rl78.cc revision 1.1.1.1 1 /* Subroutines used for code generation on Renesas RL78 processors.
2 Copyright (C) 2011-2022 Free Software Foundation, Inc.
3 Contributed by Red Hat.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #define IN_TARGET_CODE 1
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "df.h"
31 #include "memmodel.h"
32 #include "tm_p.h"
33 #include "stringpool.h"
34 #include "attribs.h"
35 #include "optabs.h"
36 #include "emit-rtl.h"
37 #include "recog.h"
38 #include "diagnostic-core.h"
39 #include "varasm.h"
40 #include "stor-layout.h"
41 #include "calls.h"
42 #include "output.h"
43 #include "insn-attr.h"
44 #include "explow.h"
45 #include "expr.h"
46 #include "reload.h"
47 #include "cfgrtl.h"
48 #include "langhooks.h"
49 #include "tree-pass.h"
50 #include "context.h"
51 #include "tm-constrs.h" /* for satisfies_constraint_*(). */
52 #include "builtins.h"
53
54 /* This file should be included last. */
55 #include "target-def.h"
56
57 static inline bool is_interrupt_func (const_tree decl);
59 static inline bool is_brk_interrupt_func (const_tree decl);
60 static void rl78_reorg (void);
61 static const char *rl78_strip_name_encoding (const char *);
62 static const char *rl78_strip_nonasm_name_encoding (const char *);
63 static section * rl78_select_section (tree, int, unsigned HOST_WIDE_INT);
64
65
67 /* Debugging statements are tagged with DEBUG0 only so that they can
68 be easily enabled individually, by replacing the '0' with '1' as
69 needed. */
70 #define DEBUG0 0
71 #define DEBUG1 1
72
73 /* REGISTER_NAMES has the names for individual 8-bit registers, but
74 these have the names we need to use when referring to 16-bit
75 register pairs. */
76 static const char * const word_regnames[] =
77 {
78 "ax", "AX", "bc", "BC", "de", "DE", "hl", "HL",
79 "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
80 "r16", "r17", "r18", "r19", "r20", "r21", "r22", "r23",
81 "r24", "r25", "r26", "r27", "r28", "r29", "r30", "r31",
82 "sp", "ap", "psw", "es", "cs"
83 };
84
85 /* used by rl78_addsi3_internal for formatting insns output */
86 static char fmt_buffer[1024];
87
88 /* Structure for G13 MDUC registers. */
89 struct mduc_reg_type
90 {
91 unsigned int address;
92 enum machine_mode mode;
93 };
94
95 struct mduc_reg_type mduc_regs[] =
96 {
97 {0xf00e8, E_QImode},
98 {0xffff0, E_HImode},
99 {0xffff2, E_HImode},
100 {0xf2224, E_HImode},
101 {0xf00e0, E_HImode},
102 {0xf00e2, E_HImode}
103 };
104
105 struct GTY(()) machine_function
106 {
107 /* If set, the rest of the fields have been computed. */
108 int computed;
109 /* Which register pairs need to be pushed in the prologue. */
110 int need_to_push [FIRST_PSEUDO_REGISTER / 2];
111
112 /* These fields describe the frame layout... */
113 /* arg pointer */
114 /* 4 bytes for saved PC */
115 int framesize_regs;
116 /* frame pointer */
117 int framesize_locals;
118 int framesize_outgoing;
119 /* stack pointer */
120 int framesize;
121
122 /* If set, recog is allowed to match against the "real" patterns. */
123 int real_insns_ok;
124 /* If set, recog is allowed to match against the "virtual" patterns. */
125 int virt_insns_ok;
126 /* Set if the current function needs to clean up any trampolines. */
127 int trampolines_used;
128 /* True if the ES register is used and hence
129 needs to be saved inside interrupt handlers. */
130 bool uses_es;
131 };
132
133 /* This is our init_machine_status, as set in
134 rl78_option_override. */
135 static struct machine_function *
136 rl78_init_machine_status (void)
137 {
138 struct machine_function *m;
139
140 m = ggc_cleared_alloc<machine_function> ();
141 m->virt_insns_ok = 1;
142
143 return m;
144 }
145
146 /* This pass converts virtual instructions using virtual registers, to
147 real instructions using real registers. Rather than run it as
148 reorg, we reschedule it before vartrack to help with debugging. */
149 namespace
150 {
151 const pass_data pass_data_rl78_devirt =
152 {
153 RTL_PASS, /* type */
154 "devirt", /* name */
155 OPTGROUP_NONE, /* optinfo_flags */
156 TV_MACH_DEP, /* tv_id */
157 0, /* properties_required */
158 0, /* properties_provided */
159 0, /* properties_destroyed */
160 0, /* todo_flags_start */
161 0, /* todo_flags_finish */
162 };
163
164 class pass_rl78_devirt : public rtl_opt_pass
165 {
166 public:
167 pass_rl78_devirt (gcc::context *ctxt)
168 : rtl_opt_pass (pass_data_rl78_devirt, ctxt)
169 {
170 }
171
172 /* opt_pass methods: */
173 virtual unsigned int execute (function *)
174 {
175 rl78_reorg ();
176 return 0;
177 }
178 };
179 } // anon namespace
180
181 rtl_opt_pass *
182 make_pass_rl78_devirt (gcc::context *ctxt)
183 {
184 return new pass_rl78_devirt (ctxt);
185 }
186
187 /* Redundant move elimination pass. Must be run after the basic block
188 reordering pass for the best effect. */
189
190 static unsigned int
191 move_elim_pass (void)
192 {
193 rtx_insn *insn, *ninsn;
194 rtx prev = NULL_RTX;
195
196 for (insn = get_insns (); insn; insn = ninsn)
197 {
198 rtx set;
199
200 ninsn = next_nonnote_nondebug_insn (insn);
201
202 if ((set = single_set (insn)) == NULL_RTX)
203 {
204 prev = NULL_RTX;
205 continue;
206 }
207
208 /* If we have two SET insns in a row (without anything
209 between them) and the source of the second one is the
210 destination of the first one, and vice versa, then we
211 can eliminate the second SET. */
212 if (prev
213 && rtx_equal_p (SET_DEST (prev), SET_SRC (set))
214 && rtx_equal_p (SET_DEST (set), SET_SRC (prev))
215 /* ... and none of the operands are volatile. */
216 && ! volatile_refs_p (SET_SRC (prev))
217 && ! volatile_refs_p (SET_DEST (prev))
218 && ! volatile_refs_p (SET_SRC (set))
219 && ! volatile_refs_p (SET_DEST (set)))
220 {
221 if (dump_file)
222 fprintf (dump_file, " Delete insn %d because it is redundant\n",
223 INSN_UID (insn));
224
225 delete_insn (insn);
226 prev = NULL_RTX;
227 }
228 else
229 prev = set;
230 }
231
232 if (dump_file)
233 print_rtl_with_bb (dump_file, get_insns (), TDF_NONE);
234
235 return 0;
236 }
237
238 namespace
239 {
240 const pass_data pass_data_rl78_move_elim =
241 {
242 RTL_PASS, /* type */
243 "move_elim", /* name */
244 OPTGROUP_NONE, /* optinfo_flags */
245 TV_MACH_DEP, /* tv_id */
246 0, /* properties_required */
247 0, /* properties_provided */
248 0, /* properties_destroyed */
249 0, /* todo_flags_start */
250 0, /* todo_flags_finish */
251 };
252
253 class pass_rl78_move_elim : public rtl_opt_pass
254 {
255 public:
256 pass_rl78_move_elim (gcc::context *ctxt)
257 : rtl_opt_pass (pass_data_rl78_move_elim, ctxt)
258 {
259 }
260
261 /* opt_pass methods: */
262 virtual unsigned int execute (function *) { return move_elim_pass (); }
263 };
264 } // anon namespace
265
266 rtl_opt_pass *
267 make_pass_rl78_move_elim (gcc::context *ctxt)
268 {
269 return new pass_rl78_move_elim (ctxt);
270 }
271
272 #undef TARGET_ASM_FILE_START
273 #define TARGET_ASM_FILE_START rl78_asm_file_start
274
275 static void
276 rl78_asm_file_start (void)
277 {
278 int i;
279
280 if (TARGET_G10)
281 {
282 /* The memory used is 0xffec8 to 0xffedf; real registers are in
283 0xffee0 to 0xffee7. */
284 for (i = 8; i < 32; i++)
285 fprintf (asm_out_file, "r%d\t=\t0x%x\n", i, 0xffec0 + i);
286 }
287 else
288 {
289 for (i = 0; i < 8; i++)
290 {
291 fprintf (asm_out_file, "r%d\t=\t0x%x\n", 8 + i, 0xffef0 + i);
292 fprintf (asm_out_file, "r%d\t=\t0x%x\n", 16 + i, 0xffee8 + i);
293 fprintf (asm_out_file, "r%d\t=\t0x%x\n", 24 + i, 0xffee0 + i);
294 }
295 }
296
297 opt_pass *rl78_devirt_pass = make_pass_rl78_devirt (g);
298 struct register_pass_info rl78_devirt_info =
299 {
300 rl78_devirt_pass,
301 "pro_and_epilogue",
302 1,
303 PASS_POS_INSERT_BEFORE
304 };
305
306 opt_pass *rl78_move_elim_pass = make_pass_rl78_move_elim (g);
307 struct register_pass_info rl78_move_elim_info =
308 {
309 rl78_move_elim_pass,
310 "bbro",
311 1,
312 PASS_POS_INSERT_AFTER
313 };
314
315 register_pass (& rl78_devirt_info);
316 register_pass (& rl78_move_elim_info);
317 }
318
319 void
320 rl78_output_symbol_ref (FILE * file, rtx sym)
321 {
322 tree type = SYMBOL_REF_DECL (sym);
323 const char *str = XSTR (sym, 0);
324
325 if (str[0] == '*')
326 {
327 fputs (str + 1, file);
328 }
329 else
330 {
331 str = rl78_strip_nonasm_name_encoding (str);
332 if (type && TREE_CODE (type) == FUNCTION_DECL)
333 {
334 fprintf (file, "%%code(");
335 assemble_name (file, str);
336 fprintf (file, ")");
337 }
338 else
339 assemble_name (file, str);
340 }
341 }
342
343 #undef TARGET_OPTION_OVERRIDE
345 #define TARGET_OPTION_OVERRIDE rl78_option_override
346
347 #define MUST_SAVE_MDUC_REGISTERS \
348 (TARGET_SAVE_MDUC_REGISTERS \
349 && (is_interrupt_func (NULL_TREE)) && RL78_MUL_G13)
350
351 static void
352 rl78_option_override (void)
353 {
354 flag_omit_frame_pointer = 1;
355 flag_no_function_cse = 1;
356 flag_split_wide_types = 0;
357
358 init_machine_status = rl78_init_machine_status;
359
360 if (TARGET_ALLREGS)
361 {
362 int i;
363
364 for (i = 24; i < 32; i++)
365 fixed_regs[i] = 0;
366 }
367
368 if (TARGET_ES0
369 && strcmp (lang_hooks.name, "GNU C")
370 && strcmp (lang_hooks.name, "GNU C11")
371 && strcmp (lang_hooks.name, "GNU C17")
372 && strcmp (lang_hooks.name, "GNU C2X")
373 && strcmp (lang_hooks.name, "GNU C89")
374 && strcmp (lang_hooks.name, "GNU C99")
375 /* Compiling with -flto results in a language of GNU GIMPLE being used... */
376 && strcmp (lang_hooks.name, "GNU GIMPLE"))
377 /* Address spaces are currently only supported by C. */
378 error ("%<-mes0%> can only be used with C");
379
380 if (TARGET_SAVE_MDUC_REGISTERS && !(TARGET_G13 || RL78_MUL_G13))
381 warning (0, "mduc registers only saved for G13 target");
382
383 switch (rl78_cpu_type)
384 {
385 case CPU_UNINIT:
386 rl78_cpu_type = CPU_G14;
387 if (rl78_mul_type == MUL_UNINIT)
388 rl78_mul_type = MUL_NONE;
389 break;
390
391 case CPU_G10:
392 switch (rl78_mul_type)
393 {
394 case MUL_UNINIT: rl78_mul_type = MUL_NONE; break;
395 case MUL_NONE: break;
396 case MUL_G13: error ("%<-mmul=g13%> cannot be used with "
397 "%<-mcpu=g10%>"); break;
398 case MUL_G14: error ("%<-mmul=g14%> cannot be used with "
399 "%<-mcpu=g10%>"); break;
400 }
401 break;
402
403 case CPU_G13:
404 switch (rl78_mul_type)
405 {
406 case MUL_UNINIT: rl78_mul_type = MUL_G13; break;
407 case MUL_NONE: break;
408 case MUL_G13: break;
409 /* The S2 core does not have mul/div instructions. */
410 case MUL_G14: error ("%<-mmul=g14%> cannot be used with "
411 "%<-mcpu=g13%>"); break;
412 }
413 break;
414
415 case CPU_G14:
416 switch (rl78_mul_type)
417 {
418 case MUL_UNINIT: rl78_mul_type = MUL_G14; break;
419 case MUL_NONE: break;
420 case MUL_G14: break;
421 /* The G14 core does not have the hardware multiply peripheral used by the
422 G13 core, hence you cannot use G13 multipliy routines on G14 hardware. */
423 case MUL_G13: error ("%<-mmul=g13%> cannot be used with "
424 "%<-mcpu=g14%>"); break;
425 }
426 break;
427 }
428 }
429
430 /* Most registers are 8 bits. Some are 16 bits because, for example,
431 gcc doesn't like dealing with $FP as a register pair (the second
432 half of $fp is also 2 to keep reload happy wrt register pairs, but
433 no register class includes it). This table maps register numbers
434 to size in bytes. */
435 static const int register_sizes[] =
436 {
437 1, 1, 1, 1, 1, 1, 1, 1,
438 1, 1, 1, 1, 1, 1, 1, 1,
439 1, 1, 1, 1, 1, 1, 2, 2,
440 1, 1, 1, 1, 1, 1, 1, 1,
441 2, 2, 1, 1, 1
442 };
443
444 /* Predicates used in the MD patterns. This one is true when virtual
445 insns may be matched, which typically means before (or during) the
446 devirt pass. */
447 bool
448 rl78_virt_insns_ok (void)
449 {
450 if (cfun)
451 return cfun->machine->virt_insns_ok;
452 return true;
453 }
454
455 /* Predicates used in the MD patterns. This one is true when real
456 insns may be matched, which typically means after (or during) the
457 devirt pass. */
458 bool
459 rl78_real_insns_ok (void)
460 {
461 if (cfun)
462 return cfun->machine->real_insns_ok;
463 return false;
464 }
465
466 #undef TARGET_HARD_REGNO_NREGS
467 #define TARGET_HARD_REGNO_NREGS rl78_hard_regno_nregs
468
469 static unsigned int
470 rl78_hard_regno_nregs (unsigned int regno, machine_mode mode)
471 {
472 int rs = register_sizes[regno];
473 if (rs < 1)
474 rs = 1;
475 return ((GET_MODE_SIZE (mode) + rs - 1) / rs);
476 }
477
478 #undef TARGET_HARD_REGNO_MODE_OK
479 #define TARGET_HARD_REGNO_MODE_OK rl78_hard_regno_mode_ok
480
481 static bool
482 rl78_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
483 {
484 int s = GET_MODE_SIZE (mode);
485
486 if (s < 1)
487 return false;
488 /* These are not to be used by gcc. */
489 if (regno == 23 || regno == ES_REG || regno == CS_REG)
490 return false;
491 /* $fp can always be accessed as a 16-bit value. */
492 if (regno == FP_REG && s == 2)
493 return true;
494 if (regno < SP_REG)
495 {
496 /* Since a reg-reg move is really a reg-mem move, we must
497 enforce alignment. */
498 if (s > 1 && (regno % 2))
499 return false;
500 return true;
501 }
502 if (s == CC_REGNUM)
503 return (mode == BImode);
504 /* All other registers must be accessed in their natural sizes. */
505 if (s == register_sizes [regno])
506 return true;
507 return false;
508 }
509
510 #undef TARGET_MODES_TIEABLE_P
511 #define TARGET_MODES_TIEABLE_P rl78_modes_tieable_p
512
513 static bool
514 rl78_modes_tieable_p (machine_mode mode1, machine_mode mode2)
515 {
516 return ((GET_MODE_CLASS (mode1) == MODE_FLOAT
517 || GET_MODE_CLASS (mode1) == MODE_COMPLEX_FLOAT)
518 == (GET_MODE_CLASS (mode2) == MODE_FLOAT
519 || GET_MODE_CLASS (mode2) == MODE_COMPLEX_FLOAT));
520 }
521
522 /* Simplify_gen_subreg() doesn't handle memory references the way we
523 need it to below, so we use this function for when we must get a
524 valid subreg in a "natural" state. */
525 static rtx
526 rl78_subreg (machine_mode mode, rtx r, machine_mode omode, int byte)
527 {
528 if (GET_CODE (r) == MEM)
529 return adjust_address (r, mode, byte);
530 else
531 return simplify_gen_subreg (mode, r, omode, byte);
532 }
533
534 /* Used by movsi. Split SImode moves into two HImode moves, using
535 appropriate patterns for the upper and lower halves of symbols. */
536 void
537 rl78_expand_movsi (rtx *operands)
538 {
539 rtx op00, op02, op10, op12;
540
541 op00 = rl78_subreg (HImode, operands[0], SImode, 0);
542 op02 = rl78_subreg (HImode, operands[0], SImode, 2);
543 if (GET_CODE (operands[1]) == CONST
544 || GET_CODE (operands[1]) == SYMBOL_REF)
545 {
546 op10 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (0));
547 op10 = gen_rtx_CONST (HImode, op10);
548 op12 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (16));
549 op12 = gen_rtx_CONST (HImode, op12);
550 }
551 else
552 {
553 op10 = rl78_subreg (HImode, operands[1], SImode, 0);
554 op12 = rl78_subreg (HImode, operands[1], SImode, 2);
555 }
556
557 if (rtx_equal_p (operands[0], operands[1]))
558 ;
559 else if (rtx_equal_p (op00, op12))
560 {
561 emit_move_insn (op02, op12);
562 emit_move_insn (op00, op10);
563 }
564 else
565 {
566 emit_move_insn (op00, op10);
567 emit_move_insn (op02, op12);
568 }
569 }
570
571 /* Generate code to move an SImode value. */
572 void
573 rl78_split_movsi (rtx *operands, machine_mode omode)
574 {
575 rtx op00, op02, op10, op12;
576
577 op00 = rl78_subreg (HImode, operands[0], omode, 0);
578 op02 = rl78_subreg (HImode, operands[0], omode, 2);
579
580 if (GET_CODE (operands[1]) == CONST
581 || GET_CODE (operands[1]) == SYMBOL_REF)
582 {
583 op10 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (0));
584 op10 = gen_rtx_CONST (HImode, op10);
585 op12 = gen_rtx_ZERO_EXTRACT (HImode, operands[1], GEN_INT (16), GEN_INT (16));
586 op12 = gen_rtx_CONST (HImode, op12);
587 }
588 else
589 {
590 op10 = rl78_subreg (HImode, operands[1], omode, 0);
591 op12 = rl78_subreg (HImode, operands[1], omode, 2);
592 }
593
594 if (rtx_equal_p (operands[0], operands[1]))
595 ;
596 else if (rtx_equal_p (op00, op12))
597 {
598 operands[2] = op02;
599 operands[4] = op12;
600 operands[3] = op00;
601 operands[5] = op10;
602 }
603 else
604 {
605 operands[2] = op00;
606 operands[4] = op10;
607 operands[3] = op02;
608 operands[5] = op12;
609 }
610 }
611
612 void
613 rl78_split_movdi (rtx *operands, enum machine_mode omode)
614 {
615 rtx op00, op04, op10, op14;
616 op00 = rl78_subreg (SImode, operands[0], omode, 0);
617 op04 = rl78_subreg (SImode, operands[0], omode, 4);
618 op10 = rl78_subreg (SImode, operands[1], omode, 0);
619 op14 = rl78_subreg (SImode, operands[1], omode, 4);
620 emit_insn (gen_movsi (op00, op10));
621 emit_insn (gen_movsi (op04, op14));
622 }
623
624 /* Used by various two-operand expanders which cannot accept all
625 operands in the "far" namespace. Force some such operands into
626 registers so that each pattern has at most one far operand. */
627 int
628 rl78_force_nonfar_2 (rtx *operands, rtx (*gen)(rtx,rtx))
629 {
630 int did = 0;
631 rtx temp_reg = NULL;
632
633 /* FIXME: in the future, be smarter about only doing this if the
634 other operand is also far, assuming the devirtualizer can also
635 handle that. */
636 if (rl78_far_p (operands[0]))
637 {
638 temp_reg = operands[0];
639 operands[0] = gen_reg_rtx (GET_MODE (operands[0]));
640 did = 1;
641 }
642 if (!did)
643 return 0;
644
645 emit_insn (gen (operands[0], operands[1]));
646 if (temp_reg)
647 emit_move_insn (temp_reg, operands[0]);
648 return 1;
649 }
650
651 /* Likewise, but for three-operand expanders. */
652 int
653 rl78_force_nonfar_3 (rtx *operands, rtx (*gen)(rtx,rtx,rtx))
654 {
655 int did = 0;
656 rtx temp_reg = NULL;
657
658 /* FIXME: Likewise. */
659 if (rl78_far_p (operands[1]))
660 {
661 rtx temp_reg = gen_reg_rtx (GET_MODE (operands[1]));
662 emit_move_insn (temp_reg, operands[1]);
663 operands[1] = temp_reg;
664 did = 1;
665 }
666 if (rl78_far_p (operands[0]))
667 {
668 temp_reg = operands[0];
669 operands[0] = gen_reg_rtx (GET_MODE (operands[0]));
670 did = 1;
671 }
672 if (!did)
673 return 0;
674
675 emit_insn (gen (operands[0], operands[1], operands[2]));
676 if (temp_reg)
677 emit_move_insn (temp_reg, operands[0]);
678 return 1;
679 }
680
681 int
682 rl78_one_far_p (rtx *operands, int n)
683 {
684 rtx which = NULL;
685 int i, c = 0;
686
687 for (i = 0; i < n; i ++)
688 if (rl78_far_p (operands[i]))
689 {
690 if (which == NULL)
691 which = operands[i];
692 else if (rtx_equal_p (operands[i], which))
693 continue;
694 c ++;
695 }
696 return c <= 1;
697 }
698
699 #undef TARGET_CAN_ELIMINATE
700 #define TARGET_CAN_ELIMINATE rl78_can_eliminate
701
702 static bool
703 rl78_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to ATTRIBUTE_UNUSED)
704 {
705 return true;
706 }
707
708 /* Returns true if the given register needs to be saved by the
709 current function. */
710 static bool
711 need_to_save (unsigned int regno)
712 {
713 if (is_interrupt_func (cfun->decl))
714 {
715 /* We don't know what devirt will need */
716 if (regno < 8)
717 return true;
718
719 /* We don't need to save registers that have
720 been reserved for interrupt handlers. */
721 if (regno > 23)
722 return false;
723
724 /* If the handler is a non-leaf function then it may call
725 non-interrupt aware routines which will happily clobber
726 any call_used registers, so we have to preserve them.
727 We do not have to worry about the frame pointer register
728 though, as that is handled below. */
729 if (!crtl->is_leaf && call_used_or_fixed_reg_p (regno) && regno < 22)
730 return true;
731
732 /* Otherwise we only have to save a register, call_used
733 or not, if it is used by this handler. */
734 return df_regs_ever_live_p (regno);
735 }
736
737 if (regno == FRAME_POINTER_REGNUM
738 && (frame_pointer_needed || df_regs_ever_live_p (regno)))
739 return true;
740 if (fixed_regs[regno])
741 return false;
742 if (crtl->calls_eh_return)
743 return true;
744 if (df_regs_ever_live_p (regno)
745 && !call_used_or_fixed_reg_p (regno))
746 return true;
747 return false;
748 }
749
750 /* We use this to wrap all emitted insns in the prologue. */
751 static rtx
752 F (rtx x)
753 {
754 RTX_FRAME_RELATED_P (x) = 1;
755 return x;
756 }
757
758 /* Compute all the frame-related fields in our machine_function
759 structure. */
760 static void
761 rl78_compute_frame_info (void)
762 {
763 int i;
764
765 cfun->machine->computed = 1;
766 cfun->machine->framesize_regs = 0;
767 cfun->machine->framesize_locals = get_frame_size ();
768 cfun->machine->framesize_outgoing = crtl->outgoing_args_size;
769
770 for (i = 0; i < 16; i ++)
771 if (need_to_save (i * 2) || need_to_save (i * 2 + 1))
772 {
773 cfun->machine->need_to_push [i] = 1;
774 cfun->machine->framesize_regs += 2;
775 }
776 else
777 cfun->machine->need_to_push [i] = 0;
778
779 if ((cfun->machine->framesize_locals + cfun->machine->framesize_outgoing) & 1)
780 cfun->machine->framesize_locals ++;
781
782 cfun->machine->framesize = (cfun->machine->framesize_regs
783 + cfun->machine->framesize_locals
784 + cfun->machine->framesize_outgoing);
785 }
786
787 /* Returns true if the provided function has the specified attribute. */
789 static inline bool
790 has_func_attr (const_tree decl, const char * func_attr)
791 {
792 if (decl == NULL_TREE)
793 decl = current_function_decl;
794
795 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
796 }
797
798 /* Returns true if the provided function has the "interrupt" attribute. */
799 static inline bool
800 is_interrupt_func (const_tree decl)
801 {
802 return has_func_attr (decl, "interrupt") || has_func_attr (decl, "brk_interrupt");
803 }
804
805 /* Returns true if the provided function has the "brk_interrupt" attribute. */
806 static inline bool
807 is_brk_interrupt_func (const_tree decl)
808 {
809 return has_func_attr (decl, "brk_interrupt");
810 }
811
812 /* Check "interrupt" attributes. */
813 static tree
814 rl78_handle_func_attribute (tree * node,
815 tree name,
816 tree args ATTRIBUTE_UNUSED,
817 int flags ATTRIBUTE_UNUSED,
818 bool * no_add_attrs)
819 {
820 gcc_assert (DECL_P (* node));
821
822 if (TREE_CODE (* node) != FUNCTION_DECL)
823 {
824 warning (OPT_Wattributes, "%qE attribute only applies to functions",
825 name);
826 * no_add_attrs = true;
827 }
828
829 /* FIXME: We ought to check that the interrupt and exception
830 handler attributes have been applied to void functions. */
831 return NULL_TREE;
832 }
833
834 /* Check "naked" attributes. */
835 static tree
836 rl78_handle_naked_attribute (tree * node,
837 tree name ATTRIBUTE_UNUSED,
838 tree args,
839 int flags ATTRIBUTE_UNUSED,
840 bool * no_add_attrs)
841 {
842 gcc_assert (DECL_P (* node));
843 gcc_assert (args == NULL_TREE);
844
845 if (TREE_CODE (* node) != FUNCTION_DECL)
846 {
847 warning (OPT_Wattributes, "naked attribute only applies to functions");
848 * no_add_attrs = true;
849 }
850
851 /* Disable warnings about this function - eg reaching the end without
852 seeing a return statement - because the programmer is doing things
853 that gcc does not know about. */
854 TREE_NO_WARNING (* node) = 1;
855
856 return NULL_TREE;
857 }
858
859 /* Check "saddr" attributes. */
860 static tree
861 rl78_handle_saddr_attribute (tree * node,
862 tree name,
863 tree args ATTRIBUTE_UNUSED,
864 int flags ATTRIBUTE_UNUSED,
865 bool * no_add_attrs)
866 {
867 gcc_assert (DECL_P (* node));
868
869 if (TREE_CODE (* node) == FUNCTION_DECL)
870 {
871 warning (OPT_Wattributes, "%qE attribute doesn%'t apply to functions",
872 name);
873 * no_add_attrs = true;
874 }
875
876 return NULL_TREE;
877 }
878
879 /* Check "vector" attribute. */
880
881 static tree
882 rl78_handle_vector_attribute (tree * node,
883 tree name,
884 tree args,
885 int flags ATTRIBUTE_UNUSED,
886 bool * no_add_attrs)
887 {
888 gcc_assert (DECL_P (* node));
889 gcc_assert (args != NULL_TREE);
890
891 if (TREE_CODE (* node) != FUNCTION_DECL)
892 {
893 warning (OPT_Wattributes, "%qE attribute only applies to functions",
894 name);
895 * no_add_attrs = true;
896 }
897
898 return NULL_TREE;
899 }
900
901 #undef TARGET_ATTRIBUTE_TABLE
902 #define TARGET_ATTRIBUTE_TABLE rl78_attribute_table
903
904 /* Table of RL78-specific attributes. */
905 const struct attribute_spec rl78_attribute_table[] =
906 {
907 /* Name, min_len, max_len, decl_req, type_req, fn_type_req,
908 affects_type_identity, handler, exclude. */
909 { "interrupt", 0, -1, true, false, false, false,
910 rl78_handle_func_attribute, NULL },
911 { "brk_interrupt", 0, 0, true, false, false, false,
912 rl78_handle_func_attribute, NULL },
913 { "naked", 0, 0, true, false, false, false,
914 rl78_handle_naked_attribute, NULL },
915 { "saddr", 0, 0, true, false, false, false,
916 rl78_handle_saddr_attribute, NULL },
917 { "vector", 1, -1, true, false, false, false,
918 rl78_handle_vector_attribute, NULL },
919 { NULL, 0, 0, false, false, false, false, NULL, NULL }
920 };
921
922
923
924 /* Break down an address RTX into its component base/index/addend
926 portions and return TRUE if the address is of a valid form, else
927 FALSE. */
928 static bool
929 characterize_address (rtx x, rtx *base, rtx *index, rtx *addend)
930 {
931 *base = NULL_RTX;
932 *index = NULL_RTX;
933 *addend = NULL_RTX;
934
935 if (GET_CODE (x) == UNSPEC
936 && XINT (x, 1) == UNS_ES_ADDR)
937 x = XVECEXP (x, 0, 1);
938
939 if (GET_CODE (x) == REG)
940 {
941 *base = x;
942 return true;
943 }
944
945 /* We sometimes get these without the CONST wrapper */
946 if (GET_CODE (x) == PLUS
947 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
948 && GET_CODE (XEXP (x, 1)) == CONST_INT)
949 {
950 *addend = x;
951 return true;
952 }
953
954 if (GET_CODE (x) == PLUS)
955 {
956 *base = XEXP (x, 0);
957 x = XEXP (x, 1);
958
959 if (GET_CODE (*base) == SUBREG)
960 {
961 if (GET_MODE (*base) == HImode
962 && GET_MODE (XEXP (*base, 0)) == SImode
963 && GET_CODE (XEXP (*base, 0)) == REG)
964 {
965 /* This is a throw-away rtx just to tell everyone
966 else what effective register we're using. */
967 *base = gen_rtx_REG (HImode, REGNO (XEXP (*base, 0)));
968 }
969 }
970
971 if (GET_CODE (*base) != REG
972 && GET_CODE (x) == REG)
973 {
974 rtx tmp = *base;
975 *base = x;
976 x = tmp;
977 }
978
979 if (GET_CODE (*base) != REG)
980 return false;
981
982 if (GET_CODE (x) == ZERO_EXTEND
983 && GET_CODE (XEXP (x, 0)) == REG)
984 {
985 *index = XEXP (x, 0);
986 return false;
987 }
988 }
989
990 switch (GET_CODE (x))
991 {
992 case PLUS:
993 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
994 && GET_CODE (XEXP (x, 0)) == CONST_INT)
995 {
996 *addend = x;
997 return true;
998 }
999 /* fall through */
1000 case MEM:
1001 case REG:
1002 return false;
1003
1004 case SUBREG:
1005 switch (GET_CODE (XEXP (x, 0)))
1006 {
1007 case CONST:
1008 case SYMBOL_REF:
1009 case CONST_INT:
1010 *addend = x;
1011 return true;
1012 default:
1013 return false;
1014 }
1015
1016 case CONST:
1017 case SYMBOL_REF:
1018 case CONST_INT:
1019 *addend = x;
1020 return true;
1021
1022 default:
1023 return false;
1024 }
1025
1026 return false;
1027 }
1028
1029 /* Used by the Whb constraint. Match addresses that use HL+B or HL+C
1030 addressing. */
1031 bool
1032 rl78_hl_b_c_addr_p (rtx op)
1033 {
1034 rtx hl, bc;
1035
1036 if (GET_CODE (op) != PLUS)
1037 return false;
1038 hl = XEXP (op, 0);
1039 bc = XEXP (op, 1);
1040 if (GET_CODE (hl) == ZERO_EXTEND)
1041 {
1042 rtx tmp = hl;
1043 hl = bc;
1044 bc = tmp;
1045 }
1046 if (GET_CODE (hl) != REG)
1047 return false;
1048 if (GET_CODE (bc) != ZERO_EXTEND)
1049 return false;
1050 bc = XEXP (bc, 0);
1051 if (GET_CODE (bc) != REG)
1052 return false;
1053 if (REGNO (hl) != HL_REG)
1054 return false;
1055 if (REGNO (bc) != B_REG && REGNO (bc) != C_REG)
1056 return false;
1057
1058 return true;
1059 }
1060
1061 #define REG_IS(r, regno) (((r) == (regno)) || ((r) >= FIRST_PSEUDO_REGISTER && !(strict)))
1062
1063 /* Return the appropriate mode for a named address address. */
1064
1065 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
1066 #define TARGET_ADDR_SPACE_ADDRESS_MODE rl78_addr_space_address_mode
1067
1068 static scalar_int_mode
1069 rl78_addr_space_address_mode (addr_space_t addrspace)
1070 {
1071 switch (addrspace)
1072 {
1073 case ADDR_SPACE_GENERIC:
1074 return HImode;
1075 case ADDR_SPACE_NEAR:
1076 return HImode;
1077 case ADDR_SPACE_FAR:
1078 return SImode;
1079 default:
1080 gcc_unreachable ();
1081 }
1082 }
1083
1084 /* Used in various constraints and predicates to match operands in the
1085 "far" address space. */
1086 int
1087 rl78_far_p (rtx x)
1088 {
1089 if (! MEM_P (x))
1090 return 0;
1091 #if DEBUG0
1092 fprintf (stderr, "\033[35mrl78_far_p: "); debug_rtx (x);
1093 fprintf (stderr, " = %d\033[0m\n", MEM_ADDR_SPACE (x) == ADDR_SPACE_FAR);
1094 #endif
1095
1096 /* Not all far addresses are legitimate, because the devirtualizer
1097 can't handle them. */
1098 if (! rl78_as_legitimate_address (GET_MODE (x), XEXP (x, 0), false, ADDR_SPACE_FAR))
1099 return 0;
1100
1101 return GET_MODE_BITSIZE (rl78_addr_space_address_mode (MEM_ADDR_SPACE (x))) == 32;
1102 }
1103
1104 /* Return the appropriate mode for a named address pointer. */
1105 #undef TARGET_ADDR_SPACE_POINTER_MODE
1106 #define TARGET_ADDR_SPACE_POINTER_MODE rl78_addr_space_pointer_mode
1107
1108 static scalar_int_mode
1109 rl78_addr_space_pointer_mode (addr_space_t addrspace)
1110 {
1111 switch (addrspace)
1112 {
1113 case ADDR_SPACE_GENERIC:
1114 return HImode;
1115 case ADDR_SPACE_NEAR:
1116 return HImode;
1117 case ADDR_SPACE_FAR:
1118 return SImode;
1119 default:
1120 gcc_unreachable ();
1121 }
1122 }
1123
1124 /* Returns TRUE for valid addresses. */
1125 #undef TARGET_VALID_POINTER_MODE
1126 #define TARGET_VALID_POINTER_MODE rl78_valid_pointer_mode
1127
1128 static bool
1129 rl78_valid_pointer_mode (scalar_int_mode m)
1130 {
1131 return (m == HImode || m == SImode);
1132 }
1133
1134 #undef TARGET_LEGITIMATE_CONSTANT_P
1135 #define TARGET_LEGITIMATE_CONSTANT_P rl78_is_legitimate_constant
1136
1137 static bool
1138 rl78_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED)
1139 {
1140 return true;
1141 }
1142
1143 #undef TARGET_LRA_P
1144 #define TARGET_LRA_P hook_bool_void_false
1145
1146 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
1147 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P rl78_as_legitimate_address
1148
1149 bool
1150 rl78_as_legitimate_address (machine_mode mode ATTRIBUTE_UNUSED, rtx x,
1151 bool strict ATTRIBUTE_UNUSED, addr_space_t as ATTRIBUTE_UNUSED)
1152 {
1153 rtx base, index, addend;
1154 bool is_far_addr = false;
1155 int as_bits;
1156
1157 as_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (as));
1158
1159 if (GET_CODE (x) == UNSPEC
1160 && XINT (x, 1) == UNS_ES_ADDR)
1161 {
1162 x = XVECEXP (x, 0, 1);
1163 is_far_addr = true;
1164 }
1165
1166 if (as_bits == 16 && is_far_addr)
1167 return false;
1168
1169 if (! characterize_address (x, &base, &index, &addend))
1170 return false;
1171
1172 /* We can't extract the high/low portions of a PLUS address
1173 involving a register during devirtualization, so make sure all
1174 such __far addresses do not have addends. This forces GCC to do
1175 the sum separately. */
1176 if (addend && base && as_bits == 32 && GET_MODE (base) == SImode)
1177 return false;
1178
1179 if (base && index)
1180 {
1181 int ir = REGNO (index);
1182 int br = REGNO (base);
1183
1184 #define OK(test, debug) if (test) { /*fprintf(stderr, "%d: OK %s\n", __LINE__, debug);*/ return true; }
1185 OK (REG_IS (br, HL_REG) && REG_IS (ir, B_REG), "[hl+b]");
1186 OK (REG_IS (br, HL_REG) && REG_IS (ir, C_REG), "[hl+c]");
1187 return false;
1188 }
1189
1190 if (strict && base && GET_CODE (base) == REG && REGNO (base) >= FIRST_PSEUDO_REGISTER)
1191 return false;
1192
1193 if (! cfun->machine->virt_insns_ok && base && GET_CODE (base) == REG
1194 && REGNO (base) >= 8 && REGNO (base) <= 31)
1195 return false;
1196
1197 return true;
1198 }
1199
1200 /* Determine if one named address space is a subset of another. */
1201 #undef TARGET_ADDR_SPACE_SUBSET_P
1202 #define TARGET_ADDR_SPACE_SUBSET_P rl78_addr_space_subset_p
1203
1204 static bool
1205 rl78_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
1206 {
1207 int subset_bits;
1208 int superset_bits;
1209
1210 subset_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (subset));
1211 superset_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (superset));
1212
1213 return (subset_bits <= superset_bits);
1214 }
1215
1216 #undef TARGET_ADDR_SPACE_CONVERT
1217 #define TARGET_ADDR_SPACE_CONVERT rl78_addr_space_convert
1218
1219 /* Convert from one address space to another. */
1220 static rtx
1221 rl78_addr_space_convert (rtx op, tree from_type, tree to_type)
1222 {
1223 addr_space_t from_as = TYPE_ADDR_SPACE (TREE_TYPE (from_type));
1224 addr_space_t to_as = TYPE_ADDR_SPACE (TREE_TYPE (to_type));
1225 rtx result;
1226 int to_bits;
1227 int from_bits;
1228
1229 to_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (to_as));
1230 from_bits = GET_MODE_BITSIZE (rl78_addr_space_address_mode (from_as));
1231
1232 if (to_bits < from_bits)
1233 {
1234 rtx tmp;
1235 /* This is unpredictable, as we're truncating off usable address
1236 bits. */
1237
1238 warning (OPT_Waddress, "converting far pointer to near pointer");
1239 result = gen_reg_rtx (HImode);
1240 if (GET_CODE (op) == SYMBOL_REF
1241 || (GET_CODE (op) == REG && REGNO (op) >= FIRST_PSEUDO_REGISTER))
1242 tmp = gen_rtx_raw_SUBREG (HImode, op, 0);
1243 else
1244 tmp = simplify_subreg (HImode, op, SImode, 0);
1245 gcc_assert (tmp != NULL_RTX);
1246 emit_move_insn (result, tmp);
1247 return result;
1248 }
1249 else if (to_bits > from_bits)
1250 {
1251 /* This always works. */
1252 result = gen_reg_rtx (SImode);
1253 emit_move_insn (rl78_subreg (HImode, result, SImode, 0), op);
1254 if (TREE_CODE (from_type) == POINTER_TYPE
1255 && TREE_CODE (TREE_TYPE (from_type)) == FUNCTION_TYPE)
1256 emit_move_insn (rl78_subreg (HImode, result, SImode, 2), const0_rtx);
1257 else
1258 emit_move_insn (rl78_subreg (HImode, result, SImode, 2), GEN_INT (0x0f));
1259 return result;
1260 }
1261 else
1262 return op;
1263 gcc_unreachable ();
1264 }
1265
1266 /* Implements REGNO_MODE_CODE_OK_FOR_BASE_P. */
1267 bool
1268 rl78_regno_mode_code_ok_for_base_p (int regno, machine_mode mode ATTRIBUTE_UNUSED,
1269 addr_space_t address_space ATTRIBUTE_UNUSED,
1270 int outer_code ATTRIBUTE_UNUSED, int index_code)
1271 {
1272 if (regno <= SP_REG && regno >= 16)
1273 return true;
1274 if (index_code == REG)
1275 return (regno == HL_REG);
1276 if (regno == C_REG || regno == B_REG || regno == E_REG || regno == L_REG)
1277 return true;
1278 return false;
1279 }
1280
1281 /* Implements MODE_CODE_BASE_REG_CLASS. */
1282 enum reg_class
1283 rl78_mode_code_base_reg_class (machine_mode mode ATTRIBUTE_UNUSED,
1284 addr_space_t address_space ATTRIBUTE_UNUSED,
1285 int outer_code ATTRIBUTE_UNUSED,
1286 int index_code ATTRIBUTE_UNUSED)
1287 {
1288 return V_REGS;
1289 }
1290
1291 /* Typical stack layout should looks like this after the function's prologue:
1292
1293 | |
1294 -- ^
1295 | | \ |
1296 | | arguments saved | Increasing
1297 | | on the stack | addresses
1298 PARENT arg pointer -> | | /
1299 -------------------------- ---- -------------------
1300 CHILD |ret | return address
1301 --
1302 | | \
1303 | | call saved
1304 | | registers
1305 frame pointer -> | | /
1306 --
1307 | | \
1308 | | local
1309 | | variables
1310 | | /
1311 --
1312 | | \
1313 | | outgoing | Decreasing
1314 | | arguments | addresses
1315 current stack pointer -> | | / |
1316 -------------------------- ---- ------------------ V
1317 | | */
1318
1319 /* Implements INITIAL_ELIMINATION_OFFSET. The frame layout is
1320 described in the machine_Function struct definition, above. */
1321 int
1322 rl78_initial_elimination_offset (int from, int to)
1323 {
1324 int rv = 0; /* as if arg to arg */
1325
1326 rl78_compute_frame_info ();
1327
1328 switch (to)
1329 {
1330 case STACK_POINTER_REGNUM:
1331 rv += cfun->machine->framesize_outgoing;
1332 rv += cfun->machine->framesize_locals;
1333 /* Fall through. */
1334 case FRAME_POINTER_REGNUM:
1335 rv += cfun->machine->framesize_regs;
1336 rv += 4;
1337 break;
1338 default:
1339 gcc_unreachable ();
1340 }
1341
1342 switch (from)
1343 {
1344 case FRAME_POINTER_REGNUM:
1345 rv -= 4;
1346 rv -= cfun->machine->framesize_regs;
1347 case ARG_POINTER_REGNUM:
1348 break;
1349 default:
1350 gcc_unreachable ();
1351 }
1352
1353 return rv;
1354 }
1355
1356 static bool
1357 rl78_is_naked_func (void)
1358 {
1359 return (lookup_attribute ("naked", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE);
1360 }
1361
1362 /* Check if the block uses mul/div insns for G13 target. */
1363
1364 static bool
1365 check_mduc_usage (void)
1366 {
1367 rtx_insn * insn;
1368 basic_block bb;
1369
1370 FOR_EACH_BB_FN (bb, cfun)
1371 {
1372 FOR_BB_INSNS (bb, insn)
1373 {
1374 if (INSN_P (insn)
1375 && (get_attr_is_g13_muldiv_insn (insn) == IS_G13_MULDIV_INSN_YES))
1376 return true;
1377 }
1378 }
1379 return false;
1380 }
1381
1382 /* Expand the function prologue (from the prologue pattern). */
1383
1384 void
1385 rl78_expand_prologue (void)
1386 {
1387 int i, fs;
1388 rtx sp = gen_rtx_REG (HImode, STACK_POINTER_REGNUM);
1389 rtx ax = gen_rtx_REG (HImode, AX_REG);
1390 int rb = 0;
1391
1392 if (rl78_is_naked_func ())
1393 return;
1394
1395 /* Always re-compute the frame info - the register usage may have changed. */
1396 rl78_compute_frame_info ();
1397
1398 if (MUST_SAVE_MDUC_REGISTERS && (!crtl->is_leaf || check_mduc_usage ()))
1399 cfun->machine->framesize += ARRAY_SIZE (mduc_regs) * 2;
1400
1401 if (flag_stack_usage_info)
1402 current_function_static_stack_size = cfun->machine->framesize;
1403
1404 if (is_interrupt_func (cfun->decl) && !TARGET_G10)
1405 for (i = 0; i < 4; i++)
1406 if (cfun->machine->need_to_push [i])
1407 {
1408 /* Select Bank 0 if we are using any registers from Bank 0. */
1409 emit_insn (gen_sel_rb (GEN_INT (0)));
1410 break;
1411 }
1412
1413 for (i = 0; i < 16; i++)
1414 if (cfun->machine->need_to_push [i])
1415 {
1416 int reg = i * 2;
1417
1418 if (TARGET_G10)
1419 {
1420 if (reg >= 8)
1421 {
1422 emit_move_insn (ax, gen_rtx_REG (HImode, reg));
1423 reg = AX_REG;
1424 }
1425 }
1426 else
1427 {
1428 int need_bank = i/4;
1429
1430 if (need_bank != rb)
1431 {
1432 emit_insn (gen_sel_rb (GEN_INT (need_bank)));
1433 rb = need_bank;
1434 }
1435 }
1436
1437 F (emit_insn (gen_push (gen_rtx_REG (HImode, reg))));
1438 }
1439
1440 if (rb != 0)
1441 emit_insn (gen_sel_rb (GEN_INT (0)));
1442
1443 /* Save ES register inside interrupt functions if it is used. */
1444 if (is_interrupt_func (cfun->decl) && cfun->machine->uses_es)
1445 {
1446 emit_insn (gen_movqi_from_es (gen_rtx_REG (QImode, A_REG)));
1447 F (emit_insn (gen_push (ax)));
1448 }
1449
1450 /* Save MDUC registers inside interrupt routine. */
1451 if (MUST_SAVE_MDUC_REGISTERS && (!crtl->is_leaf || check_mduc_usage ()))
1452 {
1453 for (unsigned i = 0; i < ARRAY_SIZE (mduc_regs); i++)
1454 {
1455 mduc_reg_type *reg = mduc_regs + i;
1456 rtx mem_mduc = gen_rtx_MEM (reg->mode, GEN_INT (reg->address));
1457
1458 MEM_VOLATILE_P (mem_mduc) = 1;
1459 if (reg->mode == QImode)
1460 emit_insn (gen_movqi (gen_rtx_REG (QImode, A_REG), mem_mduc));
1461 else
1462 emit_insn (gen_movhi (gen_rtx_REG (HImode, AX_REG), mem_mduc));
1463
1464 emit_insn (gen_push (gen_rtx_REG (HImode, AX_REG)));
1465 }
1466 }
1467
1468 if (frame_pointer_needed)
1469 {
1470 F (emit_move_insn (ax, sp));
1471 F (emit_move_insn (gen_rtx_REG (HImode, FRAME_POINTER_REGNUM), ax));
1472 }
1473
1474 fs = cfun->machine->framesize_locals + cfun->machine->framesize_outgoing;
1475 if (fs > 0)
1476 {
1477 /* If we need to subtract more than 254*3 then it is faster and
1478 smaller to move SP into AX and perform the subtraction there. */
1479 if (fs > 254 * 3)
1480 {
1481 rtx insn;
1482
1483 emit_move_insn (ax, sp);
1484 emit_insn (gen_subhi3 (ax, ax, GEN_INT (fs)));
1485 insn = F (emit_move_insn (sp, ax));
1486 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1487 gen_rtx_SET (sp, gen_rtx_PLUS (HImode, sp,
1488 GEN_INT (-fs))));
1489 }
1490 else
1491 {
1492 while (fs > 0)
1493 {
1494 int fs_byte = (fs > 254) ? 254 : fs;
1495
1496 F (emit_insn (gen_subhi3 (sp, sp, GEN_INT (fs_byte))));
1497 fs -= fs_byte;
1498 }
1499 }
1500 }
1501 }
1502
1503 /* Expand the function epilogue (from the epilogue pattern). */
1504 void
1505 rl78_expand_epilogue (void)
1506 {
1507 int i, fs;
1508 rtx sp = gen_rtx_REG (HImode, STACK_POINTER_REGNUM);
1509 rtx ax = gen_rtx_REG (HImode, AX_REG);
1510 int rb = 0;
1511
1512 if (rl78_is_naked_func ())
1513 return;
1514
1515 if (frame_pointer_needed)
1516 {
1517 emit_move_insn (ax, gen_rtx_REG (HImode, FRAME_POINTER_REGNUM));
1518 emit_move_insn (sp, ax);
1519 }
1520 else
1521 {
1522 fs = cfun->machine->framesize_locals + cfun->machine->framesize_outgoing;
1523 if (fs > 254 * 3)
1524 {
1525 emit_move_insn (ax, sp);
1526 emit_insn (gen_addhi3 (ax, ax, GEN_INT (fs)));
1527 emit_move_insn (sp, ax);
1528 }
1529 else
1530 {
1531 while (fs > 0)
1532 {
1533 int fs_byte = (fs > 254) ? 254 : fs;
1534
1535 emit_insn (gen_addhi3 (sp, sp, GEN_INT (fs_byte)));
1536 fs -= fs_byte;
1537 }
1538 }
1539 }
1540
1541 /* Restore MDUC registers from interrupt routine. */
1542 if (MUST_SAVE_MDUC_REGISTERS && (!crtl->is_leaf || check_mduc_usage ()))
1543 {
1544 for (int i = ARRAY_SIZE (mduc_regs) - 1; i >= 0; i--)
1545 {
1546 mduc_reg_type *reg = mduc_regs + i;
1547 rtx mem_mduc = gen_rtx_MEM (reg->mode, GEN_INT (reg->address));
1548
1549 emit_insn (gen_pop (gen_rtx_REG (HImode, AX_REG)));
1550 MEM_VOLATILE_P (mem_mduc) = 1;
1551 if (reg->mode == QImode)
1552 emit_insn (gen_movqi (mem_mduc, gen_rtx_REG (QImode, A_REG)));
1553 else
1554 emit_insn (gen_movhi (mem_mduc, gen_rtx_REG (HImode, AX_REG)));
1555 }
1556 }
1557
1558 if (is_interrupt_func (cfun->decl) && cfun->machine->uses_es)
1559 {
1560 emit_insn (gen_pop (gen_rtx_REG (HImode, AX_REG)));
1561 emit_insn (gen_movqi_to_es (gen_rtx_REG (QImode, A_REG)));
1562 }
1563
1564 for (i = 15; i >= 0; i--)
1565 if (cfun->machine->need_to_push [i])
1566 {
1567 rtx dest = gen_rtx_REG (HImode, i * 2);
1568
1569 if (TARGET_G10)
1570 {
1571 if (i < 8)
1572 emit_insn (gen_pop (dest));
1573 else
1574 {
1575 emit_insn (gen_pop (ax));
1576 emit_move_insn (dest, ax);
1577 /* Generate a USE of the pop'd register so that DCE will not eliminate the move. */
1578 emit_insn (gen_use (dest));
1579 }
1580 }
1581 else
1582 {
1583 int need_bank = i / 4;
1584
1585 if (need_bank != rb)
1586 {
1587 emit_insn (gen_sel_rb (GEN_INT (need_bank)));
1588 rb = need_bank;
1589 }
1590 emit_insn (gen_pop (dest));
1591 }
1592 }
1593
1594 if (rb != 0)
1595 emit_insn (gen_sel_rb (GEN_INT (0)));
1596
1597 if (cfun->machine->trampolines_used)
1598 emit_insn (gen_trampoline_uninit ());
1599
1600 if (is_brk_interrupt_func (cfun->decl))
1601 emit_jump_insn (gen_brk_interrupt_return ());
1602 else if (is_interrupt_func (cfun->decl))
1603 emit_jump_insn (gen_interrupt_return ());
1604 else
1605 emit_jump_insn (gen_rl78_return ());
1606 }
1607
1608 /* Likewise, for exception handlers. */
1609 void
1610 rl78_expand_eh_epilogue (rtx x ATTRIBUTE_UNUSED)
1611 {
1612 /* FIXME - replace this with an indirect jump with stack adjust. */
1613 emit_jump_insn (gen_rl78_return ());
1614 }
1615
1616 #undef TARGET_ASM_FUNCTION_PROLOGUE
1617 #define TARGET_ASM_FUNCTION_PROLOGUE rl78_start_function
1618
1619 static void
1620 add_vector_labels (FILE *file, const char *aname)
1621 {
1622 tree vec_attr;
1623 tree val_attr;
1624 const char *vname = "vect";
1625 const char *s;
1626 int vnum;
1627
1628 /* This node is for the vector/interrupt tag itself */
1629 vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1630 if (!vec_attr)
1631 return;
1632
1633 /* Now point it at the first argument */
1634 vec_attr = TREE_VALUE (vec_attr);
1635
1636 /* Iterate through the arguments. */
1637 while (vec_attr)
1638 {
1639 val_attr = TREE_VALUE (vec_attr);
1640 switch (TREE_CODE (val_attr))
1641 {
1642 case STRING_CST:
1643 s = TREE_STRING_POINTER (val_attr);
1644 goto string_id_common;
1645
1646 case IDENTIFIER_NODE:
1647 s = IDENTIFIER_POINTER (val_attr);
1648
1649 string_id_common:
1650 if (strcmp (s, "$default") == 0)
1651 {
1652 fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1653 fprintf (file, "$tableentry$default$%s:\n", vname);
1654 }
1655 else
1656 vname = s;
1657 break;
1658
1659 case INTEGER_CST:
1660 vnum = TREE_INT_CST_LOW (val_attr);
1661
1662 fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1663 fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1664 break;
1665
1666 default:
1667 ;
1668 }
1669
1670 vec_attr = TREE_CHAIN (vec_attr);
1671 }
1672
1673 }
1674
1675 /* We don't use this to actually emit the function prologue. We use
1676 this to insert a comment in the asm file describing the
1677 function. */
1678 static void
1679 rl78_start_function (FILE *file)
1680 {
1681 int i;
1682
1683 add_vector_labels (file, "interrupt");
1684 add_vector_labels (file, "vector");
1685
1686 if (cfun->machine->framesize == 0)
1687 return;
1688 fprintf (file, "\t; start of function\n");
1689
1690 if (cfun->machine->framesize_regs)
1691 {
1692 fprintf (file, "\t; push %d:", cfun->machine->framesize_regs);
1693 for (i = 0; i < 16; i ++)
1694 if (cfun->machine->need_to_push[i])
1695 fprintf (file, " %s", word_regnames[i*2]);
1696 fprintf (file, "\n");
1697 }
1698
1699 if (frame_pointer_needed)
1700 fprintf (file, "\t; $fp points here (r22)\n");
1701
1702 if (cfun->machine->framesize_locals)
1703 fprintf (file, "\t; locals: %d byte%s\n", cfun->machine->framesize_locals,
1704 cfun->machine->framesize_locals == 1 ? "" : "s");
1705
1706 if (cfun->machine->framesize_outgoing)
1707 fprintf (file, "\t; outgoing: %d byte%s\n", cfun->machine->framesize_outgoing,
1708 cfun->machine->framesize_outgoing == 1 ? "" : "s");
1709
1710 if (cfun->machine->uses_es)
1711 fprintf (file, "\t; uses ES register\n");
1712
1713 if (MUST_SAVE_MDUC_REGISTERS)
1714 fprintf (file, "\t; preserves MDUC registers\n");
1715 }
1716
1717 /* Return an RTL describing where a function return value of type RET_TYPE
1718 is held. */
1719
1720 #undef TARGET_FUNCTION_VALUE
1721 #define TARGET_FUNCTION_VALUE rl78_function_value
1722
1723 static rtx
1724 rl78_function_value (const_tree ret_type,
1725 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1726 bool outgoing ATTRIBUTE_UNUSED)
1727 {
1728 machine_mode mode = TYPE_MODE (ret_type);
1729
1730 return gen_rtx_REG (mode, 8);
1731 }
1732
1733 #undef TARGET_PROMOTE_FUNCTION_MODE
1734 #define TARGET_PROMOTE_FUNCTION_MODE rl78_promote_function_mode
1735
1736 static machine_mode
1737 rl78_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
1738 machine_mode mode,
1739 int *punsignedp ATTRIBUTE_UNUSED,
1740 const_tree funtype ATTRIBUTE_UNUSED, int for_return ATTRIBUTE_UNUSED)
1741 {
1742 return mode;
1743 }
1744
1745 #undef TARGET_FUNCTION_ARG
1746 #define TARGET_FUNCTION_ARG rl78_function_arg
1747
1748 static rtx
1749 rl78_function_arg (cumulative_args_t, const function_arg_info &)
1750 {
1751 return NULL_RTX;
1752 }
1753
1754 #undef TARGET_FUNCTION_ARG_ADVANCE
1755 #define TARGET_FUNCTION_ARG_ADVANCE rl78_function_arg_advance
1756
1757 static void
1758 rl78_function_arg_advance (cumulative_args_t cum_v,
1759 const function_arg_info &arg)
1760 {
1761 int rounded_size;
1762 CUMULATIVE_ARGS * cum = get_cumulative_args (cum_v);
1763
1764 rounded_size = arg.promoted_size_in_bytes ();
1765 if (rounded_size & 1)
1766 rounded_size ++;
1767 (*cum) += rounded_size;
1768 }
1769
1770 #undef TARGET_FUNCTION_ARG_BOUNDARY
1771 #define TARGET_FUNCTION_ARG_BOUNDARY rl78_function_arg_boundary
1772
1773 static unsigned int
1774 rl78_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
1775 const_tree type ATTRIBUTE_UNUSED)
1776 {
1777 return 16;
1778 }
1779
1780 /* Supported modifier letters:
1781
1782 A - address of a MEM
1783 S - SADDR form of a real register
1784 v - real register corresponding to a virtual register
1785 m - minus - negative of CONST_INT value.
1786 C - inverse of a conditional (NE vs EQ for example)
1787 C - complement of an integer
1788 z - collapsed conditional
1789 s - shift count mod 8
1790 S - shift count mod 16
1791 r - reverse shift count (8-(count mod 8))
1792 B - bit position
1793
1794 h - bottom HI of an SI
1795 H - top HI of an SI
1796 q - bottom QI of an HI
1797 Q - top QI of an HI
1798 e - third QI of an SI (i.e. where the ES register gets values from)
1799 E - fourth QI of an SI (i.e. MSB)
1800
1801 p - Add +0 to a zero-indexed HL based address.
1802 */
1803
1804 /* Implements the bulk of rl78_print_operand, below. We do it this
1805 way because we need to test for a constant at the top level and
1806 insert the '#', but not test for it anywhere else as we recurse
1807 down into the operand. */
1808 static void
1809 rl78_print_operand_1 (FILE * file, rtx op, int letter)
1810 {
1811 int need_paren;
1812
1813 switch (GET_CODE (op))
1814 {
1815 case MEM:
1816 if (letter == 'A')
1817 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1818 else
1819 {
1820 if (rl78_far_p (op))
1821 {
1822 fprintf (file, "es:");
1823 if (GET_CODE (XEXP (op, 0)) == UNSPEC)
1824 op = gen_rtx_MEM (GET_MODE (op), XVECEXP (XEXP (op, 0), 0, 1));
1825 }
1826 if (letter == 'H')
1827 {
1828 op = adjust_address (op, HImode, 2);
1829 letter = 0;
1830 }
1831 if (letter == 'h')
1832 {
1833 op = adjust_address (op, HImode, 0);
1834 letter = 0;
1835 }
1836 if (letter == 'Q')
1837 {
1838 op = adjust_address (op, QImode, 1);
1839 letter = 0;
1840 }
1841 if (letter == 'q')
1842 {
1843 op = adjust_address (op, QImode, 0);
1844 letter = 0;
1845 }
1846 if (letter == 'e')
1847 {
1848 op = adjust_address (op, QImode, 2);
1849 letter = 0;
1850 }
1851 if (letter == 'E')
1852 {
1853 op = adjust_address (op, QImode, 3);
1854 letter = 0;
1855 }
1856 if (CONSTANT_P (XEXP (op, 0)))
1857 {
1858 if (!rl78_saddr_p (op))
1859 fprintf (file, "!");
1860 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1861 }
1862 else if (GET_CODE (XEXP (op, 0)) == PLUS
1863 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF)
1864 {
1865 if (!rl78_saddr_p (op))
1866 fprintf (file, "!");
1867 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1868 }
1869 else if (GET_CODE (XEXP (op, 0)) == PLUS
1870 && GET_CODE (XEXP (XEXP (op, 0), 0)) == REG
1871 && REGNO (XEXP (XEXP (op, 0), 0)) == 2)
1872 {
1873 rl78_print_operand_1 (file, XEXP (XEXP (op, 0), 1), 'u');
1874 fprintf (file, "[");
1875 rl78_print_operand_1 (file, XEXP (XEXP (op, 0), 0), 0);
1876 if (letter == 'p' && GET_CODE (XEXP (op, 0)) == REG)
1877 fprintf (file, "+0");
1878 fprintf (file, "]");
1879 }
1880 else
1881 {
1882 op = XEXP (op, 0);
1883 fprintf (file, "[");
1884 rl78_print_operand_1 (file, op, letter);
1885 if (letter == 'p' && REG_P (op) && REGNO (op) == 6)
1886 fprintf (file, "+0");
1887 fprintf (file, "]");
1888 }
1889 }
1890 break;
1891
1892 case REG:
1893 if (letter == 'Q')
1894 fprintf (file, "%s", reg_names [REGNO (op) | 1]);
1895 else if (letter == 'H')
1896 fprintf (file, "%s", reg_names [REGNO (op) + 2]);
1897 else if (letter == 'q')
1898 fprintf (file, "%s", reg_names [REGNO (op) & ~1]);
1899 else if (letter == 'e')
1900 fprintf (file, "%s", reg_names [REGNO (op) + 2]);
1901 else if (letter == 'E')
1902 fprintf (file, "%s", reg_names [REGNO (op) + 3]);
1903 else if (letter == 'S')
1904 fprintf (file, "0x%x", 0xffef8 + REGNO (op));
1905 else if (GET_MODE (op) == HImode
1906 && ! (REGNO (op) & ~0xfe))
1907 {
1908 if (letter == 'v')
1909 fprintf (file, "%s", word_regnames [REGNO (op) % 8]);
1910 else
1911 fprintf (file, "%s", word_regnames [REGNO (op)]);
1912 }
1913 else
1914 fprintf (file, "%s", reg_names [REGNO (op)]);
1915 break;
1916
1917 case CONST_INT:
1918 if (letter == 'Q')
1919 fprintf (file, "%ld", INTVAL (op) >> 8);
1920 else if (letter == 'H')
1921 fprintf (file, "%ld", INTVAL (op) >> 16);
1922 else if (letter == 'q')
1923 fprintf (file, "%ld", INTVAL (op) & 0xff);
1924 else if (letter == 'h')
1925 fprintf (file, "%ld", INTVAL (op) & 0xffff);
1926 else if (letter == 'e')
1927 fprintf (file, "%ld", (INTVAL (op) >> 16) & 0xff);
1928 else if (letter == 'B')
1929 {
1930 int ival = INTVAL (op);
1931 if (ival == -128)
1932 ival = 0x80;
1933 if (exact_log2 (ival) >= 0)
1934 fprintf (file, "%d", exact_log2 (ival));
1935 else
1936 fprintf (file, "%d", exact_log2 (~ival & 0xff));
1937 }
1938 else if (letter == 'E')
1939 fprintf (file, "%ld", (INTVAL (op) >> 24) & 0xff);
1940 else if (letter == 'm')
1941 fprintf (file, "%ld", - INTVAL (op));
1942 else if (letter == 's')
1943 fprintf (file, "%ld", INTVAL (op) % 8);
1944 else if (letter == 'S')
1945 fprintf (file, "%ld", INTVAL (op) % 16);
1946 else if (letter == 'r')
1947 fprintf (file, "%ld", 8 - (INTVAL (op) % 8));
1948 else if (letter == 'C')
1949 fprintf (file, "%ld", (INTVAL (op) ^ 0x8000) & 0xffff);
1950 else
1951 fprintf (file, "%ld", INTVAL (op));
1952 break;
1953
1954 case CONST:
1955 rl78_print_operand_1 (file, XEXP (op, 0), letter);
1956 break;
1957
1958 case ZERO_EXTRACT:
1959 {
1960 int bits = INTVAL (XEXP (op, 1));
1961 int ofs = INTVAL (XEXP (op, 2));
1962 if (bits == 16 && ofs == 0)
1963 fprintf (file, "%%lo16(");
1964 else if (bits == 16 && ofs == 16)
1965 fprintf (file, "%%hi16(");
1966 else if (bits == 8 && ofs == 16)
1967 fprintf (file, "%%hi8(");
1968 else
1969 gcc_unreachable ();
1970 rl78_print_operand_1 (file, XEXP (op, 0), 0);
1971 fprintf (file, ")");
1972 }
1973 break;
1974
1975 case ZERO_EXTEND:
1976 if (GET_CODE (XEXP (op, 0)) == REG)
1977 fprintf (file, "%s", reg_names [REGNO (XEXP (op, 0))]);
1978 else
1979 print_rtl (file, op);
1980 break;
1981
1982 case PLUS:
1983 need_paren = 0;
1984 if (letter == 'H')
1985 {
1986 fprintf (file, "%%hi16(");
1987 need_paren = 1;
1988 letter = 0;
1989 }
1990 if (letter == 'h')
1991 {
1992 fprintf (file, "%%lo16(");
1993 need_paren = 1;
1994 letter = 0;
1995 }
1996 if (letter == 'e')
1997 {
1998 fprintf (file, "%%hi8(");
1999 need_paren = 1;
2000 letter = 0;
2001 }
2002 if (letter == 'q' || letter == 'Q')
2003 output_operand_lossage ("q/Q modifiers invalid for symbol references");
2004
2005 if (GET_CODE (XEXP (op, 0)) == ZERO_EXTEND)
2006 {
2007 if (GET_CODE (XEXP (op, 1)) == SYMBOL_REF
2008 && SYMBOL_REF_DECL (XEXP (op, 1))
2009 && TREE_CODE (SYMBOL_REF_DECL (XEXP (op, 1))) == FUNCTION_DECL)
2010 {
2011 fprintf (file, "%%code(");
2012 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (XEXP (op, 1), 0)));
2013 fprintf (file, "+");
2014 rl78_print_operand_1 (file, XEXP (op, 0), letter);
2015 fprintf (file, ")");
2016 }
2017 else
2018 {
2019 rl78_print_operand_1 (file, XEXP (op, 1), letter);
2020 fprintf (file, "+");
2021 rl78_print_operand_1 (file, XEXP (op, 0), letter);
2022 }
2023 }
2024 else
2025 {
2026 if (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
2027 && SYMBOL_REF_DECL (XEXP (op, 0))
2028 && TREE_CODE (SYMBOL_REF_DECL (XEXP (op, 0))) == FUNCTION_DECL)
2029 {
2030 fprintf (file, "%%code(");
2031 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (XEXP (op, 0), 0)));
2032 fprintf (file, "+");
2033 rl78_print_operand_1 (file, XEXP (op, 1), letter);
2034 fprintf (file, ")");
2035 }
2036 else
2037 {
2038 rl78_print_operand_1 (file, XEXP (op, 0), letter);
2039 fprintf (file, "+");
2040 rl78_print_operand_1 (file, XEXP (op, 1), letter);
2041 }
2042 }
2043 if (need_paren)
2044 fprintf (file, ")");
2045 break;
2046
2047 case SUBREG:
2048 if (GET_MODE (op) == HImode
2049 && SUBREG_BYTE (op) == 0)
2050 {
2051 fprintf (file, "%%lo16(");
2052 rl78_print_operand_1 (file, SUBREG_REG (op), 0);
2053 fprintf (file, ")");
2054 }
2055 else if (GET_MODE (op) == HImode
2056 && SUBREG_BYTE (op) == 2)
2057 {
2058 fprintf (file, "%%hi16(");
2059 rl78_print_operand_1 (file, SUBREG_REG (op), 0);
2060 fprintf (file, ")");
2061 }
2062 else
2063 {
2064 fprintf (file, "(%s)", GET_RTX_NAME (GET_CODE (op)));
2065 }
2066 break;
2067
2068 case SYMBOL_REF:
2069 need_paren = 0;
2070 if (letter == 'H')
2071 {
2072 fprintf (file, "%%hi16(");
2073 need_paren = 1;
2074 letter = 0;
2075 }
2076 if (letter == 'h')
2077 {
2078 fprintf (file, "%%lo16(");
2079 need_paren = 1;
2080 letter = 0;
2081 }
2082 if (letter == 'e')
2083 {
2084 fprintf (file, "%%hi8(");
2085 need_paren = 1;
2086 letter = 0;
2087 }
2088 if (letter == 'q' || letter == 'Q')
2089 output_operand_lossage ("q/Q modifiers invalid for symbol references");
2090
2091 if (SYMBOL_REF_DECL (op) && TREE_CODE (SYMBOL_REF_DECL (op)) == FUNCTION_DECL)
2092 {
2093 fprintf (file, "%%code(");
2094 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (op, 0)));
2095 fprintf (file, ")");
2096 }
2097 else
2098 assemble_name (file, rl78_strip_nonasm_name_encoding (XSTR (op, 0)));
2099 if (need_paren)
2100 fprintf (file, ")");
2101 break;
2102
2103 case CODE_LABEL:
2104 case LABEL_REF:
2105 output_asm_label (op);
2106 break;
2107
2108 case LTU:
2109 if (letter == 'z')
2110 fprintf (file, "#comparison eliminated");
2111 else
2112 fprintf (file, letter == 'C' ? "nc" : "c");
2113 break;
2114 case LEU:
2115 if (letter == 'z')
2116 fprintf (file, "br");
2117 else
2118 fprintf (file, letter == 'C' ? "h" : "nh");
2119 break;
2120 case GEU:
2121 if (letter == 'z')
2122 fprintf (file, "br");
2123 else
2124 fprintf (file, letter == 'C' ? "c" : "nc");
2125 break;
2126 case GTU:
2127 if (letter == 'z')
2128 fprintf (file, "#comparison eliminated");
2129 else
2130 fprintf (file, letter == 'C' ? "nh" : "h");
2131 break;
2132 case EQ:
2133 if (letter == 'z')
2134 fprintf (file, "br");
2135 else
2136 fprintf (file, letter == 'C' ? "nz" : "z");
2137 break;
2138 case NE:
2139 if (letter == 'z')
2140 fprintf (file, "#comparison eliminated");
2141 else
2142 fprintf (file, letter == 'C' ? "z" : "nz");
2143 break;
2144
2145 /* Note: these assume appropriate adjustments were made so that
2146 unsigned comparisons, which is all this chip has, will
2147 work. */
2148 case LT:
2149 if (letter == 'z')
2150 fprintf (file, "#comparison eliminated");
2151 else
2152 fprintf (file, letter == 'C' ? "nc" : "c");
2153 break;
2154 case LE:
2155 if (letter == 'z')
2156 fprintf (file, "br");
2157 else
2158 fprintf (file, letter == 'C' ? "h" : "nh");
2159 break;
2160 case GE:
2161 if (letter == 'z')
2162 fprintf (file, "br");
2163 else
2164 fprintf (file, letter == 'C' ? "c" : "nc");
2165 break;
2166 case GT:
2167 if (letter == 'z')
2168 fprintf (file, "#comparison eliminated");
2169 else
2170 fprintf (file, letter == 'C' ? "nh" : "h");
2171 break;
2172
2173 default:
2174 fprintf (file, "(%s)", GET_RTX_NAME (GET_CODE (op)));
2175 break;
2176 }
2177 }
2178
2179 #undef TARGET_PRINT_OPERAND
2180 #define TARGET_PRINT_OPERAND rl78_print_operand
2181
2182 static void
2183 rl78_print_operand (FILE * file, rtx op, int letter)
2184 {
2185 if (CONSTANT_P (op) && letter != 'u' && letter != 's' && letter != 'r' && letter != 'S' && letter != 'B')
2186 fprintf (file, "#");
2187 rl78_print_operand_1 (file, op, letter);
2188 }
2189
2190 #undef TARGET_TRAMPOLINE_INIT
2191 #define TARGET_TRAMPOLINE_INIT rl78_trampoline_init
2192
2193 /* Note that the RL78's addressing makes it very difficult to do
2194 trampolines on the stack. So, libgcc has a small pool of
2195 trampolines from which one is allocated to this task. */
2196 static void
2197 rl78_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
2198 {
2199 rtx mov_addr, thunk_addr;
2200 rtx function = XEXP (DECL_RTL (fndecl), 0);
2201
2202 mov_addr = adjust_address (m_tramp, HImode, 0);
2203 thunk_addr = gen_reg_rtx (HImode);
2204
2205 function = force_reg (HImode, function);
2206 static_chain = force_reg (HImode, static_chain);
2207
2208 emit_insn (gen_trampoline_init (thunk_addr, function, static_chain));
2209 emit_move_insn (mov_addr, thunk_addr);
2210
2211 cfun->machine->trampolines_used = 1;
2212 }
2213
2214 #undef TARGET_TRAMPOLINE_ADJUST_ADDRESS
2215 #define TARGET_TRAMPOLINE_ADJUST_ADDRESS rl78_trampoline_adjust_address
2216
2217 static rtx
2218 rl78_trampoline_adjust_address (rtx m_tramp)
2219 {
2220 rtx x = gen_rtx_MEM (HImode, m_tramp);
2221 return x;
2222 }
2223
2224 /* Expander for cbranchqi4 and cbranchhi4. RL78 is missing some of
2226 the "normal" compares, specifically, it only has unsigned compares,
2227 so we must synthesize the missing ones. */
2228 void
2229 rl78_expand_compare (rtx *operands)
2230 {
2231 if (GET_CODE (operands[2]) == MEM)
2232 operands[2] = copy_to_mode_reg (GET_MODE (operands[2]), operands[2]);
2233 }
2234
2235
2236
2238 /* Define this to 1 if you are debugging the peephole optimizers. */
2239 #define DEBUG_PEEP 0
2240
2241 /* Predicate used to enable the peephole2 patterns in rl78-virt.md.
2242 The default "word" size is a byte so we can effectively use all the
2243 registers, but we want to do 16-bit moves whenever possible. This
2244 function determines when such a move is an option. */
2245 bool
2246 rl78_peep_movhi_p (rtx *operands)
2247 {
2248 int i;
2249 rtx m, a;
2250
2251 /* (set (op0) (op1))
2252 (set (op2) (op3)) */
2253
2254 if (! rl78_virt_insns_ok ())
2255 return false;
2256
2257 #if DEBUG_PEEP
2258 fprintf (stderr, "\033[33m");
2259 debug_rtx (operands[0]);
2260 debug_rtx (operands[1]);
2261 debug_rtx (operands[2]);
2262 debug_rtx (operands[3]);
2263 fprintf (stderr, "\033[0m");
2264 #endif
2265
2266 /* You can move a constant to memory as QImode, but not HImode. */
2267 if (GET_CODE (operands[0]) == MEM
2268 && GET_CODE (operands[1]) != REG)
2269 {
2270 #if DEBUG_PEEP
2271 fprintf (stderr, "no peep: move constant to memory\n");
2272 #endif
2273 return false;
2274 }
2275
2276 if (rtx_equal_p (operands[0], operands[3]))
2277 {
2278 #if DEBUG_PEEP
2279 fprintf (stderr, "no peep: overlapping\n");
2280 #endif
2281 return false;
2282 }
2283
2284 for (i = 0; i < 2; i ++)
2285 {
2286 if (GET_CODE (operands[i]) != GET_CODE (operands[i+2]))
2287 {
2288 #if DEBUG_PEEP
2289 fprintf (stderr, "no peep: different codes\n");
2290 #endif
2291 return false;
2292 }
2293 if (GET_MODE (operands[i]) != GET_MODE (operands[i+2]))
2294 {
2295 #if DEBUG_PEEP
2296 fprintf (stderr, "no peep: different modes\n");
2297 #endif
2298 return false;
2299 }
2300
2301 switch (GET_CODE (operands[i]))
2302 {
2303 case REG:
2304 /* LSB MSB */
2305 if (REGNO (operands[i]) + 1 != REGNO (operands[i+2])
2306 || GET_MODE (operands[i]) != QImode)
2307 {
2308 #if DEBUG_PEEP
2309 fprintf (stderr, "no peep: wrong regnos %d %d %d\n",
2310 REGNO (operands[i]), REGNO (operands[i+2]),
2311 i);
2312 #endif
2313 return false;
2314 }
2315 if (! rl78_hard_regno_mode_ok (REGNO (operands[i]), HImode))
2316 {
2317 #if DEBUG_PEEP
2318 fprintf (stderr, "no peep: reg %d not HI\n", REGNO (operands[i]));
2319 #endif
2320 return false;
2321 }
2322 break;
2323
2324 case CONST_INT:
2325 break;
2326
2327 case MEM:
2328 if (GET_MODE (operands[i]) != QImode)
2329 return false;
2330 if (MEM_ALIGN (operands[i]) < 16)
2331 return false;
2332 a = XEXP (operands[i], 0);
2333 if (GET_CODE (a) == CONST)
2334 a = XEXP (a, 0);
2335 if (GET_CODE (a) == PLUS)
2336 a = XEXP (a, 1);
2337 if (GET_CODE (a) == CONST_INT
2338 && INTVAL (a) & 1)
2339 {
2340 #if DEBUG_PEEP
2341 fprintf (stderr, "no peep: misaligned mem %d\n", i);
2342 debug_rtx (operands[i]);
2343 #endif
2344 return false;
2345 }
2346 m = adjust_address (operands[i], QImode, 1);
2347 if (! rtx_equal_p (m, operands[i+2]))
2348 {
2349 #if DEBUG_PEEP
2350 fprintf (stderr, "no peep: wrong mem %d\n", i);
2351 debug_rtx (m);
2352 debug_rtx (operands[i+2]);
2353 #endif
2354 return false;
2355 }
2356 break;
2357
2358 default:
2359 #if DEBUG_PEEP
2360 fprintf (stderr, "no peep: wrong rtx %d\n", i);
2361 #endif
2362 return false;
2363 }
2364 }
2365 #if DEBUG_PEEP
2366 fprintf (stderr, "\033[32mpeep!\033[0m\n");
2367 #endif
2368 return true;
2369 }
2370
2371 /* Likewise, when a peephole is activated, this function helps compute
2372 the new operands. */
2373 void
2374 rl78_setup_peep_movhi (rtx *operands)
2375 {
2376 int i;
2377
2378 for (i = 0; i < 2; i ++)
2379 {
2380 switch (GET_CODE (operands[i]))
2381 {
2382 case REG:
2383 operands[i+4] = gen_rtx_REG (HImode, REGNO (operands[i]));
2384 break;
2385
2386 case CONST_INT:
2387 operands[i+4] = GEN_INT ((INTVAL (operands[i]) & 0xff) + ((char) INTVAL (operands[i+2])) * 256);
2388 break;
2389
2390 case MEM:
2391 operands[i+4] = adjust_address (operands[i], HImode, 0);
2392 break;
2393
2394 default:
2395 break;
2396 }
2397 }
2398 }
2399
2400 /*
2402 How Devirtualization works in the RL78 GCC port
2403
2404 Background
2405
2406 The RL78 is an 8-bit port with some 16-bit operations. It has 32
2407 bytes of register space, in four banks, memory-mapped. One bank is
2408 the "selected" bank and holds the registers used for primary
2409 operations. Since the registers are memory mapped, often you can
2410 still refer to the unselected banks via memory accesses.
2411
2412 Virtual Registers
2413
2414 The GCC port uses bank 0 as the "selected" registers (A, X, BC, etc)
2415 and refers to the other banks via their memory addresses, although
2416 they're treated as regular registers internally. These "virtual"
2417 registers are R8 through R23 (bank3 is reserved for asm-based
2418 interrupt handlers).
2419
2420 There are four machine description files:
2421
2422 rl78.md - common register-independent patterns and definitions
2423 rl78-expand.md - expanders
2424 rl78-virt.md - patterns that match BEFORE devirtualization
2425 rl78-real.md - patterns that match AFTER devirtualization
2426
2427 At least through register allocation and reload, gcc is told that it
2428 can do pretty much anything - but may only use the virtual registers.
2429 GCC cannot properly create the varying addressing modes that the RL78
2430 supports in an efficient way.
2431
2432 Sometime after reload, the RL78 backend "devirtualizes" the RTL. It
2433 uses the "valloc" attribute in rl78-virt.md for determining the rules
2434 by which it will replace virtual registers with real registers (or
2435 not) and how to make up addressing modes. For example, insns tagged
2436 with "ro1" have a single read-only parameter, which may need to be
2437 moved from memory/constant/vreg to a suitable real register. As part
2438 of devirtualization, a flag is toggled, disabling the rl78-virt.md
2439 patterns and enabling the rl78-real.md patterns. The new patterns'
2440 constraints are used to determine the real registers used. NOTE:
2441 patterns in rl78-virt.md essentially ignore the constrains and rely on
2442 predicates, where the rl78-real.md ones essentially ignore the
2443 predicates and rely on the constraints.
2444
2445 The devirtualization pass is scheduled via the pass manager (despite
2446 being called "rl78_reorg") so it can be scheduled prior to var-track
2447 (the idea is to let gdb know about the new registers). Ideally, it
2448 would be scheduled right after pro/epilogue generation, so the
2449 post-reload optimizers could operate on the real registers, but when I
2450 tried that there were some issues building the target libraries.
2451
2452 During devirtualization, a simple register move optimizer is run. It
2453 would be better to run a full CSE/propogation pass on it though, but
2454 that has not yet been attempted.
2455
2456 */
2457 #define DEBUG_ALLOC 0
2458
2459 #define OP(x) (*recog_data.operand_loc[x])
2460
2461 /* This array is used to hold knowledge about the contents of the
2462 real registers (A ... H), the memory-based registers (r8 ... r31)
2463 and the first NUM_STACK_LOCS words on the stack. We use this to
2464 avoid generating redundant move instructions.
2465
2466 A value in the range 0 .. 31 indicates register A .. r31.
2467 A value in the range 32 .. 63 indicates stack slot (value - 32).
2468 A value of NOT_KNOWN indicates that the contents of that location
2469 are not known. */
2470
2471 #define NUM_STACK_LOCS 32
2472 #define NOT_KNOWN 127
2473
2474 static unsigned char content_memory [32 + NUM_STACK_LOCS];
2475
2476 static unsigned char saved_update_index = NOT_KNOWN;
2477 static unsigned char saved_update_value;
2478 static machine_mode saved_update_mode;
2479
2480
2481 static inline void
2482 clear_content_memory (void)
2483 {
2484 memset (content_memory, NOT_KNOWN, sizeof content_memory);
2485 if (dump_file)
2486 fprintf (dump_file, " clear content memory\n");
2487 saved_update_index = NOT_KNOWN;
2488 }
2489
2490 /* Convert LOC into an index into the content_memory array.
2491 If LOC cannot be converted, return NOT_KNOWN. */
2492
2493 static unsigned char
2494 get_content_index (rtx loc)
2495 {
2496 machine_mode mode;
2497
2498 if (loc == NULL_RTX)
2499 return NOT_KNOWN;
2500
2501 if (REG_P (loc))
2502 {
2503 if (REGNO (loc) < 32)
2504 return REGNO (loc);
2505 return NOT_KNOWN;
2506 }
2507
2508 mode = GET_MODE (loc);
2509
2510 if (! rl78_stack_based_mem (loc, mode))
2511 return NOT_KNOWN;
2512
2513 loc = XEXP (loc, 0);
2514
2515 if (REG_P (loc))
2516 /* loc = MEM (SP) */
2517 return 32;
2518
2519 /* loc = MEM (PLUS (SP, INT)). */
2520 loc = XEXP (loc, 1);
2521
2522 if (INTVAL (loc) < NUM_STACK_LOCS)
2523 return 32 + INTVAL (loc);
2524
2525 return NOT_KNOWN;
2526 }
2527
2528 /* Return a string describing content INDEX in mode MODE.
2529 WARNING: Can return a pointer to a static buffer. */
2530 static const char *
2531 get_content_name (unsigned char index, machine_mode mode)
2532 {
2533 static char buffer [128];
2534
2535 if (index == NOT_KNOWN)
2536 return "Unknown";
2537
2538 if (index > 31)
2539 sprintf (buffer, "stack slot %d", index - 32);
2540 else if (mode == HImode)
2541 sprintf (buffer, "%s%s",
2542 reg_names [index + 1], reg_names [index]);
2543 else
2544 return reg_names [index];
2545
2546 return buffer;
2547 }
2548
2549 #if DEBUG_ALLOC
2550
2551 static void
2552 display_content_memory (FILE * file)
2553 {
2554 unsigned int i;
2555
2556 fprintf (file, " Known memory contents:\n");
2557
2558 for (i = 0; i < sizeof content_memory; i++)
2559 if (content_memory[i] != NOT_KNOWN)
2560 {
2561 fprintf (file, " %s contains a copy of ", get_content_name (i, QImode));
2562 fprintf (file, "%s\n", get_content_name (content_memory [i], QImode));
2563 }
2564 }
2565 #endif
2566
2567 static void
2568 update_content (unsigned char index, unsigned char val, machine_mode mode)
2569 {
2570 unsigned int i;
2571
2572 gcc_assert (index < sizeof content_memory);
2573
2574 content_memory [index] = val;
2575 if (val != NOT_KNOWN)
2576 content_memory [val] = index;
2577
2578 /* Make the entry in dump_file *before* VAL is increased below. */
2579 if (dump_file)
2580 {
2581 fprintf (dump_file, " %s now contains ", get_content_name (index, mode));
2582 if (val == NOT_KNOWN)
2583 fprintf (dump_file, "Unknown\n");
2584 else
2585 fprintf (dump_file, "%s and vice versa\n", get_content_name (val, mode));
2586 }
2587
2588 if (mode == HImode)
2589 {
2590 val = val == NOT_KNOWN ? val : val + 1;
2591
2592 content_memory [index + 1] = val;
2593 if (val != NOT_KNOWN)
2594 {
2595 content_memory [val] = index + 1;
2596 -- val;
2597 }
2598 }
2599
2600 /* Any other places that had INDEX recorded as their contents are now invalid. */
2601 for (i = 0; i < sizeof content_memory; i++)
2602 {
2603 if (i == index
2604 || (val != NOT_KNOWN && i == val))
2605 {
2606 if (mode == HImode)
2607 ++ i;
2608 continue;
2609 }
2610
2611 if (content_memory[i] == index
2612 || (val != NOT_KNOWN && content_memory[i] == val))
2613 {
2614 content_memory[i] = NOT_KNOWN;
2615
2616 if (dump_file)
2617 fprintf (dump_file, " %s cleared\n", get_content_name (i, mode));
2618
2619 if (mode == HImode)
2620 content_memory[++ i] = NOT_KNOWN;
2621 }
2622 }
2623 }
2624
2625 /* Record that LOC contains VALUE.
2626 For HImode locations record that LOC+1 contains VALUE+1.
2627 If LOC is not a register or stack slot, do nothing.
2628 If VALUE is not a register or stack slot, clear the recorded content. */
2629
2630 static void
2631 record_content (rtx loc, rtx value)
2632 {
2633 machine_mode mode;
2634 unsigned char index;
2635 unsigned char val;
2636
2637 if ((index = get_content_index (loc)) == NOT_KNOWN)
2638 return;
2639
2640 val = get_content_index (value);
2641
2642 mode = GET_MODE (loc);
2643
2644 if (val == index)
2645 {
2646 if (! optimize)
2647 return;
2648
2649 /* This should not happen when optimizing. */
2650 #if 1
2651 fprintf (stderr, "ASSIGNMENT of location to itself detected! [%s]\n",
2652 get_content_name (val, mode));
2653 return;
2654 #else
2655 gcc_unreachable ();
2656 #endif
2657 }
2658
2659 update_content (index, val, mode);
2660 }
2661
2662 /* Returns TRUE if LOC already contains a copy of VALUE. */
2663
2664 static bool
2665 already_contains (rtx loc, rtx value)
2666 {
2667 unsigned char index;
2668 unsigned char val;
2669
2670 if ((index = get_content_index (loc)) == NOT_KNOWN)
2671 return false;
2672
2673 if ((val = get_content_index (value)) == NOT_KNOWN)
2674 return false;
2675
2676 if (content_memory [index] != val)
2677 return false;
2678
2679 if (GET_MODE (loc) == HImode)
2680 return content_memory [index + 1] == val + 1;
2681
2682 return true;
2683 }
2684
2685 bool
2686 rl78_es_addr (rtx addr)
2687 {
2688 if (GET_CODE (addr) == MEM)
2689 addr = XEXP (addr, 0);
2690 if (GET_CODE (addr) != UNSPEC)
2691 return false;
2692 if (XINT (addr, 1) != UNS_ES_ADDR)
2693 return false;
2694 return true;
2695 }
2696
2697 rtx
2698 rl78_es_base (rtx addr)
2699 {
2700 if (GET_CODE (addr) == MEM)
2701 addr = XEXP (addr, 0);
2702 addr = XVECEXP (addr, 0, 1);
2703 if (GET_CODE (addr) == CONST
2704 && GET_CODE (XEXP (addr, 0)) == ZERO_EXTRACT)
2705 addr = XEXP (XEXP (addr, 0), 0);
2706 /* Mode doesn't matter here. */
2707 return gen_rtx_MEM (HImode, addr);
2708 }
2709
2710 /* Rescans an insn to see if it's recognized again. This is done
2711 carefully to ensure that all the constraint information is accurate
2712 for the newly matched insn. */
2713 static bool
2714 insn_ok_now (rtx_insn * insn)
2715 {
2716 rtx pattern = PATTERN (insn);
2717 int i;
2718
2719 INSN_CODE (insn) = -1;
2720
2721 if (recog (pattern, insn, 0) > -1)
2722 {
2723 extract_insn (insn);
2724 if (constrain_operands (1, get_preferred_alternatives (insn)))
2725 {
2726 #if DEBUG_ALLOC
2727 fprintf (stderr, "\033[32m");
2728 debug_rtx (insn);
2729 fprintf (stderr, "\033[0m");
2730 #endif
2731 if (SET_P (pattern))
2732 record_content (SET_DEST (pattern), SET_SRC (pattern));
2733
2734 /* We need to detect far addresses that haven't been
2735 converted to es/lo16 format. */
2736 for (i=0; i<recog_data.n_operands; i++)
2737 if (GET_CODE (OP (i)) == MEM
2738 && GET_MODE (XEXP (OP (i), 0)) == SImode
2739 && GET_CODE (XEXP (OP (i), 0)) != UNSPEC)
2740 goto not_ok;
2741
2742 return true;
2743 }
2744 }
2745
2746 /* INSN is not OK as-is. It may not be recognized in real mode or
2747 it might not have satisfied its constraints in real mode. Either
2748 way it will require fixups.
2749
2750 It is vital we always re-recognize at this point as some insns
2751 have fewer operands in real mode than virtual mode. If we do
2752 not re-recognize, then the recog_data will refer to real mode
2753 operands and we may read invalid data. Usually this isn't a
2754 problem, but once in a while the data we read is bogus enough
2755 to cause a segfault or other undesirable behavior. */
2756 not_ok:
2757
2758 /* We need to re-recog the insn with virtual registers to get
2759 the operands. */
2760 INSN_CODE (insn) = -1;
2761 cfun->machine->virt_insns_ok = 1;
2762 if (recog (pattern, insn, 0) > -1)
2763 {
2764 extract_insn (insn);
2765 /* In theory this should always be true. */
2766 if (constrain_operands (0, get_preferred_alternatives (insn)))
2767 {
2768 cfun->machine->virt_insns_ok = 0;
2769 return false;
2770 }
2771 }
2772
2773 #if DEBUG_ALLOC
2774 fprintf (stderr, "\033[41;30m Unrecognized *virtual* insn \033[0m\n");
2775 debug_rtx (insn);
2776 #endif
2777 gcc_unreachable ();
2778 return false;
2779 }
2780
2781 #if DEBUG_ALLOC
2782 #define WORKED fprintf (stderr, "\033[48;5;22m Worked at line %d \033[0m\n", __LINE__)
2783 #define FAILEDSOFAR fprintf (stderr, "\033[48;5;52m FAILED at line %d \033[0m\n", __LINE__)
2784 #define FAILED fprintf (stderr, "\033[48;5;52m FAILED at line %d \033[0m\n", __LINE__), gcc_unreachable ()
2785 #define MAYBE_OK(insn) if (insn_ok_now (insn)) { WORKED; return; } else { FAILEDSOFAR; }
2786 #define MUST_BE_OK(insn) if (insn_ok_now (insn)) { WORKED; return; } FAILED
2787 #else
2788 #define FAILED gcc_unreachable ()
2789 #define MAYBE_OK(insn) if (insn_ok_now (insn)) return;
2790 #define MUST_BE_OK(insn) if (insn_ok_now (insn)) return; FAILED
2791 #endif
2792
2793 /* Registers into which we move the contents of virtual registers. */
2794 #define X gen_rtx_REG (QImode, X_REG)
2795 #define A gen_rtx_REG (QImode, A_REG)
2796 #define C gen_rtx_REG (QImode, C_REG)
2797 #define B gen_rtx_REG (QImode, B_REG)
2798 #define E gen_rtx_REG (QImode, E_REG)
2799 #define D gen_rtx_REG (QImode, D_REG)
2800 #define L gen_rtx_REG (QImode, L_REG)
2801 #define H gen_rtx_REG (QImode, H_REG)
2802
2803 #define AX gen_rtx_REG (HImode, AX_REG)
2804 #define BC gen_rtx_REG (HImode, BC_REG)
2805 #define DE gen_rtx_REG (HImode, DE_REG)
2806 #define HL gen_rtx_REG (HImode, HL_REG)
2807
2808 /* Returns TRUE if R is a virtual register. */
2809 static inline bool
2810 is_virtual_register (rtx r)
2811 {
2812 return (GET_CODE (r) == REG
2813 && REGNO (r) >= 8
2814 && REGNO (r) < 32);
2815 }
2816
2817 /* In all these alloc routines, we expect the following: the insn
2818 pattern is unshared, the insn was previously recognized and failed
2819 due to predicates or constraints, and the operand data is in
2820 recog_data. */
2821
2822 static int virt_insn_was_frame;
2823
2824 /* Hook for all insns we emit. Re-mark them as FRAME_RELATED if
2825 needed. */
2826 static rtx
2827 EM2 (int line ATTRIBUTE_UNUSED, rtx r)
2828 {
2829 #if DEBUG_ALLOC
2830 fprintf (stderr, "\033[36m%d: ", line);
2831 debug_rtx (r);
2832 fprintf (stderr, "\033[0m");
2833 #endif
2834 /*SCHED_GROUP_P (r) = 1;*/
2835 if (virt_insn_was_frame)
2836 RTX_FRAME_RELATED_P (r) = 1;
2837 return r;
2838 }
2839
2840 #define EM(x) EM2 (__LINE__, x)
2841
2842 /* Return a suitable RTX for the low half of a __far address. */
2843 static rtx
2844 rl78_lo16 (rtx addr)
2845 {
2846 rtx r;
2847
2848 if (GET_CODE (addr) == SYMBOL_REF
2849 || GET_CODE (addr) == CONST)
2850 {
2851 r = gen_rtx_ZERO_EXTRACT (HImode, addr, GEN_INT (16), GEN_INT (0));
2852 r = gen_rtx_CONST (HImode, r);
2853 }
2854 else
2855 r = rl78_subreg (HImode, addr, SImode, 0);
2856
2857 r = gen_es_addr (r);
2858 cfun->machine->uses_es = true;
2859
2860 return r;
2861 }
2862
2863 /* Return a suitable RTX for the high half's lower byte of a __far address. */
2864 static rtx
2865 rl78_hi8 (rtx addr)
2866 {
2867 if (GET_CODE (addr) == SYMBOL_REF
2868 || GET_CODE (addr) == CONST)
2869 {
2870 rtx r = gen_rtx_ZERO_EXTRACT (QImode, addr, GEN_INT (8), GEN_INT (16));
2871 r = gen_rtx_CONST (QImode, r);
2872 return r;
2873 }
2874 return rl78_subreg (QImode, addr, SImode, 2);
2875 }
2876
2877 static void
2878 add_postponed_content_update (rtx to, rtx value)
2879 {
2880 unsigned char index;
2881
2882 if ((index = get_content_index (to)) == NOT_KNOWN)
2883 return;
2884
2885 gcc_assert (saved_update_index == NOT_KNOWN);
2886 saved_update_index = index;
2887 saved_update_value = get_content_index (value);
2888 saved_update_mode = GET_MODE (to);
2889 }
2890
2891 static void
2892 process_postponed_content_update (void)
2893 {
2894 if (saved_update_index != NOT_KNOWN)
2895 {
2896 update_content (saved_update_index, saved_update_value, saved_update_mode);
2897 saved_update_index = NOT_KNOWN;
2898 }
2899 }
2900
2901 /* Generate and emit a move of (register) FROM into TO. if WHERE is not NULL
2902 then if BEFORE is true then emit the insn before WHERE, otherwise emit it
2903 after WHERE. If TO already contains FROM then do nothing. Returns TO if
2904 BEFORE is true, FROM otherwise. */
2905 static rtx
2906 gen_and_emit_move (rtx to, rtx from, rtx_insn *where, bool before)
2907 {
2908 machine_mode mode = GET_MODE (to);
2909
2910 if (optimize && before && already_contains (to, from))
2911 {
2912 #if DEBUG_ALLOC
2913 display_content_memory (stderr);
2914 #endif
2915 if (dump_file)
2916 {
2917 fprintf (dump_file, " Omit move of %s into ",
2918 get_content_name (get_content_index (from), mode));
2919 fprintf (dump_file, "%s as it already contains this value\n",
2920 get_content_name (get_content_index (to), mode));
2921 }
2922 }
2923 else
2924 {
2925 rtx move = mode == QImode ? gen_movqi (to, from) : gen_movhi (to, from);
2926
2927 EM (move);
2928
2929 if (where == NULL_RTX)
2930 emit_insn (move);
2931 else if (before)
2932 emit_insn_before (move, where);
2933 else
2934 {
2935 rtx note = find_reg_note (where, REG_EH_REGION, NULL_RTX);
2936
2937 /* If necessary move REG_EH_REGION notes forward.
2938 cf. compiling gcc.dg/pr44545.c. */
2939 if (note != NULL_RTX)
2940 {
2941 add_reg_note (move, REG_EH_REGION, XEXP (note, 0));
2942 remove_note (where, note);
2943 }
2944
2945 emit_insn_after (move, where);
2946 }
2947
2948 if (before)
2949 record_content (to, from);
2950 else
2951 add_postponed_content_update (to, from);
2952 }
2953
2954 return before ? to : from;
2955 }
2956
2957 /* If M is MEM(REG) or MEM(PLUS(REG,INT)) and REG is virtual then
2958 copy it into NEWBASE and return the updated MEM. Otherwise just
2959 return M. Any needed insns are emitted before BEFORE. */
2960 static rtx
2961 transcode_memory_rtx (rtx m, rtx newbase, rtx_insn *before)
2962 {
2963 rtx base, index, addendr;
2964 int addend = 0;
2965 int need_es = 0;
2966
2967 if (! MEM_P (m))
2968 return m;
2969
2970 if (GET_MODE (XEXP (m, 0)) == SImode)
2971 {
2972 rtx new_m;
2973 rtx seg = rl78_hi8 (XEXP (m, 0));
2974
2975 if (!TARGET_ES0)
2976 {
2977 emit_insn_before (EM (gen_movqi (A, seg)), before);
2978 emit_insn_before (EM (gen_movqi_to_es (A)), before);
2979 }
2980
2981 record_content (A, NULL_RTX);
2982
2983 new_m = gen_rtx_MEM (GET_MODE (m), rl78_lo16 (XEXP (m, 0)));
2984 MEM_COPY_ATTRIBUTES (new_m, m);
2985 m = new_m;
2986 need_es = 1;
2987 }
2988
2989 characterize_address (XEXP (m, 0), & base, & index, & addendr);
2990 gcc_assert (index == NULL_RTX);
2991
2992 if (base == NULL_RTX)
2993 return m;
2994
2995 if (addendr && GET_CODE (addendr) == CONST_INT)
2996 addend = INTVAL (addendr);
2997
2998 gcc_assert (REG_P (base));
2999 gcc_assert (REG_P (newbase));
3000
3001 int limit = 256 - GET_MODE_SIZE (GET_MODE (m));
3002
3003 if (REGNO (base) == SP_REG)
3004 {
3005 if (addend >= 0 && addend <= limit)
3006 return m;
3007 }
3008
3009 /* BASE should be a virtual register. We copy it to NEWBASE. If
3010 the addend is out of range for DE/HL, we use AX to compute the full
3011 address. */
3012
3013 if (addend < 0
3014 || (addend > limit && REGNO (newbase) != BC_REG)
3015 || (addendr
3016 && (GET_CODE (addendr) != CONST_INT)
3017 && ((REGNO (newbase) != BC_REG))
3018 ))
3019 {
3020 /* mov ax, vreg
3021 add ax, #imm
3022 mov hl, ax */
3023 EM (emit_insn_before (gen_movhi (AX, base), before));
3024 EM (emit_insn_before (gen_addhi3 (AX, AX, addendr), before));
3025 EM (emit_insn_before (gen_movhi (newbase, AX), before));
3026 record_content (AX, NULL_RTX);
3027 record_content (newbase, NULL_RTX);
3028
3029 base = newbase;
3030 addend = 0;
3031 addendr = 0;
3032 }
3033 else
3034 {
3035 base = gen_and_emit_move (newbase, base, before, true);
3036 }
3037
3038 if (addend)
3039 {
3040 record_content (base, NULL_RTX);
3041 base = gen_rtx_PLUS (HImode, base, GEN_INT (addend));
3042 }
3043 else if (addendr)
3044 {
3045 record_content (base, NULL_RTX);
3046 base = gen_rtx_PLUS (HImode, base, addendr);
3047 }
3048
3049 if (need_es)
3050 {
3051 m = change_address (m, GET_MODE (m), gen_es_addr (base));
3052 cfun->machine->uses_es = true;
3053 }
3054 else
3055 m = change_address (m, GET_MODE (m), base);
3056 return m;
3057 }
3058
3059 /* Copy SRC to accumulator (A or AX), placing any generated insns
3060 before BEFORE. Returns accumulator RTX. */
3061 static rtx
3062 move_to_acc (int opno, rtx_insn *before)
3063 {
3064 rtx src = OP (opno);
3065 machine_mode mode = GET_MODE (src);
3066
3067 if (REG_P (src) && REGNO (src) < 2)
3068 return src;
3069
3070 if (mode == VOIDmode)
3071 mode = recog_data.operand_mode[opno];
3072
3073 return gen_and_emit_move (mode == QImode ? A : AX, src, before, true);
3074 }
3075
3076 static void
3077 force_into_acc (rtx src, rtx_insn *before)
3078 {
3079 machine_mode mode = GET_MODE (src);
3080 rtx move;
3081
3082 if (REG_P (src) && REGNO (src) < 2)
3083 return;
3084
3085 move = mode == QImode ? gen_movqi (A, src) : gen_movhi (AX, src);
3086
3087 EM (move);
3088
3089 emit_insn_before (move, before);
3090 record_content (AX, NULL_RTX);
3091 }
3092
3093 /* Copy accumulator (A or AX) to DEST, placing any generated insns
3094 after AFTER. Returns accumulator RTX. */
3095 static rtx
3096 move_from_acc (unsigned int opno, rtx_insn *after)
3097 {
3098 rtx dest = OP (opno);
3099 machine_mode mode = GET_MODE (dest);
3100
3101 if (REG_P (dest) && REGNO (dest) < 2)
3102 return dest;
3103
3104 return gen_and_emit_move (dest, mode == QImode ? A : AX, after, false);
3105 }
3106
3107 /* Copy accumulator (A or AX) to REGNO, placing any generated insns
3108 before BEFORE. Returns reg RTX. */
3109 static rtx
3110 move_acc_to_reg (rtx acc, int regno, rtx_insn *before)
3111 {
3112 machine_mode mode = GET_MODE (acc);
3113 rtx reg;
3114
3115 reg = gen_rtx_REG (mode, regno);
3116
3117 return gen_and_emit_move (reg, acc, before, true);
3118 }
3119
3120 /* Copy SRC to X, placing any generated insns before BEFORE.
3121 Returns X RTX. */
3122 static rtx
3123 move_to_x (int opno, rtx_insn *before)
3124 {
3125 rtx src = OP (opno);
3126 machine_mode mode = GET_MODE (src);
3127 rtx reg;
3128
3129 if (mode == VOIDmode)
3130 mode = recog_data.operand_mode[opno];
3131 reg = (mode == QImode) ? X : AX;
3132
3133 if (mode == QImode || ! is_virtual_register (OP (opno)))
3134 {
3135 OP (opno) = move_to_acc (opno, before);
3136 OP (opno) = move_acc_to_reg (OP (opno), X_REG, before);
3137 return reg;
3138 }
3139
3140 return gen_and_emit_move (reg, src, before, true);
3141 }
3142
3143 /* Copy OP (opno) to H or HL, placing any generated insns before BEFORE.
3144 Returns H/HL RTX. */
3145 static rtx
3146 move_to_hl (int opno, rtx_insn *before)
3147 {
3148 rtx src = OP (opno);
3149 machine_mode mode = GET_MODE (src);
3150 rtx reg;
3151
3152 if (mode == VOIDmode)
3153 mode = recog_data.operand_mode[opno];
3154 reg = (mode == QImode) ? L : HL;
3155
3156 if (mode == QImode || ! is_virtual_register (OP (opno)))
3157 {
3158 OP (opno) = move_to_acc (opno, before);
3159 OP (opno) = move_acc_to_reg (OP (opno), L_REG, before);
3160 return reg;
3161 }
3162
3163 return gen_and_emit_move (reg, src, before, true);
3164 }
3165
3166 /* Copy OP (opno) to E or DE, placing any generated insns before BEFORE.
3167 Returns E/DE RTX. */
3168 static rtx
3169 move_to_de (int opno, rtx_insn *before)
3170 {
3171 rtx src = OP (opno);
3172 machine_mode mode = GET_MODE (src);
3173 rtx reg;
3174
3175 if (mode == VOIDmode)
3176 mode = recog_data.operand_mode[opno];
3177
3178 reg = (mode == QImode) ? E : DE;
3179
3180 if (mode == QImode || ! is_virtual_register (OP (opno)))
3181 {
3182 OP (opno) = move_to_acc (opno, before);
3183 OP (opno) = move_acc_to_reg (OP (opno), E_REG, before);
3184 }
3185 else
3186 {
3187 gen_and_emit_move (reg, src, before, true);
3188 }
3189
3190 return reg;
3191 }
3192
3193 /* Devirtualize an insn of the form (SET (op) (unop (op))). */
3194 static void
3195 rl78_alloc_physical_registers_op1 (rtx_insn * insn)
3196 {
3197 /* op[0] = func op[1] */
3198
3199 /* We first try using A as the destination, then copying it
3200 back. */
3201 if (rtx_equal_p (OP (0), OP (1)))
3202 {
3203 OP (0) =
3204 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3205 }
3206 else
3207 {
3208 /* If necessary, load the operands into BC and HL.
3209 Check to see if we already have OP (0) in HL
3210 and if so, swap the order.
3211
3212 It is tempting to perform this optimization when OP(0) does
3213 not hold a MEM, but this leads to bigger code in general.
3214 The problem is that if OP(1) holds a MEM then swapping it
3215 into BC means a BC-relative load is used and these are 3
3216 bytes long vs 1 byte for an HL load. */
3217 if (MEM_P (OP (0))
3218 && already_contains (HL, XEXP (OP (0), 0)))
3219 {
3220 OP (0) = transcode_memory_rtx (OP (0), HL, insn);
3221 OP (1) = transcode_memory_rtx (OP (1), BC, insn);
3222 }
3223 else
3224 {
3225 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3226 OP (1) = transcode_memory_rtx (OP (1), HL, insn);
3227 }
3228 }
3229
3230 MAYBE_OK (insn);
3231
3232 OP (0) = move_from_acc (0, insn);
3233
3234 MAYBE_OK (insn);
3235
3236 /* Try copying the src to acc first, then. This is for, for
3237 example, ZERO_EXTEND or NOT. */
3238 OP (1) = move_to_acc (1, insn);
3239
3240 MUST_BE_OK (insn);
3241 }
3242
3243 /* Returns true if operand OPNUM contains a constraint of type CONSTRAINT.
3244 Assumes that the current insn has already been recognised and hence the
3245 constraint data has been filled in. */
3246 static bool
3247 has_constraint (unsigned int opnum, enum constraint_num constraint)
3248 {
3249 const char * p = recog_data.constraints[opnum];
3250
3251 /* No constraints means anything is accepted. */
3252 if (p == NULL || *p == 0 || *p == ',')
3253 return true;
3254
3255 do
3256 {
3257 char c;
3258 unsigned int len;
3259
3260 c = *p;
3261 len = CONSTRAINT_LEN (c, p);
3262 gcc_assert (len > 0);
3263
3264 switch (c)
3265 {
3266 case 0:
3267 case ',':
3268 return false;
3269 default:
3270 if (lookup_constraint (p) == constraint)
3271 return true;
3272 }
3273 p += len;
3274 }
3275 while (1);
3276 }
3277
3278 /* Devirtualize an insn of the form (SET (op) (binop (op) (op))). */
3279 static void
3280 rl78_alloc_physical_registers_op2 (rtx_insn * insn)
3281 {
3282 rtx_insn *prev;
3283 rtx_insn *first;
3284 bool hl_used;
3285 int tmp_id;
3286 rtx saved_op1;
3287
3288 if (rtx_equal_p (OP (0), OP (1)))
3289 {
3290 if (MEM_P (OP (2)))
3291 {
3292 OP (0) =
3293 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3294 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3295 }
3296 else
3297 {
3298 OP (0) =
3299 OP (1) = transcode_memory_rtx (OP (1), HL, insn);
3300 OP (2) = transcode_memory_rtx (OP (2), DE, insn);
3301 }
3302 }
3303 else if (rtx_equal_p (OP (0), OP (2)))
3304 {
3305 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3306 OP (0) =
3307 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3308 }
3309 else
3310 {
3311 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3312 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3313 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3314 }
3315
3316 MAYBE_OK (insn);
3317
3318 prev = prev_nonnote_nondebug_insn (insn);
3319 if (recog_data.constraints[1][0] == '%'
3320 && is_virtual_register (OP (1))
3321 && ! is_virtual_register (OP (2))
3322 && ! CONSTANT_P (OP (2)))
3323 {
3324 rtx tmp = OP (1);
3325 OP (1) = OP (2);
3326 OP (2) = tmp;
3327 }
3328
3329 /* Make a note of whether (H)L is being used. It matters
3330 because if OP (2) also needs reloading, then we must take
3331 care not to corrupt HL. */
3332 hl_used = reg_mentioned_p (L, OP (0)) || reg_mentioned_p (L, OP (1));
3333
3334 /* If HL is not currently being used and dest == op1 then there are
3335 some possible optimizations available by reloading one of the
3336 operands into HL, before trying to use the accumulator. */
3337 if (optimize
3338 && ! hl_used
3339 && rtx_equal_p (OP (0), OP (1)))
3340 {
3341 /* If op0 is a Ws1 type memory address then switching the base
3342 address register to HL might allow us to perform an in-memory
3343 operation. (eg for the INCW instruction).
3344
3345 FIXME: Adding the move into HL is costly if this optimization is not
3346 going to work, so for now, make sure that we know that the new insn will
3347 match the requirements of the addhi3_real pattern. Really we ought to
3348 generate a candidate sequence, test that, and then install it if the
3349 results are good. */
3350 if (satisfies_constraint_Ws1 (OP (0))
3351 && has_constraint (0, CONSTRAINT_Wh1)
3352 && (satisfies_constraint_K (OP (2)) || satisfies_constraint_L (OP (2))))
3353 {
3354 rtx base, index, addend, newbase;
3355
3356 characterize_address (XEXP (OP (0), 0), & base, & index, & addend);
3357 gcc_assert (index == NULL_RTX);
3358 gcc_assert (REG_P (base) && REGNO (base) == SP_REG);
3359
3360 /* Ws1 addressing allows an offset of 0, Wh1 addressing requires a non-zero offset. */
3361 if (addend != NULL_RTX)
3362 {
3363 newbase = gen_and_emit_move (HL, base, insn, true);
3364 record_content (newbase, NULL_RTX);
3365 newbase = gen_rtx_PLUS (HImode, newbase, addend);
3366
3367 OP (0) = OP (1) = change_address (OP (0), VOIDmode, newbase);
3368
3369 /* We do not want to fail here as this means that
3370 we have inserted useless insns into the stream. */
3371 MUST_BE_OK (insn);
3372 }
3373 }
3374 else if (REG_P (OP (0))
3375 && satisfies_constraint_Ws1 (OP (2))
3376 && has_constraint (2, CONSTRAINT_Wh1))
3377 {
3378 rtx base, index, addend, newbase;
3379
3380 characterize_address (XEXP (OP (2), 0), & base, & index, & addend);
3381 gcc_assert (index == NULL_RTX);
3382 gcc_assert (REG_P (base) && REGNO (base) == SP_REG);
3383
3384 /* Ws1 addressing allows an offset of 0, Wh1 addressing requires a non-zero offset. */
3385 if (addend != NULL_RTX)
3386 {
3387 gen_and_emit_move (HL, base, insn, true);
3388
3389 if (REGNO (OP (0)) != X_REG)
3390 {
3391 OP (1) = move_to_acc (1, insn);
3392 OP (0) = move_from_acc (0, insn);
3393 }
3394
3395 record_content (HL, NULL_RTX);
3396 newbase = gen_rtx_PLUS (HImode, HL, addend);
3397
3398 OP (2) = change_address (OP (2), VOIDmode, newbase);
3399
3400 /* We do not want to fail here as this means that
3401 we have inserted useless insns into the stream. */
3402 MUST_BE_OK (insn);
3403 }
3404 }
3405 }
3406
3407 OP (0) = move_from_acc (0, insn);
3408
3409 tmp_id = get_max_insn_count ();
3410 saved_op1 = OP (1);
3411
3412 if (rtx_equal_p (OP (1), OP (2)))
3413 OP (2) = OP (1) = move_to_acc (1, insn);
3414 else
3415 OP (1) = move_to_acc (1, insn);
3416
3417 MAYBE_OK (insn);
3418
3419 /* If we omitted the move of OP1 into the accumulator (because
3420 it was already there from a previous insn), then force the
3421 generation of the move instruction now. We know that we
3422 are about to emit a move into HL (or DE) via AX, and hence
3423 our optimization to remove the load of OP1 is no longer valid. */
3424 if (tmp_id == get_max_insn_count ())
3425 force_into_acc (saved_op1, insn);
3426
3427 /* We have to copy op2 to HL (or DE), but that involves AX, which
3428 already has a live value. Emit it before those insns. */
3429
3430 if (prev)
3431 first = next_nonnote_nondebug_insn (prev);
3432 else
3433 for (first = insn; prev_nonnote_nondebug_insn (first); first = prev_nonnote_nondebug_insn (first))
3434 ;
3435
3436 OP (2) = hl_used ? move_to_de (2, first) : move_to_hl (2, first);
3437
3438 MUST_BE_OK (insn);
3439 }
3440
3441 /* Devirtualize an insn of the form SET (PC) (MEM/REG). */
3442 static void
3443 rl78_alloc_physical_registers_ro1 (rtx_insn * insn)
3444 {
3445 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3446
3447 MAYBE_OK (insn);
3448
3449 OP (0) = move_to_acc (0, insn);
3450
3451 MUST_BE_OK (insn);
3452 }
3453
3454 /* Devirtualize a compare insn. */
3455 static void
3456 rl78_alloc_physical_registers_cmp (rtx_insn * insn)
3457 {
3458 int tmp_id;
3459 rtx saved_op1;
3460 rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
3461 rtx_insn *first;
3462
3463 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3464 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3465
3466 /* HI compares have to have OP (1) in AX, but QI
3467 compares do not, so it is worth checking here. */
3468 MAYBE_OK (insn);
3469
3470 /* For an HImode compare, OP (1) must always be in AX.
3471 But if OP (1) is a REG (and not AX), then we can avoid
3472 a reload of OP (1) if we reload OP (2) into AX and invert
3473 the comparison. */
3474 if (REG_P (OP (1))
3475 && REGNO (OP (1)) != AX_REG
3476 && GET_MODE (OP (1)) == HImode
3477 && MEM_P (OP (2)))
3478 {
3479 rtx cmp = XEXP (SET_SRC (PATTERN (insn)), 0);
3480
3481 OP (2) = move_to_acc (2, insn);
3482
3483 switch (GET_CODE (cmp))
3484 {
3485 case EQ:
3486 case NE:
3487 break;
3488 case LTU: cmp = gen_rtx_GTU (HImode, OP (2), OP (1)); break;
3489 case GTU: cmp = gen_rtx_LTU (HImode, OP (2), OP (1)); break;
3490 case LEU: cmp = gen_rtx_GEU (HImode, OP (2), OP (1)); break;
3491 case GEU: cmp = gen_rtx_LEU (HImode, OP (2), OP (1)); break;
3492
3493 case LT:
3494 case GT:
3495 case LE:
3496 case GE:
3497 #if DEBUG_ALLOC
3498 debug_rtx (insn);
3499 #endif
3500 default:
3501 gcc_unreachable ();
3502 }
3503
3504 if (GET_CODE (cmp) == EQ || GET_CODE (cmp) == NE)
3505 PATTERN (insn) = gen_cbranchhi4_real (cmp, OP (2), OP (1), OP (3));
3506 else
3507 PATTERN (insn) = gen_cbranchhi4_real_inverted (cmp, OP (2), OP (1), OP (3));
3508
3509 MUST_BE_OK (insn);
3510 }
3511
3512 /* Surprisingly, gcc can generate a comparison of a register with itself, but this
3513 should be handled by the second alternative of the cbranchhi_real pattern. */
3514 if (rtx_equal_p (OP (1), OP (2)))
3515 {
3516 OP (1) = OP (2) = BC;
3517 MUST_BE_OK (insn);
3518 }
3519
3520 tmp_id = get_max_insn_count ();
3521 saved_op1 = OP (1);
3522
3523 OP (1) = move_to_acc (1, insn);
3524
3525 MAYBE_OK (insn);
3526
3527 /* If we omitted the move of OP1 into the accumulator (because
3528 it was already there from a previous insn), then force the
3529 generation of the move instruction now. We know that we
3530 are about to emit a move into HL via AX, and hence our
3531 optimization to remove the load of OP1 is no longer valid. */
3532 if (tmp_id == get_max_insn_count ())
3533 force_into_acc (saved_op1, insn);
3534
3535 /* We have to copy op2 to HL, but that involves the acc, which
3536 already has a live value. Emit it before those insns. */
3537 if (prev)
3538 first = next_nonnote_nondebug_insn (prev);
3539 else
3540 for (first = insn; prev_nonnote_nondebug_insn (first); first = prev_nonnote_nondebug_insn (first))
3541 ;
3542 OP (2) = move_to_hl (2, first);
3543
3544 MUST_BE_OK (insn);
3545 }
3546
3547 /* Like op2, but AX = A * X. */
3548 static void
3549 rl78_alloc_physical_registers_umul (rtx_insn * insn)
3550 {
3551 rtx_insn *prev = prev_nonnote_nondebug_insn (insn);
3552 rtx_insn *first;
3553 int tmp_id;
3554 rtx saved_op1;
3555
3556 OP (0) = transcode_memory_rtx (OP (0), BC, insn);
3557 OP (1) = transcode_memory_rtx (OP (1), DE, insn);
3558 OP (2) = transcode_memory_rtx (OP (2), HL, insn);
3559
3560 MAYBE_OK (insn);
3561
3562 if (recog_data.constraints[1][0] == '%'
3563 && is_virtual_register (OP (1))
3564 && !is_virtual_register (OP (2))
3565 && !CONSTANT_P (OP (2)))
3566 {
3567 rtx tmp = OP (1);
3568 OP (1) = OP (2);
3569 OP (2) = tmp;
3570 }
3571
3572 OP (0) = move_from_acc (0, insn);
3573
3574 tmp_id = get_max_insn_count ();
3575 saved_op1 = OP (1);
3576
3577 if (rtx_equal_p (OP (1), OP (2)))
3578 {
3579 gcc_assert (GET_MODE (OP (2)) == QImode);
3580 /* The MULU instruction does not support duplicate arguments
3581 but we know that if we copy OP (2) to X it will do so via
3582 A and thus OP (1) will already be loaded into A. */
3583 OP (2) = move_to_x (2, insn);
3584 OP (1) = A;
3585 }
3586 else
3587 OP (1) = move_to_acc (1, insn);
3588
3589 MAYBE_OK (insn);
3590
3591 /* If we omitted the move of OP1 into the accumulator (because
3592 it was already there from a previous insn), then force the
3593 generation of the move instruction now. We know that we
3594 are about to emit a move into HL (or DE) via AX, and hence
3595 our optimization to remove the load of OP1 is no longer valid. */
3596 if (tmp_id == get_max_insn_count ())
3597 force_into_acc (saved_op1, insn);
3598
3599 /* We have to copy op2 to X, but that involves the acc, which
3600 already has a live value. Emit it before those insns. */
3601
3602 if (prev)
3603 first = next_nonnote_nondebug_insn (prev);
3604 else
3605 for (first = insn; prev_nonnote_nondebug_insn (first); first = prev_nonnote_nondebug_insn (first))
3606 ;
3607 OP (2) = move_to_x (2, first);
3608
3609 MUST_BE_OK (insn);
3610 }
3611
3612 static void
3613 rl78_alloc_address_registers_macax (rtx_insn * insn)
3614 {
3615 int which, op;
3616 bool replace_in_op0 = false;
3617 bool replace_in_op1 = false;
3618
3619 MAYBE_OK (insn);
3620
3621 /* Two different MEMs are not allowed. */
3622 which = 0;
3623 for (op = 2; op >= 0; op --)
3624 {
3625 if (MEM_P (OP (op)))
3626 {
3627 if (op == 0 && replace_in_op0)
3628 continue;
3629 if (op == 1 && replace_in_op1)
3630 continue;
3631
3632 switch (which)
3633 {
3634 case 0:
3635 /* If we replace a MEM, make sure that we replace it for all
3636 occurrences of the same MEM in the insn. */
3637 replace_in_op0 = (op > 0 && rtx_equal_p (OP (op), OP (0)));
3638 replace_in_op1 = (op > 1 && rtx_equal_p (OP (op), OP (1)));
3639
3640 OP (op) = transcode_memory_rtx (OP (op), HL, insn);
3641 if (op == 2
3642 && MEM_P (OP (op))
3643 && ((GET_CODE (XEXP (OP (op), 0)) == REG
3644 && REGNO (XEXP (OP (op), 0)) == SP_REG)
3645 || (GET_CODE (XEXP (OP (op), 0)) == PLUS
3646 && REGNO (XEXP (XEXP (OP (op), 0), 0)) == SP_REG)))
3647 {
3648 emit_insn_before (gen_movhi (HL, gen_rtx_REG (HImode, SP_REG)), insn);
3649 OP (op) = replace_rtx (OP (op), gen_rtx_REG (HImode, SP_REG), HL);
3650 }
3651 if (replace_in_op0)
3652 OP (0) = OP (op);
3653 if (replace_in_op1)
3654 OP (1) = OP (op);
3655 break;
3656 case 1:
3657 OP (op) = transcode_memory_rtx (OP (op), DE, insn);
3658 break;
3659 case 2:
3660 OP (op) = transcode_memory_rtx (OP (op), BC, insn);
3661 break;
3662 }
3663 which ++;
3664 }
3665 }
3666
3667 MUST_BE_OK (insn);
3668 }
3669
3670 static void
3671 rl78_alloc_address_registers_div (rtx_insn * insn)
3672 {
3673 MUST_BE_OK (insn);
3674 }
3675
3676 /* Scan all insns and devirtualize them. */
3677 static void
3678 rl78_alloc_physical_registers (void)
3679 {
3680 /* During most of the compile, gcc is dealing with virtual
3681 registers. At this point, we need to assign physical registers
3682 to the vitual ones, and copy in/out as needed. */
3683
3684 rtx_insn *insn, *curr;
3685 enum attr_valloc valloc_method;
3686
3687 for (insn = get_insns (); insn; insn = curr)
3688 {
3689 int i;
3690
3691 curr = next_nonnote_nondebug_insn (insn);
3692
3693 if (INSN_P (insn)
3694 && (GET_CODE (PATTERN (insn)) == SET
3695 || GET_CODE (PATTERN (insn)) == CALL)
3696 && INSN_CODE (insn) == -1)
3697 {
3698 if (GET_CODE (SET_SRC (PATTERN (insn))) == ASM_OPERANDS)
3699 continue;
3700 i = recog (PATTERN (insn), insn, 0);
3701 if (i == -1)
3702 {
3703 debug_rtx (insn);
3704 gcc_unreachable ();
3705 }
3706 INSN_CODE (insn) = i;
3707 }
3708 }
3709
3710 cfun->machine->virt_insns_ok = 0;
3711 cfun->machine->real_insns_ok = 1;
3712
3713 clear_content_memory ();
3714
3715 for (insn = get_insns (); insn; insn = curr)
3716 {
3717 rtx pattern;
3718
3719 curr = insn ? next_nonnote_nondebug_insn (insn) : NULL;
3720
3721 if (!INSN_P (insn))
3722 {
3723 if (LABEL_P (insn))
3724 clear_content_memory ();
3725
3726 continue;
3727 }
3728
3729 if (dump_file)
3730 fprintf (dump_file, "Converting insn %d\n", INSN_UID (insn));
3731
3732 pattern = PATTERN (insn);
3733 if (GET_CODE (pattern) == PARALLEL)
3734 pattern = XVECEXP (pattern, 0, 0);
3735 if (JUMP_P (insn) || CALL_P (insn) || GET_CODE (pattern) == CALL)
3736 clear_content_memory ();
3737 if (GET_CODE (pattern) != SET
3738 && GET_CODE (pattern) != CALL)
3739 continue;
3740 if (GET_CODE (pattern) == SET
3741 && GET_CODE (SET_SRC (pattern)) == ASM_OPERANDS)
3742 continue;
3743
3744 valloc_method = get_attr_valloc (insn);
3745
3746 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3747
3748 if (valloc_method == VALLOC_MACAX)
3749 {
3750 record_content (AX, NULL_RTX);
3751 record_content (BC, NULL_RTX);
3752 record_content (DE, NULL_RTX);
3753 }
3754 else if (valloc_method == VALLOC_DIVHI)
3755 {
3756 record_content (AX, NULL_RTX);
3757 record_content (BC, NULL_RTX);
3758 }
3759 else if (valloc_method == VALLOC_DIVSI)
3760 {
3761 record_content (AX, NULL_RTX);
3762 record_content (BC, NULL_RTX);
3763 record_content (DE, NULL_RTX);
3764 record_content (HL, NULL_RTX);
3765 }
3766
3767 if (insn_ok_now (insn))
3768 continue;
3769
3770 INSN_CODE (insn) = -1;
3771
3772 if (RTX_FRAME_RELATED_P (insn))
3773 virt_insn_was_frame = 1;
3774 else
3775 virt_insn_was_frame = 0;
3776
3777 switch (valloc_method)
3778 {
3779 case VALLOC_OP1:
3780 rl78_alloc_physical_registers_op1 (insn);
3781 break;
3782 case VALLOC_OP2:
3783 rl78_alloc_physical_registers_op2 (insn);
3784 break;
3785 case VALLOC_RO1:
3786 rl78_alloc_physical_registers_ro1 (insn);
3787 break;
3788 case VALLOC_CMP:
3789 rl78_alloc_physical_registers_cmp (insn);
3790 break;
3791 case VALLOC_UMUL:
3792 rl78_alloc_physical_registers_umul (insn);
3793 record_content (AX, NULL_RTX);
3794 break;
3795 case VALLOC_MACAX:
3796 /* Macro that clobbers AX. */
3797 rl78_alloc_address_registers_macax (insn);
3798 record_content (AX, NULL_RTX);
3799 record_content (BC, NULL_RTX);
3800 record_content (DE, NULL_RTX);
3801 break;
3802 case VALLOC_DIVSI:
3803 rl78_alloc_address_registers_div (insn);
3804 record_content (AX, NULL_RTX);
3805 record_content (BC, NULL_RTX);
3806 record_content (DE, NULL_RTX);
3807 record_content (HL, NULL_RTX);
3808 break;
3809 case VALLOC_DIVHI:
3810 rl78_alloc_address_registers_div (insn);
3811 record_content (AX, NULL_RTX);
3812 record_content (BC, NULL_RTX);
3813 break;
3814 default:
3815 gcc_unreachable ();
3816 }
3817
3818 if (JUMP_P (insn) || CALL_P (insn) || GET_CODE (pattern) == CALL)
3819 clear_content_memory ();
3820 else
3821 process_postponed_content_update ();
3822 }
3823
3824 #if DEBUG_ALLOC
3825 fprintf (stderr, "\033[0m");
3826 #endif
3827 }
3828
3829 /* Add REG_DEAD notes using DEAD[reg] for rtx S which is part of INSN.
3830 This function scans for uses of registers; the last use (i.e. first
3831 encounter when scanning backwards) triggers a REG_DEAD note if the
3832 reg was previously in DEAD[]. */
3833 static void
3834 rl78_note_reg_uses (char *dead, rtx s, rtx insn)
3835 {
3836 const char *fmt;
3837 int i, r;
3838 enum rtx_code code;
3839
3840 if (!s)
3841 return;
3842
3843 code = GET_CODE (s);
3844
3845 switch (code)
3846 {
3847 /* Compare registers by number. */
3848 case REG:
3849 r = REGNO (s);
3850 if (dump_file)
3851 {
3852 fprintf (dump_file, "note use reg %d size %d on insn %d\n",
3853 r, GET_MODE_SIZE (GET_MODE (s)), INSN_UID (insn));
3854 print_rtl_single (dump_file, s);
3855 }
3856 if (dead [r])
3857 add_reg_note (insn, REG_DEAD, gen_rtx_REG (GET_MODE (s), r));
3858 for (i = 0; i < GET_MODE_SIZE (GET_MODE (s)); i ++)
3859 dead [r + i] = 0;
3860 return;
3861
3862 /* These codes have no constituent expressions
3863 and are unique. */
3864 case SCRATCH:
3865 case PC:
3866 return;
3867
3868 case CONST_INT:
3869 case CONST_VECTOR:
3870 case CONST_DOUBLE:
3871 case CONST_FIXED:
3872 /* These are kept unique for a given value. */
3873 return;
3874
3875 default:
3876 break;
3877 }
3878
3879 fmt = GET_RTX_FORMAT (code);
3880
3881 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3882 {
3883 if (fmt[i] == 'E')
3884 {
3885 int j;
3886 for (j = XVECLEN (s, i) - 1; j >= 0; j--)
3887 rl78_note_reg_uses (dead, XVECEXP (s, i, j), insn);
3888 }
3889 else if (fmt[i] == 'e')
3890 rl78_note_reg_uses (dead, XEXP (s, i), insn);
3891 }
3892 }
3893
3894 /* Like the previous function, but scan for SETs instead. */
3895 static void
3896 rl78_note_reg_set (char *dead, rtx d, rtx insn)
3897 {
3898 int r, i;
3899 bool is_dead;
3900 if (GET_CODE (d) == MEM)
3901 rl78_note_reg_uses (dead, XEXP (d, 0), insn);
3902
3903 if (GET_CODE (d) != REG)
3904 return;
3905
3906 /* Do not mark the reg unused unless all QImode parts of it are dead. */
3907 r = REGNO (d);
3908 is_dead = true;
3909 for (i = 0; i < GET_MODE_SIZE (GET_MODE (d)); i ++)
3910 if (!dead [r + i])
3911 is_dead = false;
3912 if(is_dead)
3913 add_reg_note (insn, REG_UNUSED, gen_rtx_REG (GET_MODE (d), r));
3914 if (dump_file)
3915 fprintf (dump_file, "note set reg %d size %d\n", r, GET_MODE_SIZE (GET_MODE (d)));
3916 for (i = 0; i < GET_MODE_SIZE (GET_MODE (d)); i ++)
3917 dead [r + i] = 1;
3918 }
3919
3920 /* This is a rather crude register death pass. Death status is reset
3921 at every jump or call insn. */
3922 static void
3923 rl78_calculate_death_notes (void)
3924 {
3925 char dead[FIRST_PSEUDO_REGISTER];
3926 rtx p, s, d;
3927 rtx_insn *insn;
3928 int i;
3929
3930 memset (dead, 0, sizeof (dead));
3931
3932 for (insn = get_last_insn ();
3933 insn;
3934 insn = prev_nonnote_nondebug_insn (insn))
3935 {
3936 if (dump_file)
3937 {
3938 fprintf (dump_file, "\n--------------------------------------------------");
3939 fprintf (dump_file, "\nDead:");
3940 for (i = 0; i < FIRST_PSEUDO_REGISTER; i ++)
3941 if (dead[i])
3942 fprintf (dump_file, " %s", reg_names[i]);
3943 fprintf (dump_file, "\n");
3944 print_rtl_single (dump_file, insn);
3945 }
3946
3947 switch (GET_CODE (insn))
3948 {
3949 case INSN:
3950 p = PATTERN (insn);
3951 if (GET_CODE (p) == PARALLEL)
3952 {
3953 rtx q = XVECEXP (p, 0 ,1);
3954
3955 /* This happens with the DIV patterns. */
3956 if (GET_CODE (q) == SET)
3957 {
3958 s = SET_SRC (q);
3959 d = SET_DEST (q);
3960 rl78_note_reg_set (dead, d, insn);
3961 rl78_note_reg_uses (dead, s, insn);
3962
3963 }
3964 p = XVECEXP (p, 0, 0);
3965 }
3966
3967 switch (GET_CODE (p))
3968 {
3969 case SET:
3970 s = SET_SRC (p);
3971 d = SET_DEST (p);
3972 rl78_note_reg_set (dead, d, insn);
3973 rl78_note_reg_uses (dead, s, insn);
3974 break;
3975
3976 case USE:
3977 rl78_note_reg_uses (dead, p, insn);
3978 break;
3979
3980 default:
3981 break;
3982 }
3983 break;
3984
3985 case JUMP_INSN:
3986 if (INSN_CODE (insn) == CODE_FOR_rl78_return)
3987 {
3988 memset (dead, 1, sizeof (dead));
3989 /* We expect a USE just prior to this, which will mark
3990 the actual return registers. The USE will have a
3991 death note, but we aren't going to be modifying it
3992 after this pass. */
3993 break;
3994 }
3995 /* FALLTHRU */
3996 case CALL_INSN:
3997 memset (dead, 0, sizeof (dead));
3998 break;
3999
4000 default:
4001 break;
4002 }
4003 if (dump_file)
4004 print_rtl_single (dump_file, insn);
4005 }
4006 }
4007
4008 /* Helper function to reset the origins in RP and the age in AGE for
4009 all registers. */
4010 static void
4011 reset_origins (int *rp, int *age)
4012 {
4013 int i;
4014 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4015 {
4016 rp[i] = i;
4017 age[i] = 0;
4018 }
4019 }
4020
4021 static void
4022 set_origin (rtx pat, rtx_insn * insn, int * origins, int * age)
4023 {
4024 rtx src = SET_SRC (pat);
4025 rtx dest = SET_DEST (pat);
4026 int mb = GET_MODE_SIZE (GET_MODE (dest));
4027 int i;
4028
4029 if (GET_CODE (dest) == REG)
4030 {
4031 int dr = REGNO (dest);
4032
4033 if (GET_CODE (src) == REG)
4034 {
4035 int sr = REGNO (src);
4036 bool same = true;
4037 int best_age, best_reg;
4038
4039 /* See if the copy is not needed. */
4040 for (i = 0; i < mb; i ++)
4041 if (origins[dr + i] != origins[sr + i])
4042 same = false;
4043
4044 if (same)
4045 {
4046 if (dump_file)
4047 fprintf (dump_file, "deleting because dest already has correct value\n");
4048 delete_insn (insn);
4049 return;
4050 }
4051
4052 if (dr < 8 || sr >= 8)
4053 {
4054 int ar;
4055
4056 best_age = -1;
4057 best_reg = -1;
4058
4059 /* See if the copy can be made from another
4060 bank 0 register instead, instead of the
4061 virtual src register. */
4062 for (ar = 0; ar < 8; ar += mb)
4063 {
4064 same = true;
4065
4066 for (i = 0; i < mb; i ++)
4067 if (origins[ar + i] != origins[sr + i])
4068 same = false;
4069
4070 /* The chip has some reg-reg move limitations. */
4071 if (mb == 1 && dr > 3)
4072 same = false;
4073
4074 if (same)
4075 {
4076 if (best_age == -1 || best_age > age[sr + i])
4077 {
4078 best_age = age[sr + i];
4079 best_reg = sr;
4080 }
4081 }
4082 }
4083
4084 if (best_reg != -1)
4085 {
4086 /* FIXME: copy debug info too. */
4087 SET_SRC (pat) = gen_rtx_REG (GET_MODE (src), best_reg);
4088 sr = best_reg;
4089 }
4090 }
4091
4092 for (i = 0; i < mb; i++)
4093 {
4094 origins[dr + i] = origins[sr + i];
4095 age[dr + i] = age[sr + i] + 1;
4096 }
4097 }
4098 else
4099 {
4100 /* The destination is computed, its origin is itself. */
4101 if (dump_file)
4102 fprintf (dump_file, "resetting origin of r%d for %d byte%s\n",
4103 dr, mb, mb == 1 ? "" : "s");
4104
4105 for (i = 0; i < mb; i ++)
4106 {
4107 origins[dr + i] = dr + i;
4108 age[dr + i] = 0;
4109 }
4110 }
4111
4112 /* Any registers marked with that reg as an origin are reset. */
4113 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4114 if (origins[i] >= dr && origins[i] < dr + mb)
4115 {
4116 origins[i] = i;
4117 age[i] = 0;
4118 }
4119 }
4120
4121 /* Special case - our MUL patterns uses AX and sometimes BC. */
4122 if (get_attr_valloc (insn) == VALLOC_MACAX)
4123 {
4124 if (dump_file)
4125 fprintf (dump_file, "Resetting origin of AX/BC for MUL pattern.\n");
4126
4127 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4128 if (i <= 3 || origins[i] <= 3)
4129 {
4130 origins[i] = i;
4131 age[i] = 0;
4132 }
4133 }
4134 else if (get_attr_valloc (insn) == VALLOC_DIVHI)
4135 {
4136 if (dump_file)
4137 fprintf (dump_file, "Resetting origin of AX/DE for DIVHI pattern.\n");
4138
4139 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4140 if (i == A_REG
4141 || i == X_REG
4142 || i == D_REG
4143 || i == E_REG
4144 || origins[i] == A_REG
4145 || origins[i] == X_REG
4146 || origins[i] == D_REG
4147 || origins[i] == E_REG)
4148 {
4149 origins[i] = i;
4150 age[i] = 0;
4151 }
4152 }
4153 else if (get_attr_valloc (insn) == VALLOC_DIVSI)
4154 {
4155 if (dump_file)
4156 fprintf (dump_file, "Resetting origin of AX/BC/DE/HL for DIVSI pattern.\n");
4157
4158 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4159 if (i <= 7 || origins[i] <= 7)
4160 {
4161 origins[i] = i;
4162 age[i] = 0;
4163 }
4164 }
4165
4166 if (GET_CODE (src) == ASHIFT
4167 || GET_CODE (src) == ASHIFTRT
4168 || GET_CODE (src) == LSHIFTRT)
4169 {
4170 rtx count = XEXP (src, 1);
4171
4172 if (GET_CODE (count) == REG)
4173 {
4174 /* Special case - our pattern clobbers the count register. */
4175 int r = REGNO (count);
4176
4177 if (dump_file)
4178 fprintf (dump_file, "Resetting origin of r%d for shift.\n", r);
4179
4180 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4181 if (i == r || origins[i] == r)
4182 {
4183 origins[i] = i;
4184 age[i] = 0;
4185 }
4186 }
4187 }
4188 }
4189
4190 /* The idea behind this optimization is to look for cases where we
4191 move data from A to B to C, and instead move from A to B, and A to
4192 C. If B is a virtual register or memory, this is a big win on its
4193 own. If B turns out to be unneeded after this, it's a bigger win.
4194 For each register, we try to determine where it's value originally
4195 came from, if it's propogated purely through moves (and not
4196 computes). The ORIGINS[] array has the regno for the "origin" of
4197 the value in the [regno] it's indexed by. */
4198 static void
4199 rl78_propogate_register_origins (void)
4200 {
4201 int origins[FIRST_PSEUDO_REGISTER];
4202 int age[FIRST_PSEUDO_REGISTER];
4203 int i;
4204 rtx_insn *insn, *ninsn = NULL;
4205 rtx pat;
4206
4207 reset_origins (origins, age);
4208
4209 for (insn = get_insns (); insn; insn = ninsn)
4210 {
4211 ninsn = next_nonnote_nondebug_insn (insn);
4212
4213 if (dump_file)
4214 {
4215 fprintf (dump_file, "\n");
4216 fprintf (dump_file, "Origins:");
4217 for (i = 0; i < FIRST_PSEUDO_REGISTER; i ++)
4218 if (origins[i] != i)
4219 fprintf (dump_file, " r%d=r%d", i, origins[i]);
4220 fprintf (dump_file, "\n");
4221 print_rtl_single (dump_file, insn);
4222 }
4223
4224 switch (GET_CODE (insn))
4225 {
4226 case CODE_LABEL:
4227 case BARRIER:
4228 case CALL_INSN:
4229 case JUMP_INSN:
4230 reset_origins (origins, age);
4231 break;
4232
4233 default:
4234 break;
4235
4236 case INSN:
4237 pat = PATTERN (insn);
4238
4239 if (GET_CODE (pat) == PARALLEL)
4240 {
4241 rtx clobber = XVECEXP (pat, 0, 1);
4242 pat = XVECEXP (pat, 0, 0);
4243 if (GET_CODE (clobber) == CLOBBER
4244 && GET_CODE (XEXP (clobber, 0)) == REG)
4245 {
4246 int cr = REGNO (XEXP (clobber, 0));
4247 int mb = GET_MODE_SIZE (GET_MODE (XEXP (clobber, 0)));
4248 if (dump_file)
4249 fprintf (dump_file, "reset origins of %d regs at %d\n", mb, cr);
4250 for (i = 0; i < mb; i++)
4251 {
4252 origins[cr + i] = cr + i;
4253 age[cr + i] = 0;
4254 }
4255 }
4256 /* This happens with the DIV patterns. */
4257 else if (GET_CODE (clobber) == SET)
4258 {
4259 set_origin (clobber, insn, origins, age);
4260 }
4261 else
4262 break;
4263 }
4264
4265 if (GET_CODE (pat) == SET)
4266 {
4267 set_origin (pat, insn, origins, age);
4268 }
4269 else if (GET_CODE (pat) == CLOBBER
4270 && GET_CODE (XEXP (pat, 0)) == REG)
4271 {
4272 if (REG_P (XEXP (pat, 0)))
4273 {
4274 unsigned int reg = REGNO (XEXP (pat, 0));
4275
4276 origins[reg] = reg;
4277 age[reg] = 0;
4278 }
4279 }
4280 }
4281 }
4282 }
4283
4284 /* Remove any SETs where the destination is unneeded. */
4285 static void
4286 rl78_remove_unused_sets (void)
4287 {
4288 rtx_insn *insn, *ninsn = NULL;
4289 rtx dest;
4290
4291 for (insn = get_insns (); insn; insn = ninsn)
4292 {
4293 ninsn = next_nonnote_nondebug_insn (insn);
4294
4295 rtx set = single_set (insn);
4296 if (set == NULL)
4297 continue;
4298
4299 dest = SET_DEST (set);
4300
4301 if (GET_CODE (dest) != REG || REGNO (dest) > 23)
4302 continue;
4303
4304 if (find_regno_note (insn, REG_UNUSED, REGNO (dest)))
4305 {
4306 if (dump_file)
4307 fprintf (dump_file, "deleting because the set register is never used.\n");
4308 delete_insn (insn);
4309 }
4310 }
4311 }
4312
4313 /* This is the top of the devritualization pass. */
4314 static void
4315 rl78_reorg (void)
4316 {
4317 /* split2 only happens when optimizing, but we need all movSIs to be
4318 split now. */
4319 if (optimize <= 0)
4320 split_all_insns ();
4321
4322 rl78_alloc_physical_registers ();
4323
4324 if (dump_file)
4325 {
4326 fprintf (dump_file, "\n================DEVIRT:=AFTER=ALLOC=PHYSICAL=REGISTERS================\n");
4327 print_rtl_with_bb (dump_file, get_insns (), TDF_NONE);
4328 }
4329
4330 rl78_propogate_register_origins ();
4331 rl78_calculate_death_notes ();
4332
4333 if (dump_file)
4334 {
4335 fprintf (dump_file, "\n================DEVIRT:=AFTER=PROPOGATION=============================\n");
4336 print_rtl_with_bb (dump_file, get_insns (), TDF_NONE);
4337 fprintf (dump_file, "\n======================================================================\n");
4338 }
4339
4340 rl78_remove_unused_sets ();
4341
4342 /* The code after devirtualizing has changed so much that at this point
4343 we might as well just rescan everything. Note that
4344 df_rescan_all_insns is not going to help here because it does not
4345 touch the artificial uses and defs. */
4346 df_finish_pass (true);
4347 if (optimize > 1)
4348 df_live_add_problem ();
4349 df_scan_alloc (NULL);
4350 df_scan_blocks ();
4351
4352 if (optimize)
4353 df_analyze ();
4354 }
4355
4356 #undef TARGET_RETURN_IN_MEMORY
4357 #define TARGET_RETURN_IN_MEMORY rl78_return_in_memory
4358
4359 static bool
4360 rl78_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
4361 {
4362 const HOST_WIDE_INT size = int_size_in_bytes (type);
4363 return (size == -1 || size > 8);
4364 }
4365
4366
4367 #undef TARGET_RTX_COSTS
4369 #define TARGET_RTX_COSTS rl78_rtx_costs
4370
4371 static bool
4372 rl78_rtx_costs (rtx x,
4373 machine_mode mode,
4374 int outer_code ATTRIBUTE_UNUSED,
4375 int opno ATTRIBUTE_UNUSED,
4376 int * total,
4377 bool speed ATTRIBUTE_UNUSED)
4378 {
4379 int code = GET_CODE (x);
4380
4381 if (code == IF_THEN_ELSE)
4382 {
4383 *total = COSTS_N_INSNS (10);
4384 return true;
4385 }
4386
4387 if (mode == HImode)
4388 {
4389 if (code == MULT && ! speed)
4390 {
4391 * total = COSTS_N_INSNS (8);
4392 return true;
4393 }
4394 return false;
4395 }
4396
4397 if (mode == SImode)
4398 {
4399 switch (code)
4400 {
4401 case MULT:
4402 if (! speed)
4403 /* If we are compiling for space then we do not want to use the
4404 inline SImode multiplication patterns or shift sequences.
4405 The cost is not set to 1 or 5 however as we have to allow for
4406 the possibility that we might be converting a leaf function
4407 into a non-leaf function. (There is no way to tell here).
4408 A value of 13 seems to be a reasonable compromise for the
4409 moment. */
4410 * total = COSTS_N_INSNS (13);
4411 else if (RL78_MUL_G14)
4412 *total = COSTS_N_INSNS (14);
4413 else if (RL78_MUL_G13)
4414 *total = COSTS_N_INSNS (29);
4415 else
4416 *total = COSTS_N_INSNS (500);
4417 return true;
4418
4419 case PLUS:
4420 *total = COSTS_N_INSNS (8);
4421 return true;
4422
4423 case ASHIFT:
4424 case ASHIFTRT:
4425 case LSHIFTRT:
4426 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4427 {
4428 switch (INTVAL (XEXP (x, 1)))
4429 {
4430 case 0: *total = COSTS_N_INSNS (0); break;
4431 case 1: *total = COSTS_N_INSNS (6); break;
4432 case 2: case 3: case 4: case 5: case 6: case 7:
4433 *total = COSTS_N_INSNS (10); break;
4434 case 8: *total = COSTS_N_INSNS (6); break;
4435 case 9: case 10: case 11: case 12: case 13: case 14: case 15:
4436 *total = COSTS_N_INSNS (10); break;
4437 case 16: *total = COSTS_N_INSNS (3); break;
4438 case 17: case 18: case 19: case 20: case 21: case 22: case 23:
4439 *total = COSTS_N_INSNS (4); break;
4440 case 24: *total = COSTS_N_INSNS (4); break;
4441 case 25: case 26: case 27: case 28: case 29: case 30: case 31:
4442 *total = COSTS_N_INSNS (5); break;
4443 }
4444 }
4445 else
4446 *total = COSTS_N_INSNS (10+4*16);
4447 return true;
4448
4449 default:
4450 break;
4451 }
4452 }
4453 return false;
4454 }
4455
4456
4458 static GTY(()) section * saddr_section;
4459 static GTY(()) section * frodata_section;
4460
4461 int
4462 rl78_saddr_p (rtx x)
4463 {
4464 const char * c;
4465
4466 if (MEM_P (x))
4467 x = XEXP (x, 0);
4468 if (GET_CODE (x) == PLUS)
4469 x = XEXP (x, 0);
4470 if (GET_CODE (x) != SYMBOL_REF)
4471 return 0;
4472
4473 c = XSTR (x, 0);
4474 if (memcmp (c, "@s.", 3) == 0)
4475 return 1;
4476
4477 return 0;
4478 }
4479
4480 int
4481 rl78_sfr_p (rtx x)
4482 {
4483 if (MEM_P (x))
4484 x = XEXP (x, 0);
4485 if (GET_CODE (x) != CONST_INT)
4486 return 0;
4487
4488 if ((INTVAL (x) & 0xFF00) != 0xFF00)
4489 return 0;
4490
4491 return 1;
4492 }
4493
4494 #undef TARGET_STRIP_NAME_ENCODING
4495 #define TARGET_STRIP_NAME_ENCODING rl78_strip_name_encoding
4496
4497 static const char *
4498 rl78_strip_name_encoding (const char * sym)
4499 {
4500 while (1)
4501 {
4502 if (*sym == '*')
4503 sym++;
4504 else if (*sym == '@' && sym[2] == '.')
4505 sym += 3;
4506 else
4507 return sym;
4508 }
4509 }
4510
4511 /* Like rl78_strip_name_encoding, but does not strip leading asterisks. This
4512 is important if the stripped name is going to be passed to assemble_name()
4513 as that handles asterisk prefixed names in a special manner. */
4514
4515 static const char *
4516 rl78_strip_nonasm_name_encoding (const char * sym)
4517 {
4518 while (1)
4519 {
4520 if (*sym == '@' && sym[2] == '.')
4521 sym += 3;
4522 else
4523 return sym;
4524 }
4525 }
4526
4527
4528 static int
4529 rl78_attrlist_to_encoding (tree list, tree decl ATTRIBUTE_UNUSED)
4530 {
4531 while (list)
4532 {
4533 if (is_attribute_p ("saddr", TREE_PURPOSE (list)))
4534 return 's';
4535 list = TREE_CHAIN (list);
4536 }
4537
4538 return 0;
4539 }
4540
4541 #define RL78_ATTRIBUTES(decl) \
4542 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
4543 : DECL_ATTRIBUTES (decl) \
4544 ? (DECL_ATTRIBUTES (decl)) \
4545 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
4546
4547 #undef TARGET_ENCODE_SECTION_INFO
4548 #define TARGET_ENCODE_SECTION_INFO rl78_encode_section_info
4549
4550 static void
4551 rl78_encode_section_info (tree decl, rtx rtl, int first)
4552 {
4553 rtx rtlname;
4554 const char * oldname;
4555 char encoding;
4556 char * newname;
4557 tree idp;
4558 tree type;
4559 tree rl78_attributes;
4560
4561 if (!first)
4562 return;
4563
4564 rtlname = XEXP (rtl, 0);
4565
4566 if (GET_CODE (rtlname) == SYMBOL_REF)
4567 oldname = XSTR (rtlname, 0);
4568 else if (GET_CODE (rtlname) == MEM
4569 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4570 oldname = XSTR (XEXP (rtlname, 0), 0);
4571 else
4572 gcc_unreachable ();
4573
4574 type = TREE_TYPE (decl);
4575 if (type == error_mark_node)
4576 return;
4577 if (! DECL_P (decl))
4578 return;
4579 rl78_attributes = RL78_ATTRIBUTES (decl);
4580
4581 encoding = rl78_attrlist_to_encoding (rl78_attributes, decl);
4582
4583 if (encoding)
4584 {
4585 newname = (char *) alloca (strlen (oldname) + 4);
4586 sprintf (newname, "@%c.%s", encoding, oldname);
4587 idp = get_identifier (newname);
4588 XEXP (rtl, 0) =
4589 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4590 SYMBOL_REF_WEAK (XEXP (rtl, 0)) = DECL_WEAK (decl);
4591 SET_SYMBOL_REF_DECL (XEXP (rtl, 0), decl);
4592 }
4593 }
4594
4595 #undef TARGET_ASM_INIT_SECTIONS
4596 #define TARGET_ASM_INIT_SECTIONS rl78_asm_init_sections
4597
4598 static void
4599 rl78_asm_init_sections (void)
4600 {
4601 saddr_section
4602 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
4603 "\t.section .saddr,\"aw\",@progbits");
4604 frodata_section
4605 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
4606 "\t.section .frodata,\"aw\",@progbits");
4607 }
4608
4609 #undef TARGET_ASM_SELECT_SECTION
4610 #define TARGET_ASM_SELECT_SECTION rl78_select_section
4611
4612 static section *
4613 rl78_select_section (tree decl,
4614 int reloc,
4615 unsigned HOST_WIDE_INT align)
4616 {
4617 int readonly = 1;
4618
4619 switch (TREE_CODE (decl))
4620 {
4621 case VAR_DECL:
4622 if (!TREE_READONLY (decl)
4623 || TREE_SIDE_EFFECTS (decl)
4624 || !DECL_INITIAL (decl)
4625 || (DECL_INITIAL (decl) != error_mark_node
4626 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4627 readonly = 0;
4628 break;
4629 case CONSTRUCTOR:
4630 if (! TREE_CONSTANT (decl))
4631 readonly = 0;
4632 break;
4633
4634 default:
4635 break;
4636 }
4637
4638 if (TREE_CODE (decl) == VAR_DECL)
4639 {
4640 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4641
4642 if (name[0] == '@' && name[2] == '.')
4643 switch (name[1])
4644 {
4645 case 's':
4646 return saddr_section;
4647 }
4648
4649 if (TYPE_ADDR_SPACE (TREE_TYPE (decl)) == ADDR_SPACE_FAR
4650 && readonly)
4651 {
4652 return frodata_section;
4653 }
4654 }
4655
4656 if (readonly)
4657 return TARGET_ES0 ? frodata_section : readonly_data_section;
4658
4659 switch (categorize_decl_for_section (decl, reloc))
4660 {
4661 case SECCAT_TEXT: return text_section;
4662 case SECCAT_DATA: return data_section;
4663 case SECCAT_BSS: return bss_section;
4664 case SECCAT_RODATA: return TARGET_ES0 ? frodata_section : readonly_data_section;
4665 default:
4666 return default_select_section (decl, reloc, align);
4667 }
4668 }
4669
4670 void
4671 rl78_output_labelref (FILE *file, const char *str)
4672 {
4673 const char *str2;
4674
4675 str2 = targetm.strip_name_encoding (str);
4676 if (str2[0] != '.')
4677 fputs (user_label_prefix, file);
4678 fputs (str2, file);
4679 }
4680
4681 void
4682 rl78_output_aligned_common (FILE *stream,
4683 tree decl ATTRIBUTE_UNUSED,
4684 const char *name,
4685 int size, int align, int global)
4686 {
4687 /* We intentionally don't use rl78_section_tag() here. */
4688 if (name[0] == '@' && name[2] == '.')
4689 {
4690 const char *sec = 0;
4691 switch (name[1])
4692 {
4693 case 's':
4694 switch_to_section (saddr_section);
4695 sec = ".saddr";
4696 break;
4697 }
4698 if (sec)
4699 {
4700 const char *name2;
4701 int p2align = 0;
4702
4703 while (align > BITS_PER_UNIT)
4704 {
4705 align /= 2;
4706 p2align ++;
4707 }
4708 name2 = targetm.strip_name_encoding (name);
4709 if (global)
4710 fprintf (stream, "\t.global\t_%s\n", name2);
4711 fprintf (stream, "\t.p2align %d\n", p2align);
4712 fprintf (stream, "\t.type\t_%s,@object\n", name2);
4713 fprintf (stream, "\t.size\t_%s,%d\n", name2, size);
4714 fprintf (stream, "_%s:\n\t.zero\t%d\n", name2, size);
4715 return;
4716 }
4717 }
4718
4719 if (!global)
4720 {
4721 fprintf (stream, "\t.local\t");
4722 assemble_name (stream, name);
4723 fprintf (stream, "\n");
4724 }
4725 fprintf (stream, "\t.comm\t");
4726 assemble_name (stream, name);
4727 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4728 }
4729
4730 #undef TARGET_INSERT_ATTRIBUTES
4731 #define TARGET_INSERT_ATTRIBUTES rl78_insert_attributes
4732
4733 static void
4734 rl78_insert_attributes (tree decl, tree *attributes ATTRIBUTE_UNUSED)
4735 {
4736 if (TARGET_ES0
4737 && TREE_CODE (decl) == VAR_DECL
4738 && TREE_READONLY (decl)
4739 && TREE_ADDRESSABLE (decl)
4740 && TYPE_ADDR_SPACE (TREE_TYPE (decl)) == ADDR_SPACE_GENERIC)
4741 {
4742 tree type = TREE_TYPE (decl);
4743 tree attr = TYPE_ATTRIBUTES (type);
4744 int q = TYPE_QUALS_NO_ADDR_SPACE (type) | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_FAR);
4745
4746 TREE_TYPE (decl) = build_type_attribute_qual_variant (type, attr, q);
4747 }
4748 }
4749
4750 #undef TARGET_ASM_INTEGER
4751 #define TARGET_ASM_INTEGER rl78_asm_out_integer
4752
4753 static bool
4754 rl78_asm_out_integer (rtx x, unsigned int size, int aligned_p)
4755 {
4756 if (default_assemble_integer (x, size, aligned_p))
4757 return true;
4758
4759 if (size == 4)
4760 {
4761 assemble_integer_with_op (".long\t", x);
4762 return true;
4763 }
4764
4765 return false;
4766 }
4767
4768 #undef TARGET_UNWIND_WORD_MODE
4770 #define TARGET_UNWIND_WORD_MODE rl78_unwind_word_mode
4771
4772 static scalar_int_mode
4773 rl78_unwind_word_mode (void)
4774 {
4775 return HImode;
4776 }
4777
4778 #ifndef USE_COLLECT2
4779 #undef TARGET_ASM_CONSTRUCTOR
4780 #define TARGET_ASM_CONSTRUCTOR rl78_asm_constructor
4781 #undef TARGET_ASM_DESTRUCTOR
4782 #define TARGET_ASM_DESTRUCTOR rl78_asm_destructor
4783
4784 static void
4785 rl78_asm_ctor_dtor (rtx symbol, int priority, bool is_ctor)
4786 {
4787 section *sec;
4788
4789 if (priority != DEFAULT_INIT_PRIORITY)
4790 {
4791 /* This section of the function is based upon code copied
4792 from: gcc/varasm.cc:get_cdtor_priority_section(). */
4793 char buf[18];
4794
4795 sprintf (buf, "%s.%.5u", is_ctor ? ".ctors" : ".dtors",
4796 MAX_INIT_PRIORITY - priority);
4797 sec = get_section (buf, 0, NULL);
4798 }
4799 else
4800 sec = is_ctor ? ctors_section : dtors_section;
4801
4802 assemble_addr_to_section (symbol, sec);
4803 }
4804
4805 static void
4806 rl78_asm_constructor (rtx symbol, int priority)
4807 {
4808 rl78_asm_ctor_dtor (symbol, priority, true);
4809 }
4810
4811 static void
4812 rl78_asm_destructor (rtx symbol, int priority)
4813 {
4814 rl78_asm_ctor_dtor (symbol, priority, false);
4815 }
4816 #endif /* ! USE_COLLECT2 */
4817
4818 /* Scan backwards through the insn chain looking to see if the flags
4819 have been set for a comparison of OP against OPERAND. Start with
4820 the insn *before* the current insn. */
4821
4822 bool
4823 rl78_flags_already_set (rtx op, rtx operand)
4824 {
4825 /* We only track the Z flag. */
4826 if (GET_CODE (op) != EQ && GET_CODE (op) != NE)
4827 return false;
4828
4829 /* This should not happen, but let's be paranoid. */
4830 if (current_output_insn == NULL_RTX)
4831 return false;
4832
4833 rtx_insn *insn;
4834 bool res = false;
4835
4836 for (insn = prev_nonnote_nondebug_insn (current_output_insn);
4837 insn != NULL_RTX;
4838 insn = prev_nonnote_nondebug_insn (insn))
4839 {
4840 if (LABEL_P (insn))
4841 break;
4842
4843 if (! INSN_P (insn))
4844 continue;
4845
4846 /* Make sure that the insn can be recognized. */
4847 if (recog_memoized (insn) == -1)
4848 continue;
4849
4850 enum attr_update_Z updated = get_attr_update_Z (insn);
4851
4852 rtx set = single_set (insn);
4853 bool must_break = (set != NULL_RTX && rtx_equal_p (operand, SET_DEST (set)));
4854
4855 switch (updated)
4856 {
4857 case UPDATE_Z_NO:
4858 break;
4859 case UPDATE_Z_CLOBBER:
4860 must_break = true;
4861 break;
4862 case UPDATE_Z_UPDATE_Z:
4863 res = must_break;
4864 must_break = true;
4865 break;
4866 default:
4867 gcc_unreachable ();
4868 }
4869
4870 if (must_break)
4871 break;
4872 }
4873
4874 /* We have to re-recognize the current insn as the call(s) to
4875 get_attr_update_Z() above will have overwritten the recog_data cache. */
4876 recog_memoized (current_output_insn);
4877 cleanup_subreg_operands (current_output_insn);
4878 constrain_operands_cached (current_output_insn, 1);
4879
4880 return res;
4881 }
4882
4883 const char *
4884 rl78_addsi3_internal (rtx * operands, unsigned int alternative)
4885 {
4886 const char *addH2 = "addw ax, %H2\n\t";
4887
4888 /* If we are adding in a constant symbolic address when -mes0
4889 is active then we know that the address must be <64K and
4890 that it is invalid to access anything above 64K relative to
4891 this address. So we can skip adding in the high bytes. */
4892 if (TARGET_ES0
4893 && GET_CODE (operands[2]) == SYMBOL_REF
4894 && TREE_CODE (SYMBOL_REF_DECL (operands[2])) == VAR_DECL
4895 && TREE_READONLY (SYMBOL_REF_DECL (operands[2]))
4896 && ! TREE_SIDE_EFFECTS (SYMBOL_REF_DECL (operands[2])))
4897 return "movw ax, %h1\n\taddw ax, %h2\n\tmovw %h0, ax";
4898
4899 if(CONST_INT_P(operands[2]))
4900 {
4901 if((INTVAL(operands[2]) & 0xFFFF0000) == 0)
4902 {
4903 addH2 = "";
4904 }
4905 else if((INTVAL(operands[2]) & 0xFFFF0000) == 0x00010000)
4906 {
4907 addH2 = "incw ax\n\t";
4908 }
4909 else if((INTVAL(operands[2]) & 0xFFFF0000) == 0xFFFF0000)
4910 {
4911 addH2 = "decw ax\n\t";
4912 }
4913 }
4914
4915 switch (alternative)
4916 {
4917 case 0:
4918 case 1:
4919 snprintf(fmt_buffer, sizeof(fmt_buffer),
4920 "movw ax, %%h1\n\taddw ax, %%h2\n\tmovw %%h0, ax\n\tmovw ax, %%H1\n\tsknc\n\tincw ax\n\t%smovw %%H0,ax", addH2);
4921 break;
4922 case 2:
4923 snprintf(fmt_buffer, sizeof(fmt_buffer),
4924 "movw ax, %%h1\n\taddw ax, %%h2\n\tmovw bc, ax\n\tmovw ax, %%H1\n\tsknc\n\tincw ax\n\t%smovw %%H0, ax\n\tmovw ax, bc\n\tmovw %%h0, ax", addH2);
4925 break;
4926 default:
4927 gcc_unreachable ();
4928 }
4929
4930 return fmt_buffer;
4931 }
4932
4933 rtx
4934 rl78_emit_libcall (const char *name, enum rtx_code code,
4935 enum machine_mode dmode, enum machine_mode smode,
4936 int noperands, rtx *operands)
4937 {
4938 rtx ret;
4939 rtx_insn *insns;
4940 rtx libcall;
4941 rtx equiv;
4942
4943 start_sequence ();
4944 libcall = gen_rtx_SYMBOL_REF (Pmode, name);
4945
4946 switch (noperands)
4947 {
4948 case 2:
4949 ret = emit_library_call_value (libcall, NULL_RTX, LCT_CONST,
4950 dmode, operands[1], smode);
4951 equiv = gen_rtx_fmt_e (code, dmode, operands[1]);
4952 break;
4953
4954 case 3:
4955 ret = emit_library_call_value (libcall, NULL_RTX,
4956 LCT_CONST, dmode,
4957 operands[1], smode, operands[2],
4958 smode);
4959 equiv = gen_rtx_fmt_ee (code, dmode, operands[1], operands[2]);
4960 break;
4961
4962 default:
4963 gcc_unreachable ();
4964 }
4965
4966 insns = get_insns ();
4967 end_sequence ();
4968 emit_libcall_block (insns, operands[0], ret, equiv);
4969 return ret;
4970 }
4971
4972
4973 #undef TARGET_PREFERRED_RELOAD_CLASS
4975 #define TARGET_PREFERRED_RELOAD_CLASS rl78_preferred_reload_class
4976
4977 static reg_class_t
4978 rl78_preferred_reload_class (rtx x ATTRIBUTE_UNUSED, reg_class_t rclass)
4979 {
4980 if (rclass == NO_REGS)
4981 rclass = V_REGS;
4982
4983 return rclass;
4984 }
4985
4986
4987 struct gcc_target targetm = TARGET_INITIALIZER;
4989
4990 #include "gt-rl78.h"
4991