final.cc revision 1.1.1.1 1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987-2022 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This is the final pass of the compiler.
21 It looks at the rtl code for a function and outputs assembler code.
22
23 Call `final_start_function' to output the assembler code for function entry,
24 `final' to output assembler code for some RTL code,
25 `final_end_function' to output assembler code for function exit.
26 If a function is compiled in several pieces, each piece is
27 output separately with `final'.
28
29 Some optimizations are also done at this level.
30 Move instructions that were made unnecessary by good register allocation
31 are detected and omitted from the output. (Though most of these
32 are removed by the last jump pass.)
33
34 Instructions to set the condition codes are omitted when it can be
35 seen that the condition codes already had the desired values.
36
37 In some cases it is sufficient if the inherited condition codes
38 have related values, but this may require the following insn
39 (the one that tests the condition codes) to be modified.
40
41 The code for the function prologue and epilogue are generated
42 directly in assembler by the target functions function_prologue and
43 function_epilogue. Those instructions never exist as rtl. */
44
45 #include "config.h"
46 #define INCLUDE_ALGORITHM /* reverse */
47 #include "system.h"
48 #include "coretypes.h"
49 #include "backend.h"
50 #include "target.h"
51 #include "rtl.h"
52 #include "tree.h"
53 #include "cfghooks.h"
54 #include "df.h"
55 #include "memmodel.h"
56 #include "tm_p.h"
57 #include "insn-config.h"
58 #include "regs.h"
59 #include "emit-rtl.h"
60 #include "recog.h"
61 #include "cgraph.h"
62 #include "tree-pretty-print.h" /* for dump_function_header */
63 #include "varasm.h"
64 #include "insn-attr.h"
65 #include "conditions.h"
66 #include "flags.h"
67 #include "output.h"
68 #include "except.h"
69 #include "rtl-error.h"
70 #include "toplev.h" /* exact_log2, floor_log2 */
71 #include "reload.h"
72 #include "intl.h"
73 #include "cfgrtl.h"
74 #include "debug.h"
75 #include "tree-pass.h"
76 #include "tree-ssa.h"
77 #include "cfgloop.h"
78 #include "stringpool.h"
79 #include "attribs.h"
80 #include "asan.h"
81 #include "rtl-iter.h"
82 #include "print-rtl.h"
83 #include "function-abi.h"
84 #include "common/common-target.h"
85
86 #ifdef XCOFF_DEBUGGING_INFO
87 #include "xcoffout.h" /* Needed for external data declarations. */
88 #endif
89
90 #include "dwarf2out.h"
91
92 #ifdef DBX_DEBUGGING_INFO
93 #include "dbxout.h"
94 #endif
95
96 /* Most ports don't need to define CC_STATUS_INIT.
97 So define a null default for it to save conditionalization later. */
98 #ifndef CC_STATUS_INIT
99 #define CC_STATUS_INIT
100 #endif
101
102 /* Is the given character a logical line separator for the assembler? */
103 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
104 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
105 #endif
106
107 #ifndef JUMP_TABLES_IN_TEXT_SECTION
108 #define JUMP_TABLES_IN_TEXT_SECTION 0
109 #endif
110
111 /* Bitflags used by final_scan_insn. */
112 #define SEEN_NOTE 1
113 #define SEEN_EMITTED 2
114 #define SEEN_NEXT_VIEW 4
115
116 /* Last insn processed by final_scan_insn. */
117 static rtx_insn *debug_insn;
118 rtx_insn *current_output_insn;
119
120 /* Line number of last NOTE. */
121 static int last_linenum;
122
123 /* Column number of last NOTE. */
124 static int last_columnnum;
125
126 /* Discriminator written to assembly. */
127 static int last_discriminator;
128
129 /* Discriminator to be written to assembly for current instruction.
130 Note: actual usage depends on loc_discriminator_kind setting. */
131 static int discriminator;
132 static inline int compute_discriminator (location_t loc);
133
134 /* Discriminator identifying current basic block among others sharing
135 the same locus. */
136 static int bb_discriminator;
137
138 /* Basic block discriminator for previous instruction. */
139 static int last_bb_discriminator;
140
141 /* Highest line number in current block. */
142 static int high_block_linenum;
143
144 /* Likewise for function. */
145 static int high_function_linenum;
146
147 /* Filename of last NOTE. */
148 static const char *last_filename;
149
150 /* Override filename, line and column number. */
151 static const char *override_filename;
152 static int override_linenum;
153 static int override_columnnum;
154 static int override_discriminator;
155
156 /* Whether to force emission of a line note before the next insn. */
157 static bool force_source_line = false;
158
159 extern const int length_unit_log; /* This is defined in insn-attrtab.cc. */
160
161 /* Nonzero while outputting an `asm' with operands.
162 This means that inconsistencies are the user's fault, so don't die.
163 The precise value is the insn being output, to pass to error_for_asm. */
164 const rtx_insn *this_is_asm_operands;
165
166 /* Number of operands of this insn, for an `asm' with operands. */
167 static unsigned int insn_noperands;
168
169 /* Compare optimization flag. */
170
171 static rtx last_ignored_compare = 0;
172
173 /* Assign a unique number to each insn that is output.
174 This can be used to generate unique local labels. */
175
176 static int insn_counter = 0;
177
178 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
179
180 static int block_depth;
181
182 /* Nonzero if have enabled APP processing of our assembler output. */
183
184 static int app_on;
185
186 /* If we are outputting an insn sequence, this contains the sequence rtx.
187 Zero otherwise. */
188
189 rtx_sequence *final_sequence;
190
191 #ifdef ASSEMBLER_DIALECT
192
193 /* Number of the assembler dialect to use, starting at 0. */
194 static int dialect_number;
195 #endif
196
197 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
198 rtx current_insn_predicate;
199
200 /* True if printing into -fdump-final-insns= dump. */
201 bool final_insns_dump_p;
202
203 /* True if profile_function should be called, but hasn't been called yet. */
204 static bool need_profile_function;
205
206 static int asm_insn_count (rtx);
207 static void profile_function (FILE *);
208 static void profile_after_prologue (FILE *);
209 static bool notice_source_line (rtx_insn *, bool *);
210 static rtx walk_alter_subreg (rtx *, bool *);
211 static void output_asm_name (void);
212 static void output_alternate_entry_point (FILE *, rtx_insn *);
213 static tree get_mem_expr_from_op (rtx, int *);
214 static void output_asm_operand_names (rtx *, int *, int);
215 #ifdef LEAF_REGISTERS
216 static void leaf_renumber_regs (rtx_insn *);
217 #endif
218 static int align_fuzz (rtx, rtx, int, unsigned);
219 static void collect_fn_hard_reg_usage (void);
220
221 /* Initialize data in final at the beginning of a compilation. */
223
224 void
225 init_final (const char *filename ATTRIBUTE_UNUSED)
226 {
227 app_on = 0;
228 final_sequence = 0;
229
230 #ifdef ASSEMBLER_DIALECT
231 dialect_number = ASSEMBLER_DIALECT;
232 #endif
233 }
234
235 /* Default target function prologue and epilogue assembler output.
236
237 If not overridden for epilogue code, then the function body itself
238 contains return instructions wherever needed. */
239 void
240 default_function_pro_epilogue (FILE *)
241 {
242 }
243
244 void
245 default_function_switched_text_sections (FILE *file ATTRIBUTE_UNUSED,
246 tree decl ATTRIBUTE_UNUSED,
247 bool new_is_cold ATTRIBUTE_UNUSED)
248 {
249 }
250
251 /* Default target hook that outputs nothing to a stream. */
252 void
253 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
254 {
255 }
256
257 /* Enable APP processing of subsequent output.
258 Used before the output from an `asm' statement. */
259
260 void
261 app_enable (void)
262 {
263 if (! app_on)
264 {
265 fputs (ASM_APP_ON, asm_out_file);
266 app_on = 1;
267 }
268 }
269
270 /* Disable APP processing of subsequent output.
271 Called from varasm.cc before most kinds of output. */
272
273 void
274 app_disable (void)
275 {
276 if (app_on)
277 {
278 fputs (ASM_APP_OFF, asm_out_file);
279 app_on = 0;
280 }
281 }
282
283 /* Return the number of slots filled in the current
285 delayed branch sequence (we don't count the insn needing the
286 delay slot). Zero if not in a delayed branch sequence. */
287
288 int
289 dbr_sequence_length (void)
290 {
291 if (final_sequence != 0)
292 return XVECLEN (final_sequence, 0) - 1;
293 else
294 return 0;
295 }
296
297 /* The next two pages contain routines used to compute the length of an insn
299 and to shorten branches. */
300
301 /* Arrays for insn lengths, and addresses. The latter is referenced by
302 `insn_current_length'. */
303
304 static int *insn_lengths;
305
306 vec<int> insn_addresses_;
307
308 /* Max uid for which the above arrays are valid. */
309 static int insn_lengths_max_uid;
310
311 /* Address of insn being processed. Used by `insn_current_length'. */
312 int insn_current_address;
313
314 /* Address of insn being processed in previous iteration. */
315 int insn_last_address;
316
317 /* known invariant alignment of insn being processed. */
318 int insn_current_align;
319
320 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
321 gives the next following alignment insn that increases the known
322 alignment, or NULL_RTX if there is no such insn.
323 For any alignment obtained this way, we can again index uid_align with
324 its uid to obtain the next following align that in turn increases the
325 alignment, till we reach NULL_RTX; the sequence obtained this way
326 for each insn we'll call the alignment chain of this insn in the following
327 comments. */
328
329 static rtx *uid_align;
330 static int *uid_shuid;
331 static vec<align_flags> label_align;
332
333 /* Indicate that branch shortening hasn't yet been done. */
334
335 void
336 init_insn_lengths (void)
337 {
338 if (uid_shuid)
339 {
340 free (uid_shuid);
341 uid_shuid = 0;
342 }
343 if (insn_lengths)
344 {
345 free (insn_lengths);
346 insn_lengths = 0;
347 insn_lengths_max_uid = 0;
348 }
349 if (HAVE_ATTR_length)
350 INSN_ADDRESSES_FREE ();
351 if (uid_align)
352 {
353 free (uid_align);
354 uid_align = 0;
355 }
356 }
357
358 /* Obtain the current length of an insn. If branch shortening has been done,
359 get its actual length. Otherwise, use FALLBACK_FN to calculate the
360 length. */
361 static int
362 get_attr_length_1 (rtx_insn *insn, int (*fallback_fn) (rtx_insn *))
363 {
364 rtx body;
365 int i;
366 int length = 0;
367
368 if (!HAVE_ATTR_length)
369 return 0;
370
371 if (insn_lengths_max_uid > INSN_UID (insn))
372 return insn_lengths[INSN_UID (insn)];
373 else
374 switch (GET_CODE (insn))
375 {
376 case NOTE:
377 case BARRIER:
378 case CODE_LABEL:
379 case DEBUG_INSN:
380 return 0;
381
382 case CALL_INSN:
383 case JUMP_INSN:
384 body = PATTERN (insn);
385 if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
386 length = asm_insn_count (body) * fallback_fn (insn);
387 else
388 length = fallback_fn (insn);
389 break;
390
391 case INSN:
392 body = PATTERN (insn);
393 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
394 return 0;
395
396 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
397 length = asm_insn_count (body) * fallback_fn (insn);
398 else if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
399 for (i = 0; i < seq->len (); i++)
400 length += get_attr_length_1 (seq->insn (i), fallback_fn);
401 else
402 length = fallback_fn (insn);
403 break;
404
405 default:
406 break;
407 }
408
409 #ifdef ADJUST_INSN_LENGTH
410 ADJUST_INSN_LENGTH (insn, length);
411 #endif
412 return length;
413 }
414
415 /* Obtain the current length of an insn. If branch shortening has been done,
416 get its actual length. Otherwise, get its maximum length. */
417 int
418 get_attr_length (rtx_insn *insn)
419 {
420 return get_attr_length_1 (insn, insn_default_length);
421 }
422
423 /* Obtain the current length of an insn. If branch shortening has been done,
424 get its actual length. Otherwise, get its minimum length. */
425 int
426 get_attr_min_length (rtx_insn *insn)
427 {
428 return get_attr_length_1 (insn, insn_min_length);
429 }
430
431 /* Code to handle alignment inside shorten_branches. */
433
434 /* Here is an explanation how the algorithm in align_fuzz can give
435 proper results:
436
437 Call a sequence of instructions beginning with alignment point X
438 and continuing until the next alignment point `block X'. When `X'
439 is used in an expression, it means the alignment value of the
440 alignment point.
441
442 Call the distance between the start of the first insn of block X, and
443 the end of the last insn of block X `IX', for the `inner size of X'.
444 This is clearly the sum of the instruction lengths.
445
446 Likewise with the next alignment-delimited block following X, which we
447 shall call block Y.
448
449 Call the distance between the start of the first insn of block X, and
450 the start of the first insn of block Y `OX', for the `outer size of X'.
451
452 The estimated padding is then OX - IX.
453
454 OX can be safely estimated as
455
456 if (X >= Y)
457 OX = round_up(IX, Y)
458 else
459 OX = round_up(IX, X) + Y - X
460
461 Clearly est(IX) >= real(IX), because that only depends on the
462 instruction lengths, and those being overestimated is a given.
463
464 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
465 we needn't worry about that when thinking about OX.
466
467 When X >= Y, the alignment provided by Y adds no uncertainty factor
468 for branch ranges starting before X, so we can just round what we have.
469 But when X < Y, we don't know anything about the, so to speak,
470 `middle bits', so we have to assume the worst when aligning up from an
471 address mod X to one mod Y, which is Y - X. */
472
473 #ifndef LABEL_ALIGN
474 #define LABEL_ALIGN(LABEL) align_labels
475 #endif
476
477 #ifndef LOOP_ALIGN
478 #define LOOP_ALIGN(LABEL) align_loops
479 #endif
480
481 #ifndef LABEL_ALIGN_AFTER_BARRIER
482 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
483 #endif
484
485 #ifndef JUMP_ALIGN
486 #define JUMP_ALIGN(LABEL) align_jumps
487 #endif
488
489 #ifndef ADDR_VEC_ALIGN
490 static int
491 final_addr_vec_align (rtx_jump_table_data *addr_vec)
492 {
493 int align = GET_MODE_SIZE (addr_vec->get_data_mode ());
494
495 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
496 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
497 return exact_log2 (align);
498
499 }
500
501 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
502 #endif
503
504 #ifndef INSN_LENGTH_ALIGNMENT
505 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
506 #endif
507
508 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
509
510 static int min_labelno, max_labelno;
511
512 #define LABEL_TO_ALIGNMENT(LABEL) \
513 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno])
514
515 /* For the benefit of port specific code do this also as a function. */
516
517 align_flags
518 label_to_alignment (rtx label)
519 {
520 if (CODE_LABEL_NUMBER (label) <= max_labelno)
521 return LABEL_TO_ALIGNMENT (label);
522 return align_flags ();
523 }
524
525 /* The differences in addresses
526 between a branch and its target might grow or shrink depending on
527 the alignment the start insn of the range (the branch for a forward
528 branch or the label for a backward branch) starts out on; if these
529 differences are used naively, they can even oscillate infinitely.
530 We therefore want to compute a 'worst case' address difference that
531 is independent of the alignment the start insn of the range end
532 up on, and that is at least as large as the actual difference.
533 The function align_fuzz calculates the amount we have to add to the
534 naively computed difference, by traversing the part of the alignment
535 chain of the start insn of the range that is in front of the end insn
536 of the range, and considering for each alignment the maximum amount
537 that it might contribute to a size increase.
538
539 For casesi tables, we also want to know worst case minimum amounts of
540 address difference, in case a machine description wants to introduce
541 some common offset that is added to all offsets in a table.
542 For this purpose, align_fuzz with a growth argument of 0 computes the
543 appropriate adjustment. */
544
545 /* Compute the maximum delta by which the difference of the addresses of
546 START and END might grow / shrink due to a different address for start
547 which changes the size of alignment insns between START and END.
548 KNOWN_ALIGN_LOG is the alignment known for START.
549 GROWTH should be ~0 if the objective is to compute potential code size
550 increase, and 0 if the objective is to compute potential shrink.
551 The return value is undefined for any other value of GROWTH. */
552
553 static int
554 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
555 {
556 int uid = INSN_UID (start);
557 rtx align_label;
558 int known_align = 1 << known_align_log;
559 int end_shuid = INSN_SHUID (end);
560 int fuzz = 0;
561
562 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
563 {
564 int align_addr, new_align;
565
566 uid = INSN_UID (align_label);
567 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
568 if (uid_shuid[uid] > end_shuid)
569 break;
570 align_flags alignment = LABEL_TO_ALIGNMENT (align_label);
571 new_align = 1 << alignment.levels[0].log;
572 if (new_align < known_align)
573 continue;
574 fuzz += (-align_addr ^ growth) & (new_align - known_align);
575 known_align = new_align;
576 }
577 return fuzz;
578 }
579
580 /* Compute a worst-case reference address of a branch so that it
581 can be safely used in the presence of aligned labels. Since the
582 size of the branch itself is unknown, the size of the branch is
583 not included in the range. I.e. for a forward branch, the reference
584 address is the end address of the branch as known from the previous
585 branch shortening pass, minus a value to account for possible size
586 increase due to alignment. For a backward branch, it is the start
587 address of the branch as known from the current pass, plus a value
588 to account for possible size increase due to alignment.
589 NB.: Therefore, the maximum offset allowed for backward branches needs
590 to exclude the branch size. */
591
592 int
593 insn_current_reference_address (rtx_insn *branch)
594 {
595 rtx dest;
596 int seq_uid;
597
598 if (! INSN_ADDRESSES_SET_P ())
599 return 0;
600
601 rtx_insn *seq = NEXT_INSN (PREV_INSN (branch));
602 seq_uid = INSN_UID (seq);
603 if (!jump_to_label_p (branch))
604 /* This can happen for example on the PA; the objective is to know the
605 offset to address something in front of the start of the function.
606 Thus, we can treat it like a backward branch.
607 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
608 any alignment we'd encounter, so we skip the call to align_fuzz. */
609 return insn_current_address;
610 dest = JUMP_LABEL (branch);
611
612 /* BRANCH has no proper alignment chain set, so use SEQ.
613 BRANCH also has no INSN_SHUID. */
614 if (INSN_SHUID (seq) < INSN_SHUID (dest))
615 {
616 /* Forward branch. */
617 return (insn_last_address + insn_lengths[seq_uid]
618 - align_fuzz (seq, dest, length_unit_log, ~0));
619 }
620 else
621 {
622 /* Backward branch. */
623 return (insn_current_address
624 + align_fuzz (dest, seq, length_unit_log, ~0));
625 }
626 }
627
628 /* Compute branch alignments based on CFG profile. */
630
631 unsigned int
632 compute_alignments (void)
633 {
634 basic_block bb;
635 align_flags max_alignment;
636
637 label_align.truncate (0);
638
639 max_labelno = max_label_num ();
640 min_labelno = get_first_label_num ();
641 label_align.safe_grow_cleared (max_labelno - min_labelno + 1, true);
642
643 /* If not optimizing or optimizing for size, don't assign any alignments. */
644 if (! optimize || optimize_function_for_size_p (cfun))
645 return 0;
646
647 if (dump_file)
648 {
649 dump_reg_info (dump_file);
650 dump_flow_info (dump_file, TDF_DETAILS);
651 flow_loops_dump (dump_file, NULL, 1);
652 }
653 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
654 profile_count count_threshold = cfun->cfg->count_max.apply_scale
655 (1, param_align_threshold);
656
657 if (dump_file)
658 {
659 fprintf (dump_file, "count_max: ");
660 cfun->cfg->count_max.dump (dump_file);
661 fprintf (dump_file, "\n");
662 }
663 FOR_EACH_BB_FN (bb, cfun)
664 {
665 rtx_insn *label = BB_HEAD (bb);
666 bool has_fallthru = 0;
667 edge e;
668 edge_iterator ei;
669
670 if (!LABEL_P (label)
671 || optimize_bb_for_size_p (bb))
672 {
673 if (dump_file)
674 fprintf (dump_file,
675 "BB %4i loop %2i loop_depth %2i skipped.\n",
676 bb->index,
677 bb->loop_father->num,
678 bb_loop_depth (bb));
679 continue;
680 }
681 max_alignment = LABEL_ALIGN (label);
682 profile_count fallthru_count = profile_count::zero ();
683 profile_count branch_count = profile_count::zero ();
684
685 FOR_EACH_EDGE (e, ei, bb->preds)
686 {
687 if (e->flags & EDGE_FALLTHRU)
688 has_fallthru = 1, fallthru_count += e->count ();
689 else
690 branch_count += e->count ();
691 }
692 if (dump_file)
693 {
694 fprintf (dump_file, "BB %4i loop %2i loop_depth"
695 " %2i fall ",
696 bb->index, bb->loop_father->num,
697 bb_loop_depth (bb));
698 fallthru_count.dump (dump_file);
699 fprintf (dump_file, " branch ");
700 branch_count.dump (dump_file);
701 if (!bb->loop_father->inner && bb->loop_father->num)
702 fprintf (dump_file, " inner_loop");
703 if (bb->loop_father->header == bb)
704 fprintf (dump_file, " loop_header");
705 fprintf (dump_file, "\n");
706 }
707 if (!fallthru_count.initialized_p () || !branch_count.initialized_p ())
708 continue;
709
710 /* There are two purposes to align block with no fallthru incoming edge:
711 1) to avoid fetch stalls when branch destination is near cache boundary
712 2) to improve cache efficiency in case the previous block is not executed
713 (so it does not need to be in the cache).
714
715 We to catch first case, we align frequently executed blocks.
716 To catch the second, we align blocks that are executed more frequently
717 than the predecessor and the predecessor is likely to not be executed
718 when function is called. */
719
720 if (!has_fallthru
721 && (branch_count > count_threshold
722 || (bb->count > bb->prev_bb->count.apply_scale (10, 1)
723 && (bb->prev_bb->count
724 <= ENTRY_BLOCK_PTR_FOR_FN (cfun)
725 ->count.apply_scale (1, 2)))))
726 {
727 align_flags alignment = JUMP_ALIGN (label);
728 if (dump_file)
729 fprintf (dump_file, " jump alignment added.\n");
730 max_alignment = align_flags::max (max_alignment, alignment);
731 }
732 /* In case block is frequent and reached mostly by non-fallthru edge,
733 align it. It is most likely a first block of loop. */
734 if (has_fallthru
735 && !(single_succ_p (bb)
736 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun))
737 && optimize_bb_for_speed_p (bb)
738 && branch_count + fallthru_count > count_threshold
739 && (branch_count
740 > fallthru_count.apply_scale
741 (param_align_loop_iterations, 1)))
742 {
743 align_flags alignment = LOOP_ALIGN (label);
744 if (dump_file)
745 fprintf (dump_file, " internal loop alignment added.\n");
746 max_alignment = align_flags::max (max_alignment, alignment);
747 }
748 LABEL_TO_ALIGNMENT (label) = max_alignment;
749 }
750
751 loop_optimizer_finalize ();
752 free_dominance_info (CDI_DOMINATORS);
753 return 0;
754 }
755
756 /* Grow the LABEL_ALIGN array after new labels are created. */
757
758 static void
759 grow_label_align (void)
760 {
761 int old = max_labelno;
762 int n_labels;
763 int n_old_labels;
764
765 max_labelno = max_label_num ();
766
767 n_labels = max_labelno - min_labelno + 1;
768 n_old_labels = old - min_labelno + 1;
769
770 label_align.safe_grow_cleared (n_labels, true);
771
772 /* Range of labels grows monotonically in the function. Failing here
773 means that the initialization of array got lost. */
774 gcc_assert (n_old_labels <= n_labels);
775 }
776
777 /* Update the already computed alignment information. LABEL_PAIRS is a vector
778 made up of pairs of labels for which the alignment information of the first
779 element will be copied from that of the second element. */
780
781 void
782 update_alignments (vec<rtx> &label_pairs)
783 {
784 unsigned int i = 0;
785 rtx iter, label = NULL_RTX;
786
787 if (max_labelno != max_label_num ())
788 grow_label_align ();
789
790 FOR_EACH_VEC_ELT (label_pairs, i, iter)
791 if (i & 1)
792 LABEL_TO_ALIGNMENT (label) = LABEL_TO_ALIGNMENT (iter);
793 else
794 label = iter;
795 }
796
797 namespace {
798
799 const pass_data pass_data_compute_alignments =
800 {
801 RTL_PASS, /* type */
802 "alignments", /* name */
803 OPTGROUP_NONE, /* optinfo_flags */
804 TV_NONE, /* tv_id */
805 0, /* properties_required */
806 0, /* properties_provided */
807 0, /* properties_destroyed */
808 0, /* todo_flags_start */
809 0, /* todo_flags_finish */
810 };
811
812 class pass_compute_alignments : public rtl_opt_pass
813 {
814 public:
815 pass_compute_alignments (gcc::context *ctxt)
816 : rtl_opt_pass (pass_data_compute_alignments, ctxt)
817 {}
818
819 /* opt_pass methods: */
820 virtual unsigned int execute (function *) { return compute_alignments (); }
821
822 }; // class pass_compute_alignments
823
824 } // anon namespace
825
826 rtl_opt_pass *
827 make_pass_compute_alignments (gcc::context *ctxt)
828 {
829 return new pass_compute_alignments (ctxt);
830 }
831
832
833 /* Make a pass over all insns and compute their actual lengths by shortening
835 any branches of variable length if possible. */
836
837 /* shorten_branches might be called multiple times: for example, the SH
838 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
839 In order to do this, it needs proper length information, which it obtains
840 by calling shorten_branches. This cannot be collapsed with
841 shorten_branches itself into a single pass unless we also want to integrate
842 reorg.cc, since the branch splitting exposes new instructions with delay
843 slots. */
844
845 void
846 shorten_branches (rtx_insn *first)
847 {
848 rtx_insn *insn;
849 int max_uid;
850 int i;
851 rtx_insn *seq;
852 int something_changed = 1;
853 char *varying_length;
854 rtx body;
855 int uid;
856 rtx align_tab[MAX_CODE_ALIGN + 1];
857
858 /* Compute maximum UID and allocate label_align / uid_shuid. */
859 max_uid = get_max_uid ();
860
861 /* Free uid_shuid before reallocating it. */
862 free (uid_shuid);
863
864 uid_shuid = XNEWVEC (int, max_uid);
865
866 if (max_labelno != max_label_num ())
867 grow_label_align ();
868
869 /* Initialize label_align and set up uid_shuid to be strictly
870 monotonically rising with insn order. */
871 /* We use alignment here to keep track of the maximum alignment we want to
872 impose on the next CODE_LABEL (or the current one if we are processing
873 the CODE_LABEL itself). */
874
875 align_flags max_alignment;
876
877 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
878 {
879 INSN_SHUID (insn) = i++;
880 if (INSN_P (insn))
881 continue;
882
883 if (rtx_code_label *label = dyn_cast <rtx_code_label *> (insn))
884 {
885 /* Merge in alignments computed by compute_alignments. */
886 align_flags alignment = LABEL_TO_ALIGNMENT (label);
887 max_alignment = align_flags::max (max_alignment, alignment);
888
889 rtx_jump_table_data *table = jump_table_for_label (label);
890 if (!table)
891 {
892 align_flags alignment = LABEL_ALIGN (label);
893 max_alignment = align_flags::max (max_alignment, alignment);
894 }
895 /* ADDR_VECs only take room if read-only data goes into the text
896 section. */
897 if ((JUMP_TABLES_IN_TEXT_SECTION
898 || readonly_data_section == text_section)
899 && table)
900 {
901 align_flags alignment = align_flags (ADDR_VEC_ALIGN (table));
902 max_alignment = align_flags::max (max_alignment, alignment);
903 }
904 LABEL_TO_ALIGNMENT (label) = max_alignment;
905 max_alignment = align_flags ();
906 }
907 else if (BARRIER_P (insn))
908 {
909 rtx_insn *label;
910
911 for (label = insn; label && ! INSN_P (label);
912 label = NEXT_INSN (label))
913 if (LABEL_P (label))
914 {
915 align_flags alignment
916 = align_flags (LABEL_ALIGN_AFTER_BARRIER (insn));
917 max_alignment = align_flags::max (max_alignment, alignment);
918 break;
919 }
920 }
921 }
922 if (!HAVE_ATTR_length)
923 return;
924
925 /* Allocate the rest of the arrays. */
926 insn_lengths = XNEWVEC (int, max_uid);
927 insn_lengths_max_uid = max_uid;
928 /* Syntax errors can lead to labels being outside of the main insn stream.
929 Initialize insn_addresses, so that we get reproducible results. */
930 INSN_ADDRESSES_ALLOC (max_uid);
931
932 varying_length = XCNEWVEC (char, max_uid);
933
934 /* Initialize uid_align. We scan instructions
935 from end to start, and keep in align_tab[n] the last seen insn
936 that does an alignment of at least n+1, i.e. the successor
937 in the alignment chain for an insn that does / has a known
938 alignment of n. */
939 uid_align = XCNEWVEC (rtx, max_uid);
940
941 for (i = MAX_CODE_ALIGN + 1; --i >= 0;)
942 align_tab[i] = NULL_RTX;
943 seq = get_last_insn ();
944 for (; seq; seq = PREV_INSN (seq))
945 {
946 int uid = INSN_UID (seq);
947 int log;
948 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq).levels[0].log : 0);
949 uid_align[uid] = align_tab[0];
950 if (log)
951 {
952 /* Found an alignment label. */
953 gcc_checking_assert (log < MAX_CODE_ALIGN + 1);
954 uid_align[uid] = align_tab[log];
955 for (i = log - 1; i >= 0; i--)
956 align_tab[i] = seq;
957 }
958 }
959
960 /* When optimizing, we start assuming minimum length, and keep increasing
961 lengths as we find the need for this, till nothing changes.
962 When not optimizing, we start assuming maximum lengths, and
963 do a single pass to update the lengths. */
964 bool increasing = optimize != 0;
965
966 #ifdef CASE_VECTOR_SHORTEN_MODE
967 if (optimize)
968 {
969 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
970 label fields. */
971
972 int min_shuid = INSN_SHUID (get_insns ()) - 1;
973 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
974 int rel;
975
976 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
977 {
978 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
979 int len, i, min, max, insn_shuid;
980 int min_align;
981 addr_diff_vec_flags flags;
982
983 if (! JUMP_TABLE_DATA_P (insn)
984 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
985 continue;
986 pat = PATTERN (insn);
987 len = XVECLEN (pat, 1);
988 gcc_assert (len > 0);
989 min_align = MAX_CODE_ALIGN;
990 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
991 {
992 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
993 int shuid = INSN_SHUID (lab);
994 if (shuid < min)
995 {
996 min = shuid;
997 min_lab = lab;
998 }
999 if (shuid > max)
1000 {
1001 max = shuid;
1002 max_lab = lab;
1003 }
1004
1005 int label_alignment = LABEL_TO_ALIGNMENT (lab).levels[0].log;
1006 if (min_align > label_alignment)
1007 min_align = label_alignment;
1008 }
1009 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1010 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1011 insn_shuid = INSN_SHUID (insn);
1012 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1013 memset (&flags, 0, sizeof (flags));
1014 flags.min_align = min_align;
1015 flags.base_after_vec = rel > insn_shuid;
1016 flags.min_after_vec = min > insn_shuid;
1017 flags.max_after_vec = max > insn_shuid;
1018 flags.min_after_base = min > rel;
1019 flags.max_after_base = max > rel;
1020 ADDR_DIFF_VEC_FLAGS (pat) = flags;
1021
1022 if (increasing)
1023 PUT_MODE (pat, CASE_VECTOR_SHORTEN_MODE (0, 0, pat));
1024 }
1025 }
1026 #endif /* CASE_VECTOR_SHORTEN_MODE */
1027
1028 /* Compute initial lengths, addresses, and varying flags for each insn. */
1029 int (*length_fun) (rtx_insn *) = increasing ? insn_min_length : insn_default_length;
1030
1031 for (insn_current_address = 0, insn = first;
1032 insn != 0;
1033 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1034 {
1035 uid = INSN_UID (insn);
1036
1037 insn_lengths[uid] = 0;
1038
1039 if (LABEL_P (insn))
1040 {
1041 int log = LABEL_TO_ALIGNMENT (insn).levels[0].log;
1042 if (log)
1043 {
1044 int align = 1 << log;
1045 int new_address = (insn_current_address + align - 1) & -align;
1046 insn_lengths[uid] = new_address - insn_current_address;
1047 }
1048 }
1049
1050 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1051
1052 if (NOTE_P (insn) || BARRIER_P (insn)
1053 || LABEL_P (insn) || DEBUG_INSN_P (insn))
1054 continue;
1055 if (insn->deleted ())
1056 continue;
1057
1058 body = PATTERN (insn);
1059 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
1060 {
1061 /* This only takes room if read-only data goes into the text
1062 section. */
1063 if (JUMP_TABLES_IN_TEXT_SECTION
1064 || readonly_data_section == text_section)
1065 insn_lengths[uid] = (XVECLEN (body,
1066 GET_CODE (body) == ADDR_DIFF_VEC)
1067 * GET_MODE_SIZE (table->get_data_mode ()));
1068 /* Alignment is handled by ADDR_VEC_ALIGN. */
1069 }
1070 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1071 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1072 else if (rtx_sequence *body_seq = dyn_cast <rtx_sequence *> (body))
1073 {
1074 int i;
1075 int const_delay_slots;
1076 if (DELAY_SLOTS)
1077 const_delay_slots = const_num_delay_slots (body_seq->insn (0));
1078 else
1079 const_delay_slots = 0;
1080
1081 int (*inner_length_fun) (rtx_insn *)
1082 = const_delay_slots ? length_fun : insn_default_length;
1083 /* Inside a delay slot sequence, we do not do any branch shortening
1084 if the shortening could change the number of delay slots
1085 of the branch. */
1086 for (i = 0; i < body_seq->len (); i++)
1087 {
1088 rtx_insn *inner_insn = body_seq->insn (i);
1089 int inner_uid = INSN_UID (inner_insn);
1090 int inner_length;
1091
1092 if (GET_CODE (PATTERN (inner_insn)) == ASM_INPUT
1093 || asm_noperands (PATTERN (inner_insn)) >= 0)
1094 inner_length = (asm_insn_count (PATTERN (inner_insn))
1095 * insn_default_length (inner_insn));
1096 else
1097 inner_length = inner_length_fun (inner_insn);
1098
1099 insn_lengths[inner_uid] = inner_length;
1100 if (const_delay_slots)
1101 {
1102 if ((varying_length[inner_uid]
1103 = insn_variable_length_p (inner_insn)) != 0)
1104 varying_length[uid] = 1;
1105 INSN_ADDRESSES (inner_uid) = (insn_current_address
1106 + insn_lengths[uid]);
1107 }
1108 else
1109 varying_length[inner_uid] = 0;
1110 insn_lengths[uid] += inner_length;
1111 }
1112 }
1113 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1114 {
1115 insn_lengths[uid] = length_fun (insn);
1116 varying_length[uid] = insn_variable_length_p (insn);
1117 }
1118
1119 /* If needed, do any adjustment. */
1120 #ifdef ADJUST_INSN_LENGTH
1121 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1122 if (insn_lengths[uid] < 0)
1123 fatal_insn ("negative insn length", insn);
1124 #endif
1125 }
1126
1127 /* Now loop over all the insns finding varying length insns. For each,
1128 get the current insn length. If it has changed, reflect the change.
1129 When nothing changes for a full pass, we are done. */
1130
1131 while (something_changed)
1132 {
1133 something_changed = 0;
1134 insn_current_align = MAX_CODE_ALIGN - 1;
1135 for (insn_current_address = 0, insn = first;
1136 insn != 0;
1137 insn = NEXT_INSN (insn))
1138 {
1139 int new_length;
1140 #ifdef ADJUST_INSN_LENGTH
1141 int tmp_length;
1142 #endif
1143 int length_align;
1144
1145 uid = INSN_UID (insn);
1146
1147 if (rtx_code_label *label = dyn_cast <rtx_code_label *> (insn))
1148 {
1149 int log = LABEL_TO_ALIGNMENT (label).levels[0].log;
1150
1151 #ifdef CASE_VECTOR_SHORTEN_MODE
1152 /* If the mode of a following jump table was changed, we
1153 may need to update the alignment of this label. */
1154
1155 if (JUMP_TABLES_IN_TEXT_SECTION
1156 || readonly_data_section == text_section)
1157 {
1158 rtx_jump_table_data *table = jump_table_for_label (label);
1159 if (table)
1160 {
1161 int newlog = ADDR_VEC_ALIGN (table);
1162 if (newlog != log)
1163 {
1164 log = newlog;
1165 LABEL_TO_ALIGNMENT (insn) = log;
1166 something_changed = 1;
1167 }
1168 }
1169 }
1170 #endif
1171
1172 if (log > insn_current_align)
1173 {
1174 int align = 1 << log;
1175 int new_address= (insn_current_address + align - 1) & -align;
1176 insn_lengths[uid] = new_address - insn_current_address;
1177 insn_current_align = log;
1178 insn_current_address = new_address;
1179 }
1180 else
1181 insn_lengths[uid] = 0;
1182 INSN_ADDRESSES (uid) = insn_current_address;
1183 continue;
1184 }
1185
1186 length_align = INSN_LENGTH_ALIGNMENT (insn);
1187 if (length_align < insn_current_align)
1188 insn_current_align = length_align;
1189
1190 insn_last_address = INSN_ADDRESSES (uid);
1191 INSN_ADDRESSES (uid) = insn_current_address;
1192
1193 #ifdef CASE_VECTOR_SHORTEN_MODE
1194 if (optimize
1195 && JUMP_TABLE_DATA_P (insn)
1196 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1197 {
1198 rtx_jump_table_data *table = as_a <rtx_jump_table_data *> (insn);
1199 rtx body = PATTERN (insn);
1200 int old_length = insn_lengths[uid];
1201 rtx_insn *rel_lab =
1202 safe_as_a <rtx_insn *> (XEXP (XEXP (body, 0), 0));
1203 rtx min_lab = XEXP (XEXP (body, 2), 0);
1204 rtx max_lab = XEXP (XEXP (body, 3), 0);
1205 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1206 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1207 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1208 rtx_insn *prev;
1209 int rel_align = 0;
1210 addr_diff_vec_flags flags;
1211 scalar_int_mode vec_mode;
1212
1213 /* Avoid automatic aggregate initialization. */
1214 flags = ADDR_DIFF_VEC_FLAGS (body);
1215
1216 /* Try to find a known alignment for rel_lab. */
1217 for (prev = rel_lab;
1218 prev
1219 && ! insn_lengths[INSN_UID (prev)]
1220 && ! (varying_length[INSN_UID (prev)] & 1);
1221 prev = PREV_INSN (prev))
1222 if (varying_length[INSN_UID (prev)] & 2)
1223 {
1224 rel_align = LABEL_TO_ALIGNMENT (prev).levels[0].log;
1225 break;
1226 }
1227
1228 /* See the comment on addr_diff_vec_flags in rtl.h for the
1229 meaning of the flags values. base: REL_LAB vec: INSN */
1230 /* Anything after INSN has still addresses from the last
1231 pass; adjust these so that they reflect our current
1232 estimate for this pass. */
1233 if (flags.base_after_vec)
1234 rel_addr += insn_current_address - insn_last_address;
1235 if (flags.min_after_vec)
1236 min_addr += insn_current_address - insn_last_address;
1237 if (flags.max_after_vec)
1238 max_addr += insn_current_address - insn_last_address;
1239 /* We want to know the worst case, i.e. lowest possible value
1240 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1241 its offset is positive, and we have to be wary of code shrink;
1242 otherwise, it is negative, and we have to be vary of code
1243 size increase. */
1244 if (flags.min_after_base)
1245 {
1246 /* If INSN is between REL_LAB and MIN_LAB, the size
1247 changes we are about to make can change the alignment
1248 within the observed offset, therefore we have to break
1249 it up into two parts that are independent. */
1250 if (! flags.base_after_vec && flags.min_after_vec)
1251 {
1252 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1253 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1254 }
1255 else
1256 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1257 }
1258 else
1259 {
1260 if (flags.base_after_vec && ! flags.min_after_vec)
1261 {
1262 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1263 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1264 }
1265 else
1266 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1267 }
1268 /* Likewise, determine the highest lowest possible value
1269 for the offset of MAX_LAB. */
1270 if (flags.max_after_base)
1271 {
1272 if (! flags.base_after_vec && flags.max_after_vec)
1273 {
1274 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1275 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1276 }
1277 else
1278 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1279 }
1280 else
1281 {
1282 if (flags.base_after_vec && ! flags.max_after_vec)
1283 {
1284 max_addr += align_fuzz (max_lab, insn, 0, 0);
1285 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1286 }
1287 else
1288 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1289 }
1290 vec_mode = CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1291 max_addr - rel_addr, body);
1292 if (!increasing
1293 || (GET_MODE_SIZE (vec_mode)
1294 >= GET_MODE_SIZE (table->get_data_mode ())))
1295 PUT_MODE (body, vec_mode);
1296 if (JUMP_TABLES_IN_TEXT_SECTION
1297 || readonly_data_section == text_section)
1298 {
1299 insn_lengths[uid]
1300 = (XVECLEN (body, 1)
1301 * GET_MODE_SIZE (table->get_data_mode ()));
1302 insn_current_address += insn_lengths[uid];
1303 if (insn_lengths[uid] != old_length)
1304 something_changed = 1;
1305 }
1306
1307 continue;
1308 }
1309 #endif /* CASE_VECTOR_SHORTEN_MODE */
1310
1311 if (! (varying_length[uid]))
1312 {
1313 if (NONJUMP_INSN_P (insn)
1314 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1315 {
1316 int i;
1317
1318 body = PATTERN (insn);
1319 for (i = 0; i < XVECLEN (body, 0); i++)
1320 {
1321 rtx inner_insn = XVECEXP (body, 0, i);
1322 int inner_uid = INSN_UID (inner_insn);
1323
1324 INSN_ADDRESSES (inner_uid) = insn_current_address;
1325
1326 insn_current_address += insn_lengths[inner_uid];
1327 }
1328 }
1329 else
1330 insn_current_address += insn_lengths[uid];
1331
1332 continue;
1333 }
1334
1335 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1336 {
1337 rtx_sequence *seqn = as_a <rtx_sequence *> (PATTERN (insn));
1338 int i;
1339
1340 body = PATTERN (insn);
1341 new_length = 0;
1342 for (i = 0; i < seqn->len (); i++)
1343 {
1344 rtx_insn *inner_insn = seqn->insn (i);
1345 int inner_uid = INSN_UID (inner_insn);
1346 int inner_length;
1347
1348 INSN_ADDRESSES (inner_uid) = insn_current_address;
1349
1350 /* insn_current_length returns 0 for insns with a
1351 non-varying length. */
1352 if (! varying_length[inner_uid])
1353 inner_length = insn_lengths[inner_uid];
1354 else
1355 inner_length = insn_current_length (inner_insn);
1356
1357 if (inner_length != insn_lengths[inner_uid])
1358 {
1359 if (!increasing || inner_length > insn_lengths[inner_uid])
1360 {
1361 insn_lengths[inner_uid] = inner_length;
1362 something_changed = 1;
1363 }
1364 else
1365 inner_length = insn_lengths[inner_uid];
1366 }
1367 insn_current_address += inner_length;
1368 new_length += inner_length;
1369 }
1370 }
1371 else
1372 {
1373 new_length = insn_current_length (insn);
1374 insn_current_address += new_length;
1375 }
1376
1377 #ifdef ADJUST_INSN_LENGTH
1378 /* If needed, do any adjustment. */
1379 tmp_length = new_length;
1380 ADJUST_INSN_LENGTH (insn, new_length);
1381 insn_current_address += (new_length - tmp_length);
1382 #endif
1383
1384 if (new_length != insn_lengths[uid]
1385 && (!increasing || new_length > insn_lengths[uid]))
1386 {
1387 insn_lengths[uid] = new_length;
1388 something_changed = 1;
1389 }
1390 else
1391 insn_current_address += insn_lengths[uid] - new_length;
1392 }
1393 /* For a non-optimizing compile, do only a single pass. */
1394 if (!increasing)
1395 break;
1396 }
1397 crtl->max_insn_address = insn_current_address;
1398 free (varying_length);
1399 }
1400
1401 /* Given the body of an INSN known to be generated by an ASM statement, return
1402 the number of machine instructions likely to be generated for this insn.
1403 This is used to compute its length. */
1404
1405 static int
1406 asm_insn_count (rtx body)
1407 {
1408 const char *templ;
1409
1410 if (GET_CODE (body) == ASM_INPUT)
1411 templ = XSTR (body, 0);
1412 else
1413 templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1414
1415 return asm_str_count (templ);
1416 }
1417
1418 /* Return the number of machine instructions likely to be generated for the
1419 inline-asm template. */
1420 int
1421 asm_str_count (const char *templ)
1422 {
1423 int count = 1;
1424
1425 if (!*templ)
1426 return 0;
1427
1428 for (; *templ; templ++)
1429 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1430 || *templ == '\n')
1431 count++;
1432
1433 return count;
1434 }
1435
1436 /* Return true if DWARF2 debug info can be emitted for DECL. */
1438
1439 static bool
1440 dwarf2_debug_info_emitted_p (tree decl)
1441 {
1442 /* When DWARF2 debug info is not generated internally. */
1443 if (!dwarf_debuginfo_p () && !dwarf_based_debuginfo_p ())
1444 return false;
1445
1446 if (DECL_IGNORED_P (decl))
1447 return false;
1448
1449 return true;
1450 }
1451
1452 /* Return scope resulting from combination of S1 and S2. */
1453 static tree
1454 choose_inner_scope (tree s1, tree s2)
1455 {
1456 if (!s1)
1457 return s2;
1458 if (!s2)
1459 return s1;
1460 if (BLOCK_NUMBER (s1) > BLOCK_NUMBER (s2))
1461 return s1;
1462 return s2;
1463 }
1464
1465 /* Emit lexical block notes needed to change scope from S1 to S2. */
1466
1467 static void
1468 change_scope (rtx_insn *orig_insn, tree s1, tree s2)
1469 {
1470 rtx_insn *insn = orig_insn;
1471 tree com = NULL_TREE;
1472 tree ts1 = s1, ts2 = s2;
1473 tree s;
1474
1475 while (ts1 != ts2)
1476 {
1477 gcc_assert (ts1 && ts2);
1478 if (BLOCK_NUMBER (ts1) > BLOCK_NUMBER (ts2))
1479 ts1 = BLOCK_SUPERCONTEXT (ts1);
1480 else if (BLOCK_NUMBER (ts1) < BLOCK_NUMBER (ts2))
1481 ts2 = BLOCK_SUPERCONTEXT (ts2);
1482 else
1483 {
1484 ts1 = BLOCK_SUPERCONTEXT (ts1);
1485 ts2 = BLOCK_SUPERCONTEXT (ts2);
1486 }
1487 }
1488 com = ts1;
1489
1490 /* Close scopes. */
1491 s = s1;
1492 while (s != com)
1493 {
1494 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1495 NOTE_BLOCK (note) = s;
1496 s = BLOCK_SUPERCONTEXT (s);
1497 }
1498
1499 /* Open scopes. */
1500 s = s2;
1501 while (s != com)
1502 {
1503 insn = emit_note_before (NOTE_INSN_BLOCK_BEG, insn);
1504 NOTE_BLOCK (insn) = s;
1505 s = BLOCK_SUPERCONTEXT (s);
1506 }
1507 }
1508
1509 /* Rebuild all the NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes based
1510 on the scope tree and the newly reordered instructions. */
1511
1512 static void
1513 reemit_insn_block_notes (void)
1514 {
1515 tree cur_block = DECL_INITIAL (cfun->decl);
1516 rtx_insn *insn;
1517
1518 insn = get_insns ();
1519 for (; insn; insn = NEXT_INSN (insn))
1520 {
1521 tree this_block;
1522
1523 /* Prevent lexical blocks from straddling section boundaries. */
1524 if (NOTE_P (insn))
1525 switch (NOTE_KIND (insn))
1526 {
1527 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1528 {
1529 for (tree s = cur_block; s != DECL_INITIAL (cfun->decl);
1530 s = BLOCK_SUPERCONTEXT (s))
1531 {
1532 rtx_note *note = emit_note_before (NOTE_INSN_BLOCK_END, insn);
1533 NOTE_BLOCK (note) = s;
1534 note = emit_note_after (NOTE_INSN_BLOCK_BEG, insn);
1535 NOTE_BLOCK (note) = s;
1536 }
1537 }
1538 break;
1539
1540 case NOTE_INSN_BEGIN_STMT:
1541 case NOTE_INSN_INLINE_ENTRY:
1542 this_block = LOCATION_BLOCK (NOTE_MARKER_LOCATION (insn));
1543 goto set_cur_block_to_this_block;
1544
1545 default:
1546 continue;
1547 }
1548
1549 if (!active_insn_p (insn))
1550 continue;
1551
1552 /* Avoid putting scope notes between jump table and its label. */
1553 if (JUMP_TABLE_DATA_P (insn))
1554 continue;
1555
1556 this_block = insn_scope (insn);
1557 /* For sequences compute scope resulting from merging all scopes
1558 of instructions nested inside. */
1559 if (rtx_sequence *body = dyn_cast <rtx_sequence *> (PATTERN (insn)))
1560 {
1561 int i;
1562
1563 this_block = NULL;
1564 for (i = 0; i < body->len (); i++)
1565 this_block = choose_inner_scope (this_block,
1566 insn_scope (body->insn (i)));
1567 }
1568 set_cur_block_to_this_block:
1569 if (! this_block)
1570 {
1571 if (INSN_LOCATION (insn) == UNKNOWN_LOCATION)
1572 continue;
1573 else
1574 this_block = DECL_INITIAL (cfun->decl);
1575 }
1576
1577 if (this_block != cur_block)
1578 {
1579 change_scope (insn, cur_block, this_block);
1580 cur_block = this_block;
1581 }
1582 }
1583
1584 /* change_scope emits before the insn, not after. */
1585 rtx_note *note = emit_note (NOTE_INSN_DELETED);
1586 change_scope (note, cur_block, DECL_INITIAL (cfun->decl));
1587 delete_insn (note);
1588
1589 reorder_blocks ();
1590 }
1591
1592 static const char *some_local_dynamic_name;
1593
1594 /* Locate some local-dynamic symbol still in use by this function
1595 so that we can print its name in local-dynamic base patterns.
1596 Return null if there are no local-dynamic references. */
1597
1598 const char *
1599 get_some_local_dynamic_name ()
1600 {
1601 subrtx_iterator::array_type array;
1602 rtx_insn *insn;
1603
1604 if (some_local_dynamic_name)
1605 return some_local_dynamic_name;
1606
1607 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1608 if (NONDEBUG_INSN_P (insn))
1609 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
1610 {
1611 const_rtx x = *iter;
1612 if (GET_CODE (x) == SYMBOL_REF)
1613 {
1614 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
1615 return some_local_dynamic_name = XSTR (x, 0);
1616 if (CONSTANT_POOL_ADDRESS_P (x))
1617 iter.substitute (get_pool_constant (x));
1618 }
1619 }
1620
1621 return 0;
1622 }
1623
1624 /* Arrange for us to emit a source location note before any further
1625 real insns or section changes, by setting the SEEN_NEXT_VIEW bit in
1626 *SEEN, as long as we are keeping track of location views. The bit
1627 indicates we have referenced the next view at the current PC, so we
1628 have to emit it. This should be called next to the var_location
1629 debug hook. */
1630
1631 static inline void
1632 set_next_view_needed (int *seen)
1633 {
1634 if (debug_variable_location_views)
1635 *seen |= SEEN_NEXT_VIEW;
1636 }
1637
1638 /* Clear the flag in *SEEN indicating we need to emit the next view.
1639 This should be called next to the source_line debug hook. */
1640
1641 static inline void
1642 clear_next_view_needed (int *seen)
1643 {
1644 *seen &= ~SEEN_NEXT_VIEW;
1645 }
1646
1647 /* Test whether we have a pending request to emit the next view in
1648 *SEEN, and emit it if needed, clearing the request bit. */
1649
1650 static inline void
1651 maybe_output_next_view (int *seen)
1652 {
1653 if ((*seen & SEEN_NEXT_VIEW) != 0)
1654 {
1655 clear_next_view_needed (seen);
1656 (*debug_hooks->source_line) (last_linenum, last_columnnum,
1657 last_filename, last_discriminator,
1658 false);
1659 }
1660 }
1661
1662 /* We want to emit param bindings (before the first begin_stmt) in the
1663 initial view, if we are emitting views. To that end, we may
1664 consume initial notes in the function, processing them in
1665 final_start_function, before signaling the beginning of the
1666 prologue, rather than in final.
1667
1668 We don't test whether the DECLs are PARM_DECLs: the assumption is
1669 that there will be a NOTE_INSN_BEGIN_STMT marker before any
1670 non-parameter NOTE_INSN_VAR_LOCATION. It's ok if the marker is not
1671 there, we'll just have more variable locations bound in the initial
1672 view, which is consistent with their being bound without any code
1673 that would give them a value. */
1674
1675 static inline bool
1676 in_initial_view_p (rtx_insn *insn)
1677 {
1678 return (!DECL_IGNORED_P (current_function_decl)
1679 && debug_variable_location_views
1680 && insn && GET_CODE (insn) == NOTE
1681 && (NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION
1682 || NOTE_KIND (insn) == NOTE_INSN_DELETED));
1683 }
1684
1685 /* Output assembler code for the start of a function,
1686 and initialize some of the variables in this file
1687 for the new function. The label for the function and associated
1688 assembler pseudo-ops have already been output in `assemble_start_function'.
1689
1690 FIRST is the first insn of the rtl for the function being compiled.
1691 FILE is the file to write assembler code to.
1692 SEEN should be initially set to zero, and it may be updated to
1693 indicate we have references to the next location view, that would
1694 require us to emit it at the current PC.
1695 OPTIMIZE_P is nonzero if we should eliminate redundant
1696 test and compare insns. */
1697
1698 static void
1699 final_start_function_1 (rtx_insn **firstp, FILE *file, int *seen,
1700 int optimize_p ATTRIBUTE_UNUSED)
1701 {
1702 block_depth = 0;
1703
1704 this_is_asm_operands = 0;
1705
1706 need_profile_function = false;
1707
1708 last_filename = LOCATION_FILE (prologue_location);
1709 last_linenum = LOCATION_LINE (prologue_location);
1710 last_columnnum = LOCATION_COLUMN (prologue_location);
1711 last_discriminator = discriminator = 0;
1712 last_bb_discriminator = bb_discriminator = 0;
1713 force_source_line = false;
1714
1715 high_block_linenum = high_function_linenum = last_linenum;
1716
1717 if (flag_sanitize & SANITIZE_ADDRESS)
1718 asan_function_start ();
1719
1720 rtx_insn *first = *firstp;
1721 if (in_initial_view_p (first))
1722 {
1723 do
1724 {
1725 final_scan_insn (first, file, 0, 0, seen);
1726 first = NEXT_INSN (first);
1727 }
1728 while (in_initial_view_p (first));
1729 *firstp = first;
1730 }
1731
1732 if (!DECL_IGNORED_P (current_function_decl))
1733 debug_hooks->begin_prologue (last_linenum, last_columnnum,
1734 last_filename);
1735
1736 if (!dwarf2_debug_info_emitted_p (current_function_decl))
1737 dwarf2out_begin_prologue (0, 0, NULL);
1738
1739 if (DECL_IGNORED_P (current_function_decl) && last_linenum && last_filename)
1740 debug_hooks->set_ignored_loc (last_linenum, last_columnnum, last_filename);
1741
1742 #ifdef LEAF_REG_REMAP
1743 if (crtl->uses_only_leaf_regs)
1744 leaf_renumber_regs (first);
1745 #endif
1746
1747 /* The Sun386i and perhaps other machines don't work right
1748 if the profiling code comes after the prologue. */
1749 if (targetm.profile_before_prologue () && crtl->profile)
1750 {
1751 if (targetm.asm_out.function_prologue == default_function_pro_epilogue
1752 && targetm.have_prologue ())
1753 {
1754 rtx_insn *insn;
1755 for (insn = first; insn; insn = NEXT_INSN (insn))
1756 if (!NOTE_P (insn))
1757 {
1758 insn = NULL;
1759 break;
1760 }
1761 else if (NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK
1762 || NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
1763 break;
1764 else if (NOTE_KIND (insn) == NOTE_INSN_DELETED
1765 || NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION)
1766 continue;
1767 else
1768 {
1769 insn = NULL;
1770 break;
1771 }
1772
1773 if (insn)
1774 need_profile_function = true;
1775 else
1776 profile_function (file);
1777 }
1778 else
1779 profile_function (file);
1780 }
1781
1782 /* If debugging, assign block numbers to all of the blocks in this
1783 function. */
1784 if (write_symbols)
1785 {
1786 reemit_insn_block_notes ();
1787 number_blocks (current_function_decl);
1788 /* We never actually put out begin/end notes for the top-level
1789 block in the function. But, conceptually, that block is
1790 always needed. */
1791 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1792 }
1793
1794 unsigned HOST_WIDE_INT min_frame_size
1795 = constant_lower_bound (get_frame_size ());
1796 if (min_frame_size > (unsigned HOST_WIDE_INT) warn_frame_larger_than_size)
1797 {
1798 /* Issue a warning */
1799 warning (OPT_Wframe_larger_than_,
1800 "the frame size of %wu bytes is larger than %wu bytes",
1801 min_frame_size, warn_frame_larger_than_size);
1802 }
1803
1804 /* First output the function prologue: code to set up the stack frame. */
1805 targetm.asm_out.function_prologue (file);
1806
1807 /* If the machine represents the prologue as RTL, the profiling code must
1808 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1809 if (! targetm.have_prologue ())
1810 profile_after_prologue (file);
1811 }
1812
1813 /* This is an exported final_start_function_1, callable without SEEN. */
1814
1815 void
1816 final_start_function (rtx_insn *first, FILE *file,
1817 int optimize_p ATTRIBUTE_UNUSED)
1818 {
1819 int seen = 0;
1820 final_start_function_1 (&first, file, &seen, optimize_p);
1821 gcc_assert (seen == 0);
1822 }
1823
1824 static void
1825 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1826 {
1827 if (!targetm.profile_before_prologue () && crtl->profile)
1828 profile_function (file);
1829 }
1830
1831 static void
1832 profile_function (FILE *file ATTRIBUTE_UNUSED)
1833 {
1834 #ifndef NO_PROFILE_COUNTERS
1835 # define NO_PROFILE_COUNTERS 0
1836 #endif
1837 #ifdef ASM_OUTPUT_REG_PUSH
1838 rtx sval = NULL, chain = NULL;
1839
1840 if (cfun->returns_struct)
1841 sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1842 true);
1843 if (cfun->static_chain_decl)
1844 chain = targetm.calls.static_chain (current_function_decl, true);
1845 #endif /* ASM_OUTPUT_REG_PUSH */
1846
1847 if (! NO_PROFILE_COUNTERS)
1848 {
1849 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1850 switch_to_section (data_section);
1851 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1852 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1853 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1854 }
1855
1856 switch_to_section (current_function_section ());
1857
1858 #ifdef ASM_OUTPUT_REG_PUSH
1859 if (sval && REG_P (sval))
1860 ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1861 if (chain && REG_P (chain))
1862 ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1863 #endif
1864
1865 FUNCTION_PROFILER (file, current_function_funcdef_no);
1866
1867 #ifdef ASM_OUTPUT_REG_PUSH
1868 if (chain && REG_P (chain))
1869 ASM_OUTPUT_REG_POP (file, REGNO (chain));
1870 if (sval && REG_P (sval))
1871 ASM_OUTPUT_REG_POP (file, REGNO (sval));
1872 #endif
1873 }
1874
1875 /* Output assembler code for the end of a function.
1876 For clarity, args are same as those of `final_start_function'
1877 even though not all of them are needed. */
1878
1879 void
1880 final_end_function (void)
1881 {
1882 app_disable ();
1883
1884 if (!DECL_IGNORED_P (current_function_decl))
1885 debug_hooks->end_function (high_function_linenum);
1886
1887 /* Finally, output the function epilogue:
1888 code to restore the stack frame and return to the caller. */
1889 targetm.asm_out.function_epilogue (asm_out_file);
1890
1891 /* And debug output. */
1892 if (!DECL_IGNORED_P (current_function_decl))
1893 debug_hooks->end_epilogue (last_linenum, last_filename);
1894
1895 if (!dwarf2_debug_info_emitted_p (current_function_decl)
1896 && dwarf2out_do_frame ())
1897 dwarf2out_end_epilogue (last_linenum, last_filename);
1898
1899 some_local_dynamic_name = 0;
1900 }
1901
1902
1904 /* Dumper helper for basic block information. FILE is the assembly
1905 output file, and INSN is the instruction being emitted. */
1906
1907 static void
1908 dump_basic_block_info (FILE *file, rtx_insn *insn, basic_block *start_to_bb,
1909 basic_block *end_to_bb, int bb_map_size, int *bb_seqn)
1910 {
1911 basic_block bb;
1912
1913 if (!flag_debug_asm)
1914 return;
1915
1916 if (INSN_UID (insn) < bb_map_size
1917 && (bb = start_to_bb[INSN_UID (insn)]) != NULL)
1918 {
1919 edge e;
1920 edge_iterator ei;
1921
1922 fprintf (file, "%s BLOCK %d", ASM_COMMENT_START, bb->index);
1923 if (bb->count.initialized_p ())
1924 {
1925 fprintf (file, ", count:");
1926 bb->count.dump (file);
1927 }
1928 fprintf (file, " seq:%d", (*bb_seqn)++);
1929 fprintf (file, "\n%s PRED:", ASM_COMMENT_START);
1930 FOR_EACH_EDGE (e, ei, bb->preds)
1931 {
1932 dump_edge_info (file, e, TDF_DETAILS, 0);
1933 }
1934 fprintf (file, "\n");
1935 }
1936 if (INSN_UID (insn) < bb_map_size
1937 && (bb = end_to_bb[INSN_UID (insn)]) != NULL)
1938 {
1939 edge e;
1940 edge_iterator ei;
1941
1942 fprintf (asm_out_file, "%s SUCC:", ASM_COMMENT_START);
1943 FOR_EACH_EDGE (e, ei, bb->succs)
1944 {
1945 dump_edge_info (asm_out_file, e, TDF_DETAILS, 1);
1946 }
1947 fprintf (file, "\n");
1948 }
1949 }
1950
1951 /* Output assembler code for some insns: all or part of a function.
1952 For description of args, see `final_start_function', above. */
1953
1954 static void
1955 final_1 (rtx_insn *first, FILE *file, int seen, int optimize_p)
1956 {
1957 rtx_insn *insn, *next;
1958
1959 /* Used for -dA dump. */
1960 basic_block *start_to_bb = NULL;
1961 basic_block *end_to_bb = NULL;
1962 int bb_map_size = 0;
1963 int bb_seqn = 0;
1964
1965 last_ignored_compare = 0;
1966
1967 init_recog ();
1968
1969 CC_STATUS_INIT;
1970
1971 if (flag_debug_asm)
1972 {
1973 basic_block bb;
1974
1975 bb_map_size = get_max_uid () + 1;
1976 start_to_bb = XCNEWVEC (basic_block, bb_map_size);
1977 end_to_bb = XCNEWVEC (basic_block, bb_map_size);
1978
1979 /* There is no cfg for a thunk. */
1980 if (!cfun->is_thunk)
1981 FOR_EACH_BB_REVERSE_FN (bb, cfun)
1982 {
1983 start_to_bb[INSN_UID (BB_HEAD (bb))] = bb;
1984 end_to_bb[INSN_UID (BB_END (bb))] = bb;
1985 }
1986 }
1987
1988 /* Output the insns. */
1989 for (insn = first; insn;)
1990 {
1991 if (HAVE_ATTR_length)
1992 {
1993 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1994 {
1995 /* This can be triggered by bugs elsewhere in the compiler if
1996 new insns are created after init_insn_lengths is called. */
1997 gcc_assert (NOTE_P (insn));
1998 insn_current_address = -1;
1999 }
2000 else
2001 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
2002 /* final can be seen as an iteration of shorten_branches that
2003 does nothing (since a fixed point has already been reached). */
2004 insn_last_address = insn_current_address;
2005 }
2006
2007 dump_basic_block_info (file, insn, start_to_bb, end_to_bb,
2008 bb_map_size, &bb_seqn);
2009 insn = final_scan_insn (insn, file, optimize_p, 0, &seen);
2010 }
2011
2012 maybe_output_next_view (&seen);
2013
2014 if (flag_debug_asm)
2015 {
2016 free (start_to_bb);
2017 free (end_to_bb);
2018 }
2019
2020 /* Remove CFI notes, to avoid compare-debug failures. */
2021 for (insn = first; insn; insn = next)
2022 {
2023 next = NEXT_INSN (insn);
2024 if (NOTE_P (insn)
2025 && (NOTE_KIND (insn) == NOTE_INSN_CFI
2026 || NOTE_KIND (insn) == NOTE_INSN_CFI_LABEL))
2027 delete_insn (insn);
2028 }
2029 }
2030
2031 /* This is an exported final_1, callable without SEEN. */
2032
2033 void
2034 final (rtx_insn *first, FILE *file, int optimize_p)
2035 {
2036 /* Those that use the internal final_start_function_1/final_1 API
2037 skip initial debug bind notes in final_start_function_1, and pass
2038 the modified FIRST to final_1. But those that use the public
2039 final_start_function/final APIs, final_start_function can't move
2040 FIRST because it's not passed by reference, so if they were
2041 skipped there, skip them again here. */
2042 while (in_initial_view_p (first))
2043 first = NEXT_INSN (first);
2044
2045 final_1 (first, file, 0, optimize_p);
2046 }
2047
2048 const char *
2050 get_insn_template (int code, rtx_insn *insn)
2051 {
2052 switch (insn_data[code].output_format)
2053 {
2054 case INSN_OUTPUT_FORMAT_SINGLE:
2055 return insn_data[code].output.single;
2056 case INSN_OUTPUT_FORMAT_MULTI:
2057 return insn_data[code].output.multi[which_alternative];
2058 case INSN_OUTPUT_FORMAT_FUNCTION:
2059 gcc_assert (insn);
2060 return (*insn_data[code].output.function) (recog_data.operand, insn);
2061
2062 default:
2063 gcc_unreachable ();
2064 }
2065 }
2066
2067 /* Emit the appropriate declaration for an alternate-entry-point
2068 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
2069 LABEL_KIND != LABEL_NORMAL.
2070
2071 The case fall-through in this function is intentional. */
2072 static void
2073 output_alternate_entry_point (FILE *file, rtx_insn *insn)
2074 {
2075 const char *name = LABEL_NAME (insn);
2076
2077 switch (LABEL_KIND (insn))
2078 {
2079 case LABEL_WEAK_ENTRY:
2080 #ifdef ASM_WEAKEN_LABEL
2081 ASM_WEAKEN_LABEL (file, name);
2082 gcc_fallthrough ();
2083 #endif
2084 case LABEL_GLOBAL_ENTRY:
2085 targetm.asm_out.globalize_label (file, name);
2086 gcc_fallthrough ();
2087 case LABEL_STATIC_ENTRY:
2088 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
2089 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
2090 #endif
2091 ASM_OUTPUT_LABEL (file, name);
2092 break;
2093
2094 case LABEL_NORMAL:
2095 default:
2096 gcc_unreachable ();
2097 }
2098 }
2099
2100 /* Given a CALL_INSN, find and return the nested CALL. */
2101 static rtx
2102 call_from_call_insn (rtx_call_insn *insn)
2103 {
2104 rtx x;
2105 gcc_assert (CALL_P (insn));
2106 x = PATTERN (insn);
2107
2108 while (GET_CODE (x) != CALL)
2109 {
2110 switch (GET_CODE (x))
2111 {
2112 default:
2113 gcc_unreachable ();
2114 case COND_EXEC:
2115 x = COND_EXEC_CODE (x);
2116 break;
2117 case PARALLEL:
2118 x = XVECEXP (x, 0, 0);
2119 break;
2120 case SET:
2121 x = XEXP (x, 1);
2122 break;
2123 }
2124 }
2125 return x;
2126 }
2127
2128 /* Print a comment into the asm showing FILENAME, LINENUM, and the
2129 corresponding source line, if available. */
2130
2131 static void
2132 asm_show_source (const char *filename, int linenum)
2133 {
2134 if (!filename)
2135 return;
2136
2137 char_span line = location_get_source_line (filename, linenum);
2138 if (!line)
2139 return;
2140
2141 fprintf (asm_out_file, "%s %s:%i: ", ASM_COMMENT_START, filename, linenum);
2142 /* "line" is not 0-terminated, so we must use its length. */
2143 fwrite (line.get_buffer (), 1, line.length (), asm_out_file);
2144 fputc ('\n', asm_out_file);
2145 }
2146
2147 /* Judge if an absolute jump table is relocatable. */
2148
2149 bool
2150 jumptable_relocatable (void)
2151 {
2152 bool relocatable = false;
2153
2154 if (!CASE_VECTOR_PC_RELATIVE
2155 && !targetm.asm_out.generate_pic_addr_diff_vec ()
2156 && targetm_common.have_named_sections)
2157 relocatable = targetm.asm_out.reloc_rw_mask ();
2158
2159 return relocatable;
2160 }
2161
2162 /* The final scan for one insn, INSN.
2163 Args are same as in `final', except that INSN
2164 is the insn being scanned.
2165 Value returned is the next insn to be scanned.
2166
2167 NOPEEPHOLES is the flag to disallow peephole processing (currently
2168 used for within delayed branch sequence output).
2169
2170 SEEN is used to track the end of the prologue, for emitting
2171 debug information. We force the emission of a line note after
2172 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG. */
2173
2174 static rtx_insn *
2175 final_scan_insn_1 (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
2176 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
2177 {
2178 rtx_insn *next;
2179 rtx_jump_table_data *table;
2180
2181 insn_counter++;
2182
2183 /* Ignore deleted insns. These can occur when we split insns (due to a
2184 template of "#") while not optimizing. */
2185 if (insn->deleted ())
2186 return NEXT_INSN (insn);
2187
2188 switch (GET_CODE (insn))
2189 {
2190 case NOTE:
2191 switch (NOTE_KIND (insn))
2192 {
2193 case NOTE_INSN_DELETED:
2194 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
2195 break;
2196
2197 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
2198 maybe_output_next_view (seen);
2199
2200 output_function_exception_table (0);
2201
2202 if (targetm.asm_out.unwind_emit)
2203 targetm.asm_out.unwind_emit (asm_out_file, insn);
2204
2205 in_cold_section_p = !in_cold_section_p;
2206
2207 gcc_checking_assert (in_cold_section_p);
2208 if (in_cold_section_p)
2209 cold_function_name
2210 = clone_function_name (current_function_decl, "cold");
2211
2212 if (dwarf2out_do_frame ())
2213 {
2214 dwarf2out_switch_text_section ();
2215 if (!dwarf2_debug_info_emitted_p (current_function_decl)
2216 && !DECL_IGNORED_P (current_function_decl))
2217 debug_hooks->switch_text_section ();
2218 }
2219 else if (!DECL_IGNORED_P (current_function_decl))
2220 debug_hooks->switch_text_section ();
2221 if (DECL_IGNORED_P (current_function_decl) && last_linenum
2222 && last_filename)
2223 debug_hooks->set_ignored_loc (last_linenum, last_columnnum,
2224 last_filename);
2225
2226 switch_to_section (current_function_section ());
2227 targetm.asm_out.function_switched_text_sections (asm_out_file,
2228 current_function_decl,
2229 in_cold_section_p);
2230 /* Emit a label for the split cold section. Form label name by
2231 suffixing "cold" to the original function's name. */
2232 if (in_cold_section_p)
2233 {
2234 #ifdef ASM_DECLARE_COLD_FUNCTION_NAME
2235 ASM_DECLARE_COLD_FUNCTION_NAME (asm_out_file,
2236 IDENTIFIER_POINTER
2237 (cold_function_name),
2238 current_function_decl);
2239 #else
2240 ASM_OUTPUT_LABEL (asm_out_file,
2241 IDENTIFIER_POINTER (cold_function_name));
2242 #endif
2243 if (dwarf2out_do_frame ()
2244 && cfun->fde->dw_fde_second_begin != NULL)
2245 ASM_OUTPUT_LABEL (asm_out_file, cfun->fde->dw_fde_second_begin);
2246 }
2247 break;
2248
2249 case NOTE_INSN_BASIC_BLOCK:
2250 if (need_profile_function)
2251 {
2252 profile_function (asm_out_file);
2253 need_profile_function = false;
2254 }
2255
2256 if (targetm.asm_out.unwind_emit)
2257 targetm.asm_out.unwind_emit (asm_out_file, insn);
2258
2259 bb_discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
2260 break;
2261
2262 case NOTE_INSN_EH_REGION_BEG:
2263 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
2264 NOTE_EH_HANDLER (insn));
2265 break;
2266
2267 case NOTE_INSN_EH_REGION_END:
2268 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
2269 NOTE_EH_HANDLER (insn));
2270 break;
2271
2272 case NOTE_INSN_PROLOGUE_END:
2273 targetm.asm_out.function_end_prologue (file);
2274 profile_after_prologue (file);
2275
2276 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2277 {
2278 *seen |= SEEN_EMITTED;
2279 force_source_line = true;
2280 }
2281 else
2282 *seen |= SEEN_NOTE;
2283
2284 break;
2285
2286 case NOTE_INSN_EPILOGUE_BEG:
2287 if (!DECL_IGNORED_P (current_function_decl))
2288 (*debug_hooks->begin_epilogue) (last_linenum, last_filename);
2289 targetm.asm_out.function_begin_epilogue (file);
2290 break;
2291
2292 case NOTE_INSN_CFI:
2293 dwarf2out_emit_cfi (NOTE_CFI (insn));
2294 break;
2295
2296 case NOTE_INSN_CFI_LABEL:
2297 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LCFI",
2298 NOTE_LABEL_NUMBER (insn));
2299 break;
2300
2301 case NOTE_INSN_FUNCTION_BEG:
2302 if (need_profile_function)
2303 {
2304 profile_function (asm_out_file);
2305 need_profile_function = false;
2306 }
2307
2308 app_disable ();
2309 if (!DECL_IGNORED_P (current_function_decl))
2310 debug_hooks->end_prologue (last_linenum, last_filename);
2311
2312 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
2313 {
2314 *seen |= SEEN_EMITTED;
2315 force_source_line = true;
2316 }
2317 else
2318 *seen |= SEEN_NOTE;
2319
2320 break;
2321
2322 case NOTE_INSN_BLOCK_BEG:
2323 if (debug_info_level >= DINFO_LEVEL_NORMAL
2324 || dwarf_debuginfo_p ()
2325 || write_symbols == VMS_DEBUG)
2326 {
2327 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2328
2329 app_disable ();
2330 ++block_depth;
2331 high_block_linenum = last_linenum;
2332
2333 /* Output debugging info about the symbol-block beginning. */
2334 if (!DECL_IGNORED_P (current_function_decl))
2335 debug_hooks->begin_block (last_linenum, n);
2336
2337 /* Mark this block as output. */
2338 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
2339 BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn)) = in_cold_section_p;
2340 }
2341 if (write_symbols == DBX_DEBUG)
2342 {
2343 location_t *locus_ptr
2344 = block_nonartificial_location (NOTE_BLOCK (insn));
2345
2346 if (locus_ptr != NULL)
2347 {
2348 override_filename = LOCATION_FILE (*locus_ptr);
2349 override_linenum = LOCATION_LINE (*locus_ptr);
2350 override_columnnum = LOCATION_COLUMN (*locus_ptr);
2351 override_discriminator = compute_discriminator (*locus_ptr);
2352 }
2353 }
2354 break;
2355
2356 case NOTE_INSN_BLOCK_END:
2357 maybe_output_next_view (seen);
2358
2359 if (debug_info_level >= DINFO_LEVEL_NORMAL
2360 || dwarf_debuginfo_p ()
2361 || write_symbols == VMS_DEBUG)
2362 {
2363 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
2364
2365 app_disable ();
2366
2367 /* End of a symbol-block. */
2368 --block_depth;
2369 gcc_assert (block_depth >= 0);
2370
2371 if (!DECL_IGNORED_P (current_function_decl))
2372 debug_hooks->end_block (high_block_linenum, n);
2373 gcc_assert (BLOCK_IN_COLD_SECTION_P (NOTE_BLOCK (insn))
2374 == in_cold_section_p);
2375 }
2376 if (write_symbols == DBX_DEBUG)
2377 {
2378 tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
2379 location_t *locus_ptr
2380 = block_nonartificial_location (outer_block);
2381
2382 if (locus_ptr != NULL)
2383 {
2384 override_filename = LOCATION_FILE (*locus_ptr);
2385 override_linenum = LOCATION_LINE (*locus_ptr);
2386 override_columnnum = LOCATION_COLUMN (*locus_ptr);
2387 override_discriminator = compute_discriminator (*locus_ptr);
2388 }
2389 else
2390 {
2391 override_filename = NULL;
2392 override_linenum = 0;
2393 override_columnnum = 0;
2394 override_discriminator = 0;
2395 }
2396 }
2397 break;
2398
2399 case NOTE_INSN_DELETED_LABEL:
2400 /* Emit the label. We may have deleted the CODE_LABEL because
2401 the label could be proved to be unreachable, though still
2402 referenced (in the form of having its address taken. */
2403 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2404 break;
2405
2406 case NOTE_INSN_DELETED_DEBUG_LABEL:
2407 /* Similarly, but need to use different namespace for it. */
2408 if (CODE_LABEL_NUMBER (insn) != -1)
2409 ASM_OUTPUT_DEBUG_LABEL (file, "LDL", CODE_LABEL_NUMBER (insn));
2410 break;
2411
2412 case NOTE_INSN_VAR_LOCATION:
2413 if (!DECL_IGNORED_P (current_function_decl))
2414 {
2415 debug_hooks->var_location (insn);
2416 set_next_view_needed (seen);
2417 }
2418 break;
2419
2420 case NOTE_INSN_BEGIN_STMT:
2421 gcc_checking_assert (cfun->debug_nonbind_markers);
2422 if (!DECL_IGNORED_P (current_function_decl)
2423 && notice_source_line (insn, NULL))
2424 {
2425 output_source_line:
2426 (*debug_hooks->source_line) (last_linenum, last_columnnum,
2427 last_filename, last_discriminator,
2428 true);
2429 clear_next_view_needed (seen);
2430 }
2431 break;
2432
2433 case NOTE_INSN_INLINE_ENTRY:
2434 gcc_checking_assert (cfun->debug_nonbind_markers);
2435 if (!DECL_IGNORED_P (current_function_decl)
2436 && notice_source_line (insn, NULL))
2437 {
2438 (*debug_hooks->inline_entry) (LOCATION_BLOCK
2439 (NOTE_MARKER_LOCATION (insn)));
2440 goto output_source_line;
2441 }
2442 break;
2443
2444 default:
2445 gcc_unreachable ();
2446 break;
2447 }
2448 break;
2449
2450 case BARRIER:
2451 break;
2452
2453 case CODE_LABEL:
2454 /* The target port might emit labels in the output function for
2455 some insn, e.g. sh.cc output_branchy_insn. */
2456 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2457 {
2458 align_flags alignment = LABEL_TO_ALIGNMENT (insn);
2459 if (alignment.levels[0].log && NEXT_INSN (insn))
2460 {
2461 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2462 /* Output both primary and secondary alignment. */
2463 ASM_OUTPUT_MAX_SKIP_ALIGN (file, alignment.levels[0].log,
2464 alignment.levels[0].maxskip);
2465 ASM_OUTPUT_MAX_SKIP_ALIGN (file, alignment.levels[1].log,
2466 alignment.levels[1].maxskip);
2467 #else
2468 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2469 ASM_OUTPUT_ALIGN_WITH_NOP (file, alignment.levels[0].log);
2470 #else
2471 ASM_OUTPUT_ALIGN (file, alignment.levels[0].log);
2472 #endif
2473 #endif
2474 }
2475 }
2476 CC_STATUS_INIT;
2477
2478 if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2479 debug_hooks->label (as_a <rtx_code_label *> (insn));
2480
2481 app_disable ();
2482
2483 /* If this label is followed by a jump-table, make sure we put
2484 the label in the read-only section. Also possibly write the
2485 label and jump table together. */
2486 table = jump_table_for_label (as_a <rtx_code_label *> (insn));
2487 if (table)
2488 {
2489 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2490 /* In this case, the case vector is being moved by the
2491 target, so don't output the label at all. Leave that
2492 to the back end macros. */
2493 #else
2494 if (! JUMP_TABLES_IN_TEXT_SECTION)
2495 {
2496 int log_align;
2497
2498 switch_to_section (targetm.asm_out.function_rodata_section
2499 (current_function_decl,
2500 jumptable_relocatable ()));
2501
2502 #ifdef ADDR_VEC_ALIGN
2503 log_align = ADDR_VEC_ALIGN (table);
2504 #else
2505 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2506 #endif
2507 ASM_OUTPUT_ALIGN (file, log_align);
2508 }
2509 else
2510 switch_to_section (current_function_section ());
2511
2512 #ifdef ASM_OUTPUT_CASE_LABEL
2513 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn), table);
2514 #else
2515 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2516 #endif
2517 #endif
2518 break;
2519 }
2520 if (LABEL_ALT_ENTRY_P (insn))
2521 output_alternate_entry_point (file, insn);
2522 else
2523 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2524 break;
2525
2526 default:
2527 {
2528 rtx body = PATTERN (insn);
2529 int insn_code_number;
2530 const char *templ;
2531 bool is_stmt, *is_stmt_p;
2532
2533 if (MAY_HAVE_DEBUG_MARKER_INSNS && cfun->debug_nonbind_markers)
2534 {
2535 is_stmt = false;
2536 is_stmt_p = NULL;
2537 }
2538 else
2539 is_stmt_p = &is_stmt;
2540
2541 /* Reset this early so it is correct for ASM statements. */
2542 current_insn_predicate = NULL_RTX;
2543
2544 /* An INSN, JUMP_INSN or CALL_INSN.
2545 First check for special kinds that recog doesn't recognize. */
2546
2547 if (GET_CODE (body) == USE /* These are just declarations. */
2548 || GET_CODE (body) == CLOBBER)
2549 break;
2550
2551 /* Detect insns that are really jump-tables
2552 and output them as such. */
2553
2554 if (JUMP_TABLE_DATA_P (insn))
2555 {
2556 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2557 int vlen, idx;
2558 #endif
2559
2560 if (! JUMP_TABLES_IN_TEXT_SECTION)
2561 switch_to_section (targetm.asm_out.function_rodata_section
2562 (current_function_decl,
2563 jumptable_relocatable ()));
2564 else
2565 switch_to_section (current_function_section ());
2566
2567 app_disable ();
2568
2569 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2570 if (GET_CODE (body) == ADDR_VEC)
2571 {
2572 #ifdef ASM_OUTPUT_ADDR_VEC
2573 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2574 #else
2575 gcc_unreachable ();
2576 #endif
2577 }
2578 else
2579 {
2580 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2581 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2582 #else
2583 gcc_unreachable ();
2584 #endif
2585 }
2586 #else
2587 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2588 for (idx = 0; idx < vlen; idx++)
2589 {
2590 if (GET_CODE (body) == ADDR_VEC)
2591 {
2592 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2593 ASM_OUTPUT_ADDR_VEC_ELT
2594 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2595 #else
2596 gcc_unreachable ();
2597 #endif
2598 }
2599 else
2600 {
2601 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2602 ASM_OUTPUT_ADDR_DIFF_ELT
2603 (file,
2604 body,
2605 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2606 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2607 #else
2608 gcc_unreachable ();
2609 #endif
2610 }
2611 }
2612 #ifdef ASM_OUTPUT_CASE_END
2613 ASM_OUTPUT_CASE_END (file,
2614 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2615 insn);
2616 #endif
2617 #endif
2618
2619 switch_to_section (current_function_section ());
2620
2621 if (debug_variable_location_views
2622 && !DECL_IGNORED_P (current_function_decl))
2623 debug_hooks->var_location (insn);
2624
2625 break;
2626 }
2627 /* Output this line note if it is the first or the last line
2628 note in a row. */
2629 if (!DECL_IGNORED_P (current_function_decl)
2630 && notice_source_line (insn, is_stmt_p))
2631 {
2632 if (flag_verbose_asm)
2633 asm_show_source (last_filename, last_linenum);
2634 (*debug_hooks->source_line) (last_linenum, last_columnnum,
2635 last_filename, last_discriminator,
2636 is_stmt);
2637 clear_next_view_needed (seen);
2638 }
2639 else
2640 maybe_output_next_view (seen);
2641
2642 gcc_checking_assert (!DEBUG_INSN_P (insn));
2643
2644 if (GET_CODE (body) == PARALLEL
2645 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2646 body = XVECEXP (body, 0, 0);
2647
2648 if (GET_CODE (body) == ASM_INPUT)
2649 {
2650 const char *string = XSTR (body, 0);
2651
2652 /* There's no telling what that did to the condition codes. */
2653 CC_STATUS_INIT;
2654
2655 if (string[0])
2656 {
2657 expanded_location loc;
2658
2659 app_enable ();
2660 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2661 if (*loc.file && loc.line)
2662 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2663 ASM_COMMENT_START, loc.line, loc.file);
2664 fprintf (asm_out_file, "\t%s\n", string);
2665 #if HAVE_AS_LINE_ZERO
2666 if (*loc.file && loc.line)
2667 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2668 #endif
2669 }
2670 break;
2671 }
2672
2673 /* Detect `asm' construct with operands. */
2674 if (asm_noperands (body) >= 0)
2675 {
2676 unsigned int noperands = asm_noperands (body);
2677 rtx *ops = XALLOCAVEC (rtx, noperands);
2678 const char *string;
2679 location_t loc;
2680 expanded_location expanded;
2681
2682 /* There's no telling what that did to the condition codes. */
2683 CC_STATUS_INIT;
2684
2685 /* Get out the operand values. */
2686 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2687 /* Inhibit dying on what would otherwise be compiler bugs. */
2688 insn_noperands = noperands;
2689 this_is_asm_operands = insn;
2690 expanded = expand_location (loc);
2691
2692 #ifdef FINAL_PRESCAN_INSN
2693 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2694 #endif
2695
2696 /* Output the insn using them. */
2697 if (string[0])
2698 {
2699 app_enable ();
2700 if (expanded.file && expanded.line)
2701 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2702 ASM_COMMENT_START, expanded.line, expanded.file);
2703 output_asm_insn (string, ops);
2704 #if HAVE_AS_LINE_ZERO
2705 if (expanded.file && expanded.line)
2706 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2707 #endif
2708 }
2709
2710 if (targetm.asm_out.final_postscan_insn)
2711 targetm.asm_out.final_postscan_insn (file, insn, ops,
2712 insn_noperands);
2713
2714 this_is_asm_operands = 0;
2715 break;
2716 }
2717
2718 app_disable ();
2719
2720 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (body))
2721 {
2722 /* A delayed-branch sequence */
2723 int i;
2724
2725 final_sequence = seq;
2726
2727 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2728 force the restoration of a comparison that was previously
2729 thought unnecessary. If that happens, cancel this sequence
2730 and cause that insn to be restored. */
2731
2732 next = final_scan_insn (seq->insn (0), file, 0, 1, seen);
2733 if (next != seq->insn (1))
2734 {
2735 final_sequence = 0;
2736 return next;
2737 }
2738
2739 for (i = 1; i < seq->len (); i++)
2740 {
2741 rtx_insn *insn = seq->insn (i);
2742 rtx_insn *next = NEXT_INSN (insn);
2743 /* We loop in case any instruction in a delay slot gets
2744 split. */
2745 do
2746 insn = final_scan_insn (insn, file, 0, 1, seen);
2747 while (insn != next);
2748 }
2749 #ifdef DBR_OUTPUT_SEQEND
2750 DBR_OUTPUT_SEQEND (file);
2751 #endif
2752 final_sequence = 0;
2753
2754 /* If the insn requiring the delay slot was a CALL_INSN, the
2755 insns in the delay slot are actually executed before the
2756 called function. Hence we don't preserve any CC-setting
2757 actions in these insns and the CC must be marked as being
2758 clobbered by the function. */
2759 if (CALL_P (seq->insn (0)))
2760 {
2761 CC_STATUS_INIT;
2762 }
2763 break;
2764 }
2765
2766 /* We have a real machine instruction as rtl. */
2767
2768 body = PATTERN (insn);
2769
2770 /* Do machine-specific peephole optimizations if desired. */
2771
2772 if (HAVE_peephole && optimize_p && !flag_no_peephole && !nopeepholes)
2773 {
2774 rtx_insn *next = peephole (insn);
2775 /* When peepholing, if there were notes within the peephole,
2776 emit them before the peephole. */
2777 if (next != 0 && next != NEXT_INSN (insn))
2778 {
2779 rtx_insn *note, *prev = PREV_INSN (insn);
2780
2781 for (note = NEXT_INSN (insn); note != next;
2782 note = NEXT_INSN (note))
2783 final_scan_insn (note, file, optimize_p, nopeepholes, seen);
2784
2785 /* Put the notes in the proper position for a later
2786 rescan. For example, the SH target can do this
2787 when generating a far jump in a delayed branch
2788 sequence. */
2789 note = NEXT_INSN (insn);
2790 SET_PREV_INSN (note) = prev;
2791 SET_NEXT_INSN (prev) = note;
2792 SET_NEXT_INSN (PREV_INSN (next)) = insn;
2793 SET_PREV_INSN (insn) = PREV_INSN (next);
2794 SET_NEXT_INSN (insn) = next;
2795 SET_PREV_INSN (next) = insn;
2796 }
2797
2798 /* PEEPHOLE might have changed this. */
2799 body = PATTERN (insn);
2800 }
2801
2802 /* Try to recognize the instruction.
2803 If successful, verify that the operands satisfy the
2804 constraints for the instruction. Crash if they don't,
2805 since `reload' should have changed them so that they do. */
2806
2807 insn_code_number = recog_memoized (insn);
2808 cleanup_subreg_operands (insn);
2809
2810 /* Dump the insn in the assembly for debugging (-dAP).
2811 If the final dump is requested as slim RTL, dump slim
2812 RTL to the assembly file also. */
2813 if (flag_dump_rtl_in_asm)
2814 {
2815 print_rtx_head = ASM_COMMENT_START;
2816 if (! (dump_flags & TDF_SLIM))
2817 print_rtl_single (asm_out_file, insn);
2818 else
2819 dump_insn_slim (asm_out_file, insn);
2820 print_rtx_head = "";
2821 }
2822
2823 if (! constrain_operands_cached (insn, 1))
2824 fatal_insn_not_found (insn);
2825
2826 /* Some target machines need to prescan each insn before
2827 it is output. */
2828
2829 #ifdef FINAL_PRESCAN_INSN
2830 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2831 #endif
2832
2833 if (targetm.have_conditional_execution ()
2834 && GET_CODE (PATTERN (insn)) == COND_EXEC)
2835 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2836
2837 current_output_insn = debug_insn = insn;
2838
2839 /* Find the proper template for this insn. */
2840 templ = get_insn_template (insn_code_number, insn);
2841
2842 /* If the C code returns 0, it means that it is a jump insn
2843 which follows a deleted test insn, and that test insn
2844 needs to be reinserted. */
2845 if (templ == 0)
2846 {
2847 rtx_insn *prev;
2848
2849 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2850
2851 /* We have already processed the notes between the setter and
2852 the user. Make sure we don't process them again, this is
2853 particularly important if one of the notes is a block
2854 scope note or an EH note. */
2855 for (prev = insn;
2856 prev != last_ignored_compare;
2857 prev = PREV_INSN (prev))
2858 {
2859 if (NOTE_P (prev))
2860 delete_insn (prev); /* Use delete_note. */
2861 }
2862
2863 return prev;
2864 }
2865
2866 /* If the template is the string "#", it means that this insn must
2867 be split. */
2868 if (templ[0] == '#' && templ[1] == '\0')
2869 {
2870 rtx_insn *new_rtx = try_split (body, insn, 0);
2871
2872 /* If we didn't split the insn, go away. */
2873 if (new_rtx == insn && PATTERN (new_rtx) == body)
2874 fatal_insn ("could not split insn", insn);
2875
2876 /* If we have a length attribute, this instruction should have
2877 been split in shorten_branches, to ensure that we would have
2878 valid length info for the splitees. */
2879 gcc_assert (!HAVE_ATTR_length);
2880
2881 return new_rtx;
2882 }
2883
2884 /* ??? This will put the directives in the wrong place if
2885 get_insn_template outputs assembly directly. However calling it
2886 before get_insn_template breaks if the insns is split. */
2887 if (targetm.asm_out.unwind_emit_before_insn
2888 && targetm.asm_out.unwind_emit)
2889 targetm.asm_out.unwind_emit (asm_out_file, insn);
2890
2891 rtx_call_insn *call_insn = dyn_cast <rtx_call_insn *> (insn);
2892 if (call_insn != NULL)
2893 {
2894 rtx x = call_from_call_insn (call_insn);
2895 x = XEXP (x, 0);
2896 if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2897 {
2898 tree t;
2899 x = XEXP (x, 0);
2900 t = SYMBOL_REF_DECL (x);
2901 if (t)
2902 assemble_external (t);
2903 }
2904 }
2905
2906 /* Output assembler code from the template. */
2907 output_asm_insn (templ, recog_data.operand);
2908
2909 /* Some target machines need to postscan each insn after
2910 it is output. */
2911 if (targetm.asm_out.final_postscan_insn)
2912 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
2913 recog_data.n_operands);
2914
2915 if (!targetm.asm_out.unwind_emit_before_insn
2916 && targetm.asm_out.unwind_emit)
2917 targetm.asm_out.unwind_emit (asm_out_file, insn);
2918
2919 /* Let the debug info back-end know about this call. We do this only
2920 after the instruction has been emitted because labels that may be
2921 created to reference the call instruction must appear after it. */
2922 if ((debug_variable_location_views || call_insn != NULL)
2923 && !DECL_IGNORED_P (current_function_decl))
2924 debug_hooks->var_location (insn);
2925
2926 current_output_insn = debug_insn = 0;
2927 }
2928 }
2929 return NEXT_INSN (insn);
2930 }
2931
2932 /* This is a wrapper around final_scan_insn_1 that allows ports to
2933 call it recursively without a known value for SEEN. The value is
2934 saved at the outermost call, and recovered for recursive calls.
2935 Recursive calls MUST pass NULL, or the same pointer if they can
2936 otherwise get to it. */
2937
2938 rtx_insn *
2939 final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p,
2940 int nopeepholes, int *seen)
2941 {
2942 static int *enclosing_seen;
2943 static int recursion_counter;
2944
2945 gcc_assert (seen || recursion_counter);
2946 gcc_assert (!recursion_counter || !seen || seen == enclosing_seen);
2947
2948 if (!recursion_counter++)
2949 enclosing_seen = seen;
2950 else if (!seen)
2951 seen = enclosing_seen;
2952
2953 rtx_insn *ret = final_scan_insn_1 (insn, file, optimize_p, nopeepholes, seen);
2954
2955 if (!--recursion_counter)
2956 enclosing_seen = NULL;
2957
2958 return ret;
2959 }
2960
2961
2962
2964 /* Map DECLs to instance discriminators. This is allocated and
2965 defined in ada/gcc-interfaces/trans.cc, when compiling with -gnateS.
2966 Mappings from this table are saved and restored for LTO, so
2967 link-time compilation will have this map set, at least in
2968 partitions containing at least one DECL with an associated instance
2969 discriminator. */
2970
2971 decl_to_instance_map_t *decl_to_instance_map;
2972
2973 /* Return the instance number assigned to DECL. */
2974
2975 static inline int
2976 map_decl_to_instance (const_tree decl)
2977 {
2978 int *inst;
2979
2980 if (!decl_to_instance_map || !decl || !DECL_P (decl))
2981 return 0;
2982
2983 inst = decl_to_instance_map->get (decl);
2984
2985 if (!inst)
2986 return 0;
2987
2988 return *inst;
2989 }
2990
2991 /* Set DISCRIMINATOR to the appropriate value, possibly derived from LOC. */
2992
2993 static inline int
2994 compute_discriminator (location_t loc)
2995 {
2996 int discriminator;
2997
2998 if (!decl_to_instance_map)
2999 discriminator = bb_discriminator;
3000 else
3001 {
3002 tree block = LOCATION_BLOCK (loc);
3003
3004 while (block && TREE_CODE (block) == BLOCK
3005 && !inlined_function_outer_scope_p (block))
3006 block = BLOCK_SUPERCONTEXT (block);
3007
3008 tree decl;
3009
3010 if (!block)
3011 decl = current_function_decl;
3012 else if (DECL_P (block))
3013 decl = block;
3014 else
3015 decl = block_ultimate_origin (block);
3016
3017 discriminator = map_decl_to_instance (decl);
3018 }
3019
3020 return discriminator;
3021 }
3022
3023 /* Return whether a source line note needs to be emitted before INSN.
3024 Sets IS_STMT to TRUE if the line should be marked as a possible
3025 breakpoint location. */
3026
3027 static bool
3028 notice_source_line (rtx_insn *insn, bool *is_stmt)
3029 {
3030 const char *filename;
3031 int linenum, columnnum;
3032
3033 if (NOTE_MARKER_P (insn))
3034 {
3035 location_t loc = NOTE_MARKER_LOCATION (insn);
3036 expanded_location xloc = expand_location (loc);
3037 if (xloc.line == 0
3038 && (LOCATION_LOCUS (loc) == UNKNOWN_LOCATION
3039 || LOCATION_LOCUS (loc) == BUILTINS_LOCATION))
3040 return false;
3041
3042 filename = xloc.file;
3043 linenum = xloc.line;
3044 columnnum = xloc.column;
3045 discriminator = compute_discriminator (loc);
3046 force_source_line = true;
3047 }
3048 else if (override_filename)
3049 {
3050 filename = override_filename;
3051 linenum = override_linenum;
3052 columnnum = override_columnnum;
3053 discriminator = override_discriminator;
3054 }
3055 else if (INSN_HAS_LOCATION (insn))
3056 {
3057 expanded_location xloc = insn_location (insn);
3058 filename = xloc.file;
3059 linenum = xloc.line;
3060 columnnum = xloc.column;
3061 discriminator = compute_discriminator (INSN_LOCATION (insn));
3062 }
3063 else
3064 {
3065 filename = NULL;
3066 linenum = 0;
3067 columnnum = 0;
3068 discriminator = 0;
3069 }
3070
3071 if (filename == NULL)
3072 return false;
3073
3074 if (force_source_line
3075 || filename != last_filename
3076 || last_linenum != linenum
3077 || (debug_column_info && last_columnnum != columnnum))
3078 {
3079 force_source_line = false;
3080 last_filename = filename;
3081 last_linenum = linenum;
3082 last_columnnum = columnnum;
3083 last_discriminator = discriminator;
3084 if (is_stmt)
3085 *is_stmt = true;
3086 high_block_linenum = MAX (last_linenum, high_block_linenum);
3087 high_function_linenum = MAX (last_linenum, high_function_linenum);
3088 return true;
3089 }
3090
3091 if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
3092 {
3093 /* If the discriminator changed, but the line number did not,
3094 output the line table entry with is_stmt false so the
3095 debugger does not treat this as a breakpoint location. */
3096 last_discriminator = discriminator;
3097 if (is_stmt)
3098 *is_stmt = false;
3099 return true;
3100 }
3101
3102 return false;
3103 }
3104
3105 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
3107 directly to the desired hard register. */
3108
3109 void
3110 cleanup_subreg_operands (rtx_insn *insn)
3111 {
3112 int i;
3113 bool changed = false;
3114 extract_insn_cached (insn);
3115 for (i = 0; i < recog_data.n_operands; i++)
3116 {
3117 /* The following test cannot use recog_data.operand when testing
3118 for a SUBREG: the underlying object might have been changed
3119 already if we are inside a match_operator expression that
3120 matches the else clause. Instead we test the underlying
3121 expression directly. */
3122 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
3123 {
3124 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i], true);
3125 changed = true;
3126 }
3127 else if (GET_CODE (recog_data.operand[i]) == PLUS
3128 || GET_CODE (recog_data.operand[i]) == MULT
3129 || MEM_P (recog_data.operand[i]))
3130 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
3131 }
3132
3133 for (i = 0; i < recog_data.n_dups; i++)
3134 {
3135 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
3136 {
3137 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i], true);
3138 changed = true;
3139 }
3140 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
3141 || GET_CODE (*recog_data.dup_loc[i]) == MULT
3142 || MEM_P (*recog_data.dup_loc[i]))
3143 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
3144 }
3145 if (changed)
3146 df_insn_rescan (insn);
3147 }
3148
3149 /* If X is a SUBREG, try to replace it with a REG or a MEM, based on
3150 the thing it is a subreg of. Do it anyway if FINAL_P. */
3151
3152 rtx
3153 alter_subreg (rtx *xp, bool final_p)
3154 {
3155 rtx x = *xp;
3156 rtx y = SUBREG_REG (x);
3157
3158 /* simplify_subreg does not remove subreg from volatile references.
3159 We are required to. */
3160 if (MEM_P (y))
3161 {
3162 poly_int64 offset = SUBREG_BYTE (x);
3163
3164 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
3165 contains 0 instead of the proper offset. See simplify_subreg. */
3166 if (paradoxical_subreg_p (x))
3167 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3168
3169 if (final_p)
3170 *xp = adjust_address (y, GET_MODE (x), offset);
3171 else
3172 *xp = adjust_address_nv (y, GET_MODE (x), offset);
3173 }
3174 else if (REG_P (y) && HARD_REGISTER_P (y))
3175 {
3176 rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
3177 SUBREG_BYTE (x));
3178
3179 if (new_rtx != 0)
3180 *xp = new_rtx;
3181 else if (final_p && REG_P (y))
3182 {
3183 /* Simplify_subreg can't handle some REG cases, but we have to. */
3184 unsigned int regno;
3185 poly_int64 offset;
3186
3187 regno = subreg_regno (x);
3188 if (subreg_lowpart_p (x))
3189 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
3190 else
3191 offset = SUBREG_BYTE (x);
3192 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
3193 }
3194 }
3195
3196 return *xp;
3197 }
3198
3199 /* Do alter_subreg on all the SUBREGs contained in X. */
3200
3201 static rtx
3202 walk_alter_subreg (rtx *xp, bool *changed)
3203 {
3204 rtx x = *xp;
3205 switch (GET_CODE (x))
3206 {
3207 case PLUS:
3208 case MULT:
3209 case AND:
3210 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3211 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
3212 break;
3213
3214 case MEM:
3215 case ZERO_EXTEND:
3216 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
3217 break;
3218
3219 case SUBREG:
3220 *changed = true;
3221 return alter_subreg (xp, true);
3222
3223 default:
3224 break;
3225 }
3226
3227 return *xp;
3228 }
3229
3230 /* Report inconsistency between the assembler template and the operands.
3232 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3233
3234 void
3235 output_operand_lossage (const char *cmsgid, ...)
3236 {
3237 char *fmt_string;
3238 char *new_message;
3239 const char *pfx_str;
3240 va_list ap;
3241
3242 va_start (ap, cmsgid);
3243
3244 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3245 fmt_string = xasprintf ("%s%s", pfx_str, _(cmsgid));
3246 new_message = xvasprintf (fmt_string, ap);
3247
3248 if (this_is_asm_operands)
3249 error_for_asm (this_is_asm_operands, "%s", new_message);
3250 else
3251 internal_error ("%s", new_message);
3252
3253 free (fmt_string);
3254 free (new_message);
3255 va_end (ap);
3256 }
3257
3258 /* Output of assembler code from a template, and its subroutines. */
3260
3261 /* Annotate the assembly with a comment describing the pattern and
3262 alternative used. */
3263
3264 static void
3265 output_asm_name (void)
3266 {
3267 if (debug_insn)
3268 {
3269 fprintf (asm_out_file, "\t%s %d\t",
3270 ASM_COMMENT_START, INSN_UID (debug_insn));
3271
3272 fprintf (asm_out_file, "[c=%d",
3273 insn_cost (debug_insn, optimize_insn_for_speed_p ()));
3274 if (HAVE_ATTR_length)
3275 fprintf (asm_out_file, " l=%d",
3276 get_attr_length (debug_insn));
3277 fprintf (asm_out_file, "] ");
3278
3279 int num = INSN_CODE (debug_insn);
3280 fprintf (asm_out_file, "%s", insn_data[num].name);
3281 if (insn_data[num].n_alternatives > 1)
3282 fprintf (asm_out_file, "/%d", which_alternative);
3283
3284 /* Clear this so only the first assembler insn
3285 of any rtl insn will get the special comment for -dp. */
3286 debug_insn = 0;
3287 }
3288 }
3289
3290 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3291 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3292 corresponds to the address of the object and 0 if to the object. */
3293
3294 static tree
3295 get_mem_expr_from_op (rtx op, int *paddressp)
3296 {
3297 tree expr;
3298 int inner_addressp;
3299
3300 *paddressp = 0;
3301
3302 if (REG_P (op))
3303 return REG_EXPR (op);
3304 else if (!MEM_P (op))
3305 return 0;
3306
3307 if (MEM_EXPR (op) != 0)
3308 return MEM_EXPR (op);
3309
3310 /* Otherwise we have an address, so indicate it and look at the address. */
3311 *paddressp = 1;
3312 op = XEXP (op, 0);
3313
3314 /* First check if we have a decl for the address, then look at the right side
3315 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3316 But don't allow the address to itself be indirect. */
3317 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3318 return expr;
3319 else if (GET_CODE (op) == PLUS
3320 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3321 return expr;
3322
3323 while (UNARY_P (op)
3324 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3325 op = XEXP (op, 0);
3326
3327 expr = get_mem_expr_from_op (op, &inner_addressp);
3328 return inner_addressp ? 0 : expr;
3329 }
3330
3331 /* Output operand names for assembler instructions. OPERANDS is the
3332 operand vector, OPORDER is the order to write the operands, and NOPS
3333 is the number of operands to write. */
3334
3335 static void
3336 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3337 {
3338 int wrote = 0;
3339 int i;
3340
3341 for (i = 0; i < nops; i++)
3342 {
3343 int addressp;
3344 rtx op = operands[oporder[i]];
3345 tree expr = get_mem_expr_from_op (op, &addressp);
3346
3347 fprintf (asm_out_file, "%c%s",
3348 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3349 wrote = 1;
3350 if (expr)
3351 {
3352 fprintf (asm_out_file, "%s",
3353 addressp ? "*" : "");
3354 print_mem_expr (asm_out_file, expr);
3355 wrote = 1;
3356 }
3357 else if (REG_P (op) && ORIGINAL_REGNO (op)
3358 && ORIGINAL_REGNO (op) != REGNO (op))
3359 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3360 }
3361 }
3362
3363 #ifdef ASSEMBLER_DIALECT
3364 /* Helper function to parse assembler dialects in the asm string.
3365 This is called from output_asm_insn and asm_fprintf. */
3366 static const char *
3367 do_assembler_dialects (const char *p, int *dialect)
3368 {
3369 char c = *(p - 1);
3370
3371 switch (c)
3372 {
3373 case '{':
3374 {
3375 int i;
3376
3377 if (*dialect)
3378 output_operand_lossage ("nested assembly dialect alternatives");
3379 else
3380 *dialect = 1;
3381
3382 /* If we want the first dialect, do nothing. Otherwise, skip
3383 DIALECT_NUMBER of strings ending with '|'. */
3384 for (i = 0; i < dialect_number; i++)
3385 {
3386 while (*p && *p != '}')
3387 {
3388 if (*p == '|')
3389 {
3390 p++;
3391 break;
3392 }
3393
3394 /* Skip over any character after a percent sign. */
3395 if (*p == '%')
3396 p++;
3397 if (*p)
3398 p++;
3399 }
3400
3401 if (*p == '}')
3402 break;
3403 }
3404
3405 if (*p == '\0')
3406 output_operand_lossage ("unterminated assembly dialect alternative");
3407 }
3408 break;
3409
3410 case '|':
3411 if (*dialect)
3412 {
3413 /* Skip to close brace. */
3414 do
3415 {
3416 if (*p == '\0')
3417 {
3418 output_operand_lossage ("unterminated assembly dialect alternative");
3419 break;
3420 }
3421
3422 /* Skip over any character after a percent sign. */
3423 if (*p == '%' && p[1])
3424 {
3425 p += 2;
3426 continue;
3427 }
3428
3429 if (*p++ == '}')
3430 break;
3431 }
3432 while (1);
3433
3434 *dialect = 0;
3435 }
3436 else
3437 putc (c, asm_out_file);
3438 break;
3439
3440 case '}':
3441 if (! *dialect)
3442 putc (c, asm_out_file);
3443 *dialect = 0;
3444 break;
3445 default:
3446 gcc_unreachable ();
3447 }
3448
3449 return p;
3450 }
3451 #endif
3452
3453 /* Output text from TEMPLATE to the assembler output file,
3454 obeying %-directions to substitute operands taken from
3455 the vector OPERANDS.
3456
3457 %N (for N a digit) means print operand N in usual manner.
3458 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3459 and print the label name with no punctuation.
3460 %cN means require operand N to be a constant
3461 and print the constant expression with no punctuation.
3462 %aN means expect operand N to be a memory address
3463 (not a memory reference!) and print a reference
3464 to that address.
3465 %nN means expect operand N to be a constant
3466 and print a constant expression for minus the value
3467 of the operand, with no other punctuation. */
3468
3469 void
3470 output_asm_insn (const char *templ, rtx *operands)
3471 {
3472 const char *p;
3473 int c;
3474 #ifdef ASSEMBLER_DIALECT
3475 int dialect = 0;
3476 #endif
3477 int oporder[MAX_RECOG_OPERANDS];
3478 char opoutput[MAX_RECOG_OPERANDS];
3479 int ops = 0;
3480
3481 /* An insn may return a null string template
3482 in a case where no assembler code is needed. */
3483 if (*templ == 0)
3484 return;
3485
3486 memset (opoutput, 0, sizeof opoutput);
3487 p = templ;
3488 putc ('\t', asm_out_file);
3489
3490 #ifdef ASM_OUTPUT_OPCODE
3491 ASM_OUTPUT_OPCODE (asm_out_file, p);
3492 #endif
3493
3494 while ((c = *p++))
3495 switch (c)
3496 {
3497 case '\n':
3498 if (flag_verbose_asm)
3499 output_asm_operand_names (operands, oporder, ops);
3500 if (flag_print_asm_name)
3501 output_asm_name ();
3502
3503 ops = 0;
3504 memset (opoutput, 0, sizeof opoutput);
3505
3506 putc (c, asm_out_file);
3507 #ifdef ASM_OUTPUT_OPCODE
3508 while ((c = *p) == '\t')
3509 {
3510 putc (c, asm_out_file);
3511 p++;
3512 }
3513 ASM_OUTPUT_OPCODE (asm_out_file, p);
3514 #endif
3515 break;
3516
3517 #ifdef ASSEMBLER_DIALECT
3518 case '{':
3519 case '}':
3520 case '|':
3521 p = do_assembler_dialects (p, &dialect);
3522 break;
3523 #endif
3524
3525 case '%':
3526 /* %% outputs a single %. %{, %} and %| print {, } and | respectively
3527 if ASSEMBLER_DIALECT defined and these characters have a special
3528 meaning as dialect delimiters.*/
3529 if (*p == '%'
3530 #ifdef ASSEMBLER_DIALECT
3531 || *p == '{' || *p == '}' || *p == '|'
3532 #endif
3533 )
3534 {
3535 putc (*p, asm_out_file);
3536 p++;
3537 }
3538 /* %= outputs a number which is unique to each insn in the entire
3539 compilation. This is useful for making local labels that are
3540 referred to more than once in a given insn. */
3541 else if (*p == '=')
3542 {
3543 p++;
3544 fprintf (asm_out_file, "%d", insn_counter);
3545 }
3546 /* % followed by a letter and some digits
3547 outputs an operand in a special way depending on the letter.
3548 Letters `acln' are implemented directly.
3549 Other letters are passed to `output_operand' so that
3550 the TARGET_PRINT_OPERAND hook can define them. */
3551 else if (ISALPHA (*p))
3552 {
3553 int letter = *p++;
3554 unsigned long opnum;
3555 char *endptr;
3556
3557 opnum = strtoul (p, &endptr, 10);
3558
3559 if (endptr == p)
3560 output_operand_lossage ("operand number missing "
3561 "after %%-letter");
3562 else if (this_is_asm_operands && opnum >= insn_noperands)
3563 output_operand_lossage ("operand number out of range");
3564 else if (letter == 'l')
3565 output_asm_label (operands[opnum]);
3566 else if (letter == 'a')
3567 output_address (VOIDmode, operands[opnum]);
3568 else if (letter == 'c')
3569 {
3570 if (CONSTANT_ADDRESS_P (operands[opnum]))
3571 output_addr_const (asm_out_file, operands[opnum]);
3572 else
3573 output_operand (operands[opnum], 'c');
3574 }
3575 else if (letter == 'n')
3576 {
3577 if (CONST_INT_P (operands[opnum]))
3578 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3579 - INTVAL (operands[opnum]));
3580 else
3581 {
3582 putc ('-', asm_out_file);
3583 output_addr_const (asm_out_file, operands[opnum]);
3584 }
3585 }
3586 else
3587 output_operand (operands[opnum], letter);
3588
3589 if (!opoutput[opnum])
3590 oporder[ops++] = opnum;
3591 opoutput[opnum] = 1;
3592
3593 p = endptr;
3594 c = *p;
3595 }
3596 /* % followed by a digit outputs an operand the default way. */
3597 else if (ISDIGIT (*p))
3598 {
3599 unsigned long opnum;
3600 char *endptr;
3601
3602 opnum = strtoul (p, &endptr, 10);
3603 if (this_is_asm_operands && opnum >= insn_noperands)
3604 output_operand_lossage ("operand number out of range");
3605 else
3606 output_operand (operands[opnum], 0);
3607
3608 if (!opoutput[opnum])
3609 oporder[ops++] = opnum;
3610 opoutput[opnum] = 1;
3611
3612 p = endptr;
3613 c = *p;
3614 }
3615 /* % followed by punctuation: output something for that
3616 punctuation character alone, with no operand. The
3617 TARGET_PRINT_OPERAND hook decides what is actually done. */
3618 else if (targetm.asm_out.print_operand_punct_valid_p ((unsigned char) *p))
3619 output_operand (NULL_RTX, *p++);
3620 else
3621 output_operand_lossage ("invalid %%-code");
3622 break;
3623
3624 default:
3625 putc (c, asm_out_file);
3626 }
3627
3628 /* Try to keep the asm a bit more readable. */
3629 if ((flag_verbose_asm || flag_print_asm_name) && strlen (templ) < 9)
3630 putc ('\t', asm_out_file);
3631
3632 /* Write out the variable names for operands, if we know them. */
3633 if (flag_verbose_asm)
3634 output_asm_operand_names (operands, oporder, ops);
3635 if (flag_print_asm_name)
3636 output_asm_name ();
3637
3638 putc ('\n', asm_out_file);
3639 }
3640
3641 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3643
3644 void
3645 output_asm_label (rtx x)
3646 {
3647 char buf[256];
3648
3649 if (GET_CODE (x) == LABEL_REF)
3650 x = label_ref_label (x);
3651 if (LABEL_P (x)
3652 || (NOTE_P (x)
3653 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3654 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3655 else
3656 output_operand_lossage ("'%%l' operand isn't a label");
3657
3658 assemble_name (asm_out_file, buf);
3659 }
3660
3661 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3662
3663 void
3664 mark_symbol_refs_as_used (rtx x)
3665 {
3666 subrtx_iterator::array_type array;
3667 FOR_EACH_SUBRTX (iter, array, x, ALL)
3668 {
3669 const_rtx x = *iter;
3670 if (GET_CODE (x) == SYMBOL_REF)
3671 if (tree t = SYMBOL_REF_DECL (x))
3672 assemble_external (t);
3673 }
3674 }
3675
3676 /* Print operand X using machine-dependent assembler syntax.
3677 CODE is a non-digit that preceded the operand-number in the % spec,
3678 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3679 between the % and the digits.
3680 When CODE is a non-letter, X is 0.
3681
3682 The meanings of the letters are machine-dependent and controlled
3683 by TARGET_PRINT_OPERAND. */
3684
3685 void
3686 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3687 {
3688 if (x && GET_CODE (x) == SUBREG)
3689 x = alter_subreg (&x, true);
3690
3691 /* X must not be a pseudo reg. */
3692 if (!targetm.no_register_allocation)
3693 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3694
3695 targetm.asm_out.print_operand (asm_out_file, x, code);
3696
3697 if (x == NULL_RTX)
3698 return;
3699
3700 mark_symbol_refs_as_used (x);
3701 }
3702
3703 /* Print a memory reference operand for address X using
3704 machine-dependent assembler syntax. */
3705
3706 void
3707 output_address (machine_mode mode, rtx x)
3708 {
3709 bool changed = false;
3710 walk_alter_subreg (&x, &changed);
3711 targetm.asm_out.print_operand_address (asm_out_file, mode, x);
3712 }
3713
3714 /* Print an integer constant expression in assembler syntax.
3716 Addition and subtraction are the only arithmetic
3717 that may appear in these expressions. */
3718
3719 void
3720 output_addr_const (FILE *file, rtx x)
3721 {
3722 char buf[256];
3723
3724 restart:
3725 switch (GET_CODE (x))
3726 {
3727 case PC:
3728 putc ('.', file);
3729 break;
3730
3731 case SYMBOL_REF:
3732 if (SYMBOL_REF_DECL (x))
3733 assemble_external (SYMBOL_REF_DECL (x));
3734 #ifdef ASM_OUTPUT_SYMBOL_REF
3735 ASM_OUTPUT_SYMBOL_REF (file, x);
3736 #else
3737 assemble_name (file, XSTR (x, 0));
3738 #endif
3739 break;
3740
3741 case LABEL_REF:
3742 x = label_ref_label (x);
3743 /* Fall through. */
3744 case CODE_LABEL:
3745 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3746 #ifdef ASM_OUTPUT_LABEL_REF
3747 ASM_OUTPUT_LABEL_REF (file, buf);
3748 #else
3749 assemble_name (file, buf);
3750 #endif
3751 break;
3752
3753 case CONST_INT:
3754 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3755 break;
3756
3757 case CONST:
3758 /* This used to output parentheses around the expression,
3759 but that does not work on the 386 (either ATT or BSD assembler). */
3760 output_addr_const (file, XEXP (x, 0));
3761 break;
3762
3763 case CONST_WIDE_INT:
3764 /* We do not know the mode here so we have to use a round about
3765 way to build a wide-int to get it printed properly. */
3766 {
3767 wide_int w = wide_int::from_array (&CONST_WIDE_INT_ELT (x, 0),
3768 CONST_WIDE_INT_NUNITS (x),
3769 CONST_WIDE_INT_NUNITS (x)
3770 * HOST_BITS_PER_WIDE_INT,
3771 false);
3772 print_decs (w, file);
3773 }
3774 break;
3775
3776 case CONST_DOUBLE:
3777 if (CONST_DOUBLE_AS_INT_P (x))
3778 {
3779 /* We can use %d if the number is one word and positive. */
3780 if (CONST_DOUBLE_HIGH (x))
3781 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3782 (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3783 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3784 else if (CONST_DOUBLE_LOW (x) < 0)
3785 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3786 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3787 else
3788 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3789 }
3790 else
3791 /* We can't handle floating point constants;
3792 PRINT_OPERAND must handle them. */
3793 output_operand_lossage ("floating constant misused");
3794 break;
3795
3796 case CONST_FIXED:
3797 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_FIXED_VALUE_LOW (x));
3798 break;
3799
3800 case PLUS:
3801 /* Some assemblers need integer constants to appear last (eg masm). */
3802 if (CONST_INT_P (XEXP (x, 0)))
3803 {
3804 output_addr_const (file, XEXP (x, 1));
3805 if (INTVAL (XEXP (x, 0)) >= 0)
3806 fprintf (file, "+");
3807 output_addr_const (file, XEXP (x, 0));
3808 }
3809 else
3810 {
3811 output_addr_const (file, XEXP (x, 0));
3812 if (!CONST_INT_P (XEXP (x, 1))
3813 || INTVAL (XEXP (x, 1)) >= 0)
3814 fprintf (file, "+");
3815 output_addr_const (file, XEXP (x, 1));
3816 }
3817 break;
3818
3819 case MINUS:
3820 /* Avoid outputting things like x-x or x+5-x,
3821 since some assemblers can't handle that. */
3822 x = simplify_subtraction (x);
3823 if (GET_CODE (x) != MINUS)
3824 goto restart;
3825
3826 output_addr_const (file, XEXP (x, 0));
3827 fprintf (file, "-");
3828 if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
3829 || GET_CODE (XEXP (x, 1)) == PC
3830 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3831 output_addr_const (file, XEXP (x, 1));
3832 else
3833 {
3834 fputs (targetm.asm_out.open_paren, file);
3835 output_addr_const (file, XEXP (x, 1));
3836 fputs (targetm.asm_out.close_paren, file);
3837 }
3838 break;
3839
3840 case ZERO_EXTEND:
3841 case SIGN_EXTEND:
3842 case SUBREG:
3843 case TRUNCATE:
3844 output_addr_const (file, XEXP (x, 0));
3845 break;
3846
3847 default:
3848 if (targetm.asm_out.output_addr_const_extra (file, x))
3849 break;
3850
3851 output_operand_lossage ("invalid expression as operand");
3852 }
3853 }
3854
3855 /* Output a quoted string. */
3857
3858 void
3859 output_quoted_string (FILE *asm_file, const char *string)
3860 {
3861 #ifdef OUTPUT_QUOTED_STRING
3862 OUTPUT_QUOTED_STRING (asm_file, string);
3863 #else
3864 char c;
3865
3866 putc ('\"', asm_file);
3867 while ((c = *string++) != 0)
3868 {
3869 if (ISPRINT (c))
3870 {
3871 if (c == '\"' || c == '\\')
3872 putc ('\\', asm_file);
3873 putc (c, asm_file);
3874 }
3875 else
3876 fprintf (asm_file, "\\%03o", (unsigned char) c);
3877 }
3878 putc ('\"', asm_file);
3879 #endif
3880 }
3881
3882 /* Write a HOST_WIDE_INT number in hex form 0x1234, fast. */
3884
3885 void
3886 fprint_whex (FILE *f, unsigned HOST_WIDE_INT value)
3887 {
3888 char buf[2 + CHAR_BIT * sizeof (value) / 4];
3889 if (value == 0)
3890 putc ('0', f);
3891 else
3892 {
3893 char *p = buf + sizeof (buf);
3894 do
3895 *--p = "0123456789abcdef"[value % 16];
3896 while ((value /= 16) != 0);
3897 *--p = 'x';
3898 *--p = '0';
3899 fwrite (p, 1, buf + sizeof (buf) - p, f);
3900 }
3901 }
3902
3903 /* Internal function that prints an unsigned long in decimal in reverse.
3904 The output string IS NOT null-terminated. */
3905
3906 static int
3907 sprint_ul_rev (char *s, unsigned long value)
3908 {
3909 int i = 0;
3910 do
3911 {
3912 s[i] = "0123456789"[value % 10];
3913 value /= 10;
3914 i++;
3915 /* alternate version, without modulo */
3916 /* oldval = value; */
3917 /* value /= 10; */
3918 /* s[i] = "0123456789" [oldval - 10*value]; */
3919 /* i++ */
3920 }
3921 while (value != 0);
3922 return i;
3923 }
3924
3925 /* Write an unsigned long as decimal to a file, fast. */
3926
3927 void
3928 fprint_ul (FILE *f, unsigned long value)
3929 {
3930 /* python says: len(str(2**64)) == 20 */
3931 char s[20];
3932 int i;
3933
3934 i = sprint_ul_rev (s, value);
3935
3936 /* It's probably too small to bother with string reversal and fputs. */
3937 do
3938 {
3939 i--;
3940 putc (s[i], f);
3941 }
3942 while (i != 0);
3943 }
3944
3945 /* Write an unsigned long as decimal to a string, fast.
3946 s must be wide enough to not overflow, at least 21 chars.
3947 Returns the length of the string (without terminating '\0'). */
3948
3949 int
3950 sprint_ul (char *s, unsigned long value)
3951 {
3952 int len = sprint_ul_rev (s, value);
3953 s[len] = '\0';
3954
3955 std::reverse (s, s + len);
3956 return len;
3957 }
3958
3959 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3960 %R prints the value of REGISTER_PREFIX.
3961 %L prints the value of LOCAL_LABEL_PREFIX.
3962 %U prints the value of USER_LABEL_PREFIX.
3963 %I prints the value of IMMEDIATE_PREFIX.
3964 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3965 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3966
3967 We handle alternate assembler dialects here, just like output_asm_insn. */
3968
3969 void
3970 asm_fprintf (FILE *file, const char *p, ...)
3971 {
3972 char buf[10];
3973 char *q, c;
3974 #ifdef ASSEMBLER_DIALECT
3975 int dialect = 0;
3976 #endif
3977 va_list argptr;
3978
3979 va_start (argptr, p);
3980
3981 buf[0] = '%';
3982
3983 while ((c = *p++))
3984 switch (c)
3985 {
3986 #ifdef ASSEMBLER_DIALECT
3987 case '{':
3988 case '}':
3989 case '|':
3990 p = do_assembler_dialects (p, &dialect);
3991 break;
3992 #endif
3993
3994 case '%':
3995 c = *p++;
3996 q = &buf[1];
3997 while (strchr ("-+ #0", c))
3998 {
3999 *q++ = c;
4000 c = *p++;
4001 }
4002 while (ISDIGIT (c) || c == '.')
4003 {
4004 *q++ = c;
4005 c = *p++;
4006 }
4007 switch (c)
4008 {
4009 case '%':
4010 putc ('%', file);
4011 break;
4012
4013 case 'd': case 'i': case 'u':
4014 case 'x': case 'X': case 'o':
4015 case 'c':
4016 *q++ = c;
4017 *q = 0;
4018 fprintf (file, buf, va_arg (argptr, int));
4019 break;
4020
4021 case 'w':
4022 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
4023 'o' cases, but we do not check for those cases. It
4024 means that the value is a HOST_WIDE_INT, which may be
4025 either `long' or `long long'. */
4026 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
4027 q += strlen (HOST_WIDE_INT_PRINT);
4028 *q++ = *p++;
4029 *q = 0;
4030 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
4031 break;
4032
4033 case 'l':
4034 *q++ = c;
4035 #ifdef HAVE_LONG_LONG
4036 if (*p == 'l')
4037 {
4038 *q++ = *p++;
4039 *q++ = *p++;
4040 *q = 0;
4041 fprintf (file, buf, va_arg (argptr, long long));
4042 }
4043 else
4044 #endif
4045 {
4046 *q++ = *p++;
4047 *q = 0;
4048 fprintf (file, buf, va_arg (argptr, long));
4049 }
4050
4051 break;
4052
4053 case 's':
4054 *q++ = c;
4055 *q = 0;
4056 fprintf (file, buf, va_arg (argptr, char *));
4057 break;
4058
4059 case 'O':
4060 #ifdef ASM_OUTPUT_OPCODE
4061 ASM_OUTPUT_OPCODE (asm_out_file, p);
4062 #endif
4063 break;
4064
4065 case 'R':
4066 #ifdef REGISTER_PREFIX
4067 fprintf (file, "%s", REGISTER_PREFIX);
4068 #endif
4069 break;
4070
4071 case 'I':
4072 #ifdef IMMEDIATE_PREFIX
4073 fprintf (file, "%s", IMMEDIATE_PREFIX);
4074 #endif
4075 break;
4076
4077 case 'L':
4078 #ifdef LOCAL_LABEL_PREFIX
4079 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
4080 #endif
4081 break;
4082
4083 case 'U':
4084 fputs (user_label_prefix, file);
4085 break;
4086
4087 #ifdef ASM_FPRINTF_EXTENSIONS
4088 /* Uppercase letters are reserved for general use by asm_fprintf
4089 and so are not available to target specific code. In order to
4090 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
4091 they are defined here. As they get turned into real extensions
4092 to asm_fprintf they should be removed from this list. */
4093 case 'A': case 'B': case 'C': case 'D': case 'E':
4094 case 'F': case 'G': case 'H': case 'J': case 'K':
4095 case 'M': case 'N': case 'P': case 'Q': case 'S':
4096 case 'T': case 'V': case 'W': case 'Y': case 'Z':
4097 break;
4098
4099 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
4100 #endif
4101 default:
4102 gcc_unreachable ();
4103 }
4104 break;
4105
4106 default:
4107 putc (c, file);
4108 }
4109 va_end (argptr);
4110 }
4111
4112 /* Return nonzero if this function has no function calls. */
4114
4115 int
4116 leaf_function_p (void)
4117 {
4118 rtx_insn *insn;
4119
4120 /* Ensure we walk the entire function body. */
4121 gcc_assert (!in_sequence_p ());
4122
4123 /* Some back-ends (e.g. s390) want leaf functions to stay leaf
4124 functions even if they call mcount. */
4125 if (crtl->profile && !targetm.keep_leaf_when_profiled ())
4126 return 0;
4127
4128 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4129 {
4130 if (CALL_P (insn)
4131 && ! SIBLING_CALL_P (insn)
4132 && ! FAKE_CALL_P (insn))
4133 return 0;
4134 if (NONJUMP_INSN_P (insn)
4135 && GET_CODE (PATTERN (insn)) == SEQUENCE
4136 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
4137 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
4138 return 0;
4139 }
4140
4141 return 1;
4142 }
4143
4144 /* Return 1 if branch is a forward branch.
4145 Uses insn_shuid array, so it works only in the final pass. May be used by
4146 output templates to customary add branch prediction hints.
4147 */
4148 int
4149 final_forward_branch_p (rtx_insn *insn)
4150 {
4151 int insn_id, label_id;
4152
4153 gcc_assert (uid_shuid);
4154 insn_id = INSN_SHUID (insn);
4155 label_id = INSN_SHUID (JUMP_LABEL (insn));
4156 /* We've hit some insns that does not have id information available. */
4157 gcc_assert (insn_id && label_id);
4158 return insn_id < label_id;
4159 }
4160
4161 /* On some machines, a function with no call insns
4162 can run faster if it doesn't create its own register window.
4163 When output, the leaf function should use only the "output"
4164 registers. Ordinarily, the function would be compiled to use
4165 the "input" registers to find its arguments; it is a candidate
4166 for leaf treatment if it uses only the "input" registers.
4167 Leaf function treatment means renumbering so the function
4168 uses the "output" registers instead. */
4169
4170 #ifdef LEAF_REGISTERS
4171
4172 /* Return 1 if this function uses only the registers that can be
4173 safely renumbered. */
4174
4175 int
4176 only_leaf_regs_used (void)
4177 {
4178 int i;
4179 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4180
4181 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4182 if ((df_regs_ever_live_p (i) || global_regs[i])
4183 && ! permitted_reg_in_leaf_functions[i])
4184 return 0;
4185
4186 if (crtl->uses_pic_offset_table
4187 && pic_offset_table_rtx != 0
4188 && REG_P (pic_offset_table_rtx)
4189 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4190 return 0;
4191
4192 return 1;
4193 }
4194
4195 /* Scan all instructions and renumber all registers into those
4196 available in leaf functions. */
4197
4198 static void
4199 leaf_renumber_regs (rtx_insn *first)
4200 {
4201 rtx_insn *insn;
4202
4203 /* Renumber only the actual patterns.
4204 The reg-notes can contain frame pointer refs,
4205 and renumbering them could crash, and should not be needed. */
4206 for (insn = first; insn; insn = NEXT_INSN (insn))
4207 if (INSN_P (insn))
4208 leaf_renumber_regs_insn (PATTERN (insn));
4209 }
4210
4211 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4212 available in leaf functions. */
4213
4214 void
4215 leaf_renumber_regs_insn (rtx in_rtx)
4216 {
4217 int i, j;
4218 const char *format_ptr;
4219
4220 if (in_rtx == 0)
4221 return;
4222
4223 /* Renumber all input-registers into output-registers.
4224 renumbered_regs would be 1 for an output-register;
4225 they */
4226
4227 if (REG_P (in_rtx))
4228 {
4229 int newreg;
4230
4231 /* Don't renumber the same reg twice. */
4232 if (in_rtx->used)
4233 return;
4234
4235 newreg = REGNO (in_rtx);
4236 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4237 to reach here as part of a REG_NOTE. */
4238 if (newreg >= FIRST_PSEUDO_REGISTER)
4239 {
4240 in_rtx->used = 1;
4241 return;
4242 }
4243 newreg = LEAF_REG_REMAP (newreg);
4244 gcc_assert (newreg >= 0);
4245 df_set_regs_ever_live (REGNO (in_rtx), false);
4246 df_set_regs_ever_live (newreg, true);
4247 SET_REGNO (in_rtx, newreg);
4248 in_rtx->used = 1;
4249 return;
4250 }
4251
4252 if (INSN_P (in_rtx))
4253 {
4254 /* Inside a SEQUENCE, we find insns.
4255 Renumber just the patterns of these insns,
4256 just as we do for the top-level insns. */
4257 leaf_renumber_regs_insn (PATTERN (in_rtx));
4258 return;
4259 }
4260
4261 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4262
4263 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4264 switch (*format_ptr++)
4265 {
4266 case 'e':
4267 leaf_renumber_regs_insn (XEXP (in_rtx, i));
4268 break;
4269
4270 case 'E':
4271 if (XVEC (in_rtx, i) != NULL)
4272 for (j = 0; j < XVECLEN (in_rtx, i); j++)
4273 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4274 break;
4275
4276 case 'S':
4277 case 's':
4278 case '0':
4279 case 'i':
4280 case 'w':
4281 case 'p':
4282 case 'n':
4283 case 'u':
4284 break;
4285
4286 default:
4287 gcc_unreachable ();
4288 }
4289 }
4290 #endif
4291
4292 /* Turn the RTL into assembly. */
4294 static unsigned int
4295 rest_of_handle_final (void)
4296 {
4297 const char *fnname = get_fnname_from_decl (current_function_decl);
4298
4299 /* Turn debug markers into notes if the var-tracking pass has not
4300 been invoked. */
4301 if (!flag_var_tracking && MAY_HAVE_DEBUG_MARKER_INSNS)
4302 delete_vta_debug_insns (false);
4303
4304 assemble_start_function (current_function_decl, fnname);
4305 rtx_insn *first = get_insns ();
4306 int seen = 0;
4307 final_start_function_1 (&first, asm_out_file, &seen, optimize);
4308 final_1 (first, asm_out_file, seen, optimize);
4309 if (flag_ipa_ra
4310 && !lookup_attribute ("noipa", DECL_ATTRIBUTES (current_function_decl))
4311 /* Functions with naked attributes are supported only with basic asm
4312 statements in the body, thus for supported use cases the information
4313 on clobbered registers is not available. */
4314 && !lookup_attribute ("naked", DECL_ATTRIBUTES (current_function_decl)))
4315 collect_fn_hard_reg_usage ();
4316 final_end_function ();
4317
4318 /* The IA-64 ".handlerdata" directive must be issued before the ".endp"
4319 directive that closes the procedure descriptor. Similarly, for x64 SEH.
4320 Otherwise it's not strictly necessary, but it doesn't hurt either. */
4321 output_function_exception_table (crtl->has_bb_partition ? 1 : 0);
4322
4323 assemble_end_function (current_function_decl, fnname);
4324
4325 /* Free up reg info memory. */
4326 free_reg_info ();
4327
4328 if (! quiet_flag)
4329 fflush (asm_out_file);
4330
4331 /* Write DBX symbols if requested. */
4332
4333 /* Note that for those inline functions where we don't initially
4334 know for certain that we will be generating an out-of-line copy,
4335 the first invocation of this routine (rest_of_compilation) will
4336 skip over this code by doing a `goto exit_rest_of_compilation;'.
4337 Later on, wrapup_global_declarations will (indirectly) call
4338 rest_of_compilation again for those inline functions that need
4339 to have out-of-line copies generated. During that call, we
4340 *will* be routed past here. */
4341
4342 timevar_push (TV_SYMOUT);
4343 if (!DECL_IGNORED_P (current_function_decl))
4344 debug_hooks->function_decl (current_function_decl);
4345 timevar_pop (TV_SYMOUT);
4346
4347 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4348 DECL_INITIAL (current_function_decl) = error_mark_node;
4349
4350 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4351 && targetm.have_ctors_dtors)
4352 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4353 decl_init_priority_lookup
4354 (current_function_decl));
4355 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4356 && targetm.have_ctors_dtors)
4357 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4358 decl_fini_priority_lookup
4359 (current_function_decl));
4360 return 0;
4361 }
4362
4363 namespace {
4364
4365 const pass_data pass_data_final =
4366 {
4367 RTL_PASS, /* type */
4368 "final", /* name */
4369 OPTGROUP_NONE, /* optinfo_flags */
4370 TV_FINAL, /* tv_id */
4371 0, /* properties_required */
4372 0, /* properties_provided */
4373 0, /* properties_destroyed */
4374 0, /* todo_flags_start */
4375 0, /* todo_flags_finish */
4376 };
4377
4378 class pass_final : public rtl_opt_pass
4379 {
4380 public:
4381 pass_final (gcc::context *ctxt)
4382 : rtl_opt_pass (pass_data_final, ctxt)
4383 {}
4384
4385 /* opt_pass methods: */
4386 virtual unsigned int execute (function *) { return rest_of_handle_final (); }
4387
4388 }; // class pass_final
4389
4390 } // anon namespace
4391
4392 rtl_opt_pass *
4393 make_pass_final (gcc::context *ctxt)
4394 {
4395 return new pass_final (ctxt);
4396 }
4397
4398
4399 static unsigned int
4400 rest_of_handle_shorten_branches (void)
4401 {
4402 /* Shorten branches. */
4403 shorten_branches (get_insns ());
4404 return 0;
4405 }
4406
4407 namespace {
4408
4409 const pass_data pass_data_shorten_branches =
4410 {
4411 RTL_PASS, /* type */
4412 "shorten", /* name */
4413 OPTGROUP_NONE, /* optinfo_flags */
4414 TV_SHORTEN_BRANCH, /* tv_id */
4415 0, /* properties_required */
4416 0, /* properties_provided */
4417 0, /* properties_destroyed */
4418 0, /* todo_flags_start */
4419 0, /* todo_flags_finish */
4420 };
4421
4422 class pass_shorten_branches : public rtl_opt_pass
4423 {
4424 public:
4425 pass_shorten_branches (gcc::context *ctxt)
4426 : rtl_opt_pass (pass_data_shorten_branches, ctxt)
4427 {}
4428
4429 /* opt_pass methods: */
4430 virtual unsigned int execute (function *)
4431 {
4432 return rest_of_handle_shorten_branches ();
4433 }
4434
4435 }; // class pass_shorten_branches
4436
4437 } // anon namespace
4438
4439 rtl_opt_pass *
4440 make_pass_shorten_branches (gcc::context *ctxt)
4441 {
4442 return new pass_shorten_branches (ctxt);
4443 }
4444
4445
4446 static unsigned int
4447 rest_of_clean_state (void)
4448 {
4449 rtx_insn *insn, *next;
4450 FILE *final_output = NULL;
4451 int save_unnumbered = flag_dump_unnumbered;
4452 int save_noaddr = flag_dump_noaddr;
4453
4454 if (flag_dump_final_insns)
4455 {
4456 final_output = fopen (flag_dump_final_insns, "a");
4457 if (!final_output)
4458 {
4459 error ("could not open final insn dump file %qs: %m",
4460 flag_dump_final_insns);
4461 flag_dump_final_insns = NULL;
4462 }
4463 else
4464 {
4465 flag_dump_noaddr = flag_dump_unnumbered = 1;
4466 if (flag_compare_debug_opt || flag_compare_debug)
4467 dump_flags |= TDF_NOUID | TDF_COMPARE_DEBUG;
4468 dump_function_header (final_output, current_function_decl,
4469 dump_flags);
4470 final_insns_dump_p = true;
4471
4472 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4473 if (LABEL_P (insn))
4474 INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4475 else
4476 {
4477 if (NOTE_P (insn))
4478 set_block_for_insn (insn, NULL);
4479 INSN_UID (insn) = 0;
4480 }
4481 }
4482 }
4483
4484 /* It is very important to decompose the RTL instruction chain here:
4485 debug information keeps pointing into CODE_LABEL insns inside the function
4486 body. If these remain pointing to the other insns, we end up preserving
4487 whole RTL chain and attached detailed debug info in memory. */
4488 for (insn = get_insns (); insn; insn = next)
4489 {
4490 next = NEXT_INSN (insn);
4491 SET_NEXT_INSN (insn) = NULL;
4492 SET_PREV_INSN (insn) = NULL;
4493
4494 rtx_insn *call_insn = insn;
4495 if (NONJUMP_INSN_P (call_insn)
4496 && GET_CODE (PATTERN (call_insn)) == SEQUENCE)
4497 {
4498 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (call_insn));
4499 call_insn = seq->insn (0);
4500 }
4501 if (CALL_P (call_insn))
4502 {
4503 rtx note
4504 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
4505 if (note)
4506 remove_note (call_insn, note);
4507 }
4508
4509 if (final_output
4510 && (!NOTE_P (insn)
4511 || (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4512 && NOTE_KIND (insn) != NOTE_INSN_BEGIN_STMT
4513 && NOTE_KIND (insn) != NOTE_INSN_INLINE_ENTRY
4514 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4515 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4516 && NOTE_KIND (insn) != NOTE_INSN_DELETED_DEBUG_LABEL)))
4517 print_rtl_single (final_output, insn);
4518 }
4519
4520 if (final_output)
4521 {
4522 flag_dump_noaddr = save_noaddr;
4523 flag_dump_unnumbered = save_unnumbered;
4524 final_insns_dump_p = false;
4525
4526 if (fclose (final_output))
4527 {
4528 error ("could not close final insn dump file %qs: %m",
4529 flag_dump_final_insns);
4530 flag_dump_final_insns = NULL;
4531 }
4532 }
4533
4534 flag_rerun_cse_after_global_opts = 0;
4535 reload_completed = 0;
4536 epilogue_completed = 0;
4537 #ifdef STACK_REGS
4538 regstack_completed = 0;
4539 #endif
4540
4541 /* Clear out the insn_length contents now that they are no
4542 longer valid. */
4543 init_insn_lengths ();
4544
4545 /* Show no temporary slots allocated. */
4546 init_temp_slots ();
4547
4548 free_bb_for_insn ();
4549
4550 if (cfun->gimple_df)
4551 delete_tree_ssa (cfun);
4552
4553 /* We can reduce stack alignment on call site only when we are sure that
4554 the function body just produced will be actually used in the final
4555 executable. */
4556 if (flag_ipa_stack_alignment
4557 && decl_binds_to_current_def_p (current_function_decl))
4558 {
4559 unsigned int pref = crtl->preferred_stack_boundary;
4560 if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4561 pref = crtl->stack_alignment_needed;
4562 cgraph_node::rtl_info (current_function_decl)
4563 ->preferred_incoming_stack_boundary = pref;
4564 }
4565
4566 /* Make sure volatile mem refs aren't considered valid operands for
4567 arithmetic insns. We must call this here if this is a nested inline
4568 function, since the above code leaves us in the init_recog state,
4569 and the function context push/pop code does not save/restore volatile_ok.
4570
4571 ??? Maybe it isn't necessary for expand_start_function to call this
4572 anymore if we do it here? */
4573
4574 init_recog_no_volatile ();
4575
4576 /* We're done with this function. Free up memory if we can. */
4577 free_after_parsing (cfun);
4578 free_after_compilation (cfun);
4579 return 0;
4580 }
4581
4582 namespace {
4583
4584 const pass_data pass_data_clean_state =
4585 {
4586 RTL_PASS, /* type */
4587 "*clean_state", /* name */
4588 OPTGROUP_NONE, /* optinfo_flags */
4589 TV_FINAL, /* tv_id */
4590 0, /* properties_required */
4591 0, /* properties_provided */
4592 PROP_rtl, /* properties_destroyed */
4593 0, /* todo_flags_start */
4594 0, /* todo_flags_finish */
4595 };
4596
4597 class pass_clean_state : public rtl_opt_pass
4598 {
4599 public:
4600 pass_clean_state (gcc::context *ctxt)
4601 : rtl_opt_pass (pass_data_clean_state, ctxt)
4602 {}
4603
4604 /* opt_pass methods: */
4605 virtual unsigned int execute (function *)
4606 {
4607 return rest_of_clean_state ();
4608 }
4609
4610 }; // class pass_clean_state
4611
4612 } // anon namespace
4613
4614 rtl_opt_pass *
4615 make_pass_clean_state (gcc::context *ctxt)
4616 {
4617 return new pass_clean_state (ctxt);
4618 }
4619
4620 /* Return true if INSN is a call to the current function. */
4621
4622 static bool
4623 self_recursive_call_p (rtx_insn *insn)
4624 {
4625 tree fndecl = get_call_fndecl (insn);
4626 return (fndecl == current_function_decl
4627 && decl_binds_to_current_def_p (fndecl));
4628 }
4629
4630 /* Collect hard register usage for the current function. */
4631
4632 static void
4633 collect_fn_hard_reg_usage (void)
4634 {
4635 rtx_insn *insn;
4636 #ifdef STACK_REGS
4637 int i;
4638 #endif
4639 struct cgraph_rtl_info *node;
4640 HARD_REG_SET function_used_regs;
4641
4642 /* ??? To be removed when all the ports have been fixed. */
4643 if (!targetm.call_fusage_contains_non_callee_clobbers)
4644 return;
4645
4646 /* Be conservative - mark fixed and global registers as used. */
4647 function_used_regs = fixed_reg_set;
4648
4649 #ifdef STACK_REGS
4650 /* Handle STACK_REGS conservatively, since the df-framework does not
4651 provide accurate information for them. */
4652
4653 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
4654 SET_HARD_REG_BIT (function_used_regs, i);
4655 #endif
4656
4657 for (insn = get_insns (); insn != NULL_RTX; insn = next_insn (insn))
4658 {
4659 HARD_REG_SET insn_used_regs;
4660
4661 if (!NONDEBUG_INSN_P (insn))
4662 continue;
4663
4664 if (CALL_P (insn)
4665 && !self_recursive_call_p (insn))
4666 function_used_regs
4667 |= insn_callee_abi (insn).full_and_partial_reg_clobbers ();
4668
4669 find_all_hard_reg_sets (insn, &insn_used_regs, false);
4670 function_used_regs |= insn_used_regs;
4671
4672 if (hard_reg_set_subset_p (crtl->abi->full_and_partial_reg_clobbers (),
4673 function_used_regs))
4674 return;
4675 }
4676
4677 /* Mask out fully-saved registers, so that they don't affect equality
4678 comparisons between function_abis. */
4679 function_used_regs &= crtl->abi->full_and_partial_reg_clobbers ();
4680
4681 node = cgraph_node::rtl_info (current_function_decl);
4682 gcc_assert (node != NULL);
4683
4684 node->function_used_regs = function_used_regs;
4685 }
4686