df-scan.cc revision 1.1 1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999-2022 Free Software Foundation, Inc.
3 Originally contributed by Michael P. Hayes
4 (m.hayes (at) elec.canterbury.ac.nz, mhayes (at) redhat.com)
5 Major rewrite contributed by Danny Berlin (dberlin (at) dberlin.org)
6 and Kenneth Zadeck (zadeck (at) naturalbridge.com).
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "df.h"
32 #include "memmodel.h"
33 #include "tm_p.h"
34 #include "regs.h"
35 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
36 #include "dumpfile.h"
37 #include "calls.h"
38 #include "function-abi.h"
39
40 /* The set of hard registers in eliminables[i].from. */
41
42 static HARD_REG_SET elim_reg_set;
43
44 /* Initialize ur_in and ur_out as if all hard registers were partially
45 available. */
46
47 class df_collection_rec
48 {
49 public:
50 auto_vec<df_ref, 128> def_vec;
51 auto_vec<df_ref, 32> use_vec;
52 auto_vec<df_ref, 32> eq_use_vec;
53 auto_vec<df_mw_hardreg *, 32> mw_vec;
54 };
55
56 static void df_ref_record (enum df_ref_class, class df_collection_rec *,
57 rtx, rtx *,
58 basic_block, struct df_insn_info *,
59 enum df_ref_type, int ref_flags);
60 static void df_def_record_1 (class df_collection_rec *, rtx *,
61 basic_block, struct df_insn_info *,
62 int ref_flags);
63 static void df_defs_record (class df_collection_rec *, rtx,
64 basic_block, struct df_insn_info *,
65 int ref_flags);
66 static void df_uses_record (class df_collection_rec *,
67 rtx *, enum df_ref_type,
68 basic_block, struct df_insn_info *,
69 int ref_flags);
70
71 static void df_install_ref_incremental (df_ref);
72 static void df_insn_refs_collect (class df_collection_rec*,
73 basic_block, struct df_insn_info *);
74 static void df_canonize_collection_rec (class df_collection_rec *);
75
76 static void df_get_regular_block_artificial_uses (bitmap);
77 static void df_get_eh_block_artificial_uses (bitmap);
78
79 static void df_record_entry_block_defs (bitmap);
80 static void df_record_exit_block_uses (bitmap);
81 static void df_get_exit_block_use_set (bitmap);
82 static void df_get_entry_block_def_set (bitmap);
83 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
84 static void df_ref_chain_delete_du_chain (df_ref);
85 static void df_ref_chain_delete (df_ref);
86
87 static void df_refs_add_to_chains (class df_collection_rec *,
88 basic_block, rtx_insn *, unsigned int);
89
90 static bool df_insn_refs_verify (class df_collection_rec *, basic_block,
91 rtx_insn *, bool);
92 static void df_entry_block_defs_collect (class df_collection_rec *, bitmap);
93 static void df_exit_block_uses_collect (class df_collection_rec *, bitmap);
94 static void df_install_ref (df_ref, struct df_reg_info *,
95 struct df_ref_info *, bool);
96
97 static int df_ref_compare (df_ref, df_ref);
98 static int df_ref_ptr_compare (const void *, const void *);
99 static int df_mw_compare (const df_mw_hardreg *, const df_mw_hardreg *);
100 static int df_mw_ptr_compare (const void *, const void *);
101
102 static void df_insn_info_delete (unsigned int);
103
104 /* Indexed by hardware reg number, is true if that register is ever
105 used in the current function.
106
107 In df-scan.cc, this is set up to record the hard regs used
108 explicitly. Reload adds in the hard regs used for holding pseudo
109 regs. Final uses it to generate the code in the function prologue
110 and epilogue to save and restore registers as needed. */
111
112 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
113
114 /* Flags used to tell df_refs_add_to_chains() which vectors it should copy. */
115 static const unsigned int copy_defs = 0x1;
116 static const unsigned int copy_uses = 0x2;
117 static const unsigned int copy_eq_uses = 0x4;
118 static const unsigned int copy_mw = 0x8;
119 static const unsigned int copy_all = copy_defs | copy_uses | copy_eq_uses
120 | copy_mw;
121
122 /*----------------------------------------------------------------------------
124 SCANNING DATAFLOW PROBLEM
125
126 There are several ways in which scanning looks just like the other
127 dataflow problems. It shares the all the mechanisms for local info
128 as well as basic block info. Where it differs is when and how often
129 it gets run. It also has no need for the iterative solver.
130 ----------------------------------------------------------------------------*/
131
132 /* Problem data for the scanning dataflow function. */
133 struct df_scan_problem_data
134 {
135 object_allocator<df_base_ref> *ref_base_pool;
136 object_allocator<df_artificial_ref> *ref_artificial_pool;
137 object_allocator<df_regular_ref> *ref_regular_pool;
138 object_allocator<df_insn_info> *insn_pool;
139 object_allocator<df_reg_info> *reg_pool;
140 object_allocator<df_mw_hardreg> *mw_reg_pool;
141
142 bitmap_obstack reg_bitmaps;
143 bitmap_obstack insn_bitmaps;
144 };
145
146 /* Internal function to shut down the scanning problem. */
147 static void
148 df_scan_free_internal (void)
149 {
150 struct df_scan_problem_data *problem_data
151 = (struct df_scan_problem_data *) df_scan->problem_data;
152
153 free (df->def_info.refs);
154 free (df->def_info.begin);
155 free (df->def_info.count);
156 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
157
158 free (df->use_info.refs);
159 free (df->use_info.begin);
160 free (df->use_info.count);
161 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
162
163 free (df->def_regs);
164 df->def_regs = NULL;
165 free (df->use_regs);
166 df->use_regs = NULL;
167 free (df->eq_use_regs);
168 df->eq_use_regs = NULL;
169 df->regs_size = 0;
170 DF_REG_SIZE (df) = 0;
171
172 free (df->insns);
173 df->insns = NULL;
174 DF_INSN_SIZE () = 0;
175
176 free (df_scan->block_info);
177 df_scan->block_info = NULL;
178 df_scan->block_info_size = 0;
179
180 bitmap_clear (&df->hardware_regs_used);
181 bitmap_clear (&df->regular_block_artificial_uses);
182 bitmap_clear (&df->eh_block_artificial_uses);
183 BITMAP_FREE (df->entry_block_defs);
184 BITMAP_FREE (df->exit_block_uses);
185 bitmap_clear (&df->insns_to_delete);
186 bitmap_clear (&df->insns_to_rescan);
187 bitmap_clear (&df->insns_to_notes_rescan);
188
189 delete problem_data->ref_base_pool;
190 delete problem_data->ref_artificial_pool;
191 delete problem_data->ref_regular_pool;
192 delete problem_data->insn_pool;
193 delete problem_data->reg_pool;
194 delete problem_data->mw_reg_pool;
195 bitmap_obstack_release (&problem_data->reg_bitmaps);
196 bitmap_obstack_release (&problem_data->insn_bitmaps);
197 free (df_scan->problem_data);
198 }
199
200
201 /* Free basic block info. */
202
203 static void
204 df_scan_free_bb_info (basic_block bb, void *vbb_info)
205 {
206 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
207 unsigned int bb_index = bb->index;
208 rtx_insn *insn;
209
210 FOR_BB_INSNS (bb, insn)
211 if (INSN_P (insn))
212 df_insn_info_delete (INSN_UID (insn));
213
214 if (bb_index < df_scan->block_info_size)
215 bb_info = df_scan_get_bb_info (bb_index);
216
217 /* Get rid of any artificial uses or defs. */
218 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
219 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
220 df_ref_chain_delete (bb_info->artificial_defs);
221 df_ref_chain_delete (bb_info->artificial_uses);
222 bb_info->artificial_defs = NULL;
223 bb_info->artificial_uses = NULL;
224 }
225
226
227 /* Allocate the problem data for the scanning problem. This should be
228 called when the problem is created or when the entire function is to
229 be rescanned. */
230 void
231 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
232 {
233 struct df_scan_problem_data *problem_data;
234 basic_block bb;
235
236 /* Given the number of pools, this is really faster than tearing
237 everything apart. */
238 if (df_scan->problem_data)
239 df_scan_free_internal ();
240
241 problem_data = XNEW (struct df_scan_problem_data);
242 df_scan->problem_data = problem_data;
243 df_scan->computed = true;
244
245 problem_data->ref_base_pool = new object_allocator<df_base_ref>
246 ("df_scan ref base");
247 problem_data->ref_artificial_pool = new object_allocator<df_artificial_ref>
248 ("df_scan ref artificial");
249 problem_data->ref_regular_pool = new object_allocator<df_regular_ref>
250 ("df_scan ref regular");
251 problem_data->insn_pool = new object_allocator<df_insn_info>
252 ("df_scan insn");
253 problem_data->reg_pool = new object_allocator<df_reg_info>
254 ("df_scan reg");
255 problem_data->mw_reg_pool = new object_allocator<df_mw_hardreg>
256 ("df_scan mw_reg");
257
258 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
259 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
260
261 df_grow_reg_info ();
262
263 df_grow_insn_info ();
264 df_grow_bb_info (df_scan);
265
266 FOR_ALL_BB_FN (bb, cfun)
267 {
268 unsigned int bb_index = bb->index;
269 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
270 bb_info->artificial_defs = NULL;
271 bb_info->artificial_uses = NULL;
272 }
273
274 bitmap_initialize (&df->hardware_regs_used, &problem_data->reg_bitmaps);
275 bitmap_initialize (&df->regular_block_artificial_uses, &problem_data->reg_bitmaps);
276 bitmap_initialize (&df->eh_block_artificial_uses, &problem_data->reg_bitmaps);
277 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
278 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
279 bitmap_initialize (&df->insns_to_delete, &problem_data->insn_bitmaps);
280 bitmap_initialize (&df->insns_to_rescan, &problem_data->insn_bitmaps);
281 bitmap_initialize (&df->insns_to_notes_rescan, &problem_data->insn_bitmaps);
282 df_scan->optional_p = false;
283 }
284
285
286 /* Free all of the data associated with the scan problem. */
287
288 static void
289 df_scan_free (void)
290 {
291 if (df_scan->problem_data)
292 df_scan_free_internal ();
293
294 if (df->blocks_to_analyze)
295 {
296 BITMAP_FREE (df->blocks_to_analyze);
297 df->blocks_to_analyze = NULL;
298 }
299
300 free (df_scan);
301 }
302
303 /* Dump the preamble for DF_SCAN dump. */
304 static void
305 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
306 {
307 int i;
308 int dcount = 0;
309 int ucount = 0;
310 int ecount = 0;
311 int icount = 0;
312 int ccount = 0;
313 basic_block bb;
314 rtx_insn *insn;
315
316 fprintf (file, ";; fully invalidated by EH \t");
317 df_print_regset
318 (file, bitmap_view<HARD_REG_SET> (eh_edge_abi.full_reg_clobbers ()));
319 fprintf (file, ";; hardware regs used \t");
320 df_print_regset (file, &df->hardware_regs_used);
321 fprintf (file, ";; regular block artificial uses \t");
322 df_print_regset (file, &df->regular_block_artificial_uses);
323 fprintf (file, ";; eh block artificial uses \t");
324 df_print_regset (file, &df->eh_block_artificial_uses);
325 fprintf (file, ";; entry block defs \t");
326 df_print_regset (file, df->entry_block_defs);
327 fprintf (file, ";; exit block uses \t");
328 df_print_regset (file, df->exit_block_uses);
329 fprintf (file, ";; regs ever live \t");
330 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
331 if (df_regs_ever_live_p (i))
332 fprintf (file, " %d [%s]", i, reg_names[i]);
333 fprintf (file, "\n;; ref usage \t");
334
335 for (i = 0; i < (int)df->regs_inited; i++)
336 if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
337 {
338 const char * sep = "";
339
340 fprintf (file, "r%d={", i);
341 if (DF_REG_DEF_COUNT (i))
342 {
343 fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
344 sep = ",";
345 dcount += DF_REG_DEF_COUNT (i);
346 }
347 if (DF_REG_USE_COUNT (i))
348 {
349 fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
350 sep = ",";
351 ucount += DF_REG_USE_COUNT (i);
352 }
353 if (DF_REG_EQ_USE_COUNT (i))
354 {
355 fprintf (file, "%s%de", sep, DF_REG_EQ_USE_COUNT (i));
356 ecount += DF_REG_EQ_USE_COUNT (i);
357 }
358 fprintf (file, "} ");
359 }
360
361 FOR_EACH_BB_FN (bb, cfun)
362 FOR_BB_INSNS (bb, insn)
363 if (INSN_P (insn))
364 {
365 if (CALL_P (insn))
366 ccount++;
367 else
368 icount++;
369 }
370
371 fprintf (file, "\n;; total ref usage %d{%dd,%du,%de}"
372 " in %d{%d regular + %d call} insns.\n",
373 dcount + ucount + ecount, dcount, ucount, ecount,
374 icount + ccount, icount, ccount);
375 }
376
377 /* Dump the bb_info for a given basic block. */
378 static void
379 df_scan_start_block (basic_block bb, FILE *file)
380 {
381 struct df_scan_bb_info *bb_info
382 = df_scan_get_bb_info (bb->index);
383
384 if (bb_info)
385 {
386 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
387 df_refs_chain_dump (bb_info->artificial_defs, true, file);
388 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
389 df_refs_chain_dump (bb_info->artificial_uses, true, file);
390 fprintf (file, "\n");
391 }
392 #if 0
393 {
394 rtx_insn *insn;
395 FOR_BB_INSNS (bb, insn)
396 if (INSN_P (insn))
397 df_insn_debug (insn, false, file);
398 }
399 #endif
400 }
401
402 static const struct df_problem problem_SCAN =
403 {
404 DF_SCAN, /* Problem id. */
405 DF_NONE, /* Direction. */
406 df_scan_alloc, /* Allocate the problem specific data. */
407 NULL, /* Reset global information. */
408 df_scan_free_bb_info, /* Free basic block info. */
409 NULL, /* Local compute function. */
410 NULL, /* Init the solution specific data. */
411 NULL, /* Iterative solver. */
412 NULL, /* Confluence operator 0. */
413 NULL, /* Confluence operator n. */
414 NULL, /* Transfer function. */
415 NULL, /* Finalize function. */
416 df_scan_free, /* Free all of the problem information. */
417 NULL, /* Remove this problem from the stack of dataflow problems. */
418 df_scan_start_dump, /* Debugging. */
419 df_scan_start_block, /* Debugging start block. */
420 NULL, /* Debugging end block. */
421 NULL, /* Debugging start insn. */
422 NULL, /* Debugging end insn. */
423 NULL, /* Incremental solution verify start. */
424 NULL, /* Incremental solution verify end. */
425 NULL, /* Dependent problem. */
426 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
427 TV_DF_SCAN, /* Timing variable. */
428 false /* Reset blocks on dropping out of blocks_to_analyze. */
429 };
430
431
432 /* Create a new DATAFLOW instance and add it to an existing instance
433 of DF. The returned structure is what is used to get at the
434 solution. */
435
436 void
437 df_scan_add_problem (void)
438 {
439 df_add_problem (&problem_SCAN);
440 }
441
442
443 /*----------------------------------------------------------------------------
445 Storage Allocation Utilities
446 ----------------------------------------------------------------------------*/
447
448
449 /* First, grow the reg_info information. If the current size is less than
450 the number of pseudos, grow to 25% more than the number of
451 pseudos.
452
453 Second, assure that all of the slots up to max_reg_num have been
454 filled with reg_info structures. */
455
456 void
457 df_grow_reg_info (void)
458 {
459 unsigned int max_reg = max_reg_num ();
460 unsigned int new_size = max_reg;
461 struct df_scan_problem_data *problem_data
462 = (struct df_scan_problem_data *) df_scan->problem_data;
463 unsigned int i;
464
465 if (df->regs_size < new_size)
466 {
467 new_size += new_size / 4;
468 df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
469 df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
470 df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
471 new_size);
472 df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
473 df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
474 df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
475 df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
476 df->regs_size = new_size;
477 }
478
479 for (i = df->regs_inited; i < max_reg; i++)
480 {
481 struct df_reg_info *reg_info;
482
483 // TODO
484 reg_info = problem_data->reg_pool->allocate ();
485 memset (reg_info, 0, sizeof (struct df_reg_info));
486 df->def_regs[i] = reg_info;
487 reg_info = problem_data->reg_pool->allocate ();
488 memset (reg_info, 0, sizeof (struct df_reg_info));
489 df->use_regs[i] = reg_info;
490 reg_info = problem_data->reg_pool->allocate ();
491 memset (reg_info, 0, sizeof (struct df_reg_info));
492 df->eq_use_regs[i] = reg_info;
493 df->def_info.begin[i] = 0;
494 df->def_info.count[i] = 0;
495 df->use_info.begin[i] = 0;
496 df->use_info.count[i] = 0;
497 }
498
499 df->regs_inited = max_reg;
500 }
501
502
503 /* Grow the ref information. */
504
505 static void
506 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
507 {
508 if (ref_info->refs_size < new_size)
509 {
510 ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
511 memset (ref_info->refs + ref_info->refs_size, 0,
512 (new_size - ref_info->refs_size) *sizeof (df_ref));
513 ref_info->refs_size = new_size;
514 }
515 }
516
517
518 /* Check and grow the ref information if necessary. This routine
519 guarantees total_size + BITMAP_ADDEND amount of entries in refs
520 array. It updates ref_info->refs_size only and does not change
521 ref_info->total_size. */
522
523 static void
524 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
525 unsigned bitmap_addend)
526 {
527 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
528 {
529 int new_size = ref_info->total_size + bitmap_addend;
530 new_size += ref_info->total_size / 4;
531 df_grow_ref_info (ref_info, new_size);
532 }
533 }
534
535
536 /* Grow the ref information. If the current size is less than the
537 number of instructions, grow to 25% more than the number of
538 instructions. */
539
540 void
541 df_grow_insn_info (void)
542 {
543 unsigned int new_size = get_max_uid () + 1;
544 if (DF_INSN_SIZE () < new_size)
545 {
546 new_size += new_size / 4;
547 df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
548 memset (df->insns + df->insns_size, 0,
549 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
550 DF_INSN_SIZE () = new_size;
551 }
552 }
553
554
555
556
557 /*----------------------------------------------------------------------------
559 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
560 ----------------------------------------------------------------------------*/
561
562 /* Rescan all of the block_to_analyze or all of the blocks in the
563 function if df_set_blocks if blocks_to_analyze is NULL; */
564
565 void
566 df_scan_blocks (void)
567 {
568 basic_block bb;
569
570 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
571 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
572
573 df_get_regular_block_artificial_uses (&df->regular_block_artificial_uses);
574 df_get_eh_block_artificial_uses (&df->eh_block_artificial_uses);
575
576 bitmap_ior_into (&df->eh_block_artificial_uses,
577 &df->regular_block_artificial_uses);
578
579 /* ENTRY and EXIT blocks have special defs/uses. */
580 df_get_entry_block_def_set (df->entry_block_defs);
581 df_record_entry_block_defs (df->entry_block_defs);
582 df_get_exit_block_use_set (df->exit_block_uses);
583 df_record_exit_block_uses (df->exit_block_uses);
584 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
585 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
586
587 /* Regular blocks */
588 FOR_EACH_BB_FN (bb, cfun)
589 {
590 unsigned int bb_index = bb->index;
591 df_bb_refs_record (bb_index, true);
592 }
593 }
594
595 /* Create new refs under address LOC within INSN. This function is
596 only used externally. REF_FLAGS must be either 0 or DF_REF_IN_NOTE,
597 depending on whether LOC is inside PATTERN (INSN) or a note. */
598
599 void
600 df_uses_create (rtx *loc, rtx_insn *insn, int ref_flags)
601 {
602 gcc_assert (!(ref_flags & ~DF_REF_IN_NOTE));
603 df_uses_record (NULL, loc, DF_REF_REG_USE,
604 BLOCK_FOR_INSN (insn),
605 DF_INSN_INFO_GET (insn),
606 ref_flags);
607 }
608
609 static void
610 df_install_ref_incremental (df_ref ref)
611 {
612 struct df_reg_info **reg_info;
613 struct df_ref_info *ref_info;
614 df_ref *ref_ptr;
615 bool add_to_table;
616
617 rtx_insn *insn = DF_REF_INSN (ref);
618 basic_block bb = BLOCK_FOR_INSN (insn);
619
620 if (DF_REF_REG_DEF_P (ref))
621 {
622 reg_info = df->def_regs;
623 ref_info = &df->def_info;
624 ref_ptr = &DF_INSN_DEFS (insn);
625 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
626 }
627 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
628 {
629 reg_info = df->eq_use_regs;
630 ref_info = &df->use_info;
631 ref_ptr = &DF_INSN_EQ_USES (insn);
632 switch (ref_info->ref_order)
633 {
634 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
635 case DF_REF_ORDER_BY_REG_WITH_NOTES:
636 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
637 add_to_table = true;
638 break;
639 default:
640 add_to_table = false;
641 break;
642 }
643 }
644 else
645 {
646 reg_info = df->use_regs;
647 ref_info = &df->use_info;
648 ref_ptr = &DF_INSN_USES (insn);
649 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
650 }
651
652 /* Do not add if ref is not in the right blocks. */
653 if (add_to_table && df->analyze_subset)
654 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
655
656 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
657
658 if (add_to_table)
659 switch (ref_info->ref_order)
660 {
661 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
662 case DF_REF_ORDER_BY_REG_WITH_NOTES:
663 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
664 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
665 break;
666 default:
667 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
668 break;
669 }
670
671 while (*ref_ptr && df_ref_compare (*ref_ptr, ref) < 0)
672 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
673
674 DF_REF_NEXT_LOC (ref) = *ref_ptr;
675 *ref_ptr = ref;
676
677 #if 0
678 if (dump_file)
679 {
680 fprintf (dump_file, "adding ref ");
681 df_ref_debug (ref, dump_file);
682 }
683 #endif
684 /* By adding the ref directly, df_insn_rescan my not find any
685 differences even though the block will have changed. So we need
686 to mark the block dirty ourselves. */
687 if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
688 df_set_bb_dirty (bb);
689 }
690
691
692
693 /*----------------------------------------------------------------------------
695 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
696 ----------------------------------------------------------------------------*/
697
698 static void
699 df_free_ref (df_ref ref)
700 {
701 struct df_scan_problem_data *problem_data
702 = (struct df_scan_problem_data *) df_scan->problem_data;
703
704 switch (DF_REF_CLASS (ref))
705 {
706 case DF_REF_BASE:
707 problem_data->ref_base_pool->remove ((df_base_ref *) (ref));
708 break;
709
710 case DF_REF_ARTIFICIAL:
711 problem_data->ref_artificial_pool->remove
712 ((df_artificial_ref *) (ref));
713 break;
714
715 case DF_REF_REGULAR:
716 problem_data->ref_regular_pool->remove
717 ((df_regular_ref *) (ref));
718 break;
719 }
720 }
721
722
723 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
724 Also delete the def-use or use-def chain if it exists. */
725
726 static void
727 df_reg_chain_unlink (df_ref ref)
728 {
729 df_ref next = DF_REF_NEXT_REG (ref);
730 df_ref prev = DF_REF_PREV_REG (ref);
731 int id = DF_REF_ID (ref);
732 struct df_reg_info *reg_info;
733 df_ref *refs = NULL;
734
735 if (DF_REF_REG_DEF_P (ref))
736 {
737 int regno = DF_REF_REGNO (ref);
738 reg_info = DF_REG_DEF_GET (regno);
739 refs = df->def_info.refs;
740 }
741 else
742 {
743 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
744 {
745 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
746 switch (df->use_info.ref_order)
747 {
748 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
749 case DF_REF_ORDER_BY_REG_WITH_NOTES:
750 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
751 refs = df->use_info.refs;
752 break;
753 default:
754 break;
755 }
756 }
757 else
758 {
759 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
760 refs = df->use_info.refs;
761 }
762 }
763
764 if (refs)
765 {
766 if (df->analyze_subset)
767 {
768 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
769 refs[id] = NULL;
770 }
771 else
772 refs[id] = NULL;
773 }
774
775 /* Delete any def-use or use-def chains that start here. It is
776 possible that there is trash in this field. This happens for
777 insns that have been deleted when rescanning has been deferred
778 and the chain problem has also been deleted. The chain tear down
779 code skips deleted insns. */
780 if (df_chain && DF_REF_CHAIN (ref))
781 df_chain_unlink (ref);
782
783 reg_info->n_refs--;
784 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
785 {
786 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
787 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
788 }
789
790 /* Unlink from the reg chain. If there is no prev, this is the
791 first of the list. If not, just join the next and prev. */
792 if (prev)
793 DF_REF_NEXT_REG (prev) = next;
794 else
795 {
796 gcc_assert (reg_info->reg_chain == ref);
797 reg_info->reg_chain = next;
798 }
799 if (next)
800 DF_REF_PREV_REG (next) = prev;
801
802 df_free_ref (ref);
803 }
804
805 /* Initialize INSN_INFO to describe INSN. */
806
807 static void
808 df_insn_info_init_fields (df_insn_info *insn_info, rtx_insn *insn)
809 {
810 memset (insn_info, 0, sizeof (struct df_insn_info));
811 insn_info->insn = insn;
812 }
813
814 /* Create the insn record for INSN. If there was one there, zero it
815 out. */
816
817 struct df_insn_info *
818 df_insn_create_insn_record (rtx_insn *insn)
819 {
820 struct df_scan_problem_data *problem_data
821 = (struct df_scan_problem_data *) df_scan->problem_data;
822 struct df_insn_info *insn_rec;
823
824 df_grow_insn_info ();
825 insn_rec = DF_INSN_INFO_GET (insn);
826 if (!insn_rec)
827 {
828 insn_rec = problem_data->insn_pool->allocate ();
829 DF_INSN_INFO_SET (insn, insn_rec);
830 }
831 df_insn_info_init_fields (insn_rec, insn);
832 return insn_rec;
833 }
834
835
836 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
837
838 static void
839 df_ref_chain_delete_du_chain (df_ref ref)
840 {
841 for (; ref; ref = DF_REF_NEXT_LOC (ref))
842 /* CHAIN is allocated by DF_CHAIN. So make sure to
843 pass df_scan instance for the problem. */
844 if (DF_REF_CHAIN (ref))
845 df_chain_unlink (ref);
846 }
847
848
849 /* Delete all refs in the ref chain. */
850
851 static void
852 df_ref_chain_delete (df_ref ref)
853 {
854 df_ref next;
855 for (; ref; ref = next)
856 {
857 next = DF_REF_NEXT_LOC (ref);
858 df_reg_chain_unlink (ref);
859 }
860 }
861
862
863 /* Delete the hardreg chain. */
864
865 static void
866 df_mw_hardreg_chain_delete (struct df_mw_hardreg *hardregs)
867 {
868 struct df_scan_problem_data *problem_data
869 = (struct df_scan_problem_data *) df_scan->problem_data;
870 df_mw_hardreg *next;
871
872 for (; hardregs; hardregs = next)
873 {
874 next = DF_MWS_NEXT (hardregs);
875 problem_data->mw_reg_pool->remove (hardregs);
876 }
877 }
878
879 /* Remove the contents of INSN_INFO (but don't free INSN_INFO itself). */
880
881 static void
882 df_insn_info_free_fields (df_insn_info *insn_info)
883 {
884 /* In general, notes do not have the insn_info fields
885 initialized. However, combine deletes insns by changing them
886 to notes. How clever. So we cannot just check if it is a
887 valid insn before short circuiting this code, we need to see
888 if we actually initialized it. */
889 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
890
891 if (df_chain)
892 {
893 df_ref_chain_delete_du_chain (insn_info->defs);
894 df_ref_chain_delete_du_chain (insn_info->uses);
895 df_ref_chain_delete_du_chain (insn_info->eq_uses);
896 }
897
898 df_ref_chain_delete (insn_info->defs);
899 df_ref_chain_delete (insn_info->uses);
900 df_ref_chain_delete (insn_info->eq_uses);
901 }
902
903 /* Delete all of the refs information from the insn with UID.
904 Internal helper for df_insn_delete, df_insn_rescan, and other
905 df-scan routines that don't have to work in deferred mode
906 and do not have to mark basic blocks for re-processing. */
907
908 static void
909 df_insn_info_delete (unsigned int uid)
910 {
911 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
912
913 bitmap_clear_bit (&df->insns_to_delete, uid);
914 bitmap_clear_bit (&df->insns_to_rescan, uid);
915 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
916 if (insn_info)
917 {
918 struct df_scan_problem_data *problem_data
919 = (struct df_scan_problem_data *) df_scan->problem_data;
920
921 df_insn_info_free_fields (insn_info);
922 problem_data->insn_pool->remove (insn_info);
923 DF_INSN_UID_SET (uid, NULL);
924 }
925 }
926
927 /* Delete all of the refs information from INSN, either right now
928 or marked for later in deferred mode. */
929
930 void
931 df_insn_delete (rtx_insn *insn)
932 {
933 unsigned int uid;
934 basic_block bb;
935
936 gcc_checking_assert (INSN_P (insn));
937
938 if (!df)
939 return;
940
941 uid = INSN_UID (insn);
942 bb = BLOCK_FOR_INSN (insn);
943
944 /* ??? bb can be NULL after pass_free_cfg. At that point, DF should
945 not exist anymore (as mentioned in df-core.cc: "The only requirement
946 [for DF] is that there be a correct control flow graph." Clearly
947 that isn't the case after pass_free_cfg. But DF is freed much later
948 because some back-ends want to use DF info even though the CFG is
949 already gone. It's not clear to me whether that is safe, actually.
950 In any case, we expect BB to be non-NULL at least up to register
951 allocation, so disallow a non-NULL BB up to there. Not perfect
952 but better than nothing... */
953 gcc_checking_assert (bb != NULL || reload_completed);
954
955 df_grow_bb_info (df_scan);
956 df_grow_reg_info ();
957
958 /* The block must be marked as dirty now, rather than later as in
959 df_insn_rescan and df_notes_rescan because it may not be there at
960 rescanning time and the mark would blow up.
961 DEBUG_INSNs do not make a block's data flow solution dirty (at
962 worst the LUIDs are no longer contiguous). */
963 if (bb != NULL && NONDEBUG_INSN_P (insn))
964 df_set_bb_dirty (bb);
965
966 /* The client has deferred rescanning. */
967 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
968 {
969 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
970 if (insn_info)
971 {
972 bitmap_clear_bit (&df->insns_to_rescan, uid);
973 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
974 bitmap_set_bit (&df->insns_to_delete, uid);
975 }
976 if (dump_file)
977 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
978 return;
979 }
980
981 if (dump_file)
982 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
983
984 df_insn_info_delete (uid);
985 }
986
987
988 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
989
990 static void
991 df_free_collection_rec (class df_collection_rec *collection_rec)
992 {
993 unsigned int ix;
994 struct df_scan_problem_data *problem_data
995 = (struct df_scan_problem_data *) df_scan->problem_data;
996 df_ref ref;
997 struct df_mw_hardreg *mw;
998
999 FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
1000 df_free_ref (ref);
1001 FOR_EACH_VEC_ELT (collection_rec->use_vec, ix, ref)
1002 df_free_ref (ref);
1003 FOR_EACH_VEC_ELT (collection_rec->eq_use_vec, ix, ref)
1004 df_free_ref (ref);
1005 FOR_EACH_VEC_ELT (collection_rec->mw_vec, ix, mw)
1006 problem_data->mw_reg_pool->remove (mw);
1007
1008 collection_rec->def_vec.release ();
1009 collection_rec->use_vec.release ();
1010 collection_rec->eq_use_vec.release ();
1011 collection_rec->mw_vec.release ();
1012 }
1013
1014 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1015
1016 bool
1017 df_insn_rescan (rtx_insn *insn)
1018 {
1019 unsigned int uid = INSN_UID (insn);
1020 struct df_insn_info *insn_info = NULL;
1021 basic_block bb = BLOCK_FOR_INSN (insn);
1022 class df_collection_rec collection_rec;
1023
1024 if ((!df) || (!INSN_P (insn)))
1025 return false;
1026
1027 if (!bb)
1028 {
1029 if (dump_file)
1030 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1031 return false;
1032 }
1033
1034 /* The client has disabled rescanning and plans to do it itself. */
1035 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1036 return false;
1037
1038 df_grow_bb_info (df_scan);
1039 df_grow_reg_info ();
1040
1041 insn_info = DF_INSN_UID_SAFE_GET (uid);
1042
1043 /* The client has deferred rescanning. */
1044 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1045 {
1046 if (!insn_info)
1047 {
1048 insn_info = df_insn_create_insn_record (insn);
1049 insn_info->defs = 0;
1050 insn_info->uses = 0;
1051 insn_info->eq_uses = 0;
1052 insn_info->mw_hardregs = 0;
1053 }
1054 if (dump_file)
1055 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1056
1057 bitmap_clear_bit (&df->insns_to_delete, uid);
1058 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1059 bitmap_set_bit (&df->insns_to_rescan, INSN_UID (insn));
1060 return false;
1061 }
1062
1063 bitmap_clear_bit (&df->insns_to_delete, uid);
1064 bitmap_clear_bit (&df->insns_to_rescan, uid);
1065 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1066 if (insn_info)
1067 {
1068 int luid;
1069 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1070 /* If there's no change, return false. */
1071 if (the_same)
1072 {
1073 df_free_collection_rec (&collection_rec);
1074 if (dump_file)
1075 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1076 return false;
1077 }
1078 if (dump_file)
1079 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1080
1081 /* There's change - we need to delete the existing info.
1082 Since the insn isn't moved, we can salvage its LUID. */
1083 luid = DF_INSN_LUID (insn);
1084 df_insn_info_free_fields (insn_info);
1085 df_insn_info_init_fields (insn_info, insn);
1086 DF_INSN_LUID (insn) = luid;
1087 }
1088 else
1089 {
1090 struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1091 df_insn_refs_collect (&collection_rec, bb, insn_info);
1092 if (dump_file)
1093 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1094 }
1095
1096 df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
1097 if (!DEBUG_INSN_P (insn))
1098 df_set_bb_dirty (bb);
1099
1100 return true;
1101 }
1102
1103 /* Same as df_insn_rescan, but don't mark the basic block as
1104 dirty. */
1105
1106 bool
1107 df_insn_rescan_debug_internal (rtx_insn *insn)
1108 {
1109 unsigned int uid = INSN_UID (insn);
1110 struct df_insn_info *insn_info;
1111
1112 gcc_assert (DEBUG_INSN_P (insn)
1113 && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
1114
1115 if (!df)
1116 return false;
1117
1118 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1119 if (!insn_info)
1120 return false;
1121
1122 if (dump_file)
1123 fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);
1124
1125 bitmap_clear_bit (&df->insns_to_delete, uid);
1126 bitmap_clear_bit (&df->insns_to_rescan, uid);
1127 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1128
1129 if (insn_info->defs == 0
1130 && insn_info->uses == 0
1131 && insn_info->eq_uses == 0
1132 && insn_info->mw_hardregs == 0)
1133 return false;
1134
1135 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1136
1137 if (df_chain)
1138 {
1139 df_ref_chain_delete_du_chain (insn_info->defs);
1140 df_ref_chain_delete_du_chain (insn_info->uses);
1141 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1142 }
1143
1144 df_ref_chain_delete (insn_info->defs);
1145 df_ref_chain_delete (insn_info->uses);
1146 df_ref_chain_delete (insn_info->eq_uses);
1147
1148 insn_info->defs = 0;
1149 insn_info->uses = 0;
1150 insn_info->eq_uses = 0;
1151 insn_info->mw_hardregs = 0;
1152
1153 return true;
1154 }
1155
1156
1157 /* Rescan all of the insns in the function. Note that the artificial
1158 uses and defs are not touched. This function will destroy def-use
1159 or use-def chains. */
1160
1161 void
1162 df_insn_rescan_all (void)
1163 {
1164 bool no_insn_rescan = false;
1165 bool defer_insn_rescan = false;
1166 basic_block bb;
1167 bitmap_iterator bi;
1168 unsigned int uid;
1169
1170 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1171 {
1172 df_clear_flags (DF_NO_INSN_RESCAN);
1173 no_insn_rescan = true;
1174 }
1175
1176 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1177 {
1178 df_clear_flags (DF_DEFER_INSN_RESCAN);
1179 defer_insn_rescan = true;
1180 }
1181
1182 auto_bitmap tmp (&df_bitmap_obstack);
1183 bitmap_copy (tmp, &df->insns_to_delete);
1184 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1185 {
1186 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1187 if (insn_info)
1188 df_insn_info_delete (uid);
1189 }
1190
1191 bitmap_clear (&df->insns_to_delete);
1192 bitmap_clear (&df->insns_to_rescan);
1193 bitmap_clear (&df->insns_to_notes_rescan);
1194
1195 FOR_EACH_BB_FN (bb, cfun)
1196 {
1197 rtx_insn *insn;
1198 FOR_BB_INSNS (bb, insn)
1199 {
1200 df_insn_rescan (insn);
1201 }
1202 }
1203
1204 if (no_insn_rescan)
1205 df_set_flags (DF_NO_INSN_RESCAN);
1206 if (defer_insn_rescan)
1207 df_set_flags (DF_DEFER_INSN_RESCAN);
1208 }
1209
1210
1211 /* Process all of the deferred rescans or deletions. */
1212
1213 void
1214 df_process_deferred_rescans (void)
1215 {
1216 bool no_insn_rescan = false;
1217 bool defer_insn_rescan = false;
1218 bitmap_iterator bi;
1219 unsigned int uid;
1220
1221 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1222 {
1223 df_clear_flags (DF_NO_INSN_RESCAN);
1224 no_insn_rescan = true;
1225 }
1226
1227 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1228 {
1229 df_clear_flags (DF_DEFER_INSN_RESCAN);
1230 defer_insn_rescan = true;
1231 }
1232
1233 if (dump_file)
1234 fprintf (dump_file, "starting the processing of deferred insns\n");
1235
1236 auto_bitmap tmp (&df_bitmap_obstack);
1237 bitmap_copy (tmp, &df->insns_to_delete);
1238 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1239 {
1240 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1241 if (insn_info)
1242 df_insn_info_delete (uid);
1243 }
1244
1245 bitmap_copy (tmp, &df->insns_to_rescan);
1246 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1247 {
1248 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1249 if (insn_info)
1250 df_insn_rescan (insn_info->insn);
1251 }
1252
1253 bitmap_copy (tmp, &df->insns_to_notes_rescan);
1254 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1255 {
1256 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1257 if (insn_info)
1258 df_notes_rescan (insn_info->insn);
1259 }
1260
1261 if (dump_file)
1262 fprintf (dump_file, "ending the processing of deferred insns\n");
1263
1264 bitmap_clear (&df->insns_to_delete);
1265 bitmap_clear (&df->insns_to_rescan);
1266 bitmap_clear (&df->insns_to_notes_rescan);
1267
1268 if (no_insn_rescan)
1269 df_set_flags (DF_NO_INSN_RESCAN);
1270 if (defer_insn_rescan)
1271 df_set_flags (DF_DEFER_INSN_RESCAN);
1272
1273 /* If someone changed regs_ever_live during this pass, fix up the
1274 entry and exit blocks. */
1275 if (df->redo_entry_and_exit)
1276 {
1277 df_update_entry_exit_and_calls ();
1278 df->redo_entry_and_exit = false;
1279 }
1280 }
1281
1282
1283 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1284 the uses if INCLUDE_USES. Include the eq_uses if
1285 INCLUDE_EQ_USES. */
1286
1287 static unsigned int
1288 df_count_refs (bool include_defs, bool include_uses,
1289 bool include_eq_uses)
1290 {
1291 unsigned int regno;
1292 int size = 0;
1293 unsigned int m = df->regs_inited;
1294
1295 for (regno = 0; regno < m; regno++)
1296 {
1297 if (include_defs)
1298 size += DF_REG_DEF_COUNT (regno);
1299 if (include_uses)
1300 size += DF_REG_USE_COUNT (regno);
1301 if (include_eq_uses)
1302 size += DF_REG_EQ_USE_COUNT (regno);
1303 }
1304 return size;
1305 }
1306
1307
1308 /* Take build ref table for either the uses or defs from the reg-use
1309 or reg-def chains. This version processes the refs in reg order
1310 which is likely to be best if processing the whole function. */
1311
1312 static void
1313 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1314 bool include_defs,
1315 bool include_uses,
1316 bool include_eq_uses)
1317 {
1318 unsigned int m = df->regs_inited;
1319 unsigned int regno;
1320 unsigned int offset = 0;
1321 unsigned int start;
1322
1323 if (df->changeable_flags & DF_NO_HARD_REGS)
1324 {
1325 start = FIRST_PSEUDO_REGISTER;
1326 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1327 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1328 }
1329 else
1330 start = 0;
1331
1332 ref_info->total_size
1333 = df_count_refs (include_defs, include_uses, include_eq_uses);
1334
1335 df_check_and_grow_ref_info (ref_info, 1);
1336
1337 for (regno = start; regno < m; regno++)
1338 {
1339 int count = 0;
1340 ref_info->begin[regno] = offset;
1341 if (include_defs)
1342 {
1343 df_ref ref = DF_REG_DEF_CHAIN (regno);
1344 while (ref)
1345 {
1346 ref_info->refs[offset] = ref;
1347 DF_REF_ID (ref) = offset++;
1348 count++;
1349 ref = DF_REF_NEXT_REG (ref);
1350 gcc_checking_assert (offset < ref_info->refs_size);
1351 }
1352 }
1353 if (include_uses)
1354 {
1355 df_ref ref = DF_REG_USE_CHAIN (regno);
1356 while (ref)
1357 {
1358 ref_info->refs[offset] = ref;
1359 DF_REF_ID (ref) = offset++;
1360 count++;
1361 ref = DF_REF_NEXT_REG (ref);
1362 gcc_checking_assert (offset < ref_info->refs_size);
1363 }
1364 }
1365 if (include_eq_uses)
1366 {
1367 df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
1368 while (ref)
1369 {
1370 ref_info->refs[offset] = ref;
1371 DF_REF_ID (ref) = offset++;
1372 count++;
1373 ref = DF_REF_NEXT_REG (ref);
1374 gcc_checking_assert (offset < ref_info->refs_size);
1375 }
1376 }
1377 ref_info->count[regno] = count;
1378 }
1379
1380 /* The bitmap size is not decremented when refs are deleted. So
1381 reset it now that we have squished out all of the empty
1382 slots. */
1383 ref_info->table_size = offset;
1384 }
1385
1386
1387 /* Take build ref table for either the uses or defs from the reg-use
1388 or reg-def chains. This version processes the refs in insn order
1389 which is likely to be best if processing some segment of the
1390 function. */
1391
1392 static void
1393 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1394 bool include_defs,
1395 bool include_uses,
1396 bool include_eq_uses)
1397 {
1398 bitmap_iterator bi;
1399 unsigned int bb_index;
1400 unsigned int m = df->regs_inited;
1401 unsigned int offset = 0;
1402 unsigned int r;
1403 unsigned int start
1404 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1405
1406 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1407 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1408
1409 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1410 df_check_and_grow_ref_info (ref_info, 1);
1411
1412 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1413 {
1414 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1415 rtx_insn *insn;
1416 df_ref def, use;
1417
1418 if (include_defs)
1419 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
1420 {
1421 unsigned int regno = DF_REF_REGNO (def);
1422 ref_info->count[regno]++;
1423 }
1424 if (include_uses)
1425 FOR_EACH_ARTIFICIAL_USE (use, bb_index)
1426 {
1427 unsigned int regno = DF_REF_REGNO (use);
1428 ref_info->count[regno]++;
1429 }
1430
1431 FOR_BB_INSNS (bb, insn)
1432 {
1433 if (INSN_P (insn))
1434 {
1435 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
1436
1437 if (include_defs)
1438 FOR_EACH_INSN_INFO_DEF (def, insn_info)
1439 {
1440 unsigned int regno = DF_REF_REGNO (def);
1441 ref_info->count[regno]++;
1442 }
1443 if (include_uses)
1444 FOR_EACH_INSN_INFO_USE (use, insn_info)
1445 {
1446 unsigned int regno = DF_REF_REGNO (use);
1447 ref_info->count[regno]++;
1448 }
1449 if (include_eq_uses)
1450 FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
1451 {
1452 unsigned int regno = DF_REF_REGNO (use);
1453 ref_info->count[regno]++;
1454 }
1455 }
1456 }
1457 }
1458
1459 for (r = start; r < m; r++)
1460 {
1461 ref_info->begin[r] = offset;
1462 offset += ref_info->count[r];
1463 ref_info->count[r] = 0;
1464 }
1465
1466 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1467 {
1468 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1469 rtx_insn *insn;
1470 df_ref def, use;
1471
1472 if (include_defs)
1473 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
1474 {
1475 unsigned int regno = DF_REF_REGNO (def);
1476 if (regno >= start)
1477 {
1478 unsigned int id
1479 = ref_info->begin[regno] + ref_info->count[regno]++;
1480 DF_REF_ID (def) = id;
1481 ref_info->refs[id] = def;
1482 }
1483 }
1484 if (include_uses)
1485 FOR_EACH_ARTIFICIAL_USE (use, bb_index)
1486 {
1487 unsigned int regno = DF_REF_REGNO (def);
1488 if (regno >= start)
1489 {
1490 unsigned int id
1491 = ref_info->begin[regno] + ref_info->count[regno]++;
1492 DF_REF_ID (use) = id;
1493 ref_info->refs[id] = use;
1494 }
1495 }
1496
1497 FOR_BB_INSNS (bb, insn)
1498 {
1499 if (INSN_P (insn))
1500 {
1501 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
1502
1503 if (include_defs)
1504 FOR_EACH_INSN_INFO_DEF (def, insn_info)
1505 {
1506 unsigned int regno = DF_REF_REGNO (def);
1507 if (regno >= start)
1508 {
1509 unsigned int id
1510 = ref_info->begin[regno] + ref_info->count[regno]++;
1511 DF_REF_ID (def) = id;
1512 ref_info->refs[id] = def;
1513 }
1514 }
1515 if (include_uses)
1516 FOR_EACH_INSN_INFO_USE (use, insn_info)
1517 {
1518 unsigned int regno = DF_REF_REGNO (use);
1519 if (regno >= start)
1520 {
1521 unsigned int id
1522 = ref_info->begin[regno] + ref_info->count[regno]++;
1523 DF_REF_ID (use) = id;
1524 ref_info->refs[id] = use;
1525 }
1526 }
1527 if (include_eq_uses)
1528 FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
1529 {
1530 unsigned int regno = DF_REF_REGNO (use);
1531 if (regno >= start)
1532 {
1533 unsigned int id
1534 = ref_info->begin[regno] + ref_info->count[regno]++;
1535 DF_REF_ID (use) = id;
1536 ref_info->refs[id] = use;
1537 }
1538 }
1539 }
1540 }
1541 }
1542
1543 /* The bitmap size is not decremented when refs are deleted. So
1544 reset it now that we have squished out all of the empty
1545 slots. */
1546
1547 ref_info->table_size = offset;
1548 }
1549
1550 /* Take build ref table for either the uses or defs from the reg-use
1551 or reg-def chains. */
1552
1553 static void
1554 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1555 bool include_defs,
1556 bool include_uses,
1557 bool include_eq_uses)
1558 {
1559 if (df->analyze_subset)
1560 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1561 include_uses, include_eq_uses);
1562 else
1563 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1564 include_uses, include_eq_uses);
1565 }
1566
1567
1568 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1569 static unsigned int
1570 df_add_refs_to_table (unsigned int offset,
1571 struct df_ref_info *ref_info,
1572 df_ref ref)
1573 {
1574 for (; ref; ref = DF_REF_NEXT_LOC (ref))
1575 if (!(df->changeable_flags & DF_NO_HARD_REGS)
1576 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1577 {
1578 ref_info->refs[offset] = ref;
1579 DF_REF_ID (ref) = offset++;
1580 }
1581 return offset;
1582 }
1583
1584
1585 /* Count the number of refs in all of the insns of BB. Include the
1586 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1587 eq_uses if INCLUDE_EQ_USES. */
1588
1589 static unsigned int
1590 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1591 struct df_ref_info *ref_info,
1592 bool include_defs, bool include_uses,
1593 bool include_eq_uses)
1594 {
1595 rtx_insn *insn;
1596
1597 if (include_defs)
1598 offset = df_add_refs_to_table (offset, ref_info,
1599 df_get_artificial_defs (bb->index));
1600 if (include_uses)
1601 offset = df_add_refs_to_table (offset, ref_info,
1602 df_get_artificial_uses (bb->index));
1603
1604 FOR_BB_INSNS (bb, insn)
1605 if (INSN_P (insn))
1606 {
1607 unsigned int uid = INSN_UID (insn);
1608 if (include_defs)
1609 offset = df_add_refs_to_table (offset, ref_info,
1610 DF_INSN_UID_DEFS (uid));
1611 if (include_uses)
1612 offset = df_add_refs_to_table (offset, ref_info,
1613 DF_INSN_UID_USES (uid));
1614 if (include_eq_uses)
1615 offset = df_add_refs_to_table (offset, ref_info,
1616 DF_INSN_UID_EQ_USES (uid));
1617 }
1618 return offset;
1619 }
1620
1621
1622 /* Organize the refs by insn into the table in REF_INFO. If
1623 blocks_to_analyze is defined, use that set, otherwise the entire
1624 program. Include the defs if INCLUDE_DEFS. Include the uses if
1625 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1626
1627 static void
1628 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1629 bool include_defs, bool include_uses,
1630 bool include_eq_uses)
1631 {
1632 basic_block bb;
1633 unsigned int offset = 0;
1634
1635 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1636 df_check_and_grow_ref_info (ref_info, 1);
1637 if (df->blocks_to_analyze)
1638 {
1639 bitmap_iterator bi;
1640 unsigned int index;
1641
1642 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1643 {
1644 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK_FOR_FN (cfun,
1645 index),
1646 offset, ref_info,
1647 include_defs, include_uses,
1648 include_eq_uses);
1649 }
1650
1651 ref_info->table_size = offset;
1652 }
1653 else
1654 {
1655 FOR_ALL_BB_FN (bb, cfun)
1656 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1657 include_defs, include_uses,
1658 include_eq_uses);
1659 ref_info->table_size = offset;
1660 }
1661 }
1662
1663
1664 /* If the use refs in DF are not organized, reorganize them. */
1665
1666 void
1667 df_maybe_reorganize_use_refs (enum df_ref_order order)
1668 {
1669 if (order == df->use_info.ref_order)
1670 return;
1671
1672 switch (order)
1673 {
1674 case DF_REF_ORDER_BY_REG:
1675 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1676 break;
1677
1678 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1679 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1680 break;
1681
1682 case DF_REF_ORDER_BY_INSN:
1683 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1684 break;
1685
1686 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1687 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1688 break;
1689
1690 case DF_REF_ORDER_NO_TABLE:
1691 free (df->use_info.refs);
1692 df->use_info.refs = NULL;
1693 df->use_info.refs_size = 0;
1694 break;
1695
1696 case DF_REF_ORDER_UNORDERED:
1697 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1698 gcc_unreachable ();
1699 break;
1700 }
1701
1702 df->use_info.ref_order = order;
1703 }
1704
1705
1706 /* If the def refs in DF are not organized, reorganize them. */
1707
1708 void
1709 df_maybe_reorganize_def_refs (enum df_ref_order order)
1710 {
1711 if (order == df->def_info.ref_order)
1712 return;
1713
1714 switch (order)
1715 {
1716 case DF_REF_ORDER_BY_REG:
1717 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1718 break;
1719
1720 case DF_REF_ORDER_BY_INSN:
1721 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1722 break;
1723
1724 case DF_REF_ORDER_NO_TABLE:
1725 free (df->def_info.refs);
1726 df->def_info.refs = NULL;
1727 df->def_info.refs_size = 0;
1728 break;
1729
1730 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1731 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1732 case DF_REF_ORDER_UNORDERED:
1733 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1734 gcc_unreachable ();
1735 break;
1736 }
1737
1738 df->def_info.ref_order = order;
1739 }
1740
1741
1742 /* Change all of the basic block references in INSN to use the insn's
1743 current basic block. This function is called from routines that move
1744 instructions from one block to another. */
1745
1746 void
1747 df_insn_change_bb (rtx_insn *insn, basic_block new_bb)
1748 {
1749 basic_block old_bb = BLOCK_FOR_INSN (insn);
1750 struct df_insn_info *insn_info;
1751 unsigned int uid = INSN_UID (insn);
1752
1753 if (old_bb == new_bb)
1754 return;
1755
1756 set_block_for_insn (insn, new_bb);
1757
1758 if (!df)
1759 return;
1760
1761 if (dump_file)
1762 fprintf (dump_file, "changing bb of uid %d\n", uid);
1763
1764 insn_info = DF_INSN_UID_SAFE_GET (uid);
1765 if (insn_info == NULL)
1766 {
1767 if (dump_file)
1768 fprintf (dump_file, " unscanned insn\n");
1769 df_insn_rescan (insn);
1770 return;
1771 }
1772
1773 if (!INSN_P (insn))
1774 return;
1775
1776 if (!DEBUG_INSN_P (insn))
1777 df_set_bb_dirty (new_bb);
1778 if (old_bb)
1779 {
1780 if (dump_file)
1781 fprintf (dump_file, " from %d to %d\n",
1782 old_bb->index, new_bb->index);
1783 if (!DEBUG_INSN_P (insn))
1784 df_set_bb_dirty (old_bb);
1785 }
1786 else
1787 if (dump_file)
1788 fprintf (dump_file, " to %d\n", new_bb->index);
1789 }
1790
1791
1792 /* Helper function for df_ref_change_reg_with_loc. */
1793
1794 static void
1795 df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
1796 struct df_reg_info *new_df,
1797 unsigned int new_regno, rtx loc)
1798 {
1799 df_ref the_ref = old_df->reg_chain;
1800
1801 while (the_ref)
1802 {
1803 if ((!DF_REF_IS_ARTIFICIAL (the_ref))
1804 && DF_REF_LOC (the_ref)
1805 && (*DF_REF_LOC (the_ref) == loc))
1806 {
1807 df_ref next_ref = DF_REF_NEXT_REG (the_ref);
1808 df_ref prev_ref = DF_REF_PREV_REG (the_ref);
1809 df_ref *ref_ptr;
1810 struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
1811
1812 DF_REF_REGNO (the_ref) = new_regno;
1813 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
1814
1815 /* Pull the_ref out of the old regno chain. */
1816 if (prev_ref)
1817 DF_REF_NEXT_REG (prev_ref) = next_ref;
1818 else
1819 old_df->reg_chain = next_ref;
1820 if (next_ref)
1821 DF_REF_PREV_REG (next_ref) = prev_ref;
1822 old_df->n_refs--;
1823
1824 /* Put the ref into the new regno chain. */
1825 DF_REF_PREV_REG (the_ref) = NULL;
1826 DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
1827 if (new_df->reg_chain)
1828 DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
1829 new_df->reg_chain = the_ref;
1830 new_df->n_refs++;
1831 if (DF_REF_BB (the_ref))
1832 df_set_bb_dirty (DF_REF_BB (the_ref));
1833
1834 /* Need to sort the record again that the ref was in because
1835 the regno is a sorting key. First, find the right
1836 record. */
1837 if (DF_REF_REG_DEF_P (the_ref))
1838 ref_ptr = &insn_info->defs;
1839 else if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
1840 ref_ptr = &insn_info->eq_uses;
1841 else
1842 ref_ptr = &insn_info->uses;
1843 if (dump_file)
1844 fprintf (dump_file, "changing reg in insn %d\n",
1845 DF_REF_INSN_UID (the_ref));
1846
1847 /* Stop if we find the current reference or where the reference
1848 needs to be. */
1849 while (*ref_ptr != the_ref && df_ref_compare (*ref_ptr, the_ref) < 0)
1850 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1851 if (*ref_ptr != the_ref)
1852 {
1853 /* The reference needs to be promoted up the list. */
1854 df_ref next = DF_REF_NEXT_LOC (the_ref);
1855 DF_REF_NEXT_LOC (the_ref) = *ref_ptr;
1856 *ref_ptr = the_ref;
1857 do
1858 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1859 while (*ref_ptr != the_ref);
1860 *ref_ptr = next;
1861 }
1862 else if (DF_REF_NEXT_LOC (the_ref)
1863 && df_ref_compare (the_ref, DF_REF_NEXT_LOC (the_ref)) > 0)
1864 {
1865 /* The reference needs to be demoted down the list. */
1866 *ref_ptr = DF_REF_NEXT_LOC (the_ref);
1867 do
1868 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1869 while (*ref_ptr && df_ref_compare (the_ref, *ref_ptr) > 0);
1870 DF_REF_NEXT_LOC (the_ref) = *ref_ptr;
1871 *ref_ptr = the_ref;
1872 }
1873
1874 the_ref = next_ref;
1875 }
1876 else
1877 the_ref = DF_REF_NEXT_REG (the_ref);
1878 }
1879 }
1880
1881
1882 /* Change the regno of register LOC to NEW_REGNO and update the df
1883 information accordingly. Refs that do not match LOC are not changed
1884 which means that artificial refs are not changed since they have no loc.
1885 This call is to support the SET_REGNO macro. */
1886
1887 void
1888 df_ref_change_reg_with_loc (rtx loc, unsigned int new_regno)
1889 {
1890 unsigned int old_regno = REGNO (loc);
1891 if (old_regno == new_regno)
1892 return;
1893
1894 if (df)
1895 {
1896 df_grow_reg_info ();
1897
1898 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
1899 DF_REG_DEF_GET (new_regno),
1900 new_regno, loc);
1901 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
1902 DF_REG_USE_GET (new_regno),
1903 new_regno, loc);
1904 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
1905 DF_REG_EQ_USE_GET (new_regno),
1906 new_regno, loc);
1907 }
1908 set_mode_and_regno (loc, GET_MODE (loc), new_regno);
1909 }
1910
1911
1912 /* Delete the mw_hardregs that point into the eq_notes. */
1913
1914 static void
1915 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
1916 {
1917 struct df_mw_hardreg **mw_ptr = &insn_info->mw_hardregs;
1918 struct df_scan_problem_data *problem_data
1919 = (struct df_scan_problem_data *) df_scan->problem_data;
1920
1921 while (*mw_ptr)
1922 {
1923 df_mw_hardreg *mw = *mw_ptr;
1924 if (mw->flags & DF_REF_IN_NOTE)
1925 {
1926 *mw_ptr = DF_MWS_NEXT (mw);
1927 problem_data->mw_reg_pool->remove (mw);
1928 }
1929 else
1930 mw_ptr = &DF_MWS_NEXT (mw);
1931 }
1932 }
1933
1934
1935 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
1936
1937 void
1938 df_notes_rescan (rtx_insn *insn)
1939 {
1940 struct df_insn_info *insn_info;
1941 unsigned int uid = INSN_UID (insn);
1942
1943 if (!df)
1944 return;
1945
1946 /* The client has disabled rescanning and plans to do it itself. */
1947 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1948 return;
1949
1950 /* Do nothing if the insn hasn't been emitted yet. */
1951 if (!BLOCK_FOR_INSN (insn))
1952 return;
1953
1954 df_grow_bb_info (df_scan);
1955 df_grow_reg_info ();
1956
1957 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1958
1959 /* The client has deferred rescanning. */
1960 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1961 {
1962 if (!insn_info)
1963 {
1964 insn_info = df_insn_create_insn_record (insn);
1965 insn_info->defs = 0;
1966 insn_info->uses = 0;
1967 insn_info->eq_uses = 0;
1968 insn_info->mw_hardregs = 0;
1969 }
1970
1971 bitmap_clear_bit (&df->insns_to_delete, uid);
1972 /* If the insn is set to be rescanned, it does not need to also
1973 be notes rescanned. */
1974 if (!bitmap_bit_p (&df->insns_to_rescan, uid))
1975 bitmap_set_bit (&df->insns_to_notes_rescan, INSN_UID (insn));
1976 return;
1977 }
1978
1979 bitmap_clear_bit (&df->insns_to_delete, uid);
1980 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1981
1982 if (insn_info)
1983 {
1984 basic_block bb = BLOCK_FOR_INSN (insn);
1985 rtx note;
1986 class df_collection_rec collection_rec;
1987 unsigned int i;
1988
1989 df_mw_hardreg_chain_delete_eq_uses (insn_info);
1990 df_ref_chain_delete (insn_info->eq_uses);
1991 insn_info->eq_uses = NULL;
1992
1993 /* Process REG_EQUIV/REG_EQUAL notes */
1994 for (note = REG_NOTES (insn); note;
1995 note = XEXP (note, 1))
1996 {
1997 switch (REG_NOTE_KIND (note))
1998 {
1999 case REG_EQUIV:
2000 case REG_EQUAL:
2001 df_uses_record (&collection_rec,
2002 &XEXP (note, 0), DF_REF_REG_USE,
2003 bb, insn_info, DF_REF_IN_NOTE);
2004 default:
2005 break;
2006 }
2007 }
2008
2009 /* Find some place to put any new mw_hardregs. */
2010 df_canonize_collection_rec (&collection_rec);
2011 struct df_mw_hardreg **mw_ptr = &insn_info->mw_hardregs, *mw;
2012 FOR_EACH_VEC_ELT (collection_rec.mw_vec, i, mw)
2013 {
2014 while (*mw_ptr && df_mw_compare (*mw_ptr, mw) < 0)
2015 mw_ptr = &DF_MWS_NEXT (*mw_ptr);
2016 DF_MWS_NEXT (mw) = *mw_ptr;
2017 *mw_ptr = mw;
2018 mw_ptr = &DF_MWS_NEXT (mw);
2019 }
2020 df_refs_add_to_chains (&collection_rec, bb, insn, copy_eq_uses);
2021 }
2022 else
2023 df_insn_rescan (insn);
2024
2025 }
2026
2027
2028 /*----------------------------------------------------------------------------
2030 Hard core instruction scanning code. No external interfaces here,
2031 just a lot of routines that look inside insns.
2032 ----------------------------------------------------------------------------*/
2033
2034
2035 /* Return true if the contents of two df_ref's are identical.
2036 It ignores DF_REF_MARKER. */
2037
2038 static bool
2039 df_ref_equal_p (df_ref ref1, df_ref ref2)
2040 {
2041 if (!ref2)
2042 return false;
2043
2044 if (ref1 == ref2)
2045 return true;
2046
2047 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
2048 || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
2049 || DF_REF_REG (ref1) != DF_REF_REG (ref2)
2050 || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
2051 || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2052 != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2053 || DF_REF_BB (ref1) != DF_REF_BB (ref2)
2054 || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
2055 return false;
2056
2057 switch (DF_REF_CLASS (ref1))
2058 {
2059 case DF_REF_ARTIFICIAL:
2060 case DF_REF_BASE:
2061 return true;
2062
2063 case DF_REF_REGULAR:
2064 return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
2065
2066 default:
2067 gcc_unreachable ();
2068 }
2069 }
2070
2071
2072 /* Compare REF1 and REF2 for sorting. This is only called from places
2073 where all of the refs are of the same type, in the same insn, and
2074 have the same bb. So these fields are not checked. */
2075
2076 static int
2077 df_ref_compare (df_ref ref1, df_ref ref2)
2078 {
2079 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
2080 return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
2081
2082 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2083 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2084
2085 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2086 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2087
2088 if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
2089 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2090
2091 /* Cannot look at the LOC field on artificial refs. */
2092 if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
2093 && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
2094 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2095
2096 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2097 {
2098 /* If two refs are identical except that one of them has is from
2099 a mw and one is not, we need to have the one with the mw
2100 first. */
2101 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2102 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2103 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2104 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2105 return -1;
2106 else
2107 return 1;
2108 }
2109
2110 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2111 }
2112
2113 /* Like df_ref_compare, but compare two df_ref* pointers R1 and R2. */
2114
2115 static int
2116 df_ref_ptr_compare (const void *r1, const void *r2)
2117 {
2118 return df_ref_compare (*(const df_ref *) r1, *(const df_ref *) r2);
2119 }
2120
2121 /* Sort and compress a set of refs. */
2122
2123 static void
2124 df_sort_and_compress_refs (vec<df_ref, va_heap> *ref_vec)
2125 {
2126 unsigned int count;
2127 unsigned int i;
2128 unsigned int dist = 0;
2129
2130 count = ref_vec->length ();
2131
2132 /* If there are 1 or 0 elements, there is nothing to do. */
2133 if (count < 2)
2134 return;
2135 else if (count == 2)
2136 {
2137 df_ref r0 = (*ref_vec)[0];
2138 df_ref r1 = (*ref_vec)[1];
2139 if (df_ref_compare (r0, r1) > 0)
2140 std::swap ((*ref_vec)[0], (*ref_vec)[1]);
2141 }
2142 else
2143 {
2144 for (i = 0; i < count - 1; i++)
2145 {
2146 df_ref r0 = (*ref_vec)[i];
2147 df_ref r1 = (*ref_vec)[i + 1];
2148 if (df_ref_compare (r0, r1) >= 0)
2149 break;
2150 }
2151 /* If the array is already strictly ordered,
2152 which is the most common case for large COUNT case
2153 (which happens for CALL INSNs),
2154 no need to sort and filter out duplicate.
2155 Simply return the count.
2156 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2157 of DF_REF_COMPARE. */
2158 if (i == count - 1)
2159 return;
2160 ref_vec->qsort (df_ref_ptr_compare);
2161 }
2162
2163 for (i=0; i<count-dist; i++)
2164 {
2165 /* Find the next ref that is not equal to the current ref. */
2166 while (i + dist + 1 < count
2167 && df_ref_equal_p ((*ref_vec)[i],
2168 (*ref_vec)[i + dist + 1]))
2169 {
2170 df_free_ref ((*ref_vec)[i + dist + 1]);
2171 dist++;
2172 }
2173 /* Copy it down to the next position. */
2174 if (dist && i + dist + 1 < count)
2175 (*ref_vec)[i + 1] = (*ref_vec)[i + dist + 1];
2176 }
2177
2178 count -= dist;
2179 ref_vec->truncate (count);
2180 }
2181
2182
2183 /* Return true if the contents of two df_ref's are identical.
2184 It ignores DF_REF_MARKER. */
2185
2186 static bool
2187 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2188 {
2189 if (!mw2)
2190 return false;
2191 return (mw1 == mw2) ||
2192 (mw1->mw_reg == mw2->mw_reg
2193 && mw1->type == mw2->type
2194 && mw1->flags == mw2->flags
2195 && mw1->start_regno == mw2->start_regno
2196 && mw1->end_regno == mw2->end_regno);
2197 }
2198
2199
2200 /* Compare MW1 and MW2 for sorting. */
2201
2202 static int
2203 df_mw_compare (const df_mw_hardreg *mw1, const df_mw_hardreg *mw2)
2204 {
2205 if (mw1->type != mw2->type)
2206 return mw1->type - mw2->type;
2207
2208 if (mw1->flags != mw2->flags)
2209 return mw1->flags - mw2->flags;
2210
2211 if (mw1->start_regno != mw2->start_regno)
2212 return mw1->start_regno - mw2->start_regno;
2213
2214 if (mw1->end_regno != mw2->end_regno)
2215 return mw1->end_regno - mw2->end_regno;
2216
2217 return mw1->mw_order - mw2->mw_order;
2218 }
2219
2220 /* Like df_mw_compare, but compare two df_mw_hardreg** pointers R1 and R2. */
2221
2222 static int
2223 df_mw_ptr_compare (const void *m1, const void *m2)
2224 {
2225 return df_mw_compare (*(const df_mw_hardreg *const *) m1,
2226 *(const df_mw_hardreg *const *) m2);
2227 }
2228
2229 /* Sort and compress a set of refs. */
2230
2231 static void
2232 df_sort_and_compress_mws (vec<df_mw_hardreg *, va_heap> *mw_vec)
2233 {
2234 unsigned int count;
2235 struct df_scan_problem_data *problem_data
2236 = (struct df_scan_problem_data *) df_scan->problem_data;
2237 unsigned int i;
2238 unsigned int dist = 0;
2239
2240 count = mw_vec->length ();
2241 if (count < 2)
2242 return;
2243 else if (count == 2)
2244 {
2245 struct df_mw_hardreg *m0 = (*mw_vec)[0];
2246 struct df_mw_hardreg *m1 = (*mw_vec)[1];
2247 if (df_mw_compare (m0, m1) > 0)
2248 {
2249 struct df_mw_hardreg *tmp = (*mw_vec)[0];
2250 (*mw_vec)[0] = (*mw_vec)[1];
2251 (*mw_vec)[1] = tmp;
2252 }
2253 }
2254 else
2255 mw_vec->qsort (df_mw_ptr_compare);
2256
2257 for (i=0; i<count-dist; i++)
2258 {
2259 /* Find the next ref that is not equal to the current ref. */
2260 while (i + dist + 1 < count
2261 && df_mw_equal_p ((*mw_vec)[i], (*mw_vec)[i + dist + 1]))
2262 {
2263 problem_data->mw_reg_pool->remove ((*mw_vec)[i + dist + 1]);
2264 dist++;
2265 }
2266 /* Copy it down to the next position. */
2267 if (dist && i + dist + 1 < count)
2268 (*mw_vec)[i + 1] = (*mw_vec)[i + dist + 1];
2269 }
2270
2271 count -= dist;
2272 mw_vec->truncate (count);
2273 }
2274
2275
2276 /* Sort and remove duplicates from the COLLECTION_REC. */
2277
2278 static void
2279 df_canonize_collection_rec (class df_collection_rec *collection_rec)
2280 {
2281 df_sort_and_compress_refs (&collection_rec->def_vec);
2282 df_sort_and_compress_refs (&collection_rec->use_vec);
2283 df_sort_and_compress_refs (&collection_rec->eq_use_vec);
2284 df_sort_and_compress_mws (&collection_rec->mw_vec);
2285 }
2286
2287
2288 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2289
2290 static void
2291 df_install_ref (df_ref this_ref,
2292 struct df_reg_info *reg_info,
2293 struct df_ref_info *ref_info,
2294 bool add_to_table)
2295 {
2296 unsigned int regno = DF_REF_REGNO (this_ref);
2297 /* Add the ref to the reg_{def,use,eq_use} chain. */
2298 df_ref head = reg_info->reg_chain;
2299
2300 reg_info->reg_chain = this_ref;
2301 reg_info->n_refs++;
2302
2303 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2304 {
2305 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2306 df->hard_regs_live_count[regno]++;
2307 }
2308
2309 gcc_checking_assert (DF_REF_NEXT_REG (this_ref) == NULL
2310 && DF_REF_PREV_REG (this_ref) == NULL);
2311
2312 DF_REF_NEXT_REG (this_ref) = head;
2313
2314 /* We cannot actually link to the head of the chain. */
2315 DF_REF_PREV_REG (this_ref) = NULL;
2316
2317 if (head)
2318 DF_REF_PREV_REG (head) = this_ref;
2319
2320 if (add_to_table)
2321 {
2322 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2323 df_check_and_grow_ref_info (ref_info, 1);
2324 DF_REF_ID (this_ref) = ref_info->table_size;
2325 /* Add the ref to the big array of defs. */
2326 ref_info->refs[ref_info->table_size] = this_ref;
2327 ref_info->table_size++;
2328 }
2329 else
2330 DF_REF_ID (this_ref) = -1;
2331
2332 ref_info->total_size++;
2333 }
2334
2335
2336 /* This function takes one of the groups of refs (defs, uses or
2337 eq_uses) and installs the entire group into the insn. It also adds
2338 each of these refs into the appropriate chains. */
2339
2340 static df_ref
2341 df_install_refs (basic_block bb,
2342 const vec<df_ref, va_heap> *old_vec,
2343 struct df_reg_info **reg_info,
2344 struct df_ref_info *ref_info,
2345 bool is_notes)
2346 {
2347 unsigned int count = old_vec->length ();
2348 if (count)
2349 {
2350 bool add_to_table;
2351 df_ref this_ref;
2352 unsigned int ix;
2353
2354 switch (ref_info->ref_order)
2355 {
2356 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2357 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2358 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2359 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2360 add_to_table = true;
2361 break;
2362 case DF_REF_ORDER_UNORDERED:
2363 case DF_REF_ORDER_BY_REG:
2364 case DF_REF_ORDER_BY_INSN:
2365 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2366 add_to_table = !is_notes;
2367 break;
2368 default:
2369 add_to_table = false;
2370 break;
2371 }
2372
2373 /* Do not add if ref is not in the right blocks. */
2374 if (add_to_table && df->analyze_subset)
2375 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2376
2377 FOR_EACH_VEC_ELT (*old_vec, ix, this_ref)
2378 {
2379 DF_REF_NEXT_LOC (this_ref) = (ix + 1 < old_vec->length ()
2380 ? (*old_vec)[ix + 1]
2381 : NULL);
2382 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2383 ref_info, add_to_table);
2384 }
2385 return (*old_vec)[0];
2386 }
2387 else
2388 return 0;
2389 }
2390
2391
2392 /* This function takes the mws installs the entire group into the
2393 insn. */
2394
2395 static struct df_mw_hardreg *
2396 df_install_mws (const vec<df_mw_hardreg *, va_heap> *old_vec)
2397 {
2398 unsigned int count = old_vec->length ();
2399 if (count)
2400 {
2401 for (unsigned int i = 0; i < count - 1; i++)
2402 DF_MWS_NEXT ((*old_vec)[i]) = (*old_vec)[i + 1];
2403 DF_MWS_NEXT ((*old_vec)[count - 1]) = 0;
2404 return (*old_vec)[0];
2405 }
2406 else
2407 return 0;
2408 }
2409
2410
2411 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2412 chains and update other necessary information. */
2413
2414 static void
2415 df_refs_add_to_chains (class df_collection_rec *collection_rec,
2416 basic_block bb, rtx_insn *insn, unsigned int flags)
2417 {
2418 if (insn)
2419 {
2420 struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
2421 /* If there is a vector in the collection rec, add it to the
2422 insn. A null rec is a signal that the caller will handle the
2423 chain specially. */
2424 if (flags & copy_defs)
2425 {
2426 gcc_checking_assert (!insn_rec->defs);
2427 insn_rec->defs
2428 = df_install_refs (bb, &collection_rec->def_vec,
2429 df->def_regs,
2430 &df->def_info, false);
2431 }
2432 if (flags & copy_uses)
2433 {
2434 gcc_checking_assert (!insn_rec->uses);
2435 insn_rec->uses
2436 = df_install_refs (bb, &collection_rec->use_vec,
2437 df->use_regs,
2438 &df->use_info, false);
2439 }
2440 if (flags & copy_eq_uses)
2441 {
2442 gcc_checking_assert (!insn_rec->eq_uses);
2443 insn_rec->eq_uses
2444 = df_install_refs (bb, &collection_rec->eq_use_vec,
2445 df->eq_use_regs,
2446 &df->use_info, true);
2447 }
2448 if (flags & copy_mw)
2449 {
2450 gcc_checking_assert (!insn_rec->mw_hardregs);
2451 insn_rec->mw_hardregs
2452 = df_install_mws (&collection_rec->mw_vec);
2453 }
2454 }
2455 else
2456 {
2457 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2458
2459 gcc_checking_assert (!bb_info->artificial_defs);
2460 bb_info->artificial_defs
2461 = df_install_refs (bb, &collection_rec->def_vec,
2462 df->def_regs,
2463 &df->def_info, false);
2464 gcc_checking_assert (!bb_info->artificial_uses);
2465 bb_info->artificial_uses
2466 = df_install_refs (bb, &collection_rec->use_vec,
2467 df->use_regs,
2468 &df->use_info, false);
2469 }
2470 }
2471
2472
2473 /* Allocate a ref and initialize its fields. */
2474
2475 static df_ref
2476 df_ref_create_structure (enum df_ref_class cl,
2477 class df_collection_rec *collection_rec,
2478 rtx reg, rtx *loc,
2479 basic_block bb, struct df_insn_info *info,
2480 enum df_ref_type ref_type,
2481 int ref_flags)
2482 {
2483 df_ref this_ref = NULL;
2484 unsigned int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2485 struct df_scan_problem_data *problem_data
2486 = (struct df_scan_problem_data *) df_scan->problem_data;
2487
2488 switch (cl)
2489 {
2490 case DF_REF_BASE:
2491 this_ref = (df_ref) (problem_data->ref_base_pool->allocate ());
2492 gcc_checking_assert (loc == NULL);
2493 break;
2494
2495 case DF_REF_ARTIFICIAL:
2496 this_ref = (df_ref) (problem_data->ref_artificial_pool->allocate ());
2497 this_ref->artificial_ref.bb = bb;
2498 gcc_checking_assert (loc == NULL);
2499 break;
2500
2501 case DF_REF_REGULAR:
2502 this_ref = (df_ref) (problem_data->ref_regular_pool->allocate ());
2503 this_ref->regular_ref.loc = loc;
2504 gcc_checking_assert (loc);
2505 break;
2506 }
2507
2508 DF_REF_CLASS (this_ref) = cl;
2509 DF_REF_ID (this_ref) = -1;
2510 DF_REF_REG (this_ref) = reg;
2511 DF_REF_REGNO (this_ref) = regno;
2512 DF_REF_TYPE (this_ref) = ref_type;
2513 DF_REF_INSN_INFO (this_ref) = info;
2514 DF_REF_CHAIN (this_ref) = NULL;
2515 DF_REF_FLAGS (this_ref) = ref_flags;
2516 DF_REF_NEXT_REG (this_ref) = NULL;
2517 DF_REF_PREV_REG (this_ref) = NULL;
2518 DF_REF_ORDER (this_ref) = df->ref_order++;
2519
2520 /* We need to clear this bit because fwprop, and in the future
2521 possibly other optimizations sometimes create new refs using ond
2522 refs as the model. */
2523 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2524
2525 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2526 if (regno < FIRST_PSEUDO_REGISTER
2527 && !DF_REF_IS_ARTIFICIAL (this_ref)
2528 && !DEBUG_INSN_P (DF_REF_INSN (this_ref)))
2529 {
2530 if (DF_REF_REG_DEF_P (this_ref))
2531 {
2532 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2533 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2534 }
2535 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2536 && (regno == FRAME_POINTER_REGNUM
2537 || regno == ARG_POINTER_REGNUM)))
2538 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2539 }
2540
2541 if (collection_rec)
2542 {
2543 if (DF_REF_REG_DEF_P (this_ref))
2544 collection_rec->def_vec.safe_push (this_ref);
2545 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2546 collection_rec->eq_use_vec.safe_push (this_ref);
2547 else
2548 collection_rec->use_vec.safe_push (this_ref);
2549 }
2550 else
2551 df_install_ref_incremental (this_ref);
2552
2553 return this_ref;
2554 }
2555
2556
2557 /* Create new references of type DF_REF_TYPE for each part of register REG
2558 at address LOC within INSN of BB. */
2559
2560
2561 static void
2562 df_ref_record (enum df_ref_class cl,
2563 class df_collection_rec *collection_rec,
2564 rtx reg, rtx *loc,
2565 basic_block bb, struct df_insn_info *insn_info,
2566 enum df_ref_type ref_type,
2567 int ref_flags)
2568 {
2569 unsigned int regno;
2570
2571 gcc_checking_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2572
2573 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2574 if (regno < FIRST_PSEUDO_REGISTER)
2575 {
2576 struct df_mw_hardreg *hardreg = NULL;
2577 struct df_scan_problem_data *problem_data
2578 = (struct df_scan_problem_data *) df_scan->problem_data;
2579 unsigned int i;
2580 unsigned int endregno;
2581 df_ref ref;
2582
2583 if (GET_CODE (reg) == SUBREG)
2584 {
2585 int off = subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2586 SUBREG_BYTE (reg), GET_MODE (reg));
2587 unsigned int nregno = regno + off;
2588 endregno = nregno + subreg_nregs (reg);
2589 if (off < 0 && regno < (unsigned) -off)
2590 /* Deal with paradoxical SUBREGs on big endian where
2591 in debug insns the hard reg number might be smaller
2592 than -off, such as (subreg:DI (reg:SI 0 [+4 ]) 0));
2593 RA decisions shouldn't be affected by debug insns
2594 and so RA can decide to put pseudo into a hard reg
2595 with small REGNO, even when it is referenced in
2596 a paradoxical SUBREG in a debug insn. */
2597 regno = 0;
2598 else
2599 regno = nregno;
2600 }
2601 else
2602 endregno = END_REGNO (reg);
2603
2604 /* If this is a multiword hardreg, we create some extra
2605 datastructures that will enable us to easily build REG_DEAD
2606 and REG_UNUSED notes. */
2607 if (collection_rec
2608 && (endregno != regno + 1) && insn_info)
2609 {
2610 /* Sets to a subreg of a multiword register are partial.
2611 Sets to a non-subreg of a multiword register are not. */
2612 if (GET_CODE (reg) == SUBREG)
2613 ref_flags |= DF_REF_PARTIAL;
2614 ref_flags |= DF_REF_MW_HARDREG;
2615
2616 gcc_assert (regno < endregno);
2617
2618 hardreg = problem_data->mw_reg_pool->allocate ();
2619 hardreg->type = ref_type;
2620 hardreg->flags = ref_flags;
2621 hardreg->mw_reg = reg;
2622 hardreg->start_regno = regno;
2623 hardreg->end_regno = endregno - 1;
2624 hardreg->mw_order = df->ref_order++;
2625 collection_rec->mw_vec.safe_push (hardreg);
2626 }
2627
2628 for (i = regno; i < endregno; i++)
2629 {
2630 ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
2631 bb, insn_info, ref_type, ref_flags);
2632
2633 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2634 }
2635 }
2636 else
2637 {
2638 df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2639 ref_type, ref_flags);
2640 }
2641 }
2642
2643
2644 /* Process all the registers defined in the rtx pointed by LOC.
2645 Autoincrement/decrement definitions will be picked up by df_uses_record.
2646 Any change here has to be matched in df_find_hard_reg_defs_1. */
2647
2648 static void
2649 df_def_record_1 (class df_collection_rec *collection_rec,
2650 rtx *loc, basic_block bb, struct df_insn_info *insn_info,
2651 int flags)
2652 {
2653 rtx dst = *loc;
2654
2655 /* It is legal to have a set destination be a parallel. */
2656 if (GET_CODE (dst) == PARALLEL)
2657 {
2658 int i;
2659 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2660 {
2661 rtx temp = XVECEXP (dst, 0, i);
2662 gcc_assert (GET_CODE (temp) == EXPR_LIST);
2663 df_def_record_1 (collection_rec, &XEXP (temp, 0),
2664 bb, insn_info, flags);
2665 }
2666 return;
2667 }
2668
2669 if (GET_CODE (dst) == STRICT_LOW_PART)
2670 {
2671 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
2672
2673 loc = &XEXP (dst, 0);
2674 dst = *loc;
2675 }
2676
2677 if (GET_CODE (dst) == ZERO_EXTRACT)
2678 {
2679 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
2680
2681 loc = &XEXP (dst, 0);
2682 dst = *loc;
2683 }
2684
2685 /* At this point if we do not have a reg or a subreg, just return. */
2686 if (REG_P (dst))
2687 {
2688 df_ref_record (DF_REF_REGULAR, collection_rec,
2689 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2690
2691 /* We want to keep sp alive everywhere - by making all
2692 writes to sp also use of sp. */
2693 if (REGNO (dst) == STACK_POINTER_REGNUM)
2694 df_ref_record (DF_REF_BASE, collection_rec,
2695 dst, NULL, bb, insn_info, DF_REF_REG_USE, flags);
2696 }
2697 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2698 {
2699 if (read_modify_subreg_p (dst))
2700 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2701
2702 flags |= DF_REF_SUBREG;
2703
2704 df_ref_record (DF_REF_REGULAR, collection_rec,
2705 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2706 }
2707 }
2708
2709
2710 /* Process all the registers defined in the pattern rtx, X. Any change
2711 here has to be matched in df_find_hard_reg_defs. */
2712
2713 static void
2714 df_defs_record (class df_collection_rec *collection_rec,
2715 rtx x, basic_block bb, struct df_insn_info *insn_info,
2716 int flags)
2717 {
2718 RTX_CODE code = GET_CODE (x);
2719 int i;
2720
2721 switch (code)
2722 {
2723 case SET:
2724 df_def_record_1 (collection_rec, &SET_DEST (x), bb, insn_info, flags);
2725 break;
2726
2727 case CLOBBER:
2728 flags |= DF_REF_MUST_CLOBBER;
2729 df_def_record_1 (collection_rec, &XEXP (x, 0), bb, insn_info, flags);
2730 break;
2731
2732 case COND_EXEC:
2733 df_defs_record (collection_rec, COND_EXEC_CODE (x),
2734 bb, insn_info, DF_REF_CONDITIONAL);
2735 break;
2736
2737 case PARALLEL:
2738 for (i = 0; i < XVECLEN (x, 0); i++)
2739 df_defs_record (collection_rec, XVECEXP (x, 0, i),
2740 bb, insn_info, flags);
2741 break;
2742 default:
2743 /* No DEFs to record in other cases */
2744 break;
2745 }
2746 }
2747
2748 /* Set bits in *DEFS for hard registers found in the rtx DST, which is the
2749 destination of a set or clobber. This has to match the logic in
2750 df_defs_record_1. */
2751
2752 static void
2753 df_find_hard_reg_defs_1 (rtx dst, HARD_REG_SET *defs)
2754 {
2755 /* It is legal to have a set destination be a parallel. */
2756 if (GET_CODE (dst) == PARALLEL)
2757 {
2758 int i;
2759 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2760 {
2761 rtx temp = XVECEXP (dst, 0, i);
2762 gcc_assert (GET_CODE (temp) == EXPR_LIST);
2763 df_find_hard_reg_defs_1 (XEXP (temp, 0), defs);
2764 }
2765 return;
2766 }
2767
2768 if (GET_CODE (dst) == STRICT_LOW_PART)
2769 dst = XEXP (dst, 0);
2770
2771 if (GET_CODE (dst) == ZERO_EXTRACT)
2772 dst = XEXP (dst, 0);
2773
2774 /* At this point if we do not have a reg or a subreg, just return. */
2775 if (REG_P (dst) && HARD_REGISTER_P (dst))
2776 SET_HARD_REG_BIT (*defs, REGNO (dst));
2777 else if (GET_CODE (dst) == SUBREG
2778 && REG_P (SUBREG_REG (dst)) && HARD_REGISTER_P (dst))
2779 SET_HARD_REG_BIT (*defs, REGNO (SUBREG_REG (dst)));
2780 }
2781
2782 /* Set bits in *DEFS for hard registers defined in the pattern X. This
2783 has to match the logic in df_defs_record. */
2784
2785 static void
2786 df_find_hard_reg_defs (rtx x, HARD_REG_SET *defs)
2787 {
2788 RTX_CODE code = GET_CODE (x);
2789 int i;
2790
2791 switch (code)
2792 {
2793 case SET:
2794 df_find_hard_reg_defs_1 (SET_DEST (x), defs);
2795 break;
2796
2797 case CLOBBER:
2798 df_find_hard_reg_defs_1 (XEXP (x, 0), defs);
2799 break;
2800
2801 case COND_EXEC:
2802 df_find_hard_reg_defs (COND_EXEC_CODE (x), defs);
2803 break;
2804
2805 case PARALLEL:
2806 for (i = 0; i < XVECLEN (x, 0); i++)
2807 df_find_hard_reg_defs (XVECEXP (x, 0, i), defs);
2808 break;
2809 default:
2810 /* No DEFs to record in other cases */
2811 break;
2812 }
2813 }
2814
2815
2816 /* Process all the registers used in the rtx at address LOC. */
2817
2818 static void
2819 df_uses_record (class df_collection_rec *collection_rec,
2820 rtx *loc, enum df_ref_type ref_type,
2821 basic_block bb, struct df_insn_info *insn_info,
2822 int flags)
2823 {
2824 RTX_CODE code;
2825 rtx x;
2826
2827 retry:
2828 x = *loc;
2829 if (!x)
2830 return;
2831 code = GET_CODE (x);
2832 switch (code)
2833 {
2834 case LABEL_REF:
2835 case SYMBOL_REF:
2836 case CONST:
2837 CASE_CONST_ANY:
2838 case PC:
2839 case ADDR_VEC:
2840 case ADDR_DIFF_VEC:
2841 return;
2842
2843 case CLOBBER:
2844 /* If we are clobbering a MEM, mark any registers inside the address
2845 as being used. */
2846 if (MEM_P (XEXP (x, 0)))
2847 df_uses_record (collection_rec,
2848 &XEXP (XEXP (x, 0), 0),
2849 DF_REF_REG_MEM_STORE,
2850 bb, insn_info,
2851 flags);
2852
2853 /* If we're clobbering a REG then we have a def so ignore. */
2854 return;
2855
2856 case MEM:
2857 df_uses_record (collection_rec,
2858 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
2859 bb, insn_info, flags & DF_REF_IN_NOTE);
2860 return;
2861
2862 case SUBREG:
2863 /* While we're here, optimize this case. */
2864 flags |= DF_REF_PARTIAL;
2865 /* In case the SUBREG is not of a REG, do not optimize. */
2866 if (!REG_P (SUBREG_REG (x)))
2867 {
2868 loc = &SUBREG_REG (x);
2869 df_uses_record (collection_rec, loc, ref_type, bb, insn_info, flags);
2870 return;
2871 }
2872 /* Fall through */
2873
2874 case REG:
2875 df_ref_record (DF_REF_REGULAR, collection_rec,
2876 x, loc, bb, insn_info,
2877 ref_type, flags);
2878 return;
2879
2880 case SIGN_EXTRACT:
2881 case ZERO_EXTRACT:
2882 {
2883 df_uses_record (collection_rec,
2884 &XEXP (x, 1), ref_type, bb, insn_info, flags);
2885 df_uses_record (collection_rec,
2886 &XEXP (x, 2), ref_type, bb, insn_info, flags);
2887
2888 /* If the parameters to the zero or sign extract are
2889 constants, strip them off and recurse, otherwise there is
2890 no information that we can gain from this operation. */
2891 if (code == ZERO_EXTRACT)
2892 flags |= DF_REF_ZERO_EXTRACT;
2893 else
2894 flags |= DF_REF_SIGN_EXTRACT;
2895
2896 df_uses_record (collection_rec,
2897 &XEXP (x, 0), ref_type, bb, insn_info, flags);
2898 return;
2899 }
2900 break;
2901
2902 case SET:
2903 {
2904 rtx dst = SET_DEST (x);
2905 gcc_assert (!(flags & DF_REF_IN_NOTE));
2906 df_uses_record (collection_rec,
2907 &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags);
2908
2909 switch (GET_CODE (dst))
2910 {
2911 case SUBREG:
2912 if (read_modify_subreg_p (dst))
2913 {
2914 df_uses_record (collection_rec, &SUBREG_REG (dst),
2915 DF_REF_REG_USE, bb, insn_info,
2916 flags | DF_REF_READ_WRITE | DF_REF_SUBREG);
2917 break;
2918 }
2919 /* Fall through. */
2920 case REG:
2921 case PARALLEL:
2922 case SCRATCH:
2923 case PC:
2924 break;
2925 case MEM:
2926 df_uses_record (collection_rec, &XEXP (dst, 0),
2927 DF_REF_REG_MEM_STORE, bb, insn_info, flags);
2928 break;
2929 case STRICT_LOW_PART:
2930 {
2931 rtx *temp = &XEXP (dst, 0);
2932 /* A strict_low_part uses the whole REG and not just the
2933 SUBREG. */
2934 dst = XEXP (dst, 0);
2935 df_uses_record (collection_rec,
2936 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
2937 DF_REF_REG_USE, bb, insn_info,
2938 DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART);
2939 }
2940 break;
2941 case ZERO_EXTRACT:
2942 {
2943 df_uses_record (collection_rec, &XEXP (dst, 1),
2944 DF_REF_REG_USE, bb, insn_info, flags);
2945 df_uses_record (collection_rec, &XEXP (dst, 2),
2946 DF_REF_REG_USE, bb, insn_info, flags);
2947 if (GET_CODE (XEXP (dst,0)) == MEM)
2948 df_uses_record (collection_rec, &XEXP (dst, 0),
2949 DF_REF_REG_USE, bb, insn_info,
2950 flags);
2951 else
2952 df_uses_record (collection_rec, &XEXP (dst, 0),
2953 DF_REF_REG_USE, bb, insn_info,
2954 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT);
2955 }
2956 break;
2957
2958 default:
2959 gcc_unreachable ();
2960 }
2961 return;
2962 }
2963
2964 case RETURN:
2965 case SIMPLE_RETURN:
2966 break;
2967
2968 case ASM_OPERANDS:
2969 case UNSPEC_VOLATILE:
2970 case TRAP_IF:
2971 case ASM_INPUT:
2972 {
2973 /* Traditional and volatile asm instructions must be
2974 considered to use and clobber all hard registers, all
2975 pseudo-registers and all of memory. So must TRAP_IF and
2976 UNSPEC_VOLATILE operations.
2977
2978 Consider for instance a volatile asm that changes the fpu
2979 rounding mode. An insn should not be moved across this
2980 even if it only uses pseudo-regs because it might give an
2981 incorrectly rounded result.
2982
2983 However, flow.c's liveness computation did *not* do this,
2984 giving the reasoning as " ?!? Unfortunately, marking all
2985 hard registers as live causes massive problems for the
2986 register allocator and marking all pseudos as live creates
2987 mountains of uninitialized variable warnings."
2988
2989 In order to maintain the status quo with regard to liveness
2990 and uses, we do what flow.c did and just mark any regs we
2991 can find in ASM_OPERANDS as used. In global asm insns are
2992 scanned and regs_asm_clobbered is filled out.
2993
2994 For all ASM_OPERANDS, we must traverse the vector of input
2995 operands. We cannot just fall through here since then we
2996 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
2997 which do not indicate traditional asms unlike their normal
2998 usage. */
2999 if (code == ASM_OPERANDS)
3000 {
3001 int j;
3002
3003 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3004 df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
3005 DF_REF_REG_USE, bb, insn_info, flags);
3006 return;
3007 }
3008 break;
3009 }
3010
3011 case VAR_LOCATION:
3012 df_uses_record (collection_rec,
3013 &PAT_VAR_LOCATION_LOC (x),
3014 DF_REF_REG_USE, bb, insn_info, flags);
3015 return;
3016
3017 case PRE_DEC:
3018 case POST_DEC:
3019 case PRE_INC:
3020 case POST_INC:
3021 case PRE_MODIFY:
3022 case POST_MODIFY:
3023 gcc_assert (!DEBUG_INSN_P (insn_info->insn));
3024 /* Catch the def of the register being modified. */
3025 df_ref_record (DF_REF_REGULAR, collection_rec, XEXP (x, 0), &XEXP (x, 0),
3026 bb, insn_info,
3027 DF_REF_REG_DEF,
3028 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
3029
3030 /* ... Fall through to handle uses ... */
3031
3032 default:
3033 break;
3034 }
3035
3036 /* Recursively scan the operands of this expression. */
3037 {
3038 const char *fmt = GET_RTX_FORMAT (code);
3039 int i;
3040
3041 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3042 {
3043 if (fmt[i] == 'e')
3044 {
3045 /* Tail recursive case: save a function call level. */
3046 if (i == 0)
3047 {
3048 loc = &XEXP (x, 0);
3049 goto retry;
3050 }
3051 df_uses_record (collection_rec, &XEXP (x, i), ref_type,
3052 bb, insn_info, flags);
3053 }
3054 else if (fmt[i] == 'E')
3055 {
3056 int j;
3057 for (j = 0; j < XVECLEN (x, i); j++)
3058 df_uses_record (collection_rec,
3059 &XVECEXP (x, i, j), ref_type,
3060 bb, insn_info, flags);
3061 }
3062 }
3063 }
3064
3065 return;
3066 }
3067
3068
3069 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3070
3071 static void
3072 df_get_conditional_uses (class df_collection_rec *collection_rec)
3073 {
3074 unsigned int ix;
3075 df_ref ref;
3076
3077 FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
3078 {
3079 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3080 {
3081 df_ref use;
3082
3083 use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
3084 DF_REF_LOC (ref), DF_REF_BB (ref),
3085 DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
3086 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
3087 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3088 }
3089 }
3090 }
3091
3092
3093 /* Get call's extra defs and uses (track caller-saved registers). */
3094
3095 static void
3096 df_get_call_refs (class df_collection_rec *collection_rec,
3097 basic_block bb,
3098 struct df_insn_info *insn_info,
3099 int flags)
3100 {
3101 rtx note;
3102 bool is_sibling_call;
3103 unsigned int i;
3104 HARD_REG_SET defs_generated;
3105
3106 CLEAR_HARD_REG_SET (defs_generated);
3107 df_find_hard_reg_defs (PATTERN (insn_info->insn), &defs_generated);
3108 is_sibling_call = SIBLING_CALL_P (insn_info->insn);
3109 function_abi callee_abi = insn_callee_abi (insn_info->insn);
3110
3111 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3112 {
3113 if (i == STACK_POINTER_REGNUM
3114 && !FAKE_CALL_P (insn_info->insn))
3115 /* The stack ptr is used (honorarily) by a CALL insn. */
3116 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3117 NULL, bb, insn_info, DF_REF_REG_USE,
3118 DF_REF_CALL_STACK_USAGE | flags);
3119 else if (global_regs[i])
3120 {
3121 /* Calls to const functions cannot access any global registers and
3122 calls to pure functions cannot set them. All other calls may
3123 reference any of the global registers, so they are recorded as
3124 used. */
3125 if (!RTL_CONST_CALL_P (insn_info->insn))
3126 {
3127 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3128 NULL, bb, insn_info, DF_REF_REG_USE, flags);
3129 if (!RTL_PURE_CALL_P (insn_info->insn))
3130 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3131 NULL, bb, insn_info, DF_REF_REG_DEF, flags);
3132 }
3133 }
3134 else if (callee_abi.clobbers_full_reg_p (i)
3135 /* no clobbers for regs that are the result of the call */
3136 && !TEST_HARD_REG_BIT (defs_generated, i)
3137 && (!is_sibling_call
3138 || !bitmap_bit_p (df->exit_block_uses, i)
3139 || refers_to_regno_p (i, crtl->return_rtx)))
3140 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3141 NULL, bb, insn_info, DF_REF_REG_DEF,
3142 DF_REF_MAY_CLOBBER | flags);
3143 }
3144
3145 /* Record the registers used to pass arguments, and explicitly
3146 noted as clobbered. */
3147 for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3148 note = XEXP (note, 1))
3149 {
3150 if (GET_CODE (XEXP (note, 0)) == USE)
3151 df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3152 DF_REF_REG_USE, bb, insn_info, flags);
3153 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3154 {
3155 if (REG_P (XEXP (XEXP (note, 0), 0)))
3156 {
3157 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3158 if (!TEST_HARD_REG_BIT (defs_generated, regno))
3159 df_defs_record (collection_rec, XEXP (note, 0), bb,
3160 insn_info, flags);
3161 }
3162 else
3163 df_uses_record (collection_rec, &XEXP (note, 0),
3164 DF_REF_REG_USE, bb, insn_info, flags);
3165 }
3166 }
3167
3168 return;
3169 }
3170
3171 /* Collect all refs in the INSN. This function is free of any
3172 side-effect - it will create and return a lists of df_ref's in the
3173 COLLECTION_REC without putting those refs into existing ref chains
3174 and reg chains. */
3175
3176 static void
3177 df_insn_refs_collect (class df_collection_rec *collection_rec,
3178 basic_block bb, struct df_insn_info *insn_info)
3179 {
3180 rtx note;
3181 bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
3182
3183 /* Clear out the collection record. */
3184 collection_rec->def_vec.truncate (0);
3185 collection_rec->use_vec.truncate (0);
3186 collection_rec->eq_use_vec.truncate (0);
3187 collection_rec->mw_vec.truncate (0);
3188
3189 /* Process REG_EQUIV/REG_EQUAL notes. */
3190 for (note = REG_NOTES (insn_info->insn); note;
3191 note = XEXP (note, 1))
3192 {
3193 switch (REG_NOTE_KIND (note))
3194 {
3195 case REG_EQUIV:
3196 case REG_EQUAL:
3197 df_uses_record (collection_rec,
3198 &XEXP (note, 0), DF_REF_REG_USE,
3199 bb, insn_info, DF_REF_IN_NOTE);
3200 break;
3201 case REG_NON_LOCAL_GOTO:
3202 /* The frame ptr is used by a non-local goto. */
3203 df_ref_record (DF_REF_BASE, collection_rec,
3204 regno_reg_rtx[FRAME_POINTER_REGNUM],
3205 NULL, bb, insn_info,
3206 DF_REF_REG_USE, 0);
3207 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3208 df_ref_record (DF_REF_BASE, collection_rec,
3209 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3210 NULL, bb, insn_info,
3211 DF_REF_REG_USE, 0);
3212 break;
3213 default:
3214 break;
3215 }
3216 }
3217
3218 int flags = (is_cond_exec) ? DF_REF_CONDITIONAL : 0;
3219 /* For CALL_INSNs, first record DF_REF_BASE register defs, as well as
3220 uses from CALL_INSN_FUNCTION_USAGE. */
3221 if (CALL_P (insn_info->insn))
3222 df_get_call_refs (collection_rec, bb, insn_info, flags);
3223
3224 /* Record other defs. These should be mostly for DF_REF_REGULAR, so
3225 that a qsort on the defs is unnecessary in most cases. */
3226 df_defs_record (collection_rec,
3227 PATTERN (insn_info->insn), bb, insn_info, 0);
3228
3229 /* Record the register uses. */
3230 df_uses_record (collection_rec,
3231 &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0);
3232
3233 /* DF_REF_CONDITIONAL needs corresponding USES. */
3234 if (is_cond_exec)
3235 df_get_conditional_uses (collection_rec);
3236
3237 df_canonize_collection_rec (collection_rec);
3238 }
3239
3240 /* Recompute the luids for the insns in BB. */
3241
3242 void
3243 df_recompute_luids (basic_block bb)
3244 {
3245 rtx_insn *insn;
3246 int luid = 0;
3247
3248 df_grow_insn_info ();
3249
3250 /* Scan the block an insn at a time from beginning to end. */
3251 FOR_BB_INSNS (bb, insn)
3252 {
3253 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3254 /* Inserting labels does not always trigger the incremental
3255 rescanning. */
3256 if (!insn_info)
3257 {
3258 gcc_assert (!INSN_P (insn));
3259 insn_info = df_insn_create_insn_record (insn);
3260 }
3261
3262 DF_INSN_INFO_LUID (insn_info) = luid;
3263 if (INSN_P (insn))
3264 luid++;
3265 }
3266 }
3267
3268
3269 /* Collect all artificial refs at the block level for BB and add them
3270 to COLLECTION_REC. */
3271
3272 static void
3273 df_bb_refs_collect (class df_collection_rec *collection_rec, basic_block bb)
3274 {
3275 collection_rec->def_vec.truncate (0);
3276 collection_rec->use_vec.truncate (0);
3277 collection_rec->eq_use_vec.truncate (0);
3278 collection_rec->mw_vec.truncate (0);
3279
3280 if (bb->index == ENTRY_BLOCK)
3281 {
3282 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3283 return;
3284 }
3285 else if (bb->index == EXIT_BLOCK)
3286 {
3287 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3288 return;
3289 }
3290
3291 if (bb_has_eh_pred (bb))
3292 {
3293 unsigned int i;
3294 /* Mark the registers that will contain data for the handler. */
3295 for (i = 0; ; ++i)
3296 {
3297 unsigned regno = EH_RETURN_DATA_REGNO (i);
3298 if (regno == INVALID_REGNUM)
3299 break;
3300 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3301 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3302 }
3303 }
3304
3305 /* Add the hard_frame_pointer if this block is the target of a
3306 non-local goto. */
3307 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3308 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
3309 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3310
3311 /* Add the artificial uses. */
3312 if (bb->index >= NUM_FIXED_BLOCKS)
3313 {
3314 bitmap_iterator bi;
3315 unsigned int regno;
3316 bitmap au = bb_has_eh_pred (bb)
3317 ? &df->eh_block_artificial_uses
3318 : &df->regular_block_artificial_uses;
3319
3320 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3321 {
3322 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3323 bb, NULL, DF_REF_REG_USE, 0);
3324 }
3325 }
3326
3327 df_canonize_collection_rec (collection_rec);
3328 }
3329
3330
3331 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3332
3333 void
3334 df_bb_refs_record (int bb_index, bool scan_insns)
3335 {
3336 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
3337 rtx_insn *insn;
3338 int luid = 0;
3339
3340 if (!df)
3341 return;
3342
3343 df_collection_rec collection_rec;
3344 df_grow_bb_info (df_scan);
3345 if (scan_insns)
3346 /* Scan the block an insn at a time from beginning to end. */
3347 FOR_BB_INSNS (bb, insn)
3348 {
3349 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3350 gcc_assert (!insn_info);
3351
3352 insn_info = df_insn_create_insn_record (insn);
3353 if (INSN_P (insn))
3354 {
3355 /* Record refs within INSN. */
3356 DF_INSN_INFO_LUID (insn_info) = luid++;
3357 df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3358 df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
3359 }
3360 DF_INSN_INFO_LUID (insn_info) = luid;
3361 }
3362
3363 /* Other block level artificial refs */
3364 df_bb_refs_collect (&collection_rec, bb);
3365 df_refs_add_to_chains (&collection_rec, bb, NULL, copy_all);
3366
3367 /* Now that the block has been processed, set the block as dirty so
3368 LR and LIVE will get it processed. */
3369 df_set_bb_dirty (bb);
3370 }
3371
3372
3373 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3374 block. */
3375
3376 static void
3377 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3378 {
3379 #ifdef EH_USES
3380 unsigned int i;
3381 #endif
3382
3383 bitmap_clear (regular_block_artificial_uses);
3384
3385 if (reload_completed)
3386 {
3387 if (frame_pointer_needed)
3388 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3389 }
3390 else
3391 /* Before reload, there are a few registers that must be forced
3392 live everywhere -- which might not already be the case for
3393 blocks within infinite loops. */
3394 {
3395 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3396
3397 /* Any reference to any pseudo before reload is a potential
3398 reference of the frame pointer. */
3399 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3400
3401 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3402 bitmap_set_bit (regular_block_artificial_uses,
3403 HARD_FRAME_POINTER_REGNUM);
3404
3405 /* Pseudos with argument area equivalences may require
3406 reloading via the argument pointer. */
3407 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3408 && fixed_regs[ARG_POINTER_REGNUM])
3409 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3410
3411 /* Any constant, or pseudo with constant equivalences, may
3412 require reloading from memory using the pic register. */
3413 if (picreg != INVALID_REGNUM
3414 && fixed_regs[picreg])
3415 bitmap_set_bit (regular_block_artificial_uses, picreg);
3416 }
3417 /* The all-important stack pointer must always be live. */
3418 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3419
3420 #ifdef EH_USES
3421 /* EH_USES registers are used:
3422 1) at all insns that might throw (calls or with -fnon-call-exceptions
3423 trapping insns)
3424 2) in all EH edges
3425 3) to support backtraces and/or debugging, anywhere between their
3426 initialization and where they the saved registers are restored
3427 from them, including the cases where we don't reach the epilogue
3428 (noreturn call or infinite loop). */
3429 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3430 if (EH_USES (i))
3431 bitmap_set_bit (regular_block_artificial_uses, i);
3432 #endif
3433 }
3434
3435
3436 /* Get the artificial use set for an eh block. */
3437
3438 static void
3439 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3440 {
3441 bitmap_clear (eh_block_artificial_uses);
3442
3443 /* The following code (down through the arg_pointer setting APPEARS
3444 to be necessary because there is nothing that actually
3445 describes what the exception handling code may actually need
3446 to keep alive. */
3447 if (reload_completed)
3448 {
3449 if (frame_pointer_needed)
3450 {
3451 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3452 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3453 bitmap_set_bit (eh_block_artificial_uses,
3454 HARD_FRAME_POINTER_REGNUM);
3455 }
3456 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3457 && fixed_regs[ARG_POINTER_REGNUM])
3458 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3459 }
3460 }
3461
3462
3463
3464 /*----------------------------------------------------------------------------
3466 Specialized hard register scanning functions.
3467 ----------------------------------------------------------------------------*/
3468
3469
3470 /* Mark a register in SET. Hard registers in large modes get all
3471 of their component registers set as well. */
3472
3473 static void
3474 df_mark_reg (rtx reg, void *vset)
3475 {
3476 bitmap_set_range ((bitmap) vset, REGNO (reg), REG_NREGS (reg));
3477 }
3478
3479
3480 /* Set the bit for regs that are considered being defined at the entry. */
3481
3482 static void
3483 df_get_entry_block_def_set (bitmap entry_block_defs)
3484 {
3485 rtx r;
3486 int i;
3487
3488 bitmap_clear (entry_block_defs);
3489
3490 /* For separate shrink-wrapping we use LIVE to analyze which basic blocks
3491 need a prologue for some component to be executed before that block,
3492 and we do not care about any other registers. Hence, we do not want
3493 any register for any component defined in the entry block, and we can
3494 just leave all registers undefined. */
3495 if (df_scan->local_flags & DF_SCAN_EMPTY_ENTRY_EXIT)
3496 return;
3497
3498 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3499 {
3500 if (global_regs[i])
3501 bitmap_set_bit (entry_block_defs, i);
3502 if (FUNCTION_ARG_REGNO_P (i))
3503 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3504 }
3505
3506 /* The always important stack pointer. */
3507 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3508
3509 /* Once the prologue has been generated, all of these registers
3510 should just show up in the first regular block. */
3511 if (targetm.have_prologue () && epilogue_completed)
3512 {
3513 /* Defs for the callee saved registers are inserted so that the
3514 pushes have some defining location. */
3515 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3516 if (!crtl->abi->clobbers_full_reg_p (i)
3517 && !fixed_regs[i]
3518 && df_regs_ever_live_p (i))
3519 bitmap_set_bit (entry_block_defs, i);
3520 }
3521
3522 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3523 if (r && REG_P (r))
3524 bitmap_set_bit (entry_block_defs, REGNO (r));
3525
3526 /* If the function has an incoming STATIC_CHAIN, it has to show up
3527 in the entry def set. */
3528 r = rtx_for_static_chain (current_function_decl, true);
3529 if (r && REG_P (r))
3530 bitmap_set_bit (entry_block_defs, REGNO (r));
3531
3532 if ((!reload_completed) || frame_pointer_needed)
3533 {
3534 /* Any reference to any pseudo before reload is a potential
3535 reference of the frame pointer. */
3536 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3537
3538 /* If they are different, also mark the hard frame pointer as live. */
3539 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3540 && !LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3541 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3542 }
3543
3544 /* These registers are live everywhere. */
3545 if (!reload_completed)
3546 {
3547 /* Pseudos with argument area equivalences may require
3548 reloading via the argument pointer. */
3549 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3550 && fixed_regs[ARG_POINTER_REGNUM])
3551 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3552
3553 /* Any constant, or pseudo with constant equivalences, may
3554 require reloading from memory using the pic register. */
3555 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3556 if (picreg != INVALID_REGNUM
3557 && fixed_regs[picreg])
3558 bitmap_set_bit (entry_block_defs, picreg);
3559 }
3560
3561 #ifdef INCOMING_RETURN_ADDR_RTX
3562 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3563 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3564 #endif
3565
3566 targetm.extra_live_on_entry (entry_block_defs);
3567 }
3568
3569
3570 /* Return the (conservative) set of hard registers that are defined on
3571 entry to the function.
3572 It uses df->entry_block_defs to determine which register
3573 reference to include. */
3574
3575 static void
3576 df_entry_block_defs_collect (class df_collection_rec *collection_rec,
3577 bitmap entry_block_defs)
3578 {
3579 unsigned int i;
3580 bitmap_iterator bi;
3581
3582 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3583 {
3584 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3585 ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_DEF, 0);
3586 }
3587
3588 df_canonize_collection_rec (collection_rec);
3589 }
3590
3591
3592 /* Record the (conservative) set of hard registers that are defined on
3593 entry to the function. */
3594
3595 static void
3596 df_record_entry_block_defs (bitmap entry_block_defs)
3597 {
3598 class df_collection_rec collection_rec;
3599 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3600
3601 /* Process bb_refs chain */
3602 df_refs_add_to_chains (&collection_rec,
3603 BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK),
3604 NULL,
3605 copy_defs);
3606 }
3607
3608
3609 /* Update the defs in the entry block. */
3610
3611 void
3612 df_update_entry_block_defs (void)
3613 {
3614 bool changed = false;
3615
3616 auto_bitmap refs (&df_bitmap_obstack);
3617 df_get_entry_block_def_set (refs);
3618 gcc_assert (df->entry_block_defs);
3619 if (!bitmap_equal_p (df->entry_block_defs, refs))
3620 {
3621 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3622 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3623 df_ref_chain_delete (bb_info->artificial_defs);
3624 bb_info->artificial_defs = NULL;
3625 changed = true;
3626 }
3627
3628 if (changed)
3629 {
3630 df_record_entry_block_defs (refs);
3631 bitmap_copy (df->entry_block_defs, refs);
3632 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
3633 }
3634 }
3635
3636
3637 /* Return true if REGNO is used by the epilogue. */
3638 bool
3639 df_epilogue_uses_p (unsigned int regno)
3640 {
3641 return (EPILOGUE_USES (regno)
3642 || TEST_HARD_REG_BIT (crtl->must_be_zero_on_return, regno));
3643 }
3644
3645 /* Set the bit for regs that are considered being used at the exit. */
3646
3647 static void
3648 df_get_exit_block_use_set (bitmap exit_block_uses)
3649 {
3650 unsigned int i;
3651 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3652
3653 bitmap_clear (exit_block_uses);
3654
3655 /* For separate shrink-wrapping we use LIVE to analyze which basic blocks
3656 need an epilogue for some component to be executed after that block,
3657 and we do not care about any other registers. Hence, we do not want
3658 any register for any component seen as used in the exit block, and we
3659 can just say no registers at all are used. */
3660 if (df_scan->local_flags & DF_SCAN_EMPTY_ENTRY_EXIT)
3661 return;
3662
3663 /* Stack pointer is always live at the exit. */
3664 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3665
3666 /* Mark the frame pointer if needed at the end of the function.
3667 If we end up eliminating it, it will be removed from the live
3668 list of each basic block by reload. */
3669
3670 if ((!reload_completed) || frame_pointer_needed)
3671 {
3672 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3673
3674 /* If they are different, also mark the hard frame pointer as live. */
3675 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3676 && !LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3677 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3678 }
3679
3680 /* Many architectures have a GP register even without flag_pic.
3681 Assume the pic register is not in use, or will be handled by
3682 other means, if it is not fixed. */
3683 if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3684 && picreg != INVALID_REGNUM
3685 && fixed_regs[picreg])
3686 bitmap_set_bit (exit_block_uses, picreg);
3687
3688 /* Mark all global registers, and all registers used by the
3689 epilogue as being live at the end of the function since they
3690 may be referenced by our caller. */
3691 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3692 if (global_regs[i] || df_epilogue_uses_p (i))
3693 bitmap_set_bit (exit_block_uses, i);
3694
3695 if (targetm.have_epilogue () && epilogue_completed)
3696 {
3697 /* Mark all call-saved registers that we actually used. */
3698 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3699 if (df_regs_ever_live_p (i)
3700 && !LOCAL_REGNO (i)
3701 && !crtl->abi->clobbers_full_reg_p (i))
3702 bitmap_set_bit (exit_block_uses, i);
3703 }
3704
3705 /* Mark the registers that will contain data for the handler. */
3706 if (reload_completed && crtl->calls_eh_return)
3707 for (i = 0; ; ++i)
3708 {
3709 unsigned regno = EH_RETURN_DATA_REGNO (i);
3710 if (regno == INVALID_REGNUM)
3711 break;
3712 bitmap_set_bit (exit_block_uses, regno);
3713 }
3714
3715 #ifdef EH_RETURN_STACKADJ_RTX
3716 if ((!targetm.have_epilogue () || ! epilogue_completed)
3717 && crtl->calls_eh_return)
3718 {
3719 rtx tmp = EH_RETURN_STACKADJ_RTX;
3720 if (tmp && REG_P (tmp))
3721 df_mark_reg (tmp, exit_block_uses);
3722 }
3723 #endif
3724
3725 if ((!targetm.have_epilogue () || ! epilogue_completed)
3726 && crtl->calls_eh_return)
3727 {
3728 rtx tmp = EH_RETURN_HANDLER_RTX;
3729 if (tmp && REG_P (tmp))
3730 df_mark_reg (tmp, exit_block_uses);
3731 }
3732
3733 /* Mark function return value. */
3734 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
3735 }
3736
3737
3738 /* Return the refs of hard registers that are used in the exit block.
3739 It uses df->exit_block_uses to determine register to include. */
3740
3741 static void
3742 df_exit_block_uses_collect (class df_collection_rec *collection_rec, bitmap exit_block_uses)
3743 {
3744 unsigned int i;
3745 bitmap_iterator bi;
3746
3747 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
3748 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3749 EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
3750
3751 /* It is deliberate that this is not put in the exit block uses but
3752 I do not know why. */
3753 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3754 && reload_completed
3755 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
3756 && bb_has_eh_pred (EXIT_BLOCK_PTR_FOR_FN (cfun))
3757 && fixed_regs[ARG_POINTER_REGNUM])
3758 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
3759 EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
3760
3761 df_canonize_collection_rec (collection_rec);
3762 }
3763
3764
3765 /* Record the set of hard registers that are used in the exit block.
3766 It uses df->exit_block_uses to determine which bit to include. */
3767
3768 static void
3769 df_record_exit_block_uses (bitmap exit_block_uses)
3770 {
3771 class df_collection_rec collection_rec;
3772 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
3773
3774 /* Process bb_refs chain */
3775 df_refs_add_to_chains (&collection_rec,
3776 BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK),
3777 NULL,
3778 copy_uses);
3779 }
3780
3781
3782 /* Update the uses in the exit block. */
3783
3784 void
3785 df_update_exit_block_uses (void)
3786 {
3787 bool changed = false;
3788
3789 auto_bitmap refs (&df_bitmap_obstack);
3790 df_get_exit_block_use_set (refs);
3791 gcc_assert (df->exit_block_uses);
3792 if (!bitmap_equal_p (df->exit_block_uses, refs))
3793 {
3794 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
3795 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
3796 df_ref_chain_delete (bb_info->artificial_uses);
3797 bb_info->artificial_uses = NULL;
3798 changed = true;
3799 }
3800
3801 if (changed)
3802 {
3803 df_record_exit_block_uses (refs);
3804 bitmap_copy (df->exit_block_uses, refs);
3805 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
3806 }
3807 }
3808
3809 static bool initialized = false;
3810
3811
3812 /* Initialize some platform specific structures. */
3813
3814 void
3815 df_hard_reg_init (void)
3816 {
3817 int i;
3818 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
3819
3820 if (initialized)
3821 return;
3822
3823 /* Record which registers will be eliminated. We use this in
3824 mark_used_regs. */
3825 CLEAR_HARD_REG_SET (elim_reg_set);
3826
3827 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
3828 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
3829
3830 initialized = true;
3831 }
3832
3833 /* Recompute the parts of scanning that are based on regs_ever_live
3834 because something changed in that array. */
3835
3836 void
3837 df_update_entry_exit_and_calls (void)
3838 {
3839 basic_block bb;
3840
3841 df_update_entry_block_defs ();
3842 df_update_exit_block_uses ();
3843
3844 /* The call insns need to be rescanned because there may be changes
3845 in the set of registers clobbered across the call. */
3846 FOR_EACH_BB_FN (bb, cfun)
3847 {
3848 rtx_insn *insn;
3849 FOR_BB_INSNS (bb, insn)
3850 {
3851 if (INSN_P (insn) && CALL_P (insn))
3852 df_insn_rescan (insn);
3853 }
3854 }
3855 }
3856
3857
3858 /* Return true if hard REG is actually used in the some instruction.
3859 There are a fair number of conditions that affect the setting of
3860 this array. See the comment in df.h for df->hard_regs_live_count
3861 for the conditions that this array is set. */
3862
3863 bool
3864 df_hard_reg_used_p (unsigned int reg)
3865 {
3866 return df->hard_regs_live_count[reg] != 0;
3867 }
3868
3869
3870 /* A count of the number of times REG is actually used in the some
3871 instruction. There are a fair number of conditions that affect the
3872 setting of this array. See the comment in df.h for
3873 df->hard_regs_live_count for the conditions that this array is
3874 set. */
3875
3876
3877 unsigned int
3878 df_hard_reg_used_count (unsigned int reg)
3879 {
3880 return df->hard_regs_live_count[reg];
3881 }
3882
3883
3884 /* Get the value of regs_ever_live[REGNO]. */
3885
3886 bool
3887 df_regs_ever_live_p (unsigned int regno)
3888 {
3889 return regs_ever_live[regno];
3890 }
3891
3892 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
3893 to change, schedule that change for the next update. */
3894
3895 void
3896 df_set_regs_ever_live (unsigned int regno, bool value)
3897 {
3898 if (regs_ever_live[regno] == value)
3899 return;
3900
3901 regs_ever_live[regno] = value;
3902 if (df)
3903 df->redo_entry_and_exit = true;
3904 }
3905
3906
3907 /* Compute "regs_ever_live" information from the underlying df
3908 information. Set the vector to all false if RESET. */
3909
3910 void
3911 df_compute_regs_ever_live (bool reset)
3912 {
3913 unsigned int i;
3914 bool changed = df->redo_entry_and_exit;
3915
3916 if (reset)
3917 memset (regs_ever_live, 0, sizeof (regs_ever_live));
3918
3919 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3920 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
3921 {
3922 regs_ever_live[i] = true;
3923 changed = true;
3924 }
3925 if (changed)
3926 df_update_entry_exit_and_calls ();
3927 df->redo_entry_and_exit = false;
3928 }
3929
3930
3931 /*----------------------------------------------------------------------------
3933 Dataflow ref information verification functions.
3934
3935 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
3936 df_reg_chain_verify_unmarked (refs)
3937 df_refs_verify (vec<stack, va_df_ref>, ref*, bool)
3938 df_mws_verify (mw*, mw*, bool)
3939 df_insn_refs_verify (collection_rec, bb, insn, bool)
3940 df_bb_refs_verify (bb, refs, bool)
3941 df_bb_verify (bb)
3942 df_exit_block_bitmap_verify (bool)
3943 df_entry_block_bitmap_verify (bool)
3944 df_scan_verify ()
3945 ----------------------------------------------------------------------------*/
3946
3947
3948 /* Mark all refs in the reg chain. Verify that all of the registers
3949 are in the correct chain. */
3950
3951 static unsigned int
3952 df_reg_chain_mark (df_ref refs, unsigned int regno,
3953 bool is_def, bool is_eq_use)
3954 {
3955 unsigned int count = 0;
3956 df_ref ref;
3957 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
3958 {
3959 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
3960
3961 /* If there are no def-use or use-def chains, make sure that all
3962 of the chains are clear. */
3963 if (!df_chain)
3964 gcc_assert (!DF_REF_CHAIN (ref));
3965
3966 /* Check to make sure the ref is in the correct chain. */
3967 gcc_assert (DF_REF_REGNO (ref) == regno);
3968 if (is_def)
3969 gcc_assert (DF_REF_REG_DEF_P (ref));
3970 else
3971 gcc_assert (!DF_REF_REG_DEF_P (ref));
3972
3973 if (is_eq_use)
3974 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
3975 else
3976 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
3977
3978 if (DF_REF_NEXT_REG (ref))
3979 gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
3980 count++;
3981 DF_REF_REG_MARK (ref);
3982 }
3983 return count;
3984 }
3985
3986
3987 /* Verify that all of the registers in the chain are unmarked. */
3988
3989 static void
3990 df_reg_chain_verify_unmarked (df_ref refs)
3991 {
3992 df_ref ref;
3993 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
3994 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
3995 }
3996
3997
3998 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
3999
4000 static bool
4001 df_refs_verify (const vec<df_ref, va_heap> *new_rec, df_ref old_rec,
4002 bool abort_if_fail)
4003 {
4004 unsigned int ix;
4005 df_ref new_ref;
4006
4007 FOR_EACH_VEC_ELT (*new_rec, ix, new_ref)
4008 {
4009 if (old_rec == NULL || !df_ref_equal_p (new_ref, old_rec))
4010 {
4011 if (abort_if_fail)
4012 gcc_assert (0);
4013 else
4014 return false;
4015 }
4016
4017 /* Abort if fail is called from the function level verifier. If
4018 that is the context, mark this reg as being seem. */
4019 if (abort_if_fail)
4020 {
4021 gcc_assert (DF_REF_IS_REG_MARKED (old_rec));
4022 DF_REF_REG_UNMARK (old_rec);
4023 }
4024
4025 old_rec = DF_REF_NEXT_LOC (old_rec);
4026 }
4027
4028 if (abort_if_fail)
4029 gcc_assert (old_rec == NULL);
4030 else
4031 return old_rec == NULL;
4032 return false;
4033 }
4034
4035
4036 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4037
4038 static bool
4039 df_mws_verify (const vec<df_mw_hardreg *, va_heap> *new_rec,
4040 struct df_mw_hardreg *old_rec,
4041 bool abort_if_fail)
4042 {
4043 unsigned int ix;
4044 struct df_mw_hardreg *new_reg;
4045
4046 FOR_EACH_VEC_ELT (*new_rec, ix, new_reg)
4047 {
4048 if (old_rec == NULL || !df_mw_equal_p (new_reg, old_rec))
4049 {
4050 if (abort_if_fail)
4051 gcc_assert (0);
4052 else
4053 return false;
4054 }
4055 old_rec = DF_MWS_NEXT (old_rec);
4056 }
4057
4058 if (abort_if_fail)
4059 gcc_assert (old_rec == NULL);
4060 else
4061 return old_rec == NULL;
4062 return false;
4063 }
4064
4065
4066 /* Return true if the existing insn refs information is complete and
4067 correct. Otherwise (i.e. if there's any missing or extra refs),
4068 return the correct df_ref chain in REFS_RETURN.
4069
4070 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4071 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4072 verification mode instead of the whole function, so unmark
4073 everything.
4074
4075 If ABORT_IF_FAIL is set, this function never returns false. */
4076
4077 static bool
4078 df_insn_refs_verify (class df_collection_rec *collection_rec,
4079 basic_block bb,
4080 rtx_insn *insn,
4081 bool abort_if_fail)
4082 {
4083 bool ret1, ret2, ret3;
4084 unsigned int uid = INSN_UID (insn);
4085 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
4086
4087 df_insn_refs_collect (collection_rec, bb, insn_info);
4088
4089 /* Unfortunately we cannot opt out early if one of these is not
4090 right and abort_if_fail is set because the marks will not get cleared. */
4091 ret1 = df_refs_verify (&collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4092 abort_if_fail);
4093 if (!ret1 && !abort_if_fail)
4094 return false;
4095 ret2 = df_refs_verify (&collection_rec->use_vec, DF_INSN_UID_USES (uid),
4096 abort_if_fail);
4097 if (!ret2 && !abort_if_fail)
4098 return false;
4099 ret3 = df_refs_verify (&collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4100 abort_if_fail);
4101 if (!ret3 && !abort_if_fail)
4102 return false;
4103 if (! df_mws_verify (&collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4104 abort_if_fail))
4105 return false;
4106 return (ret1 && ret2 && ret3);
4107 }
4108
4109
4110 /* Return true if all refs in the basic block are correct and complete.
4111 Due to df_ref_chain_verify, it will cause all refs
4112 that are verified to have DF_REF_MARK bit set. */
4113
4114 static bool
4115 df_bb_verify (basic_block bb)
4116 {
4117 rtx_insn *insn;
4118 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4119 class df_collection_rec collection_rec;
4120
4121 gcc_assert (bb_info);
4122
4123 /* Scan the block, one insn at a time, from beginning to end. */
4124 FOR_BB_INSNS_REVERSE (bb, insn)
4125 {
4126 if (!INSN_P (insn))
4127 continue;
4128 df_insn_refs_verify (&collection_rec, bb, insn, true);
4129 df_free_collection_rec (&collection_rec);
4130 }
4131
4132 /* Do the artificial defs and uses. */
4133 df_bb_refs_collect (&collection_rec, bb);
4134 df_refs_verify (&collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4135 df_refs_verify (&collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4136 df_free_collection_rec (&collection_rec);
4137
4138 return true;
4139 }
4140
4141
4142 /* Returns true if the entry block has correct and complete df_ref set.
4143 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4144
4145 static bool
4146 df_entry_block_bitmap_verify (bool abort_if_fail)
4147 {
4148 bool is_eq;
4149
4150 auto_bitmap entry_block_defs (&df_bitmap_obstack);
4151 df_get_entry_block_def_set (entry_block_defs);
4152
4153 is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
4154
4155 if (!is_eq && abort_if_fail)
4156 {
4157 fprintf (stderr, "entry_block_defs = ");
4158 df_print_regset (stderr, entry_block_defs);
4159 fprintf (stderr, "df->entry_block_defs = ");
4160 df_print_regset (stderr, df->entry_block_defs);
4161 gcc_assert (0);
4162 }
4163
4164 return is_eq;
4165 }
4166
4167
4168 /* Returns true if the exit block has correct and complete df_ref set.
4169 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4170
4171 static bool
4172 df_exit_block_bitmap_verify (bool abort_if_fail)
4173 {
4174 bool is_eq;
4175
4176 auto_bitmap exit_block_uses (&df_bitmap_obstack);
4177 df_get_exit_block_use_set (exit_block_uses);
4178
4179 is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
4180
4181 if (!is_eq && abort_if_fail)
4182 {
4183 fprintf (stderr, "exit_block_uses = ");
4184 df_print_regset (stderr, exit_block_uses);
4185 fprintf (stderr, "df->exit_block_uses = ");
4186 df_print_regset (stderr, df->exit_block_uses);
4187 gcc_assert (0);
4188 }
4189
4190 return is_eq;
4191 }
4192
4193
4194 /* Return true if df_ref information for all insns in all blocks are
4195 correct and complete. */
4196
4197 void
4198 df_scan_verify (void)
4199 {
4200 unsigned int i;
4201 basic_block bb;
4202
4203 if (!df)
4204 return;
4205
4206 /* Verification is a 4 step process. */
4207
4208 /* (1) All of the refs are marked by going through the reg chains. */
4209 for (i = 0; i < DF_REG_SIZE (df); i++)
4210 {
4211 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4212 == DF_REG_DEF_COUNT (i));
4213 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4214 == DF_REG_USE_COUNT (i));
4215 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4216 == DF_REG_EQ_USE_COUNT (i));
4217 }
4218
4219 /* (2) There are various bitmaps whose value may change over the
4220 course of the compilation. This step recomputes them to make
4221 sure that they have not slipped out of date. */
4222 auto_bitmap regular_block_artificial_uses (&df_bitmap_obstack);
4223 auto_bitmap eh_block_artificial_uses (&df_bitmap_obstack);
4224
4225 df_get_regular_block_artificial_uses (regular_block_artificial_uses);
4226 df_get_eh_block_artificial_uses (eh_block_artificial_uses);
4227
4228 bitmap_ior_into (eh_block_artificial_uses,
4229 regular_block_artificial_uses);
4230
4231 /* Check artificial_uses bitmaps didn't change. */
4232 gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
4233 &df->regular_block_artificial_uses));
4234 gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
4235 &df->eh_block_artificial_uses));
4236
4237 /* Verify entry block and exit block. These only verify the bitmaps,
4238 the refs are verified in df_bb_verify. */
4239 df_entry_block_bitmap_verify (true);
4240 df_exit_block_bitmap_verify (true);
4241
4242 /* (3) All of the insns in all of the blocks are traversed and the
4243 marks are cleared both in the artificial refs attached to the
4244 blocks and the real refs inside the insns. It is a failure to
4245 clear a mark that has not been set as this means that the ref in
4246 the block or insn was not in the reg chain. */
4247
4248 FOR_ALL_BB_FN (bb, cfun)
4249 df_bb_verify (bb);
4250
4251 /* (4) See if all reg chains are traversed a second time. This time
4252 a check is made that the marks are clear. A set mark would be a
4253 from a reg that is not in any insn or basic block. */
4254
4255 for (i = 0; i < DF_REG_SIZE (df); i++)
4256 {
4257 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4258 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4259 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));
4260 }
4261 }
4262