df-scan.cc revision 1.1.1.1 1 1.1 mrg /* Scanning of rtl for dataflow analysis.
2 1.1 mrg Copyright (C) 1999-2022 Free Software Foundation, Inc.
3 1.1 mrg Originally contributed by Michael P. Hayes
4 1.1 mrg (m.hayes (at) elec.canterbury.ac.nz, mhayes (at) redhat.com)
5 1.1 mrg Major rewrite contributed by Danny Berlin (dberlin (at) dberlin.org)
6 1.1 mrg and Kenneth Zadeck (zadeck (at) naturalbridge.com).
7 1.1 mrg
8 1.1 mrg This file is part of GCC.
9 1.1 mrg
10 1.1 mrg GCC is free software; you can redistribute it and/or modify it under
11 1.1 mrg the terms of the GNU General Public License as published by the Free
12 1.1 mrg Software Foundation; either version 3, or (at your option) any later
13 1.1 mrg version.
14 1.1 mrg
15 1.1 mrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 1.1 mrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 1.1 mrg FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 1.1 mrg for more details.
19 1.1 mrg
20 1.1 mrg You should have received a copy of the GNU General Public License
21 1.1 mrg along with GCC; see the file COPYING3. If not see
22 1.1 mrg <http://www.gnu.org/licenses/>. */
23 1.1 mrg
24 1.1 mrg #include "config.h"
25 1.1 mrg #include "system.h"
26 1.1 mrg #include "coretypes.h"
27 1.1 mrg #include "backend.h"
28 1.1 mrg #include "target.h"
29 1.1 mrg #include "rtl.h"
30 1.1 mrg #include "tree.h"
31 1.1 mrg #include "df.h"
32 1.1 mrg #include "memmodel.h"
33 1.1 mrg #include "tm_p.h"
34 1.1 mrg #include "regs.h"
35 1.1 mrg #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
36 1.1 mrg #include "dumpfile.h"
37 1.1 mrg #include "calls.h"
38 1.1 mrg #include "function-abi.h"
39 1.1 mrg
40 1.1 mrg /* The set of hard registers in eliminables[i].from. */
41 1.1 mrg
42 1.1 mrg static HARD_REG_SET elim_reg_set;
43 1.1 mrg
44 1.1 mrg /* Initialize ur_in and ur_out as if all hard registers were partially
45 1.1 mrg available. */
46 1.1 mrg
47 1.1 mrg class df_collection_rec
48 1.1 mrg {
49 1.1 mrg public:
50 1.1 mrg auto_vec<df_ref, 128> def_vec;
51 1.1 mrg auto_vec<df_ref, 32> use_vec;
52 1.1 mrg auto_vec<df_ref, 32> eq_use_vec;
53 1.1 mrg auto_vec<df_mw_hardreg *, 32> mw_vec;
54 1.1 mrg };
55 1.1 mrg
56 1.1 mrg static void df_ref_record (enum df_ref_class, class df_collection_rec *,
57 1.1 mrg rtx, rtx *,
58 1.1 mrg basic_block, struct df_insn_info *,
59 1.1 mrg enum df_ref_type, int ref_flags);
60 1.1 mrg static void df_def_record_1 (class df_collection_rec *, rtx *,
61 1.1 mrg basic_block, struct df_insn_info *,
62 1.1 mrg int ref_flags);
63 1.1 mrg static void df_defs_record (class df_collection_rec *, rtx,
64 1.1 mrg basic_block, struct df_insn_info *,
65 1.1 mrg int ref_flags);
66 1.1 mrg static void df_uses_record (class df_collection_rec *,
67 1.1 mrg rtx *, enum df_ref_type,
68 1.1 mrg basic_block, struct df_insn_info *,
69 1.1 mrg int ref_flags);
70 1.1 mrg
71 1.1 mrg static void df_install_ref_incremental (df_ref);
72 1.1 mrg static void df_insn_refs_collect (class df_collection_rec*,
73 1.1 mrg basic_block, struct df_insn_info *);
74 1.1 mrg static void df_canonize_collection_rec (class df_collection_rec *);
75 1.1 mrg
76 1.1 mrg static void df_get_regular_block_artificial_uses (bitmap);
77 1.1 mrg static void df_get_eh_block_artificial_uses (bitmap);
78 1.1 mrg
79 1.1 mrg static void df_record_entry_block_defs (bitmap);
80 1.1 mrg static void df_record_exit_block_uses (bitmap);
81 1.1 mrg static void df_get_exit_block_use_set (bitmap);
82 1.1 mrg static void df_get_entry_block_def_set (bitmap);
83 1.1 mrg static void df_grow_ref_info (struct df_ref_info *, unsigned int);
84 1.1 mrg static void df_ref_chain_delete_du_chain (df_ref);
85 1.1 mrg static void df_ref_chain_delete (df_ref);
86 1.1 mrg
87 1.1 mrg static void df_refs_add_to_chains (class df_collection_rec *,
88 1.1 mrg basic_block, rtx_insn *, unsigned int);
89 1.1 mrg
90 1.1 mrg static bool df_insn_refs_verify (class df_collection_rec *, basic_block,
91 1.1 mrg rtx_insn *, bool);
92 1.1 mrg static void df_entry_block_defs_collect (class df_collection_rec *, bitmap);
93 1.1 mrg static void df_exit_block_uses_collect (class df_collection_rec *, bitmap);
94 1.1 mrg static void df_install_ref (df_ref, struct df_reg_info *,
95 1.1 mrg struct df_ref_info *, bool);
96 1.1 mrg
97 1.1 mrg static int df_ref_compare (df_ref, df_ref);
98 1.1 mrg static int df_ref_ptr_compare (const void *, const void *);
99 1.1 mrg static int df_mw_compare (const df_mw_hardreg *, const df_mw_hardreg *);
100 1.1 mrg static int df_mw_ptr_compare (const void *, const void *);
101 1.1 mrg
102 1.1 mrg static void df_insn_info_delete (unsigned int);
103 1.1 mrg
104 1.1 mrg /* Indexed by hardware reg number, is true if that register is ever
105 1.1 mrg used in the current function.
106 1.1 mrg
107 1.1 mrg In df-scan.cc, this is set up to record the hard regs used
108 1.1 mrg explicitly. Reload adds in the hard regs used for holding pseudo
109 1.1 mrg regs. Final uses it to generate the code in the function prologue
110 1.1 mrg and epilogue to save and restore registers as needed. */
111 1.1 mrg
112 1.1 mrg static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
113 1.1 mrg
114 1.1 mrg /* Flags used to tell df_refs_add_to_chains() which vectors it should copy. */
115 1.1 mrg static const unsigned int copy_defs = 0x1;
116 1.1 mrg static const unsigned int copy_uses = 0x2;
117 1.1 mrg static const unsigned int copy_eq_uses = 0x4;
118 1.1 mrg static const unsigned int copy_mw = 0x8;
119 1.1 mrg static const unsigned int copy_all = copy_defs | copy_uses | copy_eq_uses
120 1.1 mrg | copy_mw;
121 1.1 mrg
122 1.1 mrg /*----------------------------------------------------------------------------
124 1.1 mrg SCANNING DATAFLOW PROBLEM
125 1.1 mrg
126 1.1 mrg There are several ways in which scanning looks just like the other
127 1.1 mrg dataflow problems. It shares the all the mechanisms for local info
128 1.1 mrg as well as basic block info. Where it differs is when and how often
129 1.1 mrg it gets run. It also has no need for the iterative solver.
130 1.1 mrg ----------------------------------------------------------------------------*/
131 1.1 mrg
132 1.1 mrg /* Problem data for the scanning dataflow function. */
133 1.1 mrg struct df_scan_problem_data
134 1.1 mrg {
135 1.1 mrg object_allocator<df_base_ref> *ref_base_pool;
136 1.1 mrg object_allocator<df_artificial_ref> *ref_artificial_pool;
137 1.1 mrg object_allocator<df_regular_ref> *ref_regular_pool;
138 1.1 mrg object_allocator<df_insn_info> *insn_pool;
139 1.1 mrg object_allocator<df_reg_info> *reg_pool;
140 1.1 mrg object_allocator<df_mw_hardreg> *mw_reg_pool;
141 1.1 mrg
142 1.1 mrg bitmap_obstack reg_bitmaps;
143 1.1 mrg bitmap_obstack insn_bitmaps;
144 1.1 mrg };
145 1.1 mrg
146 1.1 mrg /* Internal function to shut down the scanning problem. */
147 1.1 mrg static void
148 1.1 mrg df_scan_free_internal (void)
149 1.1 mrg {
150 1.1 mrg struct df_scan_problem_data *problem_data
151 1.1 mrg = (struct df_scan_problem_data *) df_scan->problem_data;
152 1.1 mrg
153 1.1 mrg free (df->def_info.refs);
154 1.1 mrg free (df->def_info.begin);
155 1.1 mrg free (df->def_info.count);
156 1.1 mrg memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
157 1.1 mrg
158 1.1 mrg free (df->use_info.refs);
159 1.1 mrg free (df->use_info.begin);
160 1.1 mrg free (df->use_info.count);
161 1.1 mrg memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
162 1.1 mrg
163 1.1 mrg free (df->def_regs);
164 1.1 mrg df->def_regs = NULL;
165 1.1 mrg free (df->use_regs);
166 1.1 mrg df->use_regs = NULL;
167 1.1 mrg free (df->eq_use_regs);
168 1.1 mrg df->eq_use_regs = NULL;
169 1.1 mrg df->regs_size = 0;
170 1.1 mrg DF_REG_SIZE (df) = 0;
171 1.1 mrg
172 1.1 mrg free (df->insns);
173 1.1 mrg df->insns = NULL;
174 1.1 mrg DF_INSN_SIZE () = 0;
175 1.1 mrg
176 1.1 mrg free (df_scan->block_info);
177 1.1 mrg df_scan->block_info = NULL;
178 1.1 mrg df_scan->block_info_size = 0;
179 1.1 mrg
180 1.1 mrg bitmap_clear (&df->hardware_regs_used);
181 1.1 mrg bitmap_clear (&df->regular_block_artificial_uses);
182 1.1 mrg bitmap_clear (&df->eh_block_artificial_uses);
183 1.1 mrg BITMAP_FREE (df->entry_block_defs);
184 1.1 mrg BITMAP_FREE (df->exit_block_uses);
185 1.1 mrg bitmap_clear (&df->insns_to_delete);
186 1.1 mrg bitmap_clear (&df->insns_to_rescan);
187 1.1 mrg bitmap_clear (&df->insns_to_notes_rescan);
188 1.1 mrg
189 1.1 mrg delete problem_data->ref_base_pool;
190 1.1 mrg delete problem_data->ref_artificial_pool;
191 1.1 mrg delete problem_data->ref_regular_pool;
192 1.1 mrg delete problem_data->insn_pool;
193 1.1 mrg delete problem_data->reg_pool;
194 1.1 mrg delete problem_data->mw_reg_pool;
195 1.1 mrg bitmap_obstack_release (&problem_data->reg_bitmaps);
196 1.1 mrg bitmap_obstack_release (&problem_data->insn_bitmaps);
197 1.1 mrg free (df_scan->problem_data);
198 1.1 mrg }
199 1.1 mrg
200 1.1 mrg
201 1.1 mrg /* Free basic block info. */
202 1.1 mrg
203 1.1 mrg static void
204 1.1 mrg df_scan_free_bb_info (basic_block bb, void *vbb_info)
205 1.1 mrg {
206 1.1 mrg struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
207 1.1 mrg unsigned int bb_index = bb->index;
208 1.1 mrg rtx_insn *insn;
209 1.1 mrg
210 1.1 mrg FOR_BB_INSNS (bb, insn)
211 1.1 mrg if (INSN_P (insn))
212 1.1 mrg df_insn_info_delete (INSN_UID (insn));
213 1.1 mrg
214 1.1 mrg if (bb_index < df_scan->block_info_size)
215 1.1 mrg bb_info = df_scan_get_bb_info (bb_index);
216 1.1 mrg
217 1.1 mrg /* Get rid of any artificial uses or defs. */
218 1.1 mrg df_ref_chain_delete_du_chain (bb_info->artificial_defs);
219 1.1 mrg df_ref_chain_delete_du_chain (bb_info->artificial_uses);
220 1.1 mrg df_ref_chain_delete (bb_info->artificial_defs);
221 1.1 mrg df_ref_chain_delete (bb_info->artificial_uses);
222 1.1 mrg bb_info->artificial_defs = NULL;
223 1.1 mrg bb_info->artificial_uses = NULL;
224 1.1 mrg }
225 1.1 mrg
226 1.1 mrg
227 1.1 mrg /* Allocate the problem data for the scanning problem. This should be
228 1.1 mrg called when the problem is created or when the entire function is to
229 1.1 mrg be rescanned. */
230 1.1 mrg void
231 1.1 mrg df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
232 1.1 mrg {
233 1.1 mrg struct df_scan_problem_data *problem_data;
234 1.1 mrg basic_block bb;
235 1.1 mrg
236 1.1 mrg /* Given the number of pools, this is really faster than tearing
237 1.1 mrg everything apart. */
238 1.1 mrg if (df_scan->problem_data)
239 1.1 mrg df_scan_free_internal ();
240 1.1 mrg
241 1.1 mrg problem_data = XNEW (struct df_scan_problem_data);
242 1.1 mrg df_scan->problem_data = problem_data;
243 1.1 mrg df_scan->computed = true;
244 1.1 mrg
245 1.1 mrg problem_data->ref_base_pool = new object_allocator<df_base_ref>
246 1.1 mrg ("df_scan ref base");
247 1.1 mrg problem_data->ref_artificial_pool = new object_allocator<df_artificial_ref>
248 1.1 mrg ("df_scan ref artificial");
249 1.1 mrg problem_data->ref_regular_pool = new object_allocator<df_regular_ref>
250 1.1 mrg ("df_scan ref regular");
251 1.1 mrg problem_data->insn_pool = new object_allocator<df_insn_info>
252 1.1 mrg ("df_scan insn");
253 1.1 mrg problem_data->reg_pool = new object_allocator<df_reg_info>
254 1.1 mrg ("df_scan reg");
255 1.1 mrg problem_data->mw_reg_pool = new object_allocator<df_mw_hardreg>
256 1.1 mrg ("df_scan mw_reg");
257 1.1 mrg
258 1.1 mrg bitmap_obstack_initialize (&problem_data->reg_bitmaps);
259 1.1 mrg bitmap_obstack_initialize (&problem_data->insn_bitmaps);
260 1.1 mrg
261 1.1 mrg df_grow_reg_info ();
262 1.1 mrg
263 1.1 mrg df_grow_insn_info ();
264 1.1 mrg df_grow_bb_info (df_scan);
265 1.1 mrg
266 1.1 mrg FOR_ALL_BB_FN (bb, cfun)
267 1.1 mrg {
268 1.1 mrg unsigned int bb_index = bb->index;
269 1.1 mrg struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
270 1.1 mrg bb_info->artificial_defs = NULL;
271 1.1 mrg bb_info->artificial_uses = NULL;
272 1.1 mrg }
273 1.1 mrg
274 1.1 mrg bitmap_initialize (&df->hardware_regs_used, &problem_data->reg_bitmaps);
275 1.1 mrg bitmap_initialize (&df->regular_block_artificial_uses, &problem_data->reg_bitmaps);
276 1.1 mrg bitmap_initialize (&df->eh_block_artificial_uses, &problem_data->reg_bitmaps);
277 1.1 mrg df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
278 1.1 mrg df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
279 1.1 mrg bitmap_initialize (&df->insns_to_delete, &problem_data->insn_bitmaps);
280 1.1 mrg bitmap_initialize (&df->insns_to_rescan, &problem_data->insn_bitmaps);
281 1.1 mrg bitmap_initialize (&df->insns_to_notes_rescan, &problem_data->insn_bitmaps);
282 1.1 mrg df_scan->optional_p = false;
283 1.1 mrg }
284 1.1 mrg
285 1.1 mrg
286 1.1 mrg /* Free all of the data associated with the scan problem. */
287 1.1 mrg
288 1.1 mrg static void
289 1.1 mrg df_scan_free (void)
290 1.1 mrg {
291 1.1 mrg if (df_scan->problem_data)
292 1.1 mrg df_scan_free_internal ();
293 1.1 mrg
294 1.1 mrg if (df->blocks_to_analyze)
295 1.1 mrg {
296 1.1 mrg BITMAP_FREE (df->blocks_to_analyze);
297 1.1 mrg df->blocks_to_analyze = NULL;
298 1.1 mrg }
299 1.1 mrg
300 1.1 mrg free (df_scan);
301 1.1 mrg }
302 1.1 mrg
303 1.1 mrg /* Dump the preamble for DF_SCAN dump. */
304 1.1 mrg static void
305 1.1 mrg df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
306 1.1 mrg {
307 1.1 mrg int i;
308 1.1 mrg int dcount = 0;
309 1.1 mrg int ucount = 0;
310 1.1 mrg int ecount = 0;
311 1.1 mrg int icount = 0;
312 1.1 mrg int ccount = 0;
313 1.1 mrg basic_block bb;
314 1.1 mrg rtx_insn *insn;
315 1.1 mrg
316 1.1 mrg fprintf (file, ";; fully invalidated by EH \t");
317 1.1 mrg df_print_regset
318 1.1 mrg (file, bitmap_view<HARD_REG_SET> (eh_edge_abi.full_reg_clobbers ()));
319 1.1 mrg fprintf (file, ";; hardware regs used \t");
320 1.1 mrg df_print_regset (file, &df->hardware_regs_used);
321 1.1 mrg fprintf (file, ";; regular block artificial uses \t");
322 1.1 mrg df_print_regset (file, &df->regular_block_artificial_uses);
323 1.1 mrg fprintf (file, ";; eh block artificial uses \t");
324 1.1 mrg df_print_regset (file, &df->eh_block_artificial_uses);
325 1.1 mrg fprintf (file, ";; entry block defs \t");
326 1.1 mrg df_print_regset (file, df->entry_block_defs);
327 1.1 mrg fprintf (file, ";; exit block uses \t");
328 1.1 mrg df_print_regset (file, df->exit_block_uses);
329 1.1 mrg fprintf (file, ";; regs ever live \t");
330 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
331 1.1 mrg if (df_regs_ever_live_p (i))
332 1.1 mrg fprintf (file, " %d [%s]", i, reg_names[i]);
333 1.1 mrg fprintf (file, "\n;; ref usage \t");
334 1.1 mrg
335 1.1 mrg for (i = 0; i < (int)df->regs_inited; i++)
336 1.1 mrg if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
337 1.1 mrg {
338 1.1 mrg const char * sep = "";
339 1.1 mrg
340 1.1 mrg fprintf (file, "r%d={", i);
341 1.1 mrg if (DF_REG_DEF_COUNT (i))
342 1.1 mrg {
343 1.1 mrg fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
344 1.1 mrg sep = ",";
345 1.1 mrg dcount += DF_REG_DEF_COUNT (i);
346 1.1 mrg }
347 1.1 mrg if (DF_REG_USE_COUNT (i))
348 1.1 mrg {
349 1.1 mrg fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
350 1.1 mrg sep = ",";
351 1.1 mrg ucount += DF_REG_USE_COUNT (i);
352 1.1 mrg }
353 1.1 mrg if (DF_REG_EQ_USE_COUNT (i))
354 1.1 mrg {
355 1.1 mrg fprintf (file, "%s%de", sep, DF_REG_EQ_USE_COUNT (i));
356 1.1 mrg ecount += DF_REG_EQ_USE_COUNT (i);
357 1.1 mrg }
358 1.1 mrg fprintf (file, "} ");
359 1.1 mrg }
360 1.1 mrg
361 1.1 mrg FOR_EACH_BB_FN (bb, cfun)
362 1.1 mrg FOR_BB_INSNS (bb, insn)
363 1.1 mrg if (INSN_P (insn))
364 1.1 mrg {
365 1.1 mrg if (CALL_P (insn))
366 1.1 mrg ccount++;
367 1.1 mrg else
368 1.1 mrg icount++;
369 1.1 mrg }
370 1.1 mrg
371 1.1 mrg fprintf (file, "\n;; total ref usage %d{%dd,%du,%de}"
372 1.1 mrg " in %d{%d regular + %d call} insns.\n",
373 1.1 mrg dcount + ucount + ecount, dcount, ucount, ecount,
374 1.1 mrg icount + ccount, icount, ccount);
375 1.1 mrg }
376 1.1 mrg
377 1.1 mrg /* Dump the bb_info for a given basic block. */
378 1.1 mrg static void
379 1.1 mrg df_scan_start_block (basic_block bb, FILE *file)
380 1.1 mrg {
381 1.1 mrg struct df_scan_bb_info *bb_info
382 1.1 mrg = df_scan_get_bb_info (bb->index);
383 1.1 mrg
384 1.1 mrg if (bb_info)
385 1.1 mrg {
386 1.1 mrg fprintf (file, ";; bb %d artificial_defs: ", bb->index);
387 1.1 mrg df_refs_chain_dump (bb_info->artificial_defs, true, file);
388 1.1 mrg fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
389 1.1 mrg df_refs_chain_dump (bb_info->artificial_uses, true, file);
390 1.1 mrg fprintf (file, "\n");
391 1.1 mrg }
392 1.1 mrg #if 0
393 1.1 mrg {
394 1.1 mrg rtx_insn *insn;
395 1.1 mrg FOR_BB_INSNS (bb, insn)
396 1.1 mrg if (INSN_P (insn))
397 1.1 mrg df_insn_debug (insn, false, file);
398 1.1 mrg }
399 1.1 mrg #endif
400 1.1 mrg }
401 1.1 mrg
402 1.1 mrg static const struct df_problem problem_SCAN =
403 1.1 mrg {
404 1.1 mrg DF_SCAN, /* Problem id. */
405 1.1 mrg DF_NONE, /* Direction. */
406 1.1 mrg df_scan_alloc, /* Allocate the problem specific data. */
407 1.1 mrg NULL, /* Reset global information. */
408 1.1 mrg df_scan_free_bb_info, /* Free basic block info. */
409 1.1 mrg NULL, /* Local compute function. */
410 1.1 mrg NULL, /* Init the solution specific data. */
411 1.1 mrg NULL, /* Iterative solver. */
412 1.1 mrg NULL, /* Confluence operator 0. */
413 1.1 mrg NULL, /* Confluence operator n. */
414 1.1 mrg NULL, /* Transfer function. */
415 1.1 mrg NULL, /* Finalize function. */
416 1.1 mrg df_scan_free, /* Free all of the problem information. */
417 1.1 mrg NULL, /* Remove this problem from the stack of dataflow problems. */
418 1.1 mrg df_scan_start_dump, /* Debugging. */
419 1.1 mrg df_scan_start_block, /* Debugging start block. */
420 1.1 mrg NULL, /* Debugging end block. */
421 1.1 mrg NULL, /* Debugging start insn. */
422 1.1 mrg NULL, /* Debugging end insn. */
423 1.1 mrg NULL, /* Incremental solution verify start. */
424 1.1 mrg NULL, /* Incremental solution verify end. */
425 1.1 mrg NULL, /* Dependent problem. */
426 1.1 mrg sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
427 1.1 mrg TV_DF_SCAN, /* Timing variable. */
428 1.1 mrg false /* Reset blocks on dropping out of blocks_to_analyze. */
429 1.1 mrg };
430 1.1 mrg
431 1.1 mrg
432 1.1 mrg /* Create a new DATAFLOW instance and add it to an existing instance
433 1.1 mrg of DF. The returned structure is what is used to get at the
434 1.1 mrg solution. */
435 1.1 mrg
436 1.1 mrg void
437 1.1 mrg df_scan_add_problem (void)
438 1.1 mrg {
439 1.1 mrg df_add_problem (&problem_SCAN);
440 1.1 mrg }
441 1.1 mrg
442 1.1 mrg
443 1.1 mrg /*----------------------------------------------------------------------------
445 1.1 mrg Storage Allocation Utilities
446 1.1 mrg ----------------------------------------------------------------------------*/
447 1.1 mrg
448 1.1 mrg
449 1.1 mrg /* First, grow the reg_info information. If the current size is less than
450 1.1 mrg the number of pseudos, grow to 25% more than the number of
451 1.1 mrg pseudos.
452 1.1 mrg
453 1.1 mrg Second, assure that all of the slots up to max_reg_num have been
454 1.1 mrg filled with reg_info structures. */
455 1.1 mrg
456 1.1 mrg void
457 1.1 mrg df_grow_reg_info (void)
458 1.1 mrg {
459 1.1 mrg unsigned int max_reg = max_reg_num ();
460 1.1 mrg unsigned int new_size = max_reg;
461 1.1 mrg struct df_scan_problem_data *problem_data
462 1.1 mrg = (struct df_scan_problem_data *) df_scan->problem_data;
463 1.1 mrg unsigned int i;
464 1.1 mrg
465 1.1 mrg if (df->regs_size < new_size)
466 1.1 mrg {
467 1.1 mrg new_size += new_size / 4;
468 1.1 mrg df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
469 1.1 mrg df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
470 1.1 mrg df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
471 1.1 mrg new_size);
472 1.1 mrg df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
473 1.1 mrg df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
474 1.1 mrg df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
475 1.1 mrg df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
476 1.1 mrg df->regs_size = new_size;
477 1.1 mrg }
478 1.1 mrg
479 1.1 mrg for (i = df->regs_inited; i < max_reg; i++)
480 1.1 mrg {
481 1.1 mrg struct df_reg_info *reg_info;
482 1.1 mrg
483 1.1 mrg // TODO
484 1.1 mrg reg_info = problem_data->reg_pool->allocate ();
485 1.1 mrg memset (reg_info, 0, sizeof (struct df_reg_info));
486 1.1 mrg df->def_regs[i] = reg_info;
487 1.1 mrg reg_info = problem_data->reg_pool->allocate ();
488 1.1 mrg memset (reg_info, 0, sizeof (struct df_reg_info));
489 1.1 mrg df->use_regs[i] = reg_info;
490 1.1 mrg reg_info = problem_data->reg_pool->allocate ();
491 1.1 mrg memset (reg_info, 0, sizeof (struct df_reg_info));
492 1.1 mrg df->eq_use_regs[i] = reg_info;
493 1.1 mrg df->def_info.begin[i] = 0;
494 1.1 mrg df->def_info.count[i] = 0;
495 1.1 mrg df->use_info.begin[i] = 0;
496 1.1 mrg df->use_info.count[i] = 0;
497 1.1 mrg }
498 1.1 mrg
499 1.1 mrg df->regs_inited = max_reg;
500 1.1 mrg }
501 1.1 mrg
502 1.1 mrg
503 1.1 mrg /* Grow the ref information. */
504 1.1 mrg
505 1.1 mrg static void
506 1.1 mrg df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
507 1.1 mrg {
508 1.1 mrg if (ref_info->refs_size < new_size)
509 1.1 mrg {
510 1.1 mrg ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
511 1.1 mrg memset (ref_info->refs + ref_info->refs_size, 0,
512 1.1 mrg (new_size - ref_info->refs_size) *sizeof (df_ref));
513 1.1 mrg ref_info->refs_size = new_size;
514 1.1 mrg }
515 1.1 mrg }
516 1.1 mrg
517 1.1 mrg
518 1.1 mrg /* Check and grow the ref information if necessary. This routine
519 1.1 mrg guarantees total_size + BITMAP_ADDEND amount of entries in refs
520 1.1 mrg array. It updates ref_info->refs_size only and does not change
521 1.1 mrg ref_info->total_size. */
522 1.1 mrg
523 1.1 mrg static void
524 1.1 mrg df_check_and_grow_ref_info (struct df_ref_info *ref_info,
525 1.1 mrg unsigned bitmap_addend)
526 1.1 mrg {
527 1.1 mrg if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
528 1.1 mrg {
529 1.1 mrg int new_size = ref_info->total_size + bitmap_addend;
530 1.1 mrg new_size += ref_info->total_size / 4;
531 1.1 mrg df_grow_ref_info (ref_info, new_size);
532 1.1 mrg }
533 1.1 mrg }
534 1.1 mrg
535 1.1 mrg
536 1.1 mrg /* Grow the ref information. If the current size is less than the
537 1.1 mrg number of instructions, grow to 25% more than the number of
538 1.1 mrg instructions. */
539 1.1 mrg
540 1.1 mrg void
541 1.1 mrg df_grow_insn_info (void)
542 1.1 mrg {
543 1.1 mrg unsigned int new_size = get_max_uid () + 1;
544 1.1 mrg if (DF_INSN_SIZE () < new_size)
545 1.1 mrg {
546 1.1 mrg new_size += new_size / 4;
547 1.1 mrg df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
548 1.1 mrg memset (df->insns + df->insns_size, 0,
549 1.1 mrg (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
550 1.1 mrg DF_INSN_SIZE () = new_size;
551 1.1 mrg }
552 1.1 mrg }
553 1.1 mrg
554 1.1 mrg
555 1.1 mrg
556 1.1 mrg
557 1.1 mrg /*----------------------------------------------------------------------------
559 1.1 mrg PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
560 1.1 mrg ----------------------------------------------------------------------------*/
561 1.1 mrg
562 1.1 mrg /* Rescan all of the block_to_analyze or all of the blocks in the
563 1.1 mrg function if df_set_blocks if blocks_to_analyze is NULL; */
564 1.1 mrg
565 1.1 mrg void
566 1.1 mrg df_scan_blocks (void)
567 1.1 mrg {
568 1.1 mrg basic_block bb;
569 1.1 mrg
570 1.1 mrg df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
571 1.1 mrg df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
572 1.1 mrg
573 1.1 mrg df_get_regular_block_artificial_uses (&df->regular_block_artificial_uses);
574 1.1 mrg df_get_eh_block_artificial_uses (&df->eh_block_artificial_uses);
575 1.1 mrg
576 1.1 mrg bitmap_ior_into (&df->eh_block_artificial_uses,
577 1.1 mrg &df->regular_block_artificial_uses);
578 1.1 mrg
579 1.1 mrg /* ENTRY and EXIT blocks have special defs/uses. */
580 1.1 mrg df_get_entry_block_def_set (df->entry_block_defs);
581 1.1 mrg df_record_entry_block_defs (df->entry_block_defs);
582 1.1 mrg df_get_exit_block_use_set (df->exit_block_uses);
583 1.1 mrg df_record_exit_block_uses (df->exit_block_uses);
584 1.1 mrg df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
585 1.1 mrg df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
586 1.1 mrg
587 1.1 mrg /* Regular blocks */
588 1.1 mrg FOR_EACH_BB_FN (bb, cfun)
589 1.1 mrg {
590 1.1 mrg unsigned int bb_index = bb->index;
591 1.1 mrg df_bb_refs_record (bb_index, true);
592 1.1 mrg }
593 1.1 mrg }
594 1.1 mrg
595 1.1 mrg /* Create new refs under address LOC within INSN. This function is
596 1.1 mrg only used externally. REF_FLAGS must be either 0 or DF_REF_IN_NOTE,
597 1.1 mrg depending on whether LOC is inside PATTERN (INSN) or a note. */
598 1.1 mrg
599 1.1 mrg void
600 1.1 mrg df_uses_create (rtx *loc, rtx_insn *insn, int ref_flags)
601 1.1 mrg {
602 1.1 mrg gcc_assert (!(ref_flags & ~DF_REF_IN_NOTE));
603 1.1 mrg df_uses_record (NULL, loc, DF_REF_REG_USE,
604 1.1 mrg BLOCK_FOR_INSN (insn),
605 1.1 mrg DF_INSN_INFO_GET (insn),
606 1.1 mrg ref_flags);
607 1.1 mrg }
608 1.1 mrg
609 1.1 mrg static void
610 1.1 mrg df_install_ref_incremental (df_ref ref)
611 1.1 mrg {
612 1.1 mrg struct df_reg_info **reg_info;
613 1.1 mrg struct df_ref_info *ref_info;
614 1.1 mrg df_ref *ref_ptr;
615 1.1 mrg bool add_to_table;
616 1.1 mrg
617 1.1 mrg rtx_insn *insn = DF_REF_INSN (ref);
618 1.1 mrg basic_block bb = BLOCK_FOR_INSN (insn);
619 1.1 mrg
620 1.1 mrg if (DF_REF_REG_DEF_P (ref))
621 1.1 mrg {
622 1.1 mrg reg_info = df->def_regs;
623 1.1 mrg ref_info = &df->def_info;
624 1.1 mrg ref_ptr = &DF_INSN_DEFS (insn);
625 1.1 mrg add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
626 1.1 mrg }
627 1.1 mrg else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
628 1.1 mrg {
629 1.1 mrg reg_info = df->eq_use_regs;
630 1.1 mrg ref_info = &df->use_info;
631 1.1 mrg ref_ptr = &DF_INSN_EQ_USES (insn);
632 1.1 mrg switch (ref_info->ref_order)
633 1.1 mrg {
634 1.1 mrg case DF_REF_ORDER_UNORDERED_WITH_NOTES:
635 1.1 mrg case DF_REF_ORDER_BY_REG_WITH_NOTES:
636 1.1 mrg case DF_REF_ORDER_BY_INSN_WITH_NOTES:
637 1.1 mrg add_to_table = true;
638 1.1 mrg break;
639 1.1 mrg default:
640 1.1 mrg add_to_table = false;
641 1.1 mrg break;
642 1.1 mrg }
643 1.1 mrg }
644 1.1 mrg else
645 1.1 mrg {
646 1.1 mrg reg_info = df->use_regs;
647 1.1 mrg ref_info = &df->use_info;
648 1.1 mrg ref_ptr = &DF_INSN_USES (insn);
649 1.1 mrg add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
650 1.1 mrg }
651 1.1 mrg
652 1.1 mrg /* Do not add if ref is not in the right blocks. */
653 1.1 mrg if (add_to_table && df->analyze_subset)
654 1.1 mrg add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
655 1.1 mrg
656 1.1 mrg df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
657 1.1 mrg
658 1.1 mrg if (add_to_table)
659 1.1 mrg switch (ref_info->ref_order)
660 1.1 mrg {
661 1.1 mrg case DF_REF_ORDER_UNORDERED_WITH_NOTES:
662 1.1 mrg case DF_REF_ORDER_BY_REG_WITH_NOTES:
663 1.1 mrg case DF_REF_ORDER_BY_INSN_WITH_NOTES:
664 1.1 mrg ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
665 1.1 mrg break;
666 1.1 mrg default:
667 1.1 mrg ref_info->ref_order = DF_REF_ORDER_UNORDERED;
668 1.1 mrg break;
669 1.1 mrg }
670 1.1 mrg
671 1.1 mrg while (*ref_ptr && df_ref_compare (*ref_ptr, ref) < 0)
672 1.1 mrg ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
673 1.1 mrg
674 1.1 mrg DF_REF_NEXT_LOC (ref) = *ref_ptr;
675 1.1 mrg *ref_ptr = ref;
676 1.1 mrg
677 1.1 mrg #if 0
678 1.1 mrg if (dump_file)
679 1.1 mrg {
680 1.1 mrg fprintf (dump_file, "adding ref ");
681 1.1 mrg df_ref_debug (ref, dump_file);
682 1.1 mrg }
683 1.1 mrg #endif
684 1.1 mrg /* By adding the ref directly, df_insn_rescan my not find any
685 1.1 mrg differences even though the block will have changed. So we need
686 1.1 mrg to mark the block dirty ourselves. */
687 1.1 mrg if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
688 1.1 mrg df_set_bb_dirty (bb);
689 1.1 mrg }
690 1.1 mrg
691 1.1 mrg
692 1.1 mrg
693 1.1 mrg /*----------------------------------------------------------------------------
695 1.1 mrg UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
696 1.1 mrg ----------------------------------------------------------------------------*/
697 1.1 mrg
698 1.1 mrg static void
699 1.1 mrg df_free_ref (df_ref ref)
700 1.1 mrg {
701 1.1 mrg struct df_scan_problem_data *problem_data
702 1.1 mrg = (struct df_scan_problem_data *) df_scan->problem_data;
703 1.1 mrg
704 1.1 mrg switch (DF_REF_CLASS (ref))
705 1.1 mrg {
706 1.1 mrg case DF_REF_BASE:
707 1.1 mrg problem_data->ref_base_pool->remove ((df_base_ref *) (ref));
708 1.1 mrg break;
709 1.1 mrg
710 1.1 mrg case DF_REF_ARTIFICIAL:
711 1.1 mrg problem_data->ref_artificial_pool->remove
712 1.1 mrg ((df_artificial_ref *) (ref));
713 1.1 mrg break;
714 1.1 mrg
715 1.1 mrg case DF_REF_REGULAR:
716 1.1 mrg problem_data->ref_regular_pool->remove
717 1.1 mrg ((df_regular_ref *) (ref));
718 1.1 mrg break;
719 1.1 mrg }
720 1.1 mrg }
721 1.1 mrg
722 1.1 mrg
723 1.1 mrg /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
724 1.1 mrg Also delete the def-use or use-def chain if it exists. */
725 1.1 mrg
726 1.1 mrg static void
727 1.1 mrg df_reg_chain_unlink (df_ref ref)
728 1.1 mrg {
729 1.1 mrg df_ref next = DF_REF_NEXT_REG (ref);
730 1.1 mrg df_ref prev = DF_REF_PREV_REG (ref);
731 1.1 mrg int id = DF_REF_ID (ref);
732 1.1 mrg struct df_reg_info *reg_info;
733 1.1 mrg df_ref *refs = NULL;
734 1.1 mrg
735 1.1 mrg if (DF_REF_REG_DEF_P (ref))
736 1.1 mrg {
737 1.1 mrg int regno = DF_REF_REGNO (ref);
738 1.1 mrg reg_info = DF_REG_DEF_GET (regno);
739 1.1 mrg refs = df->def_info.refs;
740 1.1 mrg }
741 1.1 mrg else
742 1.1 mrg {
743 1.1 mrg if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
744 1.1 mrg {
745 1.1 mrg reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
746 1.1 mrg switch (df->use_info.ref_order)
747 1.1 mrg {
748 1.1 mrg case DF_REF_ORDER_UNORDERED_WITH_NOTES:
749 1.1 mrg case DF_REF_ORDER_BY_REG_WITH_NOTES:
750 1.1 mrg case DF_REF_ORDER_BY_INSN_WITH_NOTES:
751 1.1 mrg refs = df->use_info.refs;
752 1.1 mrg break;
753 1.1 mrg default:
754 1.1 mrg break;
755 1.1 mrg }
756 1.1 mrg }
757 1.1 mrg else
758 1.1 mrg {
759 1.1 mrg reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
760 1.1 mrg refs = df->use_info.refs;
761 1.1 mrg }
762 1.1 mrg }
763 1.1 mrg
764 1.1 mrg if (refs)
765 1.1 mrg {
766 1.1 mrg if (df->analyze_subset)
767 1.1 mrg {
768 1.1 mrg if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
769 1.1 mrg refs[id] = NULL;
770 1.1 mrg }
771 1.1 mrg else
772 1.1 mrg refs[id] = NULL;
773 1.1 mrg }
774 1.1 mrg
775 1.1 mrg /* Delete any def-use or use-def chains that start here. It is
776 1.1 mrg possible that there is trash in this field. This happens for
777 1.1 mrg insns that have been deleted when rescanning has been deferred
778 1.1 mrg and the chain problem has also been deleted. The chain tear down
779 1.1 mrg code skips deleted insns. */
780 1.1 mrg if (df_chain && DF_REF_CHAIN (ref))
781 1.1 mrg df_chain_unlink (ref);
782 1.1 mrg
783 1.1 mrg reg_info->n_refs--;
784 1.1 mrg if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
785 1.1 mrg {
786 1.1 mrg gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
787 1.1 mrg df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
788 1.1 mrg }
789 1.1 mrg
790 1.1 mrg /* Unlink from the reg chain. If there is no prev, this is the
791 1.1 mrg first of the list. If not, just join the next and prev. */
792 1.1 mrg if (prev)
793 1.1 mrg DF_REF_NEXT_REG (prev) = next;
794 1.1 mrg else
795 1.1 mrg {
796 1.1 mrg gcc_assert (reg_info->reg_chain == ref);
797 1.1 mrg reg_info->reg_chain = next;
798 1.1 mrg }
799 1.1 mrg if (next)
800 1.1 mrg DF_REF_PREV_REG (next) = prev;
801 1.1 mrg
802 1.1 mrg df_free_ref (ref);
803 1.1 mrg }
804 1.1 mrg
805 1.1 mrg /* Initialize INSN_INFO to describe INSN. */
806 1.1 mrg
807 1.1 mrg static void
808 1.1 mrg df_insn_info_init_fields (df_insn_info *insn_info, rtx_insn *insn)
809 1.1 mrg {
810 1.1 mrg memset (insn_info, 0, sizeof (struct df_insn_info));
811 1.1 mrg insn_info->insn = insn;
812 1.1 mrg }
813 1.1 mrg
814 1.1 mrg /* Create the insn record for INSN. If there was one there, zero it
815 1.1 mrg out. */
816 1.1 mrg
817 1.1 mrg struct df_insn_info *
818 1.1 mrg df_insn_create_insn_record (rtx_insn *insn)
819 1.1 mrg {
820 1.1 mrg struct df_scan_problem_data *problem_data
821 1.1 mrg = (struct df_scan_problem_data *) df_scan->problem_data;
822 1.1 mrg struct df_insn_info *insn_rec;
823 1.1 mrg
824 1.1 mrg df_grow_insn_info ();
825 1.1 mrg insn_rec = DF_INSN_INFO_GET (insn);
826 1.1 mrg if (!insn_rec)
827 1.1 mrg {
828 1.1 mrg insn_rec = problem_data->insn_pool->allocate ();
829 1.1 mrg DF_INSN_INFO_SET (insn, insn_rec);
830 1.1 mrg }
831 1.1 mrg df_insn_info_init_fields (insn_rec, insn);
832 1.1 mrg return insn_rec;
833 1.1 mrg }
834 1.1 mrg
835 1.1 mrg
836 1.1 mrg /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
837 1.1 mrg
838 1.1 mrg static void
839 1.1 mrg df_ref_chain_delete_du_chain (df_ref ref)
840 1.1 mrg {
841 1.1 mrg for (; ref; ref = DF_REF_NEXT_LOC (ref))
842 1.1 mrg /* CHAIN is allocated by DF_CHAIN. So make sure to
843 1.1 mrg pass df_scan instance for the problem. */
844 1.1 mrg if (DF_REF_CHAIN (ref))
845 1.1 mrg df_chain_unlink (ref);
846 1.1 mrg }
847 1.1 mrg
848 1.1 mrg
849 1.1 mrg /* Delete all refs in the ref chain. */
850 1.1 mrg
851 1.1 mrg static void
852 1.1 mrg df_ref_chain_delete (df_ref ref)
853 1.1 mrg {
854 1.1 mrg df_ref next;
855 1.1 mrg for (; ref; ref = next)
856 1.1 mrg {
857 1.1 mrg next = DF_REF_NEXT_LOC (ref);
858 1.1 mrg df_reg_chain_unlink (ref);
859 1.1 mrg }
860 1.1 mrg }
861 1.1 mrg
862 1.1 mrg
863 1.1 mrg /* Delete the hardreg chain. */
864 1.1 mrg
865 1.1 mrg static void
866 1.1 mrg df_mw_hardreg_chain_delete (struct df_mw_hardreg *hardregs)
867 1.1 mrg {
868 1.1 mrg struct df_scan_problem_data *problem_data
869 1.1 mrg = (struct df_scan_problem_data *) df_scan->problem_data;
870 1.1 mrg df_mw_hardreg *next;
871 1.1 mrg
872 1.1 mrg for (; hardregs; hardregs = next)
873 1.1 mrg {
874 1.1 mrg next = DF_MWS_NEXT (hardregs);
875 1.1 mrg problem_data->mw_reg_pool->remove (hardregs);
876 1.1 mrg }
877 1.1 mrg }
878 1.1 mrg
879 1.1 mrg /* Remove the contents of INSN_INFO (but don't free INSN_INFO itself). */
880 1.1 mrg
881 1.1 mrg static void
882 1.1 mrg df_insn_info_free_fields (df_insn_info *insn_info)
883 1.1 mrg {
884 1.1 mrg /* In general, notes do not have the insn_info fields
885 1.1 mrg initialized. However, combine deletes insns by changing them
886 1.1 mrg to notes. How clever. So we cannot just check if it is a
887 1.1 mrg valid insn before short circuiting this code, we need to see
888 1.1 mrg if we actually initialized it. */
889 1.1 mrg df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
890 1.1 mrg
891 1.1 mrg if (df_chain)
892 1.1 mrg {
893 1.1 mrg df_ref_chain_delete_du_chain (insn_info->defs);
894 1.1 mrg df_ref_chain_delete_du_chain (insn_info->uses);
895 1.1 mrg df_ref_chain_delete_du_chain (insn_info->eq_uses);
896 1.1 mrg }
897 1.1 mrg
898 1.1 mrg df_ref_chain_delete (insn_info->defs);
899 1.1 mrg df_ref_chain_delete (insn_info->uses);
900 1.1 mrg df_ref_chain_delete (insn_info->eq_uses);
901 1.1 mrg }
902 1.1 mrg
903 1.1 mrg /* Delete all of the refs information from the insn with UID.
904 1.1 mrg Internal helper for df_insn_delete, df_insn_rescan, and other
905 1.1 mrg df-scan routines that don't have to work in deferred mode
906 1.1 mrg and do not have to mark basic blocks for re-processing. */
907 1.1 mrg
908 1.1 mrg static void
909 1.1 mrg df_insn_info_delete (unsigned int uid)
910 1.1 mrg {
911 1.1 mrg struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
912 1.1 mrg
913 1.1 mrg bitmap_clear_bit (&df->insns_to_delete, uid);
914 1.1 mrg bitmap_clear_bit (&df->insns_to_rescan, uid);
915 1.1 mrg bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
916 1.1 mrg if (insn_info)
917 1.1 mrg {
918 1.1 mrg struct df_scan_problem_data *problem_data
919 1.1 mrg = (struct df_scan_problem_data *) df_scan->problem_data;
920 1.1 mrg
921 1.1 mrg df_insn_info_free_fields (insn_info);
922 1.1 mrg problem_data->insn_pool->remove (insn_info);
923 1.1 mrg DF_INSN_UID_SET (uid, NULL);
924 1.1 mrg }
925 1.1 mrg }
926 1.1 mrg
927 1.1 mrg /* Delete all of the refs information from INSN, either right now
928 1.1 mrg or marked for later in deferred mode. */
929 1.1 mrg
930 1.1 mrg void
931 1.1 mrg df_insn_delete (rtx_insn *insn)
932 1.1 mrg {
933 1.1 mrg unsigned int uid;
934 1.1 mrg basic_block bb;
935 1.1 mrg
936 1.1 mrg gcc_checking_assert (INSN_P (insn));
937 1.1 mrg
938 1.1 mrg if (!df)
939 1.1 mrg return;
940 1.1 mrg
941 1.1 mrg uid = INSN_UID (insn);
942 1.1 mrg bb = BLOCK_FOR_INSN (insn);
943 1.1 mrg
944 1.1 mrg /* ??? bb can be NULL after pass_free_cfg. At that point, DF should
945 1.1 mrg not exist anymore (as mentioned in df-core.cc: "The only requirement
946 1.1 mrg [for DF] is that there be a correct control flow graph." Clearly
947 1.1 mrg that isn't the case after pass_free_cfg. But DF is freed much later
948 1.1 mrg because some back-ends want to use DF info even though the CFG is
949 1.1 mrg already gone. It's not clear to me whether that is safe, actually.
950 1.1 mrg In any case, we expect BB to be non-NULL at least up to register
951 1.1 mrg allocation, so disallow a non-NULL BB up to there. Not perfect
952 1.1 mrg but better than nothing... */
953 1.1 mrg gcc_checking_assert (bb != NULL || reload_completed);
954 1.1 mrg
955 1.1 mrg df_grow_bb_info (df_scan);
956 1.1 mrg df_grow_reg_info ();
957 1.1 mrg
958 1.1 mrg /* The block must be marked as dirty now, rather than later as in
959 1.1 mrg df_insn_rescan and df_notes_rescan because it may not be there at
960 1.1 mrg rescanning time and the mark would blow up.
961 1.1 mrg DEBUG_INSNs do not make a block's data flow solution dirty (at
962 1.1 mrg worst the LUIDs are no longer contiguous). */
963 1.1 mrg if (bb != NULL && NONDEBUG_INSN_P (insn))
964 1.1 mrg df_set_bb_dirty (bb);
965 1.1 mrg
966 1.1 mrg /* The client has deferred rescanning. */
967 1.1 mrg if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
968 1.1 mrg {
969 1.1 mrg struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
970 1.1 mrg if (insn_info)
971 1.1 mrg {
972 1.1 mrg bitmap_clear_bit (&df->insns_to_rescan, uid);
973 1.1 mrg bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
974 1.1 mrg bitmap_set_bit (&df->insns_to_delete, uid);
975 1.1 mrg }
976 1.1 mrg if (dump_file)
977 1.1 mrg fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
978 1.1 mrg return;
979 1.1 mrg }
980 1.1 mrg
981 1.1 mrg if (dump_file)
982 1.1 mrg fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
983 1.1 mrg
984 1.1 mrg df_insn_info_delete (uid);
985 1.1 mrg }
986 1.1 mrg
987 1.1 mrg
988 1.1 mrg /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
989 1.1 mrg
990 1.1 mrg static void
991 1.1 mrg df_free_collection_rec (class df_collection_rec *collection_rec)
992 1.1 mrg {
993 1.1 mrg unsigned int ix;
994 1.1 mrg struct df_scan_problem_data *problem_data
995 1.1 mrg = (struct df_scan_problem_data *) df_scan->problem_data;
996 1.1 mrg df_ref ref;
997 1.1 mrg struct df_mw_hardreg *mw;
998 1.1 mrg
999 1.1 mrg FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
1000 1.1 mrg df_free_ref (ref);
1001 1.1 mrg FOR_EACH_VEC_ELT (collection_rec->use_vec, ix, ref)
1002 1.1 mrg df_free_ref (ref);
1003 1.1 mrg FOR_EACH_VEC_ELT (collection_rec->eq_use_vec, ix, ref)
1004 1.1 mrg df_free_ref (ref);
1005 1.1 mrg FOR_EACH_VEC_ELT (collection_rec->mw_vec, ix, mw)
1006 1.1 mrg problem_data->mw_reg_pool->remove (mw);
1007 1.1 mrg
1008 1.1 mrg collection_rec->def_vec.release ();
1009 1.1 mrg collection_rec->use_vec.release ();
1010 1.1 mrg collection_rec->eq_use_vec.release ();
1011 1.1 mrg collection_rec->mw_vec.release ();
1012 1.1 mrg }
1013 1.1 mrg
1014 1.1 mrg /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1015 1.1 mrg
1016 1.1 mrg bool
1017 1.1 mrg df_insn_rescan (rtx_insn *insn)
1018 1.1 mrg {
1019 1.1 mrg unsigned int uid = INSN_UID (insn);
1020 1.1 mrg struct df_insn_info *insn_info = NULL;
1021 1.1 mrg basic_block bb = BLOCK_FOR_INSN (insn);
1022 1.1 mrg class df_collection_rec collection_rec;
1023 1.1 mrg
1024 1.1 mrg if ((!df) || (!INSN_P (insn)))
1025 1.1 mrg return false;
1026 1.1 mrg
1027 1.1 mrg if (!bb)
1028 1.1 mrg {
1029 1.1 mrg if (dump_file)
1030 1.1 mrg fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1031 1.1 mrg return false;
1032 1.1 mrg }
1033 1.1 mrg
1034 1.1 mrg /* The client has disabled rescanning and plans to do it itself. */
1035 1.1 mrg if (df->changeable_flags & DF_NO_INSN_RESCAN)
1036 1.1 mrg return false;
1037 1.1 mrg
1038 1.1 mrg df_grow_bb_info (df_scan);
1039 1.1 mrg df_grow_reg_info ();
1040 1.1 mrg
1041 1.1 mrg insn_info = DF_INSN_UID_SAFE_GET (uid);
1042 1.1 mrg
1043 1.1 mrg /* The client has deferred rescanning. */
1044 1.1 mrg if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1045 1.1 mrg {
1046 1.1 mrg if (!insn_info)
1047 1.1 mrg {
1048 1.1 mrg insn_info = df_insn_create_insn_record (insn);
1049 1.1 mrg insn_info->defs = 0;
1050 1.1 mrg insn_info->uses = 0;
1051 1.1 mrg insn_info->eq_uses = 0;
1052 1.1 mrg insn_info->mw_hardregs = 0;
1053 1.1 mrg }
1054 1.1 mrg if (dump_file)
1055 1.1 mrg fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1056 1.1 mrg
1057 1.1 mrg bitmap_clear_bit (&df->insns_to_delete, uid);
1058 1.1 mrg bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1059 1.1 mrg bitmap_set_bit (&df->insns_to_rescan, INSN_UID (insn));
1060 1.1 mrg return false;
1061 1.1 mrg }
1062 1.1 mrg
1063 1.1 mrg bitmap_clear_bit (&df->insns_to_delete, uid);
1064 1.1 mrg bitmap_clear_bit (&df->insns_to_rescan, uid);
1065 1.1 mrg bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1066 1.1 mrg if (insn_info)
1067 1.1 mrg {
1068 1.1 mrg int luid;
1069 1.1 mrg bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1070 1.1 mrg /* If there's no change, return false. */
1071 1.1 mrg if (the_same)
1072 1.1 mrg {
1073 1.1 mrg df_free_collection_rec (&collection_rec);
1074 1.1 mrg if (dump_file)
1075 1.1 mrg fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1076 1.1 mrg return false;
1077 1.1 mrg }
1078 1.1 mrg if (dump_file)
1079 1.1 mrg fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1080 1.1 mrg
1081 1.1 mrg /* There's change - we need to delete the existing info.
1082 1.1 mrg Since the insn isn't moved, we can salvage its LUID. */
1083 1.1 mrg luid = DF_INSN_LUID (insn);
1084 1.1 mrg df_insn_info_free_fields (insn_info);
1085 1.1 mrg df_insn_info_init_fields (insn_info, insn);
1086 1.1 mrg DF_INSN_LUID (insn) = luid;
1087 1.1 mrg }
1088 1.1 mrg else
1089 1.1 mrg {
1090 1.1 mrg struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1091 1.1 mrg df_insn_refs_collect (&collection_rec, bb, insn_info);
1092 1.1 mrg if (dump_file)
1093 1.1 mrg fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1094 1.1 mrg }
1095 1.1 mrg
1096 1.1 mrg df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
1097 1.1 mrg if (!DEBUG_INSN_P (insn))
1098 1.1 mrg df_set_bb_dirty (bb);
1099 1.1 mrg
1100 1.1 mrg return true;
1101 1.1 mrg }
1102 1.1 mrg
1103 1.1 mrg /* Same as df_insn_rescan, but don't mark the basic block as
1104 1.1 mrg dirty. */
1105 1.1 mrg
1106 1.1 mrg bool
1107 1.1 mrg df_insn_rescan_debug_internal (rtx_insn *insn)
1108 1.1 mrg {
1109 1.1 mrg unsigned int uid = INSN_UID (insn);
1110 1.1 mrg struct df_insn_info *insn_info;
1111 1.1 mrg
1112 1.1 mrg gcc_assert (DEBUG_INSN_P (insn)
1113 1.1 mrg && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
1114 1.1 mrg
1115 1.1 mrg if (!df)
1116 1.1 mrg return false;
1117 1.1 mrg
1118 1.1 mrg insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1119 1.1 mrg if (!insn_info)
1120 1.1 mrg return false;
1121 1.1 mrg
1122 1.1 mrg if (dump_file)
1123 1.1 mrg fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);
1124 1.1 mrg
1125 1.1 mrg bitmap_clear_bit (&df->insns_to_delete, uid);
1126 1.1 mrg bitmap_clear_bit (&df->insns_to_rescan, uid);
1127 1.1 mrg bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1128 1.1 mrg
1129 1.1 mrg if (insn_info->defs == 0
1130 1.1 mrg && insn_info->uses == 0
1131 1.1 mrg && insn_info->eq_uses == 0
1132 1.1 mrg && insn_info->mw_hardregs == 0)
1133 1.1 mrg return false;
1134 1.1 mrg
1135 1.1 mrg df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1136 1.1 mrg
1137 1.1 mrg if (df_chain)
1138 1.1 mrg {
1139 1.1 mrg df_ref_chain_delete_du_chain (insn_info->defs);
1140 1.1 mrg df_ref_chain_delete_du_chain (insn_info->uses);
1141 1.1 mrg df_ref_chain_delete_du_chain (insn_info->eq_uses);
1142 1.1 mrg }
1143 1.1 mrg
1144 1.1 mrg df_ref_chain_delete (insn_info->defs);
1145 1.1 mrg df_ref_chain_delete (insn_info->uses);
1146 1.1 mrg df_ref_chain_delete (insn_info->eq_uses);
1147 1.1 mrg
1148 1.1 mrg insn_info->defs = 0;
1149 1.1 mrg insn_info->uses = 0;
1150 1.1 mrg insn_info->eq_uses = 0;
1151 1.1 mrg insn_info->mw_hardregs = 0;
1152 1.1 mrg
1153 1.1 mrg return true;
1154 1.1 mrg }
1155 1.1 mrg
1156 1.1 mrg
1157 1.1 mrg /* Rescan all of the insns in the function. Note that the artificial
1158 1.1 mrg uses and defs are not touched. This function will destroy def-use
1159 1.1 mrg or use-def chains. */
1160 1.1 mrg
1161 1.1 mrg void
1162 1.1 mrg df_insn_rescan_all (void)
1163 1.1 mrg {
1164 1.1 mrg bool no_insn_rescan = false;
1165 1.1 mrg bool defer_insn_rescan = false;
1166 1.1 mrg basic_block bb;
1167 1.1 mrg bitmap_iterator bi;
1168 1.1 mrg unsigned int uid;
1169 1.1 mrg
1170 1.1 mrg if (df->changeable_flags & DF_NO_INSN_RESCAN)
1171 1.1 mrg {
1172 1.1 mrg df_clear_flags (DF_NO_INSN_RESCAN);
1173 1.1 mrg no_insn_rescan = true;
1174 1.1 mrg }
1175 1.1 mrg
1176 1.1 mrg if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1177 1.1 mrg {
1178 1.1 mrg df_clear_flags (DF_DEFER_INSN_RESCAN);
1179 1.1 mrg defer_insn_rescan = true;
1180 1.1 mrg }
1181 1.1 mrg
1182 1.1 mrg auto_bitmap tmp (&df_bitmap_obstack);
1183 1.1 mrg bitmap_copy (tmp, &df->insns_to_delete);
1184 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1185 1.1 mrg {
1186 1.1 mrg struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1187 1.1 mrg if (insn_info)
1188 1.1 mrg df_insn_info_delete (uid);
1189 1.1 mrg }
1190 1.1 mrg
1191 1.1 mrg bitmap_clear (&df->insns_to_delete);
1192 1.1 mrg bitmap_clear (&df->insns_to_rescan);
1193 1.1 mrg bitmap_clear (&df->insns_to_notes_rescan);
1194 1.1 mrg
1195 1.1 mrg FOR_EACH_BB_FN (bb, cfun)
1196 1.1 mrg {
1197 1.1 mrg rtx_insn *insn;
1198 1.1 mrg FOR_BB_INSNS (bb, insn)
1199 1.1 mrg {
1200 1.1 mrg df_insn_rescan (insn);
1201 1.1 mrg }
1202 1.1 mrg }
1203 1.1 mrg
1204 1.1 mrg if (no_insn_rescan)
1205 1.1 mrg df_set_flags (DF_NO_INSN_RESCAN);
1206 1.1 mrg if (defer_insn_rescan)
1207 1.1 mrg df_set_flags (DF_DEFER_INSN_RESCAN);
1208 1.1 mrg }
1209 1.1 mrg
1210 1.1 mrg
1211 1.1 mrg /* Process all of the deferred rescans or deletions. */
1212 1.1 mrg
1213 1.1 mrg void
1214 1.1 mrg df_process_deferred_rescans (void)
1215 1.1 mrg {
1216 1.1 mrg bool no_insn_rescan = false;
1217 1.1 mrg bool defer_insn_rescan = false;
1218 1.1 mrg bitmap_iterator bi;
1219 1.1 mrg unsigned int uid;
1220 1.1 mrg
1221 1.1 mrg if (df->changeable_flags & DF_NO_INSN_RESCAN)
1222 1.1 mrg {
1223 1.1 mrg df_clear_flags (DF_NO_INSN_RESCAN);
1224 1.1 mrg no_insn_rescan = true;
1225 1.1 mrg }
1226 1.1 mrg
1227 1.1 mrg if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1228 1.1 mrg {
1229 1.1 mrg df_clear_flags (DF_DEFER_INSN_RESCAN);
1230 1.1 mrg defer_insn_rescan = true;
1231 1.1 mrg }
1232 1.1 mrg
1233 1.1 mrg if (dump_file)
1234 1.1 mrg fprintf (dump_file, "starting the processing of deferred insns\n");
1235 1.1 mrg
1236 1.1 mrg auto_bitmap tmp (&df_bitmap_obstack);
1237 1.1 mrg bitmap_copy (tmp, &df->insns_to_delete);
1238 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1239 1.1 mrg {
1240 1.1 mrg struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1241 1.1 mrg if (insn_info)
1242 1.1 mrg df_insn_info_delete (uid);
1243 1.1 mrg }
1244 1.1 mrg
1245 1.1 mrg bitmap_copy (tmp, &df->insns_to_rescan);
1246 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1247 1.1 mrg {
1248 1.1 mrg struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1249 1.1 mrg if (insn_info)
1250 1.1 mrg df_insn_rescan (insn_info->insn);
1251 1.1 mrg }
1252 1.1 mrg
1253 1.1 mrg bitmap_copy (tmp, &df->insns_to_notes_rescan);
1254 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1255 1.1 mrg {
1256 1.1 mrg struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1257 1.1 mrg if (insn_info)
1258 1.1 mrg df_notes_rescan (insn_info->insn);
1259 1.1 mrg }
1260 1.1 mrg
1261 1.1 mrg if (dump_file)
1262 1.1 mrg fprintf (dump_file, "ending the processing of deferred insns\n");
1263 1.1 mrg
1264 1.1 mrg bitmap_clear (&df->insns_to_delete);
1265 1.1 mrg bitmap_clear (&df->insns_to_rescan);
1266 1.1 mrg bitmap_clear (&df->insns_to_notes_rescan);
1267 1.1 mrg
1268 1.1 mrg if (no_insn_rescan)
1269 1.1 mrg df_set_flags (DF_NO_INSN_RESCAN);
1270 1.1 mrg if (defer_insn_rescan)
1271 1.1 mrg df_set_flags (DF_DEFER_INSN_RESCAN);
1272 1.1 mrg
1273 1.1 mrg /* If someone changed regs_ever_live during this pass, fix up the
1274 1.1 mrg entry and exit blocks. */
1275 1.1 mrg if (df->redo_entry_and_exit)
1276 1.1 mrg {
1277 1.1 mrg df_update_entry_exit_and_calls ();
1278 1.1 mrg df->redo_entry_and_exit = false;
1279 1.1 mrg }
1280 1.1 mrg }
1281 1.1 mrg
1282 1.1 mrg
1283 1.1 mrg /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1284 1.1 mrg the uses if INCLUDE_USES. Include the eq_uses if
1285 1.1 mrg INCLUDE_EQ_USES. */
1286 1.1 mrg
1287 1.1 mrg static unsigned int
1288 1.1 mrg df_count_refs (bool include_defs, bool include_uses,
1289 1.1 mrg bool include_eq_uses)
1290 1.1 mrg {
1291 1.1 mrg unsigned int regno;
1292 1.1 mrg int size = 0;
1293 1.1 mrg unsigned int m = df->regs_inited;
1294 1.1 mrg
1295 1.1 mrg for (regno = 0; regno < m; regno++)
1296 1.1 mrg {
1297 1.1 mrg if (include_defs)
1298 1.1 mrg size += DF_REG_DEF_COUNT (regno);
1299 1.1 mrg if (include_uses)
1300 1.1 mrg size += DF_REG_USE_COUNT (regno);
1301 1.1 mrg if (include_eq_uses)
1302 1.1 mrg size += DF_REG_EQ_USE_COUNT (regno);
1303 1.1 mrg }
1304 1.1 mrg return size;
1305 1.1 mrg }
1306 1.1 mrg
1307 1.1 mrg
1308 1.1 mrg /* Take build ref table for either the uses or defs from the reg-use
1309 1.1 mrg or reg-def chains. This version processes the refs in reg order
1310 1.1 mrg which is likely to be best if processing the whole function. */
1311 1.1 mrg
1312 1.1 mrg static void
1313 1.1 mrg df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1314 1.1 mrg bool include_defs,
1315 1.1 mrg bool include_uses,
1316 1.1 mrg bool include_eq_uses)
1317 1.1 mrg {
1318 1.1 mrg unsigned int m = df->regs_inited;
1319 1.1 mrg unsigned int regno;
1320 1.1 mrg unsigned int offset = 0;
1321 1.1 mrg unsigned int start;
1322 1.1 mrg
1323 1.1 mrg if (df->changeable_flags & DF_NO_HARD_REGS)
1324 1.1 mrg {
1325 1.1 mrg start = FIRST_PSEUDO_REGISTER;
1326 1.1 mrg memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1327 1.1 mrg memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1328 1.1 mrg }
1329 1.1 mrg else
1330 1.1 mrg start = 0;
1331 1.1 mrg
1332 1.1 mrg ref_info->total_size
1333 1.1 mrg = df_count_refs (include_defs, include_uses, include_eq_uses);
1334 1.1 mrg
1335 1.1 mrg df_check_and_grow_ref_info (ref_info, 1);
1336 1.1 mrg
1337 1.1 mrg for (regno = start; regno < m; regno++)
1338 1.1 mrg {
1339 1.1 mrg int count = 0;
1340 1.1 mrg ref_info->begin[regno] = offset;
1341 1.1 mrg if (include_defs)
1342 1.1 mrg {
1343 1.1 mrg df_ref ref = DF_REG_DEF_CHAIN (regno);
1344 1.1 mrg while (ref)
1345 1.1 mrg {
1346 1.1 mrg ref_info->refs[offset] = ref;
1347 1.1 mrg DF_REF_ID (ref) = offset++;
1348 1.1 mrg count++;
1349 1.1 mrg ref = DF_REF_NEXT_REG (ref);
1350 1.1 mrg gcc_checking_assert (offset < ref_info->refs_size);
1351 1.1 mrg }
1352 1.1 mrg }
1353 1.1 mrg if (include_uses)
1354 1.1 mrg {
1355 1.1 mrg df_ref ref = DF_REG_USE_CHAIN (regno);
1356 1.1 mrg while (ref)
1357 1.1 mrg {
1358 1.1 mrg ref_info->refs[offset] = ref;
1359 1.1 mrg DF_REF_ID (ref) = offset++;
1360 1.1 mrg count++;
1361 1.1 mrg ref = DF_REF_NEXT_REG (ref);
1362 1.1 mrg gcc_checking_assert (offset < ref_info->refs_size);
1363 1.1 mrg }
1364 1.1 mrg }
1365 1.1 mrg if (include_eq_uses)
1366 1.1 mrg {
1367 1.1 mrg df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
1368 1.1 mrg while (ref)
1369 1.1 mrg {
1370 1.1 mrg ref_info->refs[offset] = ref;
1371 1.1 mrg DF_REF_ID (ref) = offset++;
1372 1.1 mrg count++;
1373 1.1 mrg ref = DF_REF_NEXT_REG (ref);
1374 1.1 mrg gcc_checking_assert (offset < ref_info->refs_size);
1375 1.1 mrg }
1376 1.1 mrg }
1377 1.1 mrg ref_info->count[regno] = count;
1378 1.1 mrg }
1379 1.1 mrg
1380 1.1 mrg /* The bitmap size is not decremented when refs are deleted. So
1381 1.1 mrg reset it now that we have squished out all of the empty
1382 1.1 mrg slots. */
1383 1.1 mrg ref_info->table_size = offset;
1384 1.1 mrg }
1385 1.1 mrg
1386 1.1 mrg
1387 1.1 mrg /* Take build ref table for either the uses or defs from the reg-use
1388 1.1 mrg or reg-def chains. This version processes the refs in insn order
1389 1.1 mrg which is likely to be best if processing some segment of the
1390 1.1 mrg function. */
1391 1.1 mrg
1392 1.1 mrg static void
1393 1.1 mrg df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1394 1.1 mrg bool include_defs,
1395 1.1 mrg bool include_uses,
1396 1.1 mrg bool include_eq_uses)
1397 1.1 mrg {
1398 1.1 mrg bitmap_iterator bi;
1399 1.1 mrg unsigned int bb_index;
1400 1.1 mrg unsigned int m = df->regs_inited;
1401 1.1 mrg unsigned int offset = 0;
1402 1.1 mrg unsigned int r;
1403 1.1 mrg unsigned int start
1404 1.1 mrg = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1405 1.1 mrg
1406 1.1 mrg memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1407 1.1 mrg memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1408 1.1 mrg
1409 1.1 mrg ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1410 1.1 mrg df_check_and_grow_ref_info (ref_info, 1);
1411 1.1 mrg
1412 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1413 1.1 mrg {
1414 1.1 mrg basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1415 1.1 mrg rtx_insn *insn;
1416 1.1 mrg df_ref def, use;
1417 1.1 mrg
1418 1.1 mrg if (include_defs)
1419 1.1 mrg FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
1420 1.1 mrg {
1421 1.1 mrg unsigned int regno = DF_REF_REGNO (def);
1422 1.1 mrg ref_info->count[regno]++;
1423 1.1 mrg }
1424 1.1 mrg if (include_uses)
1425 1.1 mrg FOR_EACH_ARTIFICIAL_USE (use, bb_index)
1426 1.1 mrg {
1427 1.1 mrg unsigned int regno = DF_REF_REGNO (use);
1428 1.1 mrg ref_info->count[regno]++;
1429 1.1 mrg }
1430 1.1 mrg
1431 1.1 mrg FOR_BB_INSNS (bb, insn)
1432 1.1 mrg {
1433 1.1 mrg if (INSN_P (insn))
1434 1.1 mrg {
1435 1.1 mrg struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
1436 1.1 mrg
1437 1.1 mrg if (include_defs)
1438 1.1 mrg FOR_EACH_INSN_INFO_DEF (def, insn_info)
1439 1.1 mrg {
1440 1.1 mrg unsigned int regno = DF_REF_REGNO (def);
1441 1.1 mrg ref_info->count[regno]++;
1442 1.1 mrg }
1443 1.1 mrg if (include_uses)
1444 1.1 mrg FOR_EACH_INSN_INFO_USE (use, insn_info)
1445 1.1 mrg {
1446 1.1 mrg unsigned int regno = DF_REF_REGNO (use);
1447 1.1 mrg ref_info->count[regno]++;
1448 1.1 mrg }
1449 1.1 mrg if (include_eq_uses)
1450 1.1 mrg FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
1451 1.1 mrg {
1452 1.1 mrg unsigned int regno = DF_REF_REGNO (use);
1453 1.1 mrg ref_info->count[regno]++;
1454 1.1 mrg }
1455 1.1 mrg }
1456 1.1 mrg }
1457 1.1 mrg }
1458 1.1 mrg
1459 1.1 mrg for (r = start; r < m; r++)
1460 1.1 mrg {
1461 1.1 mrg ref_info->begin[r] = offset;
1462 1.1 mrg offset += ref_info->count[r];
1463 1.1 mrg ref_info->count[r] = 0;
1464 1.1 mrg }
1465 1.1 mrg
1466 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1467 1.1 mrg {
1468 1.1 mrg basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1469 1.1 mrg rtx_insn *insn;
1470 1.1 mrg df_ref def, use;
1471 1.1 mrg
1472 1.1 mrg if (include_defs)
1473 1.1 mrg FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
1474 1.1 mrg {
1475 1.1 mrg unsigned int regno = DF_REF_REGNO (def);
1476 1.1 mrg if (regno >= start)
1477 1.1 mrg {
1478 1.1 mrg unsigned int id
1479 1.1 mrg = ref_info->begin[regno] + ref_info->count[regno]++;
1480 1.1 mrg DF_REF_ID (def) = id;
1481 1.1 mrg ref_info->refs[id] = def;
1482 1.1 mrg }
1483 1.1 mrg }
1484 1.1 mrg if (include_uses)
1485 1.1 mrg FOR_EACH_ARTIFICIAL_USE (use, bb_index)
1486 1.1 mrg {
1487 1.1 mrg unsigned int regno = DF_REF_REGNO (def);
1488 1.1 mrg if (regno >= start)
1489 1.1 mrg {
1490 1.1 mrg unsigned int id
1491 1.1 mrg = ref_info->begin[regno] + ref_info->count[regno]++;
1492 1.1 mrg DF_REF_ID (use) = id;
1493 1.1 mrg ref_info->refs[id] = use;
1494 1.1 mrg }
1495 1.1 mrg }
1496 1.1 mrg
1497 1.1 mrg FOR_BB_INSNS (bb, insn)
1498 1.1 mrg {
1499 1.1 mrg if (INSN_P (insn))
1500 1.1 mrg {
1501 1.1 mrg struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
1502 1.1 mrg
1503 1.1 mrg if (include_defs)
1504 1.1 mrg FOR_EACH_INSN_INFO_DEF (def, insn_info)
1505 1.1 mrg {
1506 1.1 mrg unsigned int regno = DF_REF_REGNO (def);
1507 1.1 mrg if (regno >= start)
1508 1.1 mrg {
1509 1.1 mrg unsigned int id
1510 1.1 mrg = ref_info->begin[regno] + ref_info->count[regno]++;
1511 1.1 mrg DF_REF_ID (def) = id;
1512 1.1 mrg ref_info->refs[id] = def;
1513 1.1 mrg }
1514 1.1 mrg }
1515 1.1 mrg if (include_uses)
1516 1.1 mrg FOR_EACH_INSN_INFO_USE (use, insn_info)
1517 1.1 mrg {
1518 1.1 mrg unsigned int regno = DF_REF_REGNO (use);
1519 1.1 mrg if (regno >= start)
1520 1.1 mrg {
1521 1.1 mrg unsigned int id
1522 1.1 mrg = ref_info->begin[regno] + ref_info->count[regno]++;
1523 1.1 mrg DF_REF_ID (use) = id;
1524 1.1 mrg ref_info->refs[id] = use;
1525 1.1 mrg }
1526 1.1 mrg }
1527 1.1 mrg if (include_eq_uses)
1528 1.1 mrg FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
1529 1.1 mrg {
1530 1.1 mrg unsigned int regno = DF_REF_REGNO (use);
1531 1.1 mrg if (regno >= start)
1532 1.1 mrg {
1533 1.1 mrg unsigned int id
1534 1.1 mrg = ref_info->begin[regno] + ref_info->count[regno]++;
1535 1.1 mrg DF_REF_ID (use) = id;
1536 1.1 mrg ref_info->refs[id] = use;
1537 1.1 mrg }
1538 1.1 mrg }
1539 1.1 mrg }
1540 1.1 mrg }
1541 1.1 mrg }
1542 1.1 mrg
1543 1.1 mrg /* The bitmap size is not decremented when refs are deleted. So
1544 1.1 mrg reset it now that we have squished out all of the empty
1545 1.1 mrg slots. */
1546 1.1 mrg
1547 1.1 mrg ref_info->table_size = offset;
1548 1.1 mrg }
1549 1.1 mrg
1550 1.1 mrg /* Take build ref table for either the uses or defs from the reg-use
1551 1.1 mrg or reg-def chains. */
1552 1.1 mrg
1553 1.1 mrg static void
1554 1.1 mrg df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1555 1.1 mrg bool include_defs,
1556 1.1 mrg bool include_uses,
1557 1.1 mrg bool include_eq_uses)
1558 1.1 mrg {
1559 1.1 mrg if (df->analyze_subset)
1560 1.1 mrg df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1561 1.1 mrg include_uses, include_eq_uses);
1562 1.1 mrg else
1563 1.1 mrg df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1564 1.1 mrg include_uses, include_eq_uses);
1565 1.1 mrg }
1566 1.1 mrg
1567 1.1 mrg
1568 1.1 mrg /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1569 1.1 mrg static unsigned int
1570 1.1 mrg df_add_refs_to_table (unsigned int offset,
1571 1.1 mrg struct df_ref_info *ref_info,
1572 1.1 mrg df_ref ref)
1573 1.1 mrg {
1574 1.1 mrg for (; ref; ref = DF_REF_NEXT_LOC (ref))
1575 1.1 mrg if (!(df->changeable_flags & DF_NO_HARD_REGS)
1576 1.1 mrg || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1577 1.1 mrg {
1578 1.1 mrg ref_info->refs[offset] = ref;
1579 1.1 mrg DF_REF_ID (ref) = offset++;
1580 1.1 mrg }
1581 1.1 mrg return offset;
1582 1.1 mrg }
1583 1.1 mrg
1584 1.1 mrg
1585 1.1 mrg /* Count the number of refs in all of the insns of BB. Include the
1586 1.1 mrg defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1587 1.1 mrg eq_uses if INCLUDE_EQ_USES. */
1588 1.1 mrg
1589 1.1 mrg static unsigned int
1590 1.1 mrg df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1591 1.1 mrg struct df_ref_info *ref_info,
1592 1.1 mrg bool include_defs, bool include_uses,
1593 1.1 mrg bool include_eq_uses)
1594 1.1 mrg {
1595 1.1 mrg rtx_insn *insn;
1596 1.1 mrg
1597 1.1 mrg if (include_defs)
1598 1.1 mrg offset = df_add_refs_to_table (offset, ref_info,
1599 1.1 mrg df_get_artificial_defs (bb->index));
1600 1.1 mrg if (include_uses)
1601 1.1 mrg offset = df_add_refs_to_table (offset, ref_info,
1602 1.1 mrg df_get_artificial_uses (bb->index));
1603 1.1 mrg
1604 1.1 mrg FOR_BB_INSNS (bb, insn)
1605 1.1 mrg if (INSN_P (insn))
1606 1.1 mrg {
1607 1.1 mrg unsigned int uid = INSN_UID (insn);
1608 1.1 mrg if (include_defs)
1609 1.1 mrg offset = df_add_refs_to_table (offset, ref_info,
1610 1.1 mrg DF_INSN_UID_DEFS (uid));
1611 1.1 mrg if (include_uses)
1612 1.1 mrg offset = df_add_refs_to_table (offset, ref_info,
1613 1.1 mrg DF_INSN_UID_USES (uid));
1614 1.1 mrg if (include_eq_uses)
1615 1.1 mrg offset = df_add_refs_to_table (offset, ref_info,
1616 1.1 mrg DF_INSN_UID_EQ_USES (uid));
1617 1.1 mrg }
1618 1.1 mrg return offset;
1619 1.1 mrg }
1620 1.1 mrg
1621 1.1 mrg
1622 1.1 mrg /* Organize the refs by insn into the table in REF_INFO. If
1623 1.1 mrg blocks_to_analyze is defined, use that set, otherwise the entire
1624 1.1 mrg program. Include the defs if INCLUDE_DEFS. Include the uses if
1625 1.1 mrg INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1626 1.1 mrg
1627 1.1 mrg static void
1628 1.1 mrg df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1629 1.1 mrg bool include_defs, bool include_uses,
1630 1.1 mrg bool include_eq_uses)
1631 1.1 mrg {
1632 1.1 mrg basic_block bb;
1633 1.1 mrg unsigned int offset = 0;
1634 1.1 mrg
1635 1.1 mrg ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1636 1.1 mrg df_check_and_grow_ref_info (ref_info, 1);
1637 1.1 mrg if (df->blocks_to_analyze)
1638 1.1 mrg {
1639 1.1 mrg bitmap_iterator bi;
1640 1.1 mrg unsigned int index;
1641 1.1 mrg
1642 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1643 1.1 mrg {
1644 1.1 mrg offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK_FOR_FN (cfun,
1645 1.1 mrg index),
1646 1.1 mrg offset, ref_info,
1647 1.1 mrg include_defs, include_uses,
1648 1.1 mrg include_eq_uses);
1649 1.1 mrg }
1650 1.1 mrg
1651 1.1 mrg ref_info->table_size = offset;
1652 1.1 mrg }
1653 1.1 mrg else
1654 1.1 mrg {
1655 1.1 mrg FOR_ALL_BB_FN (bb, cfun)
1656 1.1 mrg offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1657 1.1 mrg include_defs, include_uses,
1658 1.1 mrg include_eq_uses);
1659 1.1 mrg ref_info->table_size = offset;
1660 1.1 mrg }
1661 1.1 mrg }
1662 1.1 mrg
1663 1.1 mrg
1664 1.1 mrg /* If the use refs in DF are not organized, reorganize them. */
1665 1.1 mrg
1666 1.1 mrg void
1667 1.1 mrg df_maybe_reorganize_use_refs (enum df_ref_order order)
1668 1.1 mrg {
1669 1.1 mrg if (order == df->use_info.ref_order)
1670 1.1 mrg return;
1671 1.1 mrg
1672 1.1 mrg switch (order)
1673 1.1 mrg {
1674 1.1 mrg case DF_REF_ORDER_BY_REG:
1675 1.1 mrg df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1676 1.1 mrg break;
1677 1.1 mrg
1678 1.1 mrg case DF_REF_ORDER_BY_REG_WITH_NOTES:
1679 1.1 mrg df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1680 1.1 mrg break;
1681 1.1 mrg
1682 1.1 mrg case DF_REF_ORDER_BY_INSN:
1683 1.1 mrg df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1684 1.1 mrg break;
1685 1.1 mrg
1686 1.1 mrg case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1687 1.1 mrg df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1688 1.1 mrg break;
1689 1.1 mrg
1690 1.1 mrg case DF_REF_ORDER_NO_TABLE:
1691 1.1 mrg free (df->use_info.refs);
1692 1.1 mrg df->use_info.refs = NULL;
1693 1.1 mrg df->use_info.refs_size = 0;
1694 1.1 mrg break;
1695 1.1 mrg
1696 1.1 mrg case DF_REF_ORDER_UNORDERED:
1697 1.1 mrg case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1698 1.1 mrg gcc_unreachable ();
1699 1.1 mrg break;
1700 1.1 mrg }
1701 1.1 mrg
1702 1.1 mrg df->use_info.ref_order = order;
1703 1.1 mrg }
1704 1.1 mrg
1705 1.1 mrg
1706 1.1 mrg /* If the def refs in DF are not organized, reorganize them. */
1707 1.1 mrg
1708 1.1 mrg void
1709 1.1 mrg df_maybe_reorganize_def_refs (enum df_ref_order order)
1710 1.1 mrg {
1711 1.1 mrg if (order == df->def_info.ref_order)
1712 1.1 mrg return;
1713 1.1 mrg
1714 1.1 mrg switch (order)
1715 1.1 mrg {
1716 1.1 mrg case DF_REF_ORDER_BY_REG:
1717 1.1 mrg df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1718 1.1 mrg break;
1719 1.1 mrg
1720 1.1 mrg case DF_REF_ORDER_BY_INSN:
1721 1.1 mrg df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1722 1.1 mrg break;
1723 1.1 mrg
1724 1.1 mrg case DF_REF_ORDER_NO_TABLE:
1725 1.1 mrg free (df->def_info.refs);
1726 1.1 mrg df->def_info.refs = NULL;
1727 1.1 mrg df->def_info.refs_size = 0;
1728 1.1 mrg break;
1729 1.1 mrg
1730 1.1 mrg case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1731 1.1 mrg case DF_REF_ORDER_BY_REG_WITH_NOTES:
1732 1.1 mrg case DF_REF_ORDER_UNORDERED:
1733 1.1 mrg case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1734 1.1 mrg gcc_unreachable ();
1735 1.1 mrg break;
1736 1.1 mrg }
1737 1.1 mrg
1738 1.1 mrg df->def_info.ref_order = order;
1739 1.1 mrg }
1740 1.1 mrg
1741 1.1 mrg
1742 1.1 mrg /* Change all of the basic block references in INSN to use the insn's
1743 1.1 mrg current basic block. This function is called from routines that move
1744 1.1 mrg instructions from one block to another. */
1745 1.1 mrg
1746 1.1 mrg void
1747 1.1 mrg df_insn_change_bb (rtx_insn *insn, basic_block new_bb)
1748 1.1 mrg {
1749 1.1 mrg basic_block old_bb = BLOCK_FOR_INSN (insn);
1750 1.1 mrg struct df_insn_info *insn_info;
1751 1.1 mrg unsigned int uid = INSN_UID (insn);
1752 1.1 mrg
1753 1.1 mrg if (old_bb == new_bb)
1754 1.1 mrg return;
1755 1.1 mrg
1756 1.1 mrg set_block_for_insn (insn, new_bb);
1757 1.1 mrg
1758 1.1 mrg if (!df)
1759 1.1 mrg return;
1760 1.1 mrg
1761 1.1 mrg if (dump_file)
1762 1.1 mrg fprintf (dump_file, "changing bb of uid %d\n", uid);
1763 1.1 mrg
1764 1.1 mrg insn_info = DF_INSN_UID_SAFE_GET (uid);
1765 1.1 mrg if (insn_info == NULL)
1766 1.1 mrg {
1767 1.1 mrg if (dump_file)
1768 1.1 mrg fprintf (dump_file, " unscanned insn\n");
1769 1.1 mrg df_insn_rescan (insn);
1770 1.1 mrg return;
1771 1.1 mrg }
1772 1.1 mrg
1773 1.1 mrg if (!INSN_P (insn))
1774 1.1 mrg return;
1775 1.1 mrg
1776 1.1 mrg if (!DEBUG_INSN_P (insn))
1777 1.1 mrg df_set_bb_dirty (new_bb);
1778 1.1 mrg if (old_bb)
1779 1.1 mrg {
1780 1.1 mrg if (dump_file)
1781 1.1 mrg fprintf (dump_file, " from %d to %d\n",
1782 1.1 mrg old_bb->index, new_bb->index);
1783 1.1 mrg if (!DEBUG_INSN_P (insn))
1784 1.1 mrg df_set_bb_dirty (old_bb);
1785 1.1 mrg }
1786 1.1 mrg else
1787 1.1 mrg if (dump_file)
1788 1.1 mrg fprintf (dump_file, " to %d\n", new_bb->index);
1789 1.1 mrg }
1790 1.1 mrg
1791 1.1 mrg
1792 1.1 mrg /* Helper function for df_ref_change_reg_with_loc. */
1793 1.1 mrg
1794 1.1 mrg static void
1795 1.1 mrg df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
1796 1.1 mrg struct df_reg_info *new_df,
1797 1.1 mrg unsigned int new_regno, rtx loc)
1798 1.1 mrg {
1799 1.1 mrg df_ref the_ref = old_df->reg_chain;
1800 1.1 mrg
1801 1.1 mrg while (the_ref)
1802 1.1 mrg {
1803 1.1 mrg if ((!DF_REF_IS_ARTIFICIAL (the_ref))
1804 1.1 mrg && DF_REF_LOC (the_ref)
1805 1.1 mrg && (*DF_REF_LOC (the_ref) == loc))
1806 1.1 mrg {
1807 1.1 mrg df_ref next_ref = DF_REF_NEXT_REG (the_ref);
1808 1.1 mrg df_ref prev_ref = DF_REF_PREV_REG (the_ref);
1809 1.1 mrg df_ref *ref_ptr;
1810 1.1 mrg struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
1811 1.1 mrg
1812 1.1 mrg DF_REF_REGNO (the_ref) = new_regno;
1813 1.1 mrg DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
1814 1.1 mrg
1815 1.1 mrg /* Pull the_ref out of the old regno chain. */
1816 1.1 mrg if (prev_ref)
1817 1.1 mrg DF_REF_NEXT_REG (prev_ref) = next_ref;
1818 1.1 mrg else
1819 1.1 mrg old_df->reg_chain = next_ref;
1820 1.1 mrg if (next_ref)
1821 1.1 mrg DF_REF_PREV_REG (next_ref) = prev_ref;
1822 1.1 mrg old_df->n_refs--;
1823 1.1 mrg
1824 1.1 mrg /* Put the ref into the new regno chain. */
1825 1.1 mrg DF_REF_PREV_REG (the_ref) = NULL;
1826 1.1 mrg DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
1827 1.1 mrg if (new_df->reg_chain)
1828 1.1 mrg DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
1829 1.1 mrg new_df->reg_chain = the_ref;
1830 1.1 mrg new_df->n_refs++;
1831 1.1 mrg if (DF_REF_BB (the_ref))
1832 1.1 mrg df_set_bb_dirty (DF_REF_BB (the_ref));
1833 1.1 mrg
1834 1.1 mrg /* Need to sort the record again that the ref was in because
1835 1.1 mrg the regno is a sorting key. First, find the right
1836 1.1 mrg record. */
1837 1.1 mrg if (DF_REF_REG_DEF_P (the_ref))
1838 1.1 mrg ref_ptr = &insn_info->defs;
1839 1.1 mrg else if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
1840 1.1 mrg ref_ptr = &insn_info->eq_uses;
1841 1.1 mrg else
1842 1.1 mrg ref_ptr = &insn_info->uses;
1843 1.1 mrg if (dump_file)
1844 1.1 mrg fprintf (dump_file, "changing reg in insn %d\n",
1845 1.1 mrg DF_REF_INSN_UID (the_ref));
1846 1.1 mrg
1847 1.1 mrg /* Stop if we find the current reference or where the reference
1848 1.1 mrg needs to be. */
1849 1.1 mrg while (*ref_ptr != the_ref && df_ref_compare (*ref_ptr, the_ref) < 0)
1850 1.1 mrg ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1851 1.1 mrg if (*ref_ptr != the_ref)
1852 1.1 mrg {
1853 1.1 mrg /* The reference needs to be promoted up the list. */
1854 1.1 mrg df_ref next = DF_REF_NEXT_LOC (the_ref);
1855 1.1 mrg DF_REF_NEXT_LOC (the_ref) = *ref_ptr;
1856 1.1 mrg *ref_ptr = the_ref;
1857 1.1 mrg do
1858 1.1 mrg ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1859 1.1 mrg while (*ref_ptr != the_ref);
1860 1.1 mrg *ref_ptr = next;
1861 1.1 mrg }
1862 1.1 mrg else if (DF_REF_NEXT_LOC (the_ref)
1863 1.1 mrg && df_ref_compare (the_ref, DF_REF_NEXT_LOC (the_ref)) > 0)
1864 1.1 mrg {
1865 1.1 mrg /* The reference needs to be demoted down the list. */
1866 1.1 mrg *ref_ptr = DF_REF_NEXT_LOC (the_ref);
1867 1.1 mrg do
1868 1.1 mrg ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1869 1.1 mrg while (*ref_ptr && df_ref_compare (the_ref, *ref_ptr) > 0);
1870 1.1 mrg DF_REF_NEXT_LOC (the_ref) = *ref_ptr;
1871 1.1 mrg *ref_ptr = the_ref;
1872 1.1 mrg }
1873 1.1 mrg
1874 1.1 mrg the_ref = next_ref;
1875 1.1 mrg }
1876 1.1 mrg else
1877 1.1 mrg the_ref = DF_REF_NEXT_REG (the_ref);
1878 1.1 mrg }
1879 1.1 mrg }
1880 1.1 mrg
1881 1.1 mrg
1882 1.1 mrg /* Change the regno of register LOC to NEW_REGNO and update the df
1883 1.1 mrg information accordingly. Refs that do not match LOC are not changed
1884 1.1 mrg which means that artificial refs are not changed since they have no loc.
1885 1.1 mrg This call is to support the SET_REGNO macro. */
1886 1.1 mrg
1887 1.1 mrg void
1888 1.1 mrg df_ref_change_reg_with_loc (rtx loc, unsigned int new_regno)
1889 1.1 mrg {
1890 1.1 mrg unsigned int old_regno = REGNO (loc);
1891 1.1 mrg if (old_regno == new_regno)
1892 1.1 mrg return;
1893 1.1 mrg
1894 1.1 mrg if (df)
1895 1.1 mrg {
1896 1.1 mrg df_grow_reg_info ();
1897 1.1 mrg
1898 1.1 mrg df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
1899 1.1 mrg DF_REG_DEF_GET (new_regno),
1900 1.1 mrg new_regno, loc);
1901 1.1 mrg df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
1902 1.1 mrg DF_REG_USE_GET (new_regno),
1903 1.1 mrg new_regno, loc);
1904 1.1 mrg df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
1905 1.1 mrg DF_REG_EQ_USE_GET (new_regno),
1906 1.1 mrg new_regno, loc);
1907 1.1 mrg }
1908 1.1 mrg set_mode_and_regno (loc, GET_MODE (loc), new_regno);
1909 1.1 mrg }
1910 1.1 mrg
1911 1.1 mrg
1912 1.1 mrg /* Delete the mw_hardregs that point into the eq_notes. */
1913 1.1 mrg
1914 1.1 mrg static void
1915 1.1 mrg df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
1916 1.1 mrg {
1917 1.1 mrg struct df_mw_hardreg **mw_ptr = &insn_info->mw_hardregs;
1918 1.1 mrg struct df_scan_problem_data *problem_data
1919 1.1 mrg = (struct df_scan_problem_data *) df_scan->problem_data;
1920 1.1 mrg
1921 1.1 mrg while (*mw_ptr)
1922 1.1 mrg {
1923 1.1 mrg df_mw_hardreg *mw = *mw_ptr;
1924 1.1 mrg if (mw->flags & DF_REF_IN_NOTE)
1925 1.1 mrg {
1926 1.1 mrg *mw_ptr = DF_MWS_NEXT (mw);
1927 1.1 mrg problem_data->mw_reg_pool->remove (mw);
1928 1.1 mrg }
1929 1.1 mrg else
1930 1.1 mrg mw_ptr = &DF_MWS_NEXT (mw);
1931 1.1 mrg }
1932 1.1 mrg }
1933 1.1 mrg
1934 1.1 mrg
1935 1.1 mrg /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
1936 1.1 mrg
1937 1.1 mrg void
1938 1.1 mrg df_notes_rescan (rtx_insn *insn)
1939 1.1 mrg {
1940 1.1 mrg struct df_insn_info *insn_info;
1941 1.1 mrg unsigned int uid = INSN_UID (insn);
1942 1.1 mrg
1943 1.1 mrg if (!df)
1944 1.1 mrg return;
1945 1.1 mrg
1946 1.1 mrg /* The client has disabled rescanning and plans to do it itself. */
1947 1.1 mrg if (df->changeable_flags & DF_NO_INSN_RESCAN)
1948 1.1 mrg return;
1949 1.1 mrg
1950 1.1 mrg /* Do nothing if the insn hasn't been emitted yet. */
1951 1.1 mrg if (!BLOCK_FOR_INSN (insn))
1952 1.1 mrg return;
1953 1.1 mrg
1954 1.1 mrg df_grow_bb_info (df_scan);
1955 1.1 mrg df_grow_reg_info ();
1956 1.1 mrg
1957 1.1 mrg insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1958 1.1 mrg
1959 1.1 mrg /* The client has deferred rescanning. */
1960 1.1 mrg if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1961 1.1 mrg {
1962 1.1 mrg if (!insn_info)
1963 1.1 mrg {
1964 1.1 mrg insn_info = df_insn_create_insn_record (insn);
1965 1.1 mrg insn_info->defs = 0;
1966 1.1 mrg insn_info->uses = 0;
1967 1.1 mrg insn_info->eq_uses = 0;
1968 1.1 mrg insn_info->mw_hardregs = 0;
1969 1.1 mrg }
1970 1.1 mrg
1971 1.1 mrg bitmap_clear_bit (&df->insns_to_delete, uid);
1972 1.1 mrg /* If the insn is set to be rescanned, it does not need to also
1973 1.1 mrg be notes rescanned. */
1974 1.1 mrg if (!bitmap_bit_p (&df->insns_to_rescan, uid))
1975 1.1 mrg bitmap_set_bit (&df->insns_to_notes_rescan, INSN_UID (insn));
1976 1.1 mrg return;
1977 1.1 mrg }
1978 1.1 mrg
1979 1.1 mrg bitmap_clear_bit (&df->insns_to_delete, uid);
1980 1.1 mrg bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1981 1.1 mrg
1982 1.1 mrg if (insn_info)
1983 1.1 mrg {
1984 1.1 mrg basic_block bb = BLOCK_FOR_INSN (insn);
1985 1.1 mrg rtx note;
1986 1.1 mrg class df_collection_rec collection_rec;
1987 1.1 mrg unsigned int i;
1988 1.1 mrg
1989 1.1 mrg df_mw_hardreg_chain_delete_eq_uses (insn_info);
1990 1.1 mrg df_ref_chain_delete (insn_info->eq_uses);
1991 1.1 mrg insn_info->eq_uses = NULL;
1992 1.1 mrg
1993 1.1 mrg /* Process REG_EQUIV/REG_EQUAL notes */
1994 1.1 mrg for (note = REG_NOTES (insn); note;
1995 1.1 mrg note = XEXP (note, 1))
1996 1.1 mrg {
1997 1.1 mrg switch (REG_NOTE_KIND (note))
1998 1.1 mrg {
1999 1.1 mrg case REG_EQUIV:
2000 1.1 mrg case REG_EQUAL:
2001 1.1 mrg df_uses_record (&collection_rec,
2002 1.1 mrg &XEXP (note, 0), DF_REF_REG_USE,
2003 1.1 mrg bb, insn_info, DF_REF_IN_NOTE);
2004 1.1 mrg default:
2005 1.1 mrg break;
2006 1.1 mrg }
2007 1.1 mrg }
2008 1.1 mrg
2009 1.1 mrg /* Find some place to put any new mw_hardregs. */
2010 1.1 mrg df_canonize_collection_rec (&collection_rec);
2011 1.1 mrg struct df_mw_hardreg **mw_ptr = &insn_info->mw_hardregs, *mw;
2012 1.1 mrg FOR_EACH_VEC_ELT (collection_rec.mw_vec, i, mw)
2013 1.1 mrg {
2014 1.1 mrg while (*mw_ptr && df_mw_compare (*mw_ptr, mw) < 0)
2015 1.1 mrg mw_ptr = &DF_MWS_NEXT (*mw_ptr);
2016 1.1 mrg DF_MWS_NEXT (mw) = *mw_ptr;
2017 1.1 mrg *mw_ptr = mw;
2018 1.1 mrg mw_ptr = &DF_MWS_NEXT (mw);
2019 1.1 mrg }
2020 1.1 mrg df_refs_add_to_chains (&collection_rec, bb, insn, copy_eq_uses);
2021 1.1 mrg }
2022 1.1 mrg else
2023 1.1 mrg df_insn_rescan (insn);
2024 1.1 mrg
2025 1.1 mrg }
2026 1.1 mrg
2027 1.1 mrg
2028 1.1 mrg /*----------------------------------------------------------------------------
2030 1.1 mrg Hard core instruction scanning code. No external interfaces here,
2031 1.1 mrg just a lot of routines that look inside insns.
2032 1.1 mrg ----------------------------------------------------------------------------*/
2033 1.1 mrg
2034 1.1 mrg
2035 1.1 mrg /* Return true if the contents of two df_ref's are identical.
2036 1.1 mrg It ignores DF_REF_MARKER. */
2037 1.1 mrg
2038 1.1 mrg static bool
2039 1.1 mrg df_ref_equal_p (df_ref ref1, df_ref ref2)
2040 1.1 mrg {
2041 1.1 mrg if (!ref2)
2042 1.1 mrg return false;
2043 1.1 mrg
2044 1.1 mrg if (ref1 == ref2)
2045 1.1 mrg return true;
2046 1.1 mrg
2047 1.1 mrg if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
2048 1.1 mrg || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
2049 1.1 mrg || DF_REF_REG (ref1) != DF_REF_REG (ref2)
2050 1.1 mrg || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
2051 1.1 mrg || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2052 1.1 mrg != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2053 1.1 mrg || DF_REF_BB (ref1) != DF_REF_BB (ref2)
2054 1.1 mrg || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
2055 1.1 mrg return false;
2056 1.1 mrg
2057 1.1 mrg switch (DF_REF_CLASS (ref1))
2058 1.1 mrg {
2059 1.1 mrg case DF_REF_ARTIFICIAL:
2060 1.1 mrg case DF_REF_BASE:
2061 1.1 mrg return true;
2062 1.1 mrg
2063 1.1 mrg case DF_REF_REGULAR:
2064 1.1 mrg return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
2065 1.1 mrg
2066 1.1 mrg default:
2067 1.1 mrg gcc_unreachable ();
2068 1.1 mrg }
2069 1.1 mrg }
2070 1.1 mrg
2071 1.1 mrg
2072 1.1 mrg /* Compare REF1 and REF2 for sorting. This is only called from places
2073 1.1 mrg where all of the refs are of the same type, in the same insn, and
2074 1.1 mrg have the same bb. So these fields are not checked. */
2075 1.1 mrg
2076 1.1 mrg static int
2077 1.1 mrg df_ref_compare (df_ref ref1, df_ref ref2)
2078 1.1 mrg {
2079 1.1 mrg if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
2080 1.1 mrg return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
2081 1.1 mrg
2082 1.1 mrg if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2083 1.1 mrg return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2084 1.1 mrg
2085 1.1 mrg if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2086 1.1 mrg return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2087 1.1 mrg
2088 1.1 mrg if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
2089 1.1 mrg return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2090 1.1 mrg
2091 1.1 mrg /* Cannot look at the LOC field on artificial refs. */
2092 1.1 mrg if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
2093 1.1 mrg && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
2094 1.1 mrg return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2095 1.1 mrg
2096 1.1 mrg if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2097 1.1 mrg {
2098 1.1 mrg /* If two refs are identical except that one of them has is from
2099 1.1 mrg a mw and one is not, we need to have the one with the mw
2100 1.1 mrg first. */
2101 1.1 mrg if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2102 1.1 mrg DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2103 1.1 mrg return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2104 1.1 mrg else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2105 1.1 mrg return -1;
2106 1.1 mrg else
2107 1.1 mrg return 1;
2108 1.1 mrg }
2109 1.1 mrg
2110 1.1 mrg return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2111 1.1 mrg }
2112 1.1 mrg
2113 1.1 mrg /* Like df_ref_compare, but compare two df_ref* pointers R1 and R2. */
2114 1.1 mrg
2115 1.1 mrg static int
2116 1.1 mrg df_ref_ptr_compare (const void *r1, const void *r2)
2117 1.1 mrg {
2118 1.1 mrg return df_ref_compare (*(const df_ref *) r1, *(const df_ref *) r2);
2119 1.1 mrg }
2120 1.1 mrg
2121 1.1 mrg /* Sort and compress a set of refs. */
2122 1.1 mrg
2123 1.1 mrg static void
2124 1.1 mrg df_sort_and_compress_refs (vec<df_ref, va_heap> *ref_vec)
2125 1.1 mrg {
2126 1.1 mrg unsigned int count;
2127 1.1 mrg unsigned int i;
2128 1.1 mrg unsigned int dist = 0;
2129 1.1 mrg
2130 1.1 mrg count = ref_vec->length ();
2131 1.1 mrg
2132 1.1 mrg /* If there are 1 or 0 elements, there is nothing to do. */
2133 1.1 mrg if (count < 2)
2134 1.1 mrg return;
2135 1.1 mrg else if (count == 2)
2136 1.1 mrg {
2137 1.1 mrg df_ref r0 = (*ref_vec)[0];
2138 1.1 mrg df_ref r1 = (*ref_vec)[1];
2139 1.1 mrg if (df_ref_compare (r0, r1) > 0)
2140 1.1 mrg std::swap ((*ref_vec)[0], (*ref_vec)[1]);
2141 1.1 mrg }
2142 1.1 mrg else
2143 1.1 mrg {
2144 1.1 mrg for (i = 0; i < count - 1; i++)
2145 1.1 mrg {
2146 1.1 mrg df_ref r0 = (*ref_vec)[i];
2147 1.1 mrg df_ref r1 = (*ref_vec)[i + 1];
2148 1.1 mrg if (df_ref_compare (r0, r1) >= 0)
2149 1.1 mrg break;
2150 1.1 mrg }
2151 1.1 mrg /* If the array is already strictly ordered,
2152 1.1 mrg which is the most common case for large COUNT case
2153 1.1 mrg (which happens for CALL INSNs),
2154 1.1 mrg no need to sort and filter out duplicate.
2155 1.1 mrg Simply return the count.
2156 1.1 mrg Make sure DF_GET_ADD_REFS adds refs in the increasing order
2157 1.1 mrg of DF_REF_COMPARE. */
2158 1.1 mrg if (i == count - 1)
2159 1.1 mrg return;
2160 1.1 mrg ref_vec->qsort (df_ref_ptr_compare);
2161 1.1 mrg }
2162 1.1 mrg
2163 1.1 mrg for (i=0; i<count-dist; i++)
2164 1.1 mrg {
2165 1.1 mrg /* Find the next ref that is not equal to the current ref. */
2166 1.1 mrg while (i + dist + 1 < count
2167 1.1 mrg && df_ref_equal_p ((*ref_vec)[i],
2168 1.1 mrg (*ref_vec)[i + dist + 1]))
2169 1.1 mrg {
2170 1.1 mrg df_free_ref ((*ref_vec)[i + dist + 1]);
2171 1.1 mrg dist++;
2172 1.1 mrg }
2173 1.1 mrg /* Copy it down to the next position. */
2174 1.1 mrg if (dist && i + dist + 1 < count)
2175 1.1 mrg (*ref_vec)[i + 1] = (*ref_vec)[i + dist + 1];
2176 1.1 mrg }
2177 1.1 mrg
2178 1.1 mrg count -= dist;
2179 1.1 mrg ref_vec->truncate (count);
2180 1.1 mrg }
2181 1.1 mrg
2182 1.1 mrg
2183 1.1 mrg /* Return true if the contents of two df_ref's are identical.
2184 1.1 mrg It ignores DF_REF_MARKER. */
2185 1.1 mrg
2186 1.1 mrg static bool
2187 1.1 mrg df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2188 1.1 mrg {
2189 1.1 mrg if (!mw2)
2190 1.1 mrg return false;
2191 1.1 mrg return (mw1 == mw2) ||
2192 1.1 mrg (mw1->mw_reg == mw2->mw_reg
2193 1.1 mrg && mw1->type == mw2->type
2194 1.1 mrg && mw1->flags == mw2->flags
2195 1.1 mrg && mw1->start_regno == mw2->start_regno
2196 1.1 mrg && mw1->end_regno == mw2->end_regno);
2197 1.1 mrg }
2198 1.1 mrg
2199 1.1 mrg
2200 1.1 mrg /* Compare MW1 and MW2 for sorting. */
2201 1.1 mrg
2202 1.1 mrg static int
2203 1.1 mrg df_mw_compare (const df_mw_hardreg *mw1, const df_mw_hardreg *mw2)
2204 1.1 mrg {
2205 1.1 mrg if (mw1->type != mw2->type)
2206 1.1 mrg return mw1->type - mw2->type;
2207 1.1 mrg
2208 1.1 mrg if (mw1->flags != mw2->flags)
2209 1.1 mrg return mw1->flags - mw2->flags;
2210 1.1 mrg
2211 1.1 mrg if (mw1->start_regno != mw2->start_regno)
2212 1.1 mrg return mw1->start_regno - mw2->start_regno;
2213 1.1 mrg
2214 1.1 mrg if (mw1->end_regno != mw2->end_regno)
2215 1.1 mrg return mw1->end_regno - mw2->end_regno;
2216 1.1 mrg
2217 1.1 mrg return mw1->mw_order - mw2->mw_order;
2218 1.1 mrg }
2219 1.1 mrg
2220 1.1 mrg /* Like df_mw_compare, but compare two df_mw_hardreg** pointers R1 and R2. */
2221 1.1 mrg
2222 1.1 mrg static int
2223 1.1 mrg df_mw_ptr_compare (const void *m1, const void *m2)
2224 1.1 mrg {
2225 1.1 mrg return df_mw_compare (*(const df_mw_hardreg *const *) m1,
2226 1.1 mrg *(const df_mw_hardreg *const *) m2);
2227 1.1 mrg }
2228 1.1 mrg
2229 1.1 mrg /* Sort and compress a set of refs. */
2230 1.1 mrg
2231 1.1 mrg static void
2232 1.1 mrg df_sort_and_compress_mws (vec<df_mw_hardreg *, va_heap> *mw_vec)
2233 1.1 mrg {
2234 1.1 mrg unsigned int count;
2235 1.1 mrg struct df_scan_problem_data *problem_data
2236 1.1 mrg = (struct df_scan_problem_data *) df_scan->problem_data;
2237 1.1 mrg unsigned int i;
2238 1.1 mrg unsigned int dist = 0;
2239 1.1 mrg
2240 1.1 mrg count = mw_vec->length ();
2241 1.1 mrg if (count < 2)
2242 1.1 mrg return;
2243 1.1 mrg else if (count == 2)
2244 1.1 mrg {
2245 1.1 mrg struct df_mw_hardreg *m0 = (*mw_vec)[0];
2246 1.1 mrg struct df_mw_hardreg *m1 = (*mw_vec)[1];
2247 1.1 mrg if (df_mw_compare (m0, m1) > 0)
2248 1.1 mrg {
2249 1.1 mrg struct df_mw_hardreg *tmp = (*mw_vec)[0];
2250 1.1 mrg (*mw_vec)[0] = (*mw_vec)[1];
2251 1.1 mrg (*mw_vec)[1] = tmp;
2252 1.1 mrg }
2253 1.1 mrg }
2254 1.1 mrg else
2255 1.1 mrg mw_vec->qsort (df_mw_ptr_compare);
2256 1.1 mrg
2257 1.1 mrg for (i=0; i<count-dist; i++)
2258 1.1 mrg {
2259 1.1 mrg /* Find the next ref that is not equal to the current ref. */
2260 1.1 mrg while (i + dist + 1 < count
2261 1.1 mrg && df_mw_equal_p ((*mw_vec)[i], (*mw_vec)[i + dist + 1]))
2262 1.1 mrg {
2263 1.1 mrg problem_data->mw_reg_pool->remove ((*mw_vec)[i + dist + 1]);
2264 1.1 mrg dist++;
2265 1.1 mrg }
2266 1.1 mrg /* Copy it down to the next position. */
2267 1.1 mrg if (dist && i + dist + 1 < count)
2268 1.1 mrg (*mw_vec)[i + 1] = (*mw_vec)[i + dist + 1];
2269 1.1 mrg }
2270 1.1 mrg
2271 1.1 mrg count -= dist;
2272 1.1 mrg mw_vec->truncate (count);
2273 1.1 mrg }
2274 1.1 mrg
2275 1.1 mrg
2276 1.1 mrg /* Sort and remove duplicates from the COLLECTION_REC. */
2277 1.1 mrg
2278 1.1 mrg static void
2279 1.1 mrg df_canonize_collection_rec (class df_collection_rec *collection_rec)
2280 1.1 mrg {
2281 1.1 mrg df_sort_and_compress_refs (&collection_rec->def_vec);
2282 1.1 mrg df_sort_and_compress_refs (&collection_rec->use_vec);
2283 1.1 mrg df_sort_and_compress_refs (&collection_rec->eq_use_vec);
2284 1.1 mrg df_sort_and_compress_mws (&collection_rec->mw_vec);
2285 1.1 mrg }
2286 1.1 mrg
2287 1.1 mrg
2288 1.1 mrg /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2289 1.1 mrg
2290 1.1 mrg static void
2291 1.1 mrg df_install_ref (df_ref this_ref,
2292 1.1 mrg struct df_reg_info *reg_info,
2293 1.1 mrg struct df_ref_info *ref_info,
2294 1.1 mrg bool add_to_table)
2295 1.1 mrg {
2296 1.1 mrg unsigned int regno = DF_REF_REGNO (this_ref);
2297 1.1 mrg /* Add the ref to the reg_{def,use,eq_use} chain. */
2298 1.1 mrg df_ref head = reg_info->reg_chain;
2299 1.1 mrg
2300 1.1 mrg reg_info->reg_chain = this_ref;
2301 1.1 mrg reg_info->n_refs++;
2302 1.1 mrg
2303 1.1 mrg if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2304 1.1 mrg {
2305 1.1 mrg gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2306 1.1 mrg df->hard_regs_live_count[regno]++;
2307 1.1 mrg }
2308 1.1 mrg
2309 1.1 mrg gcc_checking_assert (DF_REF_NEXT_REG (this_ref) == NULL
2310 1.1 mrg && DF_REF_PREV_REG (this_ref) == NULL);
2311 1.1 mrg
2312 1.1 mrg DF_REF_NEXT_REG (this_ref) = head;
2313 1.1 mrg
2314 1.1 mrg /* We cannot actually link to the head of the chain. */
2315 1.1 mrg DF_REF_PREV_REG (this_ref) = NULL;
2316 1.1 mrg
2317 1.1 mrg if (head)
2318 1.1 mrg DF_REF_PREV_REG (head) = this_ref;
2319 1.1 mrg
2320 1.1 mrg if (add_to_table)
2321 1.1 mrg {
2322 1.1 mrg gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2323 1.1 mrg df_check_and_grow_ref_info (ref_info, 1);
2324 1.1 mrg DF_REF_ID (this_ref) = ref_info->table_size;
2325 1.1 mrg /* Add the ref to the big array of defs. */
2326 1.1 mrg ref_info->refs[ref_info->table_size] = this_ref;
2327 1.1 mrg ref_info->table_size++;
2328 1.1 mrg }
2329 1.1 mrg else
2330 1.1 mrg DF_REF_ID (this_ref) = -1;
2331 1.1 mrg
2332 1.1 mrg ref_info->total_size++;
2333 1.1 mrg }
2334 1.1 mrg
2335 1.1 mrg
2336 1.1 mrg /* This function takes one of the groups of refs (defs, uses or
2337 1.1 mrg eq_uses) and installs the entire group into the insn. It also adds
2338 1.1 mrg each of these refs into the appropriate chains. */
2339 1.1 mrg
2340 1.1 mrg static df_ref
2341 1.1 mrg df_install_refs (basic_block bb,
2342 1.1 mrg const vec<df_ref, va_heap> *old_vec,
2343 1.1 mrg struct df_reg_info **reg_info,
2344 1.1 mrg struct df_ref_info *ref_info,
2345 1.1 mrg bool is_notes)
2346 1.1 mrg {
2347 1.1 mrg unsigned int count = old_vec->length ();
2348 1.1 mrg if (count)
2349 1.1 mrg {
2350 1.1 mrg bool add_to_table;
2351 1.1 mrg df_ref this_ref;
2352 1.1 mrg unsigned int ix;
2353 1.1 mrg
2354 1.1 mrg switch (ref_info->ref_order)
2355 1.1 mrg {
2356 1.1 mrg case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2357 1.1 mrg case DF_REF_ORDER_BY_REG_WITH_NOTES:
2358 1.1 mrg case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2359 1.1 mrg ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2360 1.1 mrg add_to_table = true;
2361 1.1 mrg break;
2362 1.1 mrg case DF_REF_ORDER_UNORDERED:
2363 1.1 mrg case DF_REF_ORDER_BY_REG:
2364 1.1 mrg case DF_REF_ORDER_BY_INSN:
2365 1.1 mrg ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2366 1.1 mrg add_to_table = !is_notes;
2367 1.1 mrg break;
2368 1.1 mrg default:
2369 1.1 mrg add_to_table = false;
2370 1.1 mrg break;
2371 1.1 mrg }
2372 1.1 mrg
2373 1.1 mrg /* Do not add if ref is not in the right blocks. */
2374 1.1 mrg if (add_to_table && df->analyze_subset)
2375 1.1 mrg add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2376 1.1 mrg
2377 1.1 mrg FOR_EACH_VEC_ELT (*old_vec, ix, this_ref)
2378 1.1 mrg {
2379 1.1 mrg DF_REF_NEXT_LOC (this_ref) = (ix + 1 < old_vec->length ()
2380 1.1 mrg ? (*old_vec)[ix + 1]
2381 1.1 mrg : NULL);
2382 1.1 mrg df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2383 1.1 mrg ref_info, add_to_table);
2384 1.1 mrg }
2385 1.1 mrg return (*old_vec)[0];
2386 1.1 mrg }
2387 1.1 mrg else
2388 1.1 mrg return 0;
2389 1.1 mrg }
2390 1.1 mrg
2391 1.1 mrg
2392 1.1 mrg /* This function takes the mws installs the entire group into the
2393 1.1 mrg insn. */
2394 1.1 mrg
2395 1.1 mrg static struct df_mw_hardreg *
2396 1.1 mrg df_install_mws (const vec<df_mw_hardreg *, va_heap> *old_vec)
2397 1.1 mrg {
2398 1.1 mrg unsigned int count = old_vec->length ();
2399 1.1 mrg if (count)
2400 1.1 mrg {
2401 1.1 mrg for (unsigned int i = 0; i < count - 1; i++)
2402 1.1 mrg DF_MWS_NEXT ((*old_vec)[i]) = (*old_vec)[i + 1];
2403 1.1 mrg DF_MWS_NEXT ((*old_vec)[count - 1]) = 0;
2404 1.1 mrg return (*old_vec)[0];
2405 1.1 mrg }
2406 1.1 mrg else
2407 1.1 mrg return 0;
2408 1.1 mrg }
2409 1.1 mrg
2410 1.1 mrg
2411 1.1 mrg /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2412 1.1 mrg chains and update other necessary information. */
2413 1.1 mrg
2414 1.1 mrg static void
2415 1.1 mrg df_refs_add_to_chains (class df_collection_rec *collection_rec,
2416 1.1 mrg basic_block bb, rtx_insn *insn, unsigned int flags)
2417 1.1 mrg {
2418 1.1 mrg if (insn)
2419 1.1 mrg {
2420 1.1 mrg struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
2421 1.1 mrg /* If there is a vector in the collection rec, add it to the
2422 1.1 mrg insn. A null rec is a signal that the caller will handle the
2423 1.1 mrg chain specially. */
2424 1.1 mrg if (flags & copy_defs)
2425 1.1 mrg {
2426 1.1 mrg gcc_checking_assert (!insn_rec->defs);
2427 1.1 mrg insn_rec->defs
2428 1.1 mrg = df_install_refs (bb, &collection_rec->def_vec,
2429 1.1 mrg df->def_regs,
2430 1.1 mrg &df->def_info, false);
2431 1.1 mrg }
2432 1.1 mrg if (flags & copy_uses)
2433 1.1 mrg {
2434 1.1 mrg gcc_checking_assert (!insn_rec->uses);
2435 1.1 mrg insn_rec->uses
2436 1.1 mrg = df_install_refs (bb, &collection_rec->use_vec,
2437 1.1 mrg df->use_regs,
2438 1.1 mrg &df->use_info, false);
2439 1.1 mrg }
2440 1.1 mrg if (flags & copy_eq_uses)
2441 1.1 mrg {
2442 1.1 mrg gcc_checking_assert (!insn_rec->eq_uses);
2443 1.1 mrg insn_rec->eq_uses
2444 1.1 mrg = df_install_refs (bb, &collection_rec->eq_use_vec,
2445 1.1 mrg df->eq_use_regs,
2446 1.1 mrg &df->use_info, true);
2447 1.1 mrg }
2448 1.1 mrg if (flags & copy_mw)
2449 1.1 mrg {
2450 1.1 mrg gcc_checking_assert (!insn_rec->mw_hardregs);
2451 1.1 mrg insn_rec->mw_hardregs
2452 1.1 mrg = df_install_mws (&collection_rec->mw_vec);
2453 1.1 mrg }
2454 1.1 mrg }
2455 1.1 mrg else
2456 1.1 mrg {
2457 1.1 mrg struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2458 1.1 mrg
2459 1.1 mrg gcc_checking_assert (!bb_info->artificial_defs);
2460 1.1 mrg bb_info->artificial_defs
2461 1.1 mrg = df_install_refs (bb, &collection_rec->def_vec,
2462 1.1 mrg df->def_regs,
2463 1.1 mrg &df->def_info, false);
2464 1.1 mrg gcc_checking_assert (!bb_info->artificial_uses);
2465 1.1 mrg bb_info->artificial_uses
2466 1.1 mrg = df_install_refs (bb, &collection_rec->use_vec,
2467 1.1 mrg df->use_regs,
2468 1.1 mrg &df->use_info, false);
2469 1.1 mrg }
2470 1.1 mrg }
2471 1.1 mrg
2472 1.1 mrg
2473 1.1 mrg /* Allocate a ref and initialize its fields. */
2474 1.1 mrg
2475 1.1 mrg static df_ref
2476 1.1 mrg df_ref_create_structure (enum df_ref_class cl,
2477 1.1 mrg class df_collection_rec *collection_rec,
2478 1.1 mrg rtx reg, rtx *loc,
2479 1.1 mrg basic_block bb, struct df_insn_info *info,
2480 1.1 mrg enum df_ref_type ref_type,
2481 1.1 mrg int ref_flags)
2482 1.1 mrg {
2483 1.1 mrg df_ref this_ref = NULL;
2484 1.1 mrg unsigned int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2485 1.1 mrg struct df_scan_problem_data *problem_data
2486 1.1 mrg = (struct df_scan_problem_data *) df_scan->problem_data;
2487 1.1 mrg
2488 1.1 mrg switch (cl)
2489 1.1 mrg {
2490 1.1 mrg case DF_REF_BASE:
2491 1.1 mrg this_ref = (df_ref) (problem_data->ref_base_pool->allocate ());
2492 1.1 mrg gcc_checking_assert (loc == NULL);
2493 1.1 mrg break;
2494 1.1 mrg
2495 1.1 mrg case DF_REF_ARTIFICIAL:
2496 1.1 mrg this_ref = (df_ref) (problem_data->ref_artificial_pool->allocate ());
2497 1.1 mrg this_ref->artificial_ref.bb = bb;
2498 1.1 mrg gcc_checking_assert (loc == NULL);
2499 1.1 mrg break;
2500 1.1 mrg
2501 1.1 mrg case DF_REF_REGULAR:
2502 1.1 mrg this_ref = (df_ref) (problem_data->ref_regular_pool->allocate ());
2503 1.1 mrg this_ref->regular_ref.loc = loc;
2504 1.1 mrg gcc_checking_assert (loc);
2505 1.1 mrg break;
2506 1.1 mrg }
2507 1.1 mrg
2508 1.1 mrg DF_REF_CLASS (this_ref) = cl;
2509 1.1 mrg DF_REF_ID (this_ref) = -1;
2510 1.1 mrg DF_REF_REG (this_ref) = reg;
2511 1.1 mrg DF_REF_REGNO (this_ref) = regno;
2512 1.1 mrg DF_REF_TYPE (this_ref) = ref_type;
2513 1.1 mrg DF_REF_INSN_INFO (this_ref) = info;
2514 1.1 mrg DF_REF_CHAIN (this_ref) = NULL;
2515 1.1 mrg DF_REF_FLAGS (this_ref) = ref_flags;
2516 1.1 mrg DF_REF_NEXT_REG (this_ref) = NULL;
2517 1.1 mrg DF_REF_PREV_REG (this_ref) = NULL;
2518 1.1 mrg DF_REF_ORDER (this_ref) = df->ref_order++;
2519 1.1 mrg
2520 1.1 mrg /* We need to clear this bit because fwprop, and in the future
2521 1.1 mrg possibly other optimizations sometimes create new refs using ond
2522 1.1 mrg refs as the model. */
2523 1.1 mrg DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2524 1.1 mrg
2525 1.1 mrg /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2526 1.1 mrg if (regno < FIRST_PSEUDO_REGISTER
2527 1.1 mrg && !DF_REF_IS_ARTIFICIAL (this_ref)
2528 1.1 mrg && !DEBUG_INSN_P (DF_REF_INSN (this_ref)))
2529 1.1 mrg {
2530 1.1 mrg if (DF_REF_REG_DEF_P (this_ref))
2531 1.1 mrg {
2532 1.1 mrg if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2533 1.1 mrg DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2534 1.1 mrg }
2535 1.1 mrg else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2536 1.1 mrg && (regno == FRAME_POINTER_REGNUM
2537 1.1 mrg || regno == ARG_POINTER_REGNUM)))
2538 1.1 mrg DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2539 1.1 mrg }
2540 1.1 mrg
2541 1.1 mrg if (collection_rec)
2542 1.1 mrg {
2543 1.1 mrg if (DF_REF_REG_DEF_P (this_ref))
2544 1.1 mrg collection_rec->def_vec.safe_push (this_ref);
2545 1.1 mrg else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2546 1.1 mrg collection_rec->eq_use_vec.safe_push (this_ref);
2547 1.1 mrg else
2548 1.1 mrg collection_rec->use_vec.safe_push (this_ref);
2549 1.1 mrg }
2550 1.1 mrg else
2551 1.1 mrg df_install_ref_incremental (this_ref);
2552 1.1 mrg
2553 1.1 mrg return this_ref;
2554 1.1 mrg }
2555 1.1 mrg
2556 1.1 mrg
2557 1.1 mrg /* Create new references of type DF_REF_TYPE for each part of register REG
2558 1.1 mrg at address LOC within INSN of BB. */
2559 1.1 mrg
2560 1.1 mrg
2561 1.1 mrg static void
2562 1.1 mrg df_ref_record (enum df_ref_class cl,
2563 1.1 mrg class df_collection_rec *collection_rec,
2564 1.1 mrg rtx reg, rtx *loc,
2565 1.1 mrg basic_block bb, struct df_insn_info *insn_info,
2566 1.1 mrg enum df_ref_type ref_type,
2567 1.1 mrg int ref_flags)
2568 1.1 mrg {
2569 1.1 mrg unsigned int regno;
2570 1.1 mrg
2571 1.1 mrg gcc_checking_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2572 1.1 mrg
2573 1.1 mrg regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2574 1.1 mrg if (regno < FIRST_PSEUDO_REGISTER)
2575 1.1 mrg {
2576 1.1 mrg struct df_mw_hardreg *hardreg = NULL;
2577 1.1 mrg struct df_scan_problem_data *problem_data
2578 1.1 mrg = (struct df_scan_problem_data *) df_scan->problem_data;
2579 1.1 mrg unsigned int i;
2580 1.1 mrg unsigned int endregno;
2581 1.1 mrg df_ref ref;
2582 1.1 mrg
2583 1.1 mrg if (GET_CODE (reg) == SUBREG)
2584 1.1 mrg {
2585 1.1 mrg int off = subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2586 1.1 mrg SUBREG_BYTE (reg), GET_MODE (reg));
2587 1.1 mrg unsigned int nregno = regno + off;
2588 1.1 mrg endregno = nregno + subreg_nregs (reg);
2589 1.1 mrg if (off < 0 && regno < (unsigned) -off)
2590 1.1 mrg /* Deal with paradoxical SUBREGs on big endian where
2591 1.1 mrg in debug insns the hard reg number might be smaller
2592 1.1 mrg than -off, such as (subreg:DI (reg:SI 0 [+4 ]) 0));
2593 1.1 mrg RA decisions shouldn't be affected by debug insns
2594 1.1 mrg and so RA can decide to put pseudo into a hard reg
2595 1.1 mrg with small REGNO, even when it is referenced in
2596 1.1 mrg a paradoxical SUBREG in a debug insn. */
2597 1.1 mrg regno = 0;
2598 1.1 mrg else
2599 1.1 mrg regno = nregno;
2600 1.1 mrg }
2601 1.1 mrg else
2602 1.1 mrg endregno = END_REGNO (reg);
2603 1.1 mrg
2604 1.1 mrg /* If this is a multiword hardreg, we create some extra
2605 1.1 mrg datastructures that will enable us to easily build REG_DEAD
2606 1.1 mrg and REG_UNUSED notes. */
2607 1.1 mrg if (collection_rec
2608 1.1 mrg && (endregno != regno + 1) && insn_info)
2609 1.1 mrg {
2610 1.1 mrg /* Sets to a subreg of a multiword register are partial.
2611 1.1 mrg Sets to a non-subreg of a multiword register are not. */
2612 1.1 mrg if (GET_CODE (reg) == SUBREG)
2613 1.1 mrg ref_flags |= DF_REF_PARTIAL;
2614 1.1 mrg ref_flags |= DF_REF_MW_HARDREG;
2615 1.1 mrg
2616 1.1 mrg gcc_assert (regno < endregno);
2617 1.1 mrg
2618 1.1 mrg hardreg = problem_data->mw_reg_pool->allocate ();
2619 1.1 mrg hardreg->type = ref_type;
2620 1.1 mrg hardreg->flags = ref_flags;
2621 1.1 mrg hardreg->mw_reg = reg;
2622 1.1 mrg hardreg->start_regno = regno;
2623 1.1 mrg hardreg->end_regno = endregno - 1;
2624 1.1 mrg hardreg->mw_order = df->ref_order++;
2625 1.1 mrg collection_rec->mw_vec.safe_push (hardreg);
2626 1.1 mrg }
2627 1.1 mrg
2628 1.1 mrg for (i = regno; i < endregno; i++)
2629 1.1 mrg {
2630 1.1 mrg ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
2631 1.1 mrg bb, insn_info, ref_type, ref_flags);
2632 1.1 mrg
2633 1.1 mrg gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2634 1.1 mrg }
2635 1.1 mrg }
2636 1.1 mrg else
2637 1.1 mrg {
2638 1.1 mrg df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2639 1.1 mrg ref_type, ref_flags);
2640 1.1 mrg }
2641 1.1 mrg }
2642 1.1 mrg
2643 1.1 mrg
2644 1.1 mrg /* Process all the registers defined in the rtx pointed by LOC.
2645 1.1 mrg Autoincrement/decrement definitions will be picked up by df_uses_record.
2646 1.1 mrg Any change here has to be matched in df_find_hard_reg_defs_1. */
2647 1.1 mrg
2648 1.1 mrg static void
2649 1.1 mrg df_def_record_1 (class df_collection_rec *collection_rec,
2650 1.1 mrg rtx *loc, basic_block bb, struct df_insn_info *insn_info,
2651 1.1 mrg int flags)
2652 1.1 mrg {
2653 1.1 mrg rtx dst = *loc;
2654 1.1 mrg
2655 1.1 mrg /* It is legal to have a set destination be a parallel. */
2656 1.1 mrg if (GET_CODE (dst) == PARALLEL)
2657 1.1 mrg {
2658 1.1 mrg int i;
2659 1.1 mrg for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2660 1.1 mrg {
2661 1.1 mrg rtx temp = XVECEXP (dst, 0, i);
2662 1.1 mrg gcc_assert (GET_CODE (temp) == EXPR_LIST);
2663 1.1 mrg df_def_record_1 (collection_rec, &XEXP (temp, 0),
2664 1.1 mrg bb, insn_info, flags);
2665 1.1 mrg }
2666 1.1 mrg return;
2667 1.1 mrg }
2668 1.1 mrg
2669 1.1 mrg if (GET_CODE (dst) == STRICT_LOW_PART)
2670 1.1 mrg {
2671 1.1 mrg flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
2672 1.1 mrg
2673 1.1 mrg loc = &XEXP (dst, 0);
2674 1.1 mrg dst = *loc;
2675 1.1 mrg }
2676 1.1 mrg
2677 1.1 mrg if (GET_CODE (dst) == ZERO_EXTRACT)
2678 1.1 mrg {
2679 1.1 mrg flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
2680 1.1 mrg
2681 1.1 mrg loc = &XEXP (dst, 0);
2682 1.1 mrg dst = *loc;
2683 1.1 mrg }
2684 1.1 mrg
2685 1.1 mrg /* At this point if we do not have a reg or a subreg, just return. */
2686 1.1 mrg if (REG_P (dst))
2687 1.1 mrg {
2688 1.1 mrg df_ref_record (DF_REF_REGULAR, collection_rec,
2689 1.1 mrg dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2690 1.1 mrg
2691 1.1 mrg /* We want to keep sp alive everywhere - by making all
2692 1.1 mrg writes to sp also use of sp. */
2693 1.1 mrg if (REGNO (dst) == STACK_POINTER_REGNUM)
2694 1.1 mrg df_ref_record (DF_REF_BASE, collection_rec,
2695 1.1 mrg dst, NULL, bb, insn_info, DF_REF_REG_USE, flags);
2696 1.1 mrg }
2697 1.1 mrg else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2698 1.1 mrg {
2699 1.1 mrg if (read_modify_subreg_p (dst))
2700 1.1 mrg flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2701 1.1 mrg
2702 1.1 mrg flags |= DF_REF_SUBREG;
2703 1.1 mrg
2704 1.1 mrg df_ref_record (DF_REF_REGULAR, collection_rec,
2705 1.1 mrg dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2706 1.1 mrg }
2707 1.1 mrg }
2708 1.1 mrg
2709 1.1 mrg
2710 1.1 mrg /* Process all the registers defined in the pattern rtx, X. Any change
2711 1.1 mrg here has to be matched in df_find_hard_reg_defs. */
2712 1.1 mrg
2713 1.1 mrg static void
2714 1.1 mrg df_defs_record (class df_collection_rec *collection_rec,
2715 1.1 mrg rtx x, basic_block bb, struct df_insn_info *insn_info,
2716 1.1 mrg int flags)
2717 1.1 mrg {
2718 1.1 mrg RTX_CODE code = GET_CODE (x);
2719 1.1 mrg int i;
2720 1.1 mrg
2721 1.1 mrg switch (code)
2722 1.1 mrg {
2723 1.1 mrg case SET:
2724 1.1 mrg df_def_record_1 (collection_rec, &SET_DEST (x), bb, insn_info, flags);
2725 1.1 mrg break;
2726 1.1 mrg
2727 1.1 mrg case CLOBBER:
2728 1.1 mrg flags |= DF_REF_MUST_CLOBBER;
2729 1.1 mrg df_def_record_1 (collection_rec, &XEXP (x, 0), bb, insn_info, flags);
2730 1.1 mrg break;
2731 1.1 mrg
2732 1.1 mrg case COND_EXEC:
2733 1.1 mrg df_defs_record (collection_rec, COND_EXEC_CODE (x),
2734 1.1 mrg bb, insn_info, DF_REF_CONDITIONAL);
2735 1.1 mrg break;
2736 1.1 mrg
2737 1.1 mrg case PARALLEL:
2738 1.1 mrg for (i = 0; i < XVECLEN (x, 0); i++)
2739 1.1 mrg df_defs_record (collection_rec, XVECEXP (x, 0, i),
2740 1.1 mrg bb, insn_info, flags);
2741 1.1 mrg break;
2742 1.1 mrg default:
2743 1.1 mrg /* No DEFs to record in other cases */
2744 1.1 mrg break;
2745 1.1 mrg }
2746 1.1 mrg }
2747 1.1 mrg
2748 1.1 mrg /* Set bits in *DEFS for hard registers found in the rtx DST, which is the
2749 1.1 mrg destination of a set or clobber. This has to match the logic in
2750 1.1 mrg df_defs_record_1. */
2751 1.1 mrg
2752 1.1 mrg static void
2753 1.1 mrg df_find_hard_reg_defs_1 (rtx dst, HARD_REG_SET *defs)
2754 1.1 mrg {
2755 1.1 mrg /* It is legal to have a set destination be a parallel. */
2756 1.1 mrg if (GET_CODE (dst) == PARALLEL)
2757 1.1 mrg {
2758 1.1 mrg int i;
2759 1.1 mrg for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2760 1.1 mrg {
2761 1.1 mrg rtx temp = XVECEXP (dst, 0, i);
2762 1.1 mrg gcc_assert (GET_CODE (temp) == EXPR_LIST);
2763 1.1 mrg df_find_hard_reg_defs_1 (XEXP (temp, 0), defs);
2764 1.1 mrg }
2765 1.1 mrg return;
2766 1.1 mrg }
2767 1.1 mrg
2768 1.1 mrg if (GET_CODE (dst) == STRICT_LOW_PART)
2769 1.1 mrg dst = XEXP (dst, 0);
2770 1.1 mrg
2771 1.1 mrg if (GET_CODE (dst) == ZERO_EXTRACT)
2772 1.1 mrg dst = XEXP (dst, 0);
2773 1.1 mrg
2774 1.1 mrg /* At this point if we do not have a reg or a subreg, just return. */
2775 1.1 mrg if (REG_P (dst) && HARD_REGISTER_P (dst))
2776 1.1 mrg SET_HARD_REG_BIT (*defs, REGNO (dst));
2777 1.1 mrg else if (GET_CODE (dst) == SUBREG
2778 1.1 mrg && REG_P (SUBREG_REG (dst)) && HARD_REGISTER_P (dst))
2779 1.1 mrg SET_HARD_REG_BIT (*defs, REGNO (SUBREG_REG (dst)));
2780 1.1 mrg }
2781 1.1 mrg
2782 1.1 mrg /* Set bits in *DEFS for hard registers defined in the pattern X. This
2783 1.1 mrg has to match the logic in df_defs_record. */
2784 1.1 mrg
2785 1.1 mrg static void
2786 1.1 mrg df_find_hard_reg_defs (rtx x, HARD_REG_SET *defs)
2787 1.1 mrg {
2788 1.1 mrg RTX_CODE code = GET_CODE (x);
2789 1.1 mrg int i;
2790 1.1 mrg
2791 1.1 mrg switch (code)
2792 1.1 mrg {
2793 1.1 mrg case SET:
2794 1.1 mrg df_find_hard_reg_defs_1 (SET_DEST (x), defs);
2795 1.1 mrg break;
2796 1.1 mrg
2797 1.1 mrg case CLOBBER:
2798 1.1 mrg df_find_hard_reg_defs_1 (XEXP (x, 0), defs);
2799 1.1 mrg break;
2800 1.1 mrg
2801 1.1 mrg case COND_EXEC:
2802 1.1 mrg df_find_hard_reg_defs (COND_EXEC_CODE (x), defs);
2803 1.1 mrg break;
2804 1.1 mrg
2805 1.1 mrg case PARALLEL:
2806 1.1 mrg for (i = 0; i < XVECLEN (x, 0); i++)
2807 1.1 mrg df_find_hard_reg_defs (XVECEXP (x, 0, i), defs);
2808 1.1 mrg break;
2809 1.1 mrg default:
2810 1.1 mrg /* No DEFs to record in other cases */
2811 1.1 mrg break;
2812 1.1 mrg }
2813 1.1 mrg }
2814 1.1 mrg
2815 1.1 mrg
2816 1.1 mrg /* Process all the registers used in the rtx at address LOC. */
2817 1.1 mrg
2818 1.1 mrg static void
2819 1.1 mrg df_uses_record (class df_collection_rec *collection_rec,
2820 1.1 mrg rtx *loc, enum df_ref_type ref_type,
2821 1.1 mrg basic_block bb, struct df_insn_info *insn_info,
2822 1.1 mrg int flags)
2823 1.1 mrg {
2824 1.1 mrg RTX_CODE code;
2825 1.1 mrg rtx x;
2826 1.1 mrg
2827 1.1 mrg retry:
2828 1.1 mrg x = *loc;
2829 1.1 mrg if (!x)
2830 1.1 mrg return;
2831 1.1 mrg code = GET_CODE (x);
2832 1.1 mrg switch (code)
2833 1.1 mrg {
2834 1.1 mrg case LABEL_REF:
2835 1.1 mrg case SYMBOL_REF:
2836 1.1 mrg case CONST:
2837 1.1 mrg CASE_CONST_ANY:
2838 1.1 mrg case PC:
2839 1.1 mrg case ADDR_VEC:
2840 1.1 mrg case ADDR_DIFF_VEC:
2841 1.1 mrg return;
2842 1.1 mrg
2843 1.1 mrg case CLOBBER:
2844 1.1 mrg /* If we are clobbering a MEM, mark any registers inside the address
2845 1.1 mrg as being used. */
2846 1.1 mrg if (MEM_P (XEXP (x, 0)))
2847 1.1 mrg df_uses_record (collection_rec,
2848 1.1 mrg &XEXP (XEXP (x, 0), 0),
2849 1.1 mrg DF_REF_REG_MEM_STORE,
2850 1.1 mrg bb, insn_info,
2851 1.1 mrg flags);
2852 1.1 mrg
2853 1.1 mrg /* If we're clobbering a REG then we have a def so ignore. */
2854 1.1 mrg return;
2855 1.1 mrg
2856 1.1 mrg case MEM:
2857 1.1 mrg df_uses_record (collection_rec,
2858 1.1 mrg &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
2859 1.1 mrg bb, insn_info, flags & DF_REF_IN_NOTE);
2860 1.1 mrg return;
2861 1.1 mrg
2862 1.1 mrg case SUBREG:
2863 1.1 mrg /* While we're here, optimize this case. */
2864 1.1 mrg flags |= DF_REF_PARTIAL;
2865 1.1 mrg /* In case the SUBREG is not of a REG, do not optimize. */
2866 1.1 mrg if (!REG_P (SUBREG_REG (x)))
2867 1.1 mrg {
2868 1.1 mrg loc = &SUBREG_REG (x);
2869 1.1 mrg df_uses_record (collection_rec, loc, ref_type, bb, insn_info, flags);
2870 1.1 mrg return;
2871 1.1 mrg }
2872 1.1 mrg /* Fall through */
2873 1.1 mrg
2874 1.1 mrg case REG:
2875 1.1 mrg df_ref_record (DF_REF_REGULAR, collection_rec,
2876 1.1 mrg x, loc, bb, insn_info,
2877 1.1 mrg ref_type, flags);
2878 1.1 mrg return;
2879 1.1 mrg
2880 1.1 mrg case SIGN_EXTRACT:
2881 1.1 mrg case ZERO_EXTRACT:
2882 1.1 mrg {
2883 1.1 mrg df_uses_record (collection_rec,
2884 1.1 mrg &XEXP (x, 1), ref_type, bb, insn_info, flags);
2885 1.1 mrg df_uses_record (collection_rec,
2886 1.1 mrg &XEXP (x, 2), ref_type, bb, insn_info, flags);
2887 1.1 mrg
2888 1.1 mrg /* If the parameters to the zero or sign extract are
2889 1.1 mrg constants, strip them off and recurse, otherwise there is
2890 1.1 mrg no information that we can gain from this operation. */
2891 1.1 mrg if (code == ZERO_EXTRACT)
2892 1.1 mrg flags |= DF_REF_ZERO_EXTRACT;
2893 1.1 mrg else
2894 1.1 mrg flags |= DF_REF_SIGN_EXTRACT;
2895 1.1 mrg
2896 1.1 mrg df_uses_record (collection_rec,
2897 1.1 mrg &XEXP (x, 0), ref_type, bb, insn_info, flags);
2898 1.1 mrg return;
2899 1.1 mrg }
2900 1.1 mrg break;
2901 1.1 mrg
2902 1.1 mrg case SET:
2903 1.1 mrg {
2904 1.1 mrg rtx dst = SET_DEST (x);
2905 1.1 mrg gcc_assert (!(flags & DF_REF_IN_NOTE));
2906 1.1 mrg df_uses_record (collection_rec,
2907 1.1 mrg &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags);
2908 1.1 mrg
2909 1.1 mrg switch (GET_CODE (dst))
2910 1.1 mrg {
2911 1.1 mrg case SUBREG:
2912 1.1 mrg if (read_modify_subreg_p (dst))
2913 1.1 mrg {
2914 1.1 mrg df_uses_record (collection_rec, &SUBREG_REG (dst),
2915 1.1 mrg DF_REF_REG_USE, bb, insn_info,
2916 1.1 mrg flags | DF_REF_READ_WRITE | DF_REF_SUBREG);
2917 1.1 mrg break;
2918 1.1 mrg }
2919 1.1 mrg /* Fall through. */
2920 1.1 mrg case REG:
2921 1.1 mrg case PARALLEL:
2922 1.1 mrg case SCRATCH:
2923 1.1 mrg case PC:
2924 1.1 mrg break;
2925 1.1 mrg case MEM:
2926 1.1 mrg df_uses_record (collection_rec, &XEXP (dst, 0),
2927 1.1 mrg DF_REF_REG_MEM_STORE, bb, insn_info, flags);
2928 1.1 mrg break;
2929 1.1 mrg case STRICT_LOW_PART:
2930 1.1 mrg {
2931 1.1 mrg rtx *temp = &XEXP (dst, 0);
2932 1.1 mrg /* A strict_low_part uses the whole REG and not just the
2933 1.1 mrg SUBREG. */
2934 1.1 mrg dst = XEXP (dst, 0);
2935 1.1 mrg df_uses_record (collection_rec,
2936 1.1 mrg (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
2937 1.1 mrg DF_REF_REG_USE, bb, insn_info,
2938 1.1 mrg DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART);
2939 1.1 mrg }
2940 1.1 mrg break;
2941 1.1 mrg case ZERO_EXTRACT:
2942 1.1 mrg {
2943 1.1 mrg df_uses_record (collection_rec, &XEXP (dst, 1),
2944 1.1 mrg DF_REF_REG_USE, bb, insn_info, flags);
2945 1.1 mrg df_uses_record (collection_rec, &XEXP (dst, 2),
2946 1.1 mrg DF_REF_REG_USE, bb, insn_info, flags);
2947 1.1 mrg if (GET_CODE (XEXP (dst,0)) == MEM)
2948 1.1 mrg df_uses_record (collection_rec, &XEXP (dst, 0),
2949 1.1 mrg DF_REF_REG_USE, bb, insn_info,
2950 1.1 mrg flags);
2951 1.1 mrg else
2952 1.1 mrg df_uses_record (collection_rec, &XEXP (dst, 0),
2953 1.1 mrg DF_REF_REG_USE, bb, insn_info,
2954 1.1 mrg DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT);
2955 1.1 mrg }
2956 1.1 mrg break;
2957 1.1 mrg
2958 1.1 mrg default:
2959 1.1 mrg gcc_unreachable ();
2960 1.1 mrg }
2961 1.1 mrg return;
2962 1.1 mrg }
2963 1.1 mrg
2964 1.1 mrg case RETURN:
2965 1.1 mrg case SIMPLE_RETURN:
2966 1.1 mrg break;
2967 1.1 mrg
2968 1.1 mrg case ASM_OPERANDS:
2969 1.1 mrg case UNSPEC_VOLATILE:
2970 1.1 mrg case TRAP_IF:
2971 1.1 mrg case ASM_INPUT:
2972 1.1 mrg {
2973 1.1 mrg /* Traditional and volatile asm instructions must be
2974 1.1 mrg considered to use and clobber all hard registers, all
2975 1.1 mrg pseudo-registers and all of memory. So must TRAP_IF and
2976 1.1 mrg UNSPEC_VOLATILE operations.
2977 1.1 mrg
2978 1.1 mrg Consider for instance a volatile asm that changes the fpu
2979 1.1 mrg rounding mode. An insn should not be moved across this
2980 1.1 mrg even if it only uses pseudo-regs because it might give an
2981 1.1 mrg incorrectly rounded result.
2982 1.1 mrg
2983 1.1 mrg However, flow.c's liveness computation did *not* do this,
2984 1.1 mrg giving the reasoning as " ?!? Unfortunately, marking all
2985 1.1 mrg hard registers as live causes massive problems for the
2986 1.1 mrg register allocator and marking all pseudos as live creates
2987 1.1 mrg mountains of uninitialized variable warnings."
2988 1.1 mrg
2989 1.1 mrg In order to maintain the status quo with regard to liveness
2990 1.1 mrg and uses, we do what flow.c did and just mark any regs we
2991 1.1 mrg can find in ASM_OPERANDS as used. In global asm insns are
2992 1.1 mrg scanned and regs_asm_clobbered is filled out.
2993 1.1 mrg
2994 1.1 mrg For all ASM_OPERANDS, we must traverse the vector of input
2995 1.1 mrg operands. We cannot just fall through here since then we
2996 1.1 mrg would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
2997 1.1 mrg which do not indicate traditional asms unlike their normal
2998 1.1 mrg usage. */
2999 1.1 mrg if (code == ASM_OPERANDS)
3000 1.1 mrg {
3001 1.1 mrg int j;
3002 1.1 mrg
3003 1.1 mrg for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3004 1.1 mrg df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
3005 1.1 mrg DF_REF_REG_USE, bb, insn_info, flags);
3006 1.1 mrg return;
3007 1.1 mrg }
3008 1.1 mrg break;
3009 1.1 mrg }
3010 1.1 mrg
3011 1.1 mrg case VAR_LOCATION:
3012 1.1 mrg df_uses_record (collection_rec,
3013 1.1 mrg &PAT_VAR_LOCATION_LOC (x),
3014 1.1 mrg DF_REF_REG_USE, bb, insn_info, flags);
3015 1.1 mrg return;
3016 1.1 mrg
3017 1.1 mrg case PRE_DEC:
3018 1.1 mrg case POST_DEC:
3019 1.1 mrg case PRE_INC:
3020 1.1 mrg case POST_INC:
3021 1.1 mrg case PRE_MODIFY:
3022 1.1 mrg case POST_MODIFY:
3023 1.1 mrg gcc_assert (!DEBUG_INSN_P (insn_info->insn));
3024 1.1 mrg /* Catch the def of the register being modified. */
3025 1.1 mrg df_ref_record (DF_REF_REGULAR, collection_rec, XEXP (x, 0), &XEXP (x, 0),
3026 1.1 mrg bb, insn_info,
3027 1.1 mrg DF_REF_REG_DEF,
3028 1.1 mrg flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
3029 1.1 mrg
3030 1.1 mrg /* ... Fall through to handle uses ... */
3031 1.1 mrg
3032 1.1 mrg default:
3033 1.1 mrg break;
3034 1.1 mrg }
3035 1.1 mrg
3036 1.1 mrg /* Recursively scan the operands of this expression. */
3037 1.1 mrg {
3038 1.1 mrg const char *fmt = GET_RTX_FORMAT (code);
3039 1.1 mrg int i;
3040 1.1 mrg
3041 1.1 mrg for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3042 1.1 mrg {
3043 1.1 mrg if (fmt[i] == 'e')
3044 1.1 mrg {
3045 1.1 mrg /* Tail recursive case: save a function call level. */
3046 1.1 mrg if (i == 0)
3047 1.1 mrg {
3048 1.1 mrg loc = &XEXP (x, 0);
3049 1.1 mrg goto retry;
3050 1.1 mrg }
3051 1.1 mrg df_uses_record (collection_rec, &XEXP (x, i), ref_type,
3052 1.1 mrg bb, insn_info, flags);
3053 1.1 mrg }
3054 1.1 mrg else if (fmt[i] == 'E')
3055 1.1 mrg {
3056 1.1 mrg int j;
3057 1.1 mrg for (j = 0; j < XVECLEN (x, i); j++)
3058 1.1 mrg df_uses_record (collection_rec,
3059 1.1 mrg &XVECEXP (x, i, j), ref_type,
3060 1.1 mrg bb, insn_info, flags);
3061 1.1 mrg }
3062 1.1 mrg }
3063 1.1 mrg }
3064 1.1 mrg
3065 1.1 mrg return;
3066 1.1 mrg }
3067 1.1 mrg
3068 1.1 mrg
3069 1.1 mrg /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3070 1.1 mrg
3071 1.1 mrg static void
3072 1.1 mrg df_get_conditional_uses (class df_collection_rec *collection_rec)
3073 1.1 mrg {
3074 1.1 mrg unsigned int ix;
3075 1.1 mrg df_ref ref;
3076 1.1 mrg
3077 1.1 mrg FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
3078 1.1 mrg {
3079 1.1 mrg if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3080 1.1 mrg {
3081 1.1 mrg df_ref use;
3082 1.1 mrg
3083 1.1 mrg use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
3084 1.1 mrg DF_REF_LOC (ref), DF_REF_BB (ref),
3085 1.1 mrg DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
3086 1.1 mrg DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
3087 1.1 mrg DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3088 1.1 mrg }
3089 1.1 mrg }
3090 1.1 mrg }
3091 1.1 mrg
3092 1.1 mrg
3093 1.1 mrg /* Get call's extra defs and uses (track caller-saved registers). */
3094 1.1 mrg
3095 1.1 mrg static void
3096 1.1 mrg df_get_call_refs (class df_collection_rec *collection_rec,
3097 1.1 mrg basic_block bb,
3098 1.1 mrg struct df_insn_info *insn_info,
3099 1.1 mrg int flags)
3100 1.1 mrg {
3101 1.1 mrg rtx note;
3102 1.1 mrg bool is_sibling_call;
3103 1.1 mrg unsigned int i;
3104 1.1 mrg HARD_REG_SET defs_generated;
3105 1.1 mrg
3106 1.1 mrg CLEAR_HARD_REG_SET (defs_generated);
3107 1.1 mrg df_find_hard_reg_defs (PATTERN (insn_info->insn), &defs_generated);
3108 1.1 mrg is_sibling_call = SIBLING_CALL_P (insn_info->insn);
3109 1.1 mrg function_abi callee_abi = insn_callee_abi (insn_info->insn);
3110 1.1 mrg
3111 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3112 1.1 mrg {
3113 1.1 mrg if (i == STACK_POINTER_REGNUM
3114 1.1 mrg && !FAKE_CALL_P (insn_info->insn))
3115 1.1 mrg /* The stack ptr is used (honorarily) by a CALL insn. */
3116 1.1 mrg df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3117 1.1 mrg NULL, bb, insn_info, DF_REF_REG_USE,
3118 1.1 mrg DF_REF_CALL_STACK_USAGE | flags);
3119 1.1 mrg else if (global_regs[i])
3120 1.1 mrg {
3121 1.1 mrg /* Calls to const functions cannot access any global registers and
3122 1.1 mrg calls to pure functions cannot set them. All other calls may
3123 1.1 mrg reference any of the global registers, so they are recorded as
3124 1.1 mrg used. */
3125 1.1 mrg if (!RTL_CONST_CALL_P (insn_info->insn))
3126 1.1 mrg {
3127 1.1 mrg df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3128 1.1 mrg NULL, bb, insn_info, DF_REF_REG_USE, flags);
3129 1.1 mrg if (!RTL_PURE_CALL_P (insn_info->insn))
3130 1.1 mrg df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3131 1.1 mrg NULL, bb, insn_info, DF_REF_REG_DEF, flags);
3132 1.1 mrg }
3133 1.1 mrg }
3134 1.1 mrg else if (callee_abi.clobbers_full_reg_p (i)
3135 1.1 mrg /* no clobbers for regs that are the result of the call */
3136 1.1 mrg && !TEST_HARD_REG_BIT (defs_generated, i)
3137 1.1 mrg && (!is_sibling_call
3138 1.1 mrg || !bitmap_bit_p (df->exit_block_uses, i)
3139 1.1 mrg || refers_to_regno_p (i, crtl->return_rtx)))
3140 1.1 mrg df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3141 1.1 mrg NULL, bb, insn_info, DF_REF_REG_DEF,
3142 1.1 mrg DF_REF_MAY_CLOBBER | flags);
3143 1.1 mrg }
3144 1.1 mrg
3145 1.1 mrg /* Record the registers used to pass arguments, and explicitly
3146 1.1 mrg noted as clobbered. */
3147 1.1 mrg for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3148 1.1 mrg note = XEXP (note, 1))
3149 1.1 mrg {
3150 1.1 mrg if (GET_CODE (XEXP (note, 0)) == USE)
3151 1.1 mrg df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3152 1.1 mrg DF_REF_REG_USE, bb, insn_info, flags);
3153 1.1 mrg else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3154 1.1 mrg {
3155 1.1 mrg if (REG_P (XEXP (XEXP (note, 0), 0)))
3156 1.1 mrg {
3157 1.1 mrg unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3158 1.1 mrg if (!TEST_HARD_REG_BIT (defs_generated, regno))
3159 1.1 mrg df_defs_record (collection_rec, XEXP (note, 0), bb,
3160 1.1 mrg insn_info, flags);
3161 1.1 mrg }
3162 1.1 mrg else
3163 1.1 mrg df_uses_record (collection_rec, &XEXP (note, 0),
3164 1.1 mrg DF_REF_REG_USE, bb, insn_info, flags);
3165 1.1 mrg }
3166 1.1 mrg }
3167 1.1 mrg
3168 1.1 mrg return;
3169 1.1 mrg }
3170 1.1 mrg
3171 1.1 mrg /* Collect all refs in the INSN. This function is free of any
3172 1.1 mrg side-effect - it will create and return a lists of df_ref's in the
3173 1.1 mrg COLLECTION_REC without putting those refs into existing ref chains
3174 1.1 mrg and reg chains. */
3175 1.1 mrg
3176 1.1 mrg static void
3177 1.1 mrg df_insn_refs_collect (class df_collection_rec *collection_rec,
3178 1.1 mrg basic_block bb, struct df_insn_info *insn_info)
3179 1.1 mrg {
3180 1.1 mrg rtx note;
3181 1.1 mrg bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
3182 1.1 mrg
3183 1.1 mrg /* Clear out the collection record. */
3184 1.1 mrg collection_rec->def_vec.truncate (0);
3185 1.1 mrg collection_rec->use_vec.truncate (0);
3186 1.1 mrg collection_rec->eq_use_vec.truncate (0);
3187 1.1 mrg collection_rec->mw_vec.truncate (0);
3188 1.1 mrg
3189 1.1 mrg /* Process REG_EQUIV/REG_EQUAL notes. */
3190 1.1 mrg for (note = REG_NOTES (insn_info->insn); note;
3191 1.1 mrg note = XEXP (note, 1))
3192 1.1 mrg {
3193 1.1 mrg switch (REG_NOTE_KIND (note))
3194 1.1 mrg {
3195 1.1 mrg case REG_EQUIV:
3196 1.1 mrg case REG_EQUAL:
3197 1.1 mrg df_uses_record (collection_rec,
3198 1.1 mrg &XEXP (note, 0), DF_REF_REG_USE,
3199 1.1 mrg bb, insn_info, DF_REF_IN_NOTE);
3200 1.1 mrg break;
3201 1.1 mrg case REG_NON_LOCAL_GOTO:
3202 1.1 mrg /* The frame ptr is used by a non-local goto. */
3203 1.1 mrg df_ref_record (DF_REF_BASE, collection_rec,
3204 1.1 mrg regno_reg_rtx[FRAME_POINTER_REGNUM],
3205 1.1 mrg NULL, bb, insn_info,
3206 1.1 mrg DF_REF_REG_USE, 0);
3207 1.1 mrg if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3208 1.1 mrg df_ref_record (DF_REF_BASE, collection_rec,
3209 1.1 mrg regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3210 1.1 mrg NULL, bb, insn_info,
3211 1.1 mrg DF_REF_REG_USE, 0);
3212 1.1 mrg break;
3213 1.1 mrg default:
3214 1.1 mrg break;
3215 1.1 mrg }
3216 1.1 mrg }
3217 1.1 mrg
3218 1.1 mrg int flags = (is_cond_exec) ? DF_REF_CONDITIONAL : 0;
3219 1.1 mrg /* For CALL_INSNs, first record DF_REF_BASE register defs, as well as
3220 1.1 mrg uses from CALL_INSN_FUNCTION_USAGE. */
3221 1.1 mrg if (CALL_P (insn_info->insn))
3222 1.1 mrg df_get_call_refs (collection_rec, bb, insn_info, flags);
3223 1.1 mrg
3224 1.1 mrg /* Record other defs. These should be mostly for DF_REF_REGULAR, so
3225 1.1 mrg that a qsort on the defs is unnecessary in most cases. */
3226 1.1 mrg df_defs_record (collection_rec,
3227 1.1 mrg PATTERN (insn_info->insn), bb, insn_info, 0);
3228 1.1 mrg
3229 1.1 mrg /* Record the register uses. */
3230 1.1 mrg df_uses_record (collection_rec,
3231 1.1 mrg &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0);
3232 1.1 mrg
3233 1.1 mrg /* DF_REF_CONDITIONAL needs corresponding USES. */
3234 1.1 mrg if (is_cond_exec)
3235 1.1 mrg df_get_conditional_uses (collection_rec);
3236 1.1 mrg
3237 1.1 mrg df_canonize_collection_rec (collection_rec);
3238 1.1 mrg }
3239 1.1 mrg
3240 1.1 mrg /* Recompute the luids for the insns in BB. */
3241 1.1 mrg
3242 1.1 mrg void
3243 1.1 mrg df_recompute_luids (basic_block bb)
3244 1.1 mrg {
3245 1.1 mrg rtx_insn *insn;
3246 1.1 mrg int luid = 0;
3247 1.1 mrg
3248 1.1 mrg df_grow_insn_info ();
3249 1.1 mrg
3250 1.1 mrg /* Scan the block an insn at a time from beginning to end. */
3251 1.1 mrg FOR_BB_INSNS (bb, insn)
3252 1.1 mrg {
3253 1.1 mrg struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3254 1.1 mrg /* Inserting labels does not always trigger the incremental
3255 1.1 mrg rescanning. */
3256 1.1 mrg if (!insn_info)
3257 1.1 mrg {
3258 1.1 mrg gcc_assert (!INSN_P (insn));
3259 1.1 mrg insn_info = df_insn_create_insn_record (insn);
3260 1.1 mrg }
3261 1.1 mrg
3262 1.1 mrg DF_INSN_INFO_LUID (insn_info) = luid;
3263 1.1 mrg if (INSN_P (insn))
3264 1.1 mrg luid++;
3265 1.1 mrg }
3266 1.1 mrg }
3267 1.1 mrg
3268 1.1 mrg
3269 1.1 mrg /* Collect all artificial refs at the block level for BB and add them
3270 1.1 mrg to COLLECTION_REC. */
3271 1.1 mrg
3272 1.1 mrg static void
3273 1.1 mrg df_bb_refs_collect (class df_collection_rec *collection_rec, basic_block bb)
3274 1.1 mrg {
3275 1.1 mrg collection_rec->def_vec.truncate (0);
3276 1.1 mrg collection_rec->use_vec.truncate (0);
3277 1.1 mrg collection_rec->eq_use_vec.truncate (0);
3278 1.1 mrg collection_rec->mw_vec.truncate (0);
3279 1.1 mrg
3280 1.1 mrg if (bb->index == ENTRY_BLOCK)
3281 1.1 mrg {
3282 1.1 mrg df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3283 1.1 mrg return;
3284 1.1 mrg }
3285 1.1 mrg else if (bb->index == EXIT_BLOCK)
3286 1.1 mrg {
3287 1.1 mrg df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3288 1.1 mrg return;
3289 1.1 mrg }
3290 1.1 mrg
3291 1.1 mrg if (bb_has_eh_pred (bb))
3292 1.1 mrg {
3293 1.1 mrg unsigned int i;
3294 1.1 mrg /* Mark the registers that will contain data for the handler. */
3295 1.1 mrg for (i = 0; ; ++i)
3296 1.1 mrg {
3297 1.1 mrg unsigned regno = EH_RETURN_DATA_REGNO (i);
3298 1.1 mrg if (regno == INVALID_REGNUM)
3299 1.1 mrg break;
3300 1.1 mrg df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3301 1.1 mrg bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3302 1.1 mrg }
3303 1.1 mrg }
3304 1.1 mrg
3305 1.1 mrg /* Add the hard_frame_pointer if this block is the target of a
3306 1.1 mrg non-local goto. */
3307 1.1 mrg if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3308 1.1 mrg df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
3309 1.1 mrg bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3310 1.1 mrg
3311 1.1 mrg /* Add the artificial uses. */
3312 1.1 mrg if (bb->index >= NUM_FIXED_BLOCKS)
3313 1.1 mrg {
3314 1.1 mrg bitmap_iterator bi;
3315 1.1 mrg unsigned int regno;
3316 1.1 mrg bitmap au = bb_has_eh_pred (bb)
3317 1.1 mrg ? &df->eh_block_artificial_uses
3318 1.1 mrg : &df->regular_block_artificial_uses;
3319 1.1 mrg
3320 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3321 1.1 mrg {
3322 1.1 mrg df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3323 1.1 mrg bb, NULL, DF_REF_REG_USE, 0);
3324 1.1 mrg }
3325 1.1 mrg }
3326 1.1 mrg
3327 1.1 mrg df_canonize_collection_rec (collection_rec);
3328 1.1 mrg }
3329 1.1 mrg
3330 1.1 mrg
3331 1.1 mrg /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3332 1.1 mrg
3333 1.1 mrg void
3334 1.1 mrg df_bb_refs_record (int bb_index, bool scan_insns)
3335 1.1 mrg {
3336 1.1 mrg basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
3337 1.1 mrg rtx_insn *insn;
3338 1.1 mrg int luid = 0;
3339 1.1 mrg
3340 1.1 mrg if (!df)
3341 1.1 mrg return;
3342 1.1 mrg
3343 1.1 mrg df_collection_rec collection_rec;
3344 1.1 mrg df_grow_bb_info (df_scan);
3345 1.1 mrg if (scan_insns)
3346 1.1 mrg /* Scan the block an insn at a time from beginning to end. */
3347 1.1 mrg FOR_BB_INSNS (bb, insn)
3348 1.1 mrg {
3349 1.1 mrg struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3350 1.1 mrg gcc_assert (!insn_info);
3351 1.1 mrg
3352 1.1 mrg insn_info = df_insn_create_insn_record (insn);
3353 1.1 mrg if (INSN_P (insn))
3354 1.1 mrg {
3355 1.1 mrg /* Record refs within INSN. */
3356 1.1 mrg DF_INSN_INFO_LUID (insn_info) = luid++;
3357 1.1 mrg df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3358 1.1 mrg df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
3359 1.1 mrg }
3360 1.1 mrg DF_INSN_INFO_LUID (insn_info) = luid;
3361 1.1 mrg }
3362 1.1 mrg
3363 1.1 mrg /* Other block level artificial refs */
3364 1.1 mrg df_bb_refs_collect (&collection_rec, bb);
3365 1.1 mrg df_refs_add_to_chains (&collection_rec, bb, NULL, copy_all);
3366 1.1 mrg
3367 1.1 mrg /* Now that the block has been processed, set the block as dirty so
3368 1.1 mrg LR and LIVE will get it processed. */
3369 1.1 mrg df_set_bb_dirty (bb);
3370 1.1 mrg }
3371 1.1 mrg
3372 1.1 mrg
3373 1.1 mrg /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3374 1.1 mrg block. */
3375 1.1 mrg
3376 1.1 mrg static void
3377 1.1 mrg df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3378 1.1 mrg {
3379 1.1 mrg #ifdef EH_USES
3380 1.1 mrg unsigned int i;
3381 1.1 mrg #endif
3382 1.1 mrg
3383 1.1 mrg bitmap_clear (regular_block_artificial_uses);
3384 1.1 mrg
3385 1.1 mrg if (reload_completed)
3386 1.1 mrg {
3387 1.1 mrg if (frame_pointer_needed)
3388 1.1 mrg bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3389 1.1 mrg }
3390 1.1 mrg else
3391 1.1 mrg /* Before reload, there are a few registers that must be forced
3392 1.1 mrg live everywhere -- which might not already be the case for
3393 1.1 mrg blocks within infinite loops. */
3394 1.1 mrg {
3395 1.1 mrg unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3396 1.1 mrg
3397 1.1 mrg /* Any reference to any pseudo before reload is a potential
3398 1.1 mrg reference of the frame pointer. */
3399 1.1 mrg bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3400 1.1 mrg
3401 1.1 mrg if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3402 1.1 mrg bitmap_set_bit (regular_block_artificial_uses,
3403 1.1 mrg HARD_FRAME_POINTER_REGNUM);
3404 1.1 mrg
3405 1.1 mrg /* Pseudos with argument area equivalences may require
3406 1.1 mrg reloading via the argument pointer. */
3407 1.1 mrg if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3408 1.1 mrg && fixed_regs[ARG_POINTER_REGNUM])
3409 1.1 mrg bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3410 1.1 mrg
3411 1.1 mrg /* Any constant, or pseudo with constant equivalences, may
3412 1.1 mrg require reloading from memory using the pic register. */
3413 1.1 mrg if (picreg != INVALID_REGNUM
3414 1.1 mrg && fixed_regs[picreg])
3415 1.1 mrg bitmap_set_bit (regular_block_artificial_uses, picreg);
3416 1.1 mrg }
3417 1.1 mrg /* The all-important stack pointer must always be live. */
3418 1.1 mrg bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3419 1.1 mrg
3420 1.1 mrg #ifdef EH_USES
3421 1.1 mrg /* EH_USES registers are used:
3422 1.1 mrg 1) at all insns that might throw (calls or with -fnon-call-exceptions
3423 1.1 mrg trapping insns)
3424 1.1 mrg 2) in all EH edges
3425 1.1 mrg 3) to support backtraces and/or debugging, anywhere between their
3426 1.1 mrg initialization and where they the saved registers are restored
3427 1.1 mrg from them, including the cases where we don't reach the epilogue
3428 1.1 mrg (noreturn call or infinite loop). */
3429 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3430 1.1 mrg if (EH_USES (i))
3431 1.1 mrg bitmap_set_bit (regular_block_artificial_uses, i);
3432 1.1 mrg #endif
3433 1.1 mrg }
3434 1.1 mrg
3435 1.1 mrg
3436 1.1 mrg /* Get the artificial use set for an eh block. */
3437 1.1 mrg
3438 1.1 mrg static void
3439 1.1 mrg df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3440 1.1 mrg {
3441 1.1 mrg bitmap_clear (eh_block_artificial_uses);
3442 1.1 mrg
3443 1.1 mrg /* The following code (down through the arg_pointer setting APPEARS
3444 1.1 mrg to be necessary because there is nothing that actually
3445 1.1 mrg describes what the exception handling code may actually need
3446 1.1 mrg to keep alive. */
3447 1.1 mrg if (reload_completed)
3448 1.1 mrg {
3449 1.1 mrg if (frame_pointer_needed)
3450 1.1 mrg {
3451 1.1 mrg bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3452 1.1 mrg if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3453 1.1 mrg bitmap_set_bit (eh_block_artificial_uses,
3454 1.1 mrg HARD_FRAME_POINTER_REGNUM);
3455 1.1 mrg }
3456 1.1 mrg if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3457 1.1 mrg && fixed_regs[ARG_POINTER_REGNUM])
3458 1.1 mrg bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3459 1.1 mrg }
3460 1.1 mrg }
3461 1.1 mrg
3462 1.1 mrg
3463 1.1 mrg
3464 1.1 mrg /*----------------------------------------------------------------------------
3466 1.1 mrg Specialized hard register scanning functions.
3467 1.1 mrg ----------------------------------------------------------------------------*/
3468 1.1 mrg
3469 1.1 mrg
3470 1.1 mrg /* Mark a register in SET. Hard registers in large modes get all
3471 1.1 mrg of their component registers set as well. */
3472 1.1 mrg
3473 1.1 mrg static void
3474 1.1 mrg df_mark_reg (rtx reg, void *vset)
3475 1.1 mrg {
3476 1.1 mrg bitmap_set_range ((bitmap) vset, REGNO (reg), REG_NREGS (reg));
3477 1.1 mrg }
3478 1.1 mrg
3479 1.1 mrg
3480 1.1 mrg /* Set the bit for regs that are considered being defined at the entry. */
3481 1.1 mrg
3482 1.1 mrg static void
3483 1.1 mrg df_get_entry_block_def_set (bitmap entry_block_defs)
3484 1.1 mrg {
3485 1.1 mrg rtx r;
3486 1.1 mrg int i;
3487 1.1 mrg
3488 1.1 mrg bitmap_clear (entry_block_defs);
3489 1.1 mrg
3490 1.1 mrg /* For separate shrink-wrapping we use LIVE to analyze which basic blocks
3491 1.1 mrg need a prologue for some component to be executed before that block,
3492 1.1 mrg and we do not care about any other registers. Hence, we do not want
3493 1.1 mrg any register for any component defined in the entry block, and we can
3494 1.1 mrg just leave all registers undefined. */
3495 1.1 mrg if (df_scan->local_flags & DF_SCAN_EMPTY_ENTRY_EXIT)
3496 1.1 mrg return;
3497 1.1 mrg
3498 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3499 1.1 mrg {
3500 1.1 mrg if (global_regs[i])
3501 1.1 mrg bitmap_set_bit (entry_block_defs, i);
3502 1.1 mrg if (FUNCTION_ARG_REGNO_P (i))
3503 1.1 mrg bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3504 1.1 mrg }
3505 1.1 mrg
3506 1.1 mrg /* The always important stack pointer. */
3507 1.1 mrg bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3508 1.1 mrg
3509 1.1 mrg /* Once the prologue has been generated, all of these registers
3510 1.1 mrg should just show up in the first regular block. */
3511 1.1 mrg if (targetm.have_prologue () && epilogue_completed)
3512 1.1 mrg {
3513 1.1 mrg /* Defs for the callee saved registers are inserted so that the
3514 1.1 mrg pushes have some defining location. */
3515 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3516 1.1 mrg if (!crtl->abi->clobbers_full_reg_p (i)
3517 1.1 mrg && !fixed_regs[i]
3518 1.1 mrg && df_regs_ever_live_p (i))
3519 1.1 mrg bitmap_set_bit (entry_block_defs, i);
3520 1.1 mrg }
3521 1.1 mrg
3522 1.1 mrg r = targetm.calls.struct_value_rtx (current_function_decl, true);
3523 1.1 mrg if (r && REG_P (r))
3524 1.1 mrg bitmap_set_bit (entry_block_defs, REGNO (r));
3525 1.1 mrg
3526 1.1 mrg /* If the function has an incoming STATIC_CHAIN, it has to show up
3527 1.1 mrg in the entry def set. */
3528 1.1 mrg r = rtx_for_static_chain (current_function_decl, true);
3529 1.1 mrg if (r && REG_P (r))
3530 1.1 mrg bitmap_set_bit (entry_block_defs, REGNO (r));
3531 1.1 mrg
3532 1.1 mrg if ((!reload_completed) || frame_pointer_needed)
3533 1.1 mrg {
3534 1.1 mrg /* Any reference to any pseudo before reload is a potential
3535 1.1 mrg reference of the frame pointer. */
3536 1.1 mrg bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3537 1.1 mrg
3538 1.1 mrg /* If they are different, also mark the hard frame pointer as live. */
3539 1.1 mrg if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3540 1.1 mrg && !LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3541 1.1 mrg bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3542 1.1 mrg }
3543 1.1 mrg
3544 1.1 mrg /* These registers are live everywhere. */
3545 1.1 mrg if (!reload_completed)
3546 1.1 mrg {
3547 1.1 mrg /* Pseudos with argument area equivalences may require
3548 1.1 mrg reloading via the argument pointer. */
3549 1.1 mrg if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3550 1.1 mrg && fixed_regs[ARG_POINTER_REGNUM])
3551 1.1 mrg bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3552 1.1 mrg
3553 1.1 mrg /* Any constant, or pseudo with constant equivalences, may
3554 1.1 mrg require reloading from memory using the pic register. */
3555 1.1 mrg unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3556 1.1 mrg if (picreg != INVALID_REGNUM
3557 1.1 mrg && fixed_regs[picreg])
3558 1.1 mrg bitmap_set_bit (entry_block_defs, picreg);
3559 1.1 mrg }
3560 1.1 mrg
3561 1.1 mrg #ifdef INCOMING_RETURN_ADDR_RTX
3562 1.1 mrg if (REG_P (INCOMING_RETURN_ADDR_RTX))
3563 1.1 mrg bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3564 1.1 mrg #endif
3565 1.1 mrg
3566 1.1 mrg targetm.extra_live_on_entry (entry_block_defs);
3567 1.1 mrg }
3568 1.1 mrg
3569 1.1 mrg
3570 1.1 mrg /* Return the (conservative) set of hard registers that are defined on
3571 1.1 mrg entry to the function.
3572 1.1 mrg It uses df->entry_block_defs to determine which register
3573 1.1 mrg reference to include. */
3574 1.1 mrg
3575 1.1 mrg static void
3576 1.1 mrg df_entry_block_defs_collect (class df_collection_rec *collection_rec,
3577 1.1 mrg bitmap entry_block_defs)
3578 1.1 mrg {
3579 1.1 mrg unsigned int i;
3580 1.1 mrg bitmap_iterator bi;
3581 1.1 mrg
3582 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3583 1.1 mrg {
3584 1.1 mrg df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3585 1.1 mrg ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_DEF, 0);
3586 1.1 mrg }
3587 1.1 mrg
3588 1.1 mrg df_canonize_collection_rec (collection_rec);
3589 1.1 mrg }
3590 1.1 mrg
3591 1.1 mrg
3592 1.1 mrg /* Record the (conservative) set of hard registers that are defined on
3593 1.1 mrg entry to the function. */
3594 1.1 mrg
3595 1.1 mrg static void
3596 1.1 mrg df_record_entry_block_defs (bitmap entry_block_defs)
3597 1.1 mrg {
3598 1.1 mrg class df_collection_rec collection_rec;
3599 1.1 mrg df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3600 1.1 mrg
3601 1.1 mrg /* Process bb_refs chain */
3602 1.1 mrg df_refs_add_to_chains (&collection_rec,
3603 1.1 mrg BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK),
3604 1.1 mrg NULL,
3605 1.1 mrg copy_defs);
3606 1.1 mrg }
3607 1.1 mrg
3608 1.1 mrg
3609 1.1 mrg /* Update the defs in the entry block. */
3610 1.1 mrg
3611 1.1 mrg void
3612 1.1 mrg df_update_entry_block_defs (void)
3613 1.1 mrg {
3614 1.1 mrg bool changed = false;
3615 1.1 mrg
3616 1.1 mrg auto_bitmap refs (&df_bitmap_obstack);
3617 1.1 mrg df_get_entry_block_def_set (refs);
3618 1.1 mrg gcc_assert (df->entry_block_defs);
3619 1.1 mrg if (!bitmap_equal_p (df->entry_block_defs, refs))
3620 1.1 mrg {
3621 1.1 mrg struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3622 1.1 mrg df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3623 1.1 mrg df_ref_chain_delete (bb_info->artificial_defs);
3624 1.1 mrg bb_info->artificial_defs = NULL;
3625 1.1 mrg changed = true;
3626 1.1 mrg }
3627 1.1 mrg
3628 1.1 mrg if (changed)
3629 1.1 mrg {
3630 1.1 mrg df_record_entry_block_defs (refs);
3631 1.1 mrg bitmap_copy (df->entry_block_defs, refs);
3632 1.1 mrg df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
3633 1.1 mrg }
3634 1.1 mrg }
3635 1.1 mrg
3636 1.1 mrg
3637 1.1 mrg /* Return true if REGNO is used by the epilogue. */
3638 1.1 mrg bool
3639 1.1 mrg df_epilogue_uses_p (unsigned int regno)
3640 1.1 mrg {
3641 1.1 mrg return (EPILOGUE_USES (regno)
3642 1.1 mrg || TEST_HARD_REG_BIT (crtl->must_be_zero_on_return, regno));
3643 1.1 mrg }
3644 1.1 mrg
3645 1.1 mrg /* Set the bit for regs that are considered being used at the exit. */
3646 1.1 mrg
3647 1.1 mrg static void
3648 1.1 mrg df_get_exit_block_use_set (bitmap exit_block_uses)
3649 1.1 mrg {
3650 1.1 mrg unsigned int i;
3651 1.1 mrg unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3652 1.1 mrg
3653 1.1 mrg bitmap_clear (exit_block_uses);
3654 1.1 mrg
3655 1.1 mrg /* For separate shrink-wrapping we use LIVE to analyze which basic blocks
3656 1.1 mrg need an epilogue for some component to be executed after that block,
3657 1.1 mrg and we do not care about any other registers. Hence, we do not want
3658 1.1 mrg any register for any component seen as used in the exit block, and we
3659 1.1 mrg can just say no registers at all are used. */
3660 1.1 mrg if (df_scan->local_flags & DF_SCAN_EMPTY_ENTRY_EXIT)
3661 1.1 mrg return;
3662 1.1 mrg
3663 1.1 mrg /* Stack pointer is always live at the exit. */
3664 1.1 mrg bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3665 1.1 mrg
3666 1.1 mrg /* Mark the frame pointer if needed at the end of the function.
3667 1.1 mrg If we end up eliminating it, it will be removed from the live
3668 1.1 mrg list of each basic block by reload. */
3669 1.1 mrg
3670 1.1 mrg if ((!reload_completed) || frame_pointer_needed)
3671 1.1 mrg {
3672 1.1 mrg bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3673 1.1 mrg
3674 1.1 mrg /* If they are different, also mark the hard frame pointer as live. */
3675 1.1 mrg if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3676 1.1 mrg && !LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3677 1.1 mrg bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3678 1.1 mrg }
3679 1.1 mrg
3680 1.1 mrg /* Many architectures have a GP register even without flag_pic.
3681 1.1 mrg Assume the pic register is not in use, or will be handled by
3682 1.1 mrg other means, if it is not fixed. */
3683 1.1 mrg if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3684 1.1 mrg && picreg != INVALID_REGNUM
3685 1.1 mrg && fixed_regs[picreg])
3686 1.1 mrg bitmap_set_bit (exit_block_uses, picreg);
3687 1.1 mrg
3688 1.1 mrg /* Mark all global registers, and all registers used by the
3689 1.1 mrg epilogue as being live at the end of the function since they
3690 1.1 mrg may be referenced by our caller. */
3691 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3692 1.1 mrg if (global_regs[i] || df_epilogue_uses_p (i))
3693 1.1 mrg bitmap_set_bit (exit_block_uses, i);
3694 1.1 mrg
3695 1.1 mrg if (targetm.have_epilogue () && epilogue_completed)
3696 1.1 mrg {
3697 1.1 mrg /* Mark all call-saved registers that we actually used. */
3698 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3699 1.1 mrg if (df_regs_ever_live_p (i)
3700 1.1 mrg && !LOCAL_REGNO (i)
3701 1.1 mrg && !crtl->abi->clobbers_full_reg_p (i))
3702 1.1 mrg bitmap_set_bit (exit_block_uses, i);
3703 1.1 mrg }
3704 1.1 mrg
3705 1.1 mrg /* Mark the registers that will contain data for the handler. */
3706 1.1 mrg if (reload_completed && crtl->calls_eh_return)
3707 1.1 mrg for (i = 0; ; ++i)
3708 1.1 mrg {
3709 1.1 mrg unsigned regno = EH_RETURN_DATA_REGNO (i);
3710 1.1 mrg if (regno == INVALID_REGNUM)
3711 1.1 mrg break;
3712 1.1 mrg bitmap_set_bit (exit_block_uses, regno);
3713 1.1 mrg }
3714 1.1 mrg
3715 1.1 mrg #ifdef EH_RETURN_STACKADJ_RTX
3716 1.1 mrg if ((!targetm.have_epilogue () || ! epilogue_completed)
3717 1.1 mrg && crtl->calls_eh_return)
3718 1.1 mrg {
3719 1.1 mrg rtx tmp = EH_RETURN_STACKADJ_RTX;
3720 1.1 mrg if (tmp && REG_P (tmp))
3721 1.1 mrg df_mark_reg (tmp, exit_block_uses);
3722 1.1 mrg }
3723 1.1 mrg #endif
3724 1.1 mrg
3725 1.1 mrg if ((!targetm.have_epilogue () || ! epilogue_completed)
3726 1.1 mrg && crtl->calls_eh_return)
3727 1.1 mrg {
3728 1.1 mrg rtx tmp = EH_RETURN_HANDLER_RTX;
3729 1.1 mrg if (tmp && REG_P (tmp))
3730 1.1 mrg df_mark_reg (tmp, exit_block_uses);
3731 1.1 mrg }
3732 1.1 mrg
3733 1.1 mrg /* Mark function return value. */
3734 1.1 mrg diddle_return_value (df_mark_reg, (void*) exit_block_uses);
3735 1.1 mrg }
3736 1.1 mrg
3737 1.1 mrg
3738 1.1 mrg /* Return the refs of hard registers that are used in the exit block.
3739 1.1 mrg It uses df->exit_block_uses to determine register to include. */
3740 1.1 mrg
3741 1.1 mrg static void
3742 1.1 mrg df_exit_block_uses_collect (class df_collection_rec *collection_rec, bitmap exit_block_uses)
3743 1.1 mrg {
3744 1.1 mrg unsigned int i;
3745 1.1 mrg bitmap_iterator bi;
3746 1.1 mrg
3747 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
3748 1.1 mrg df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3749 1.1 mrg EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
3750 1.1 mrg
3751 1.1 mrg /* It is deliberate that this is not put in the exit block uses but
3752 1.1 mrg I do not know why. */
3753 1.1 mrg if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3754 1.1 mrg && reload_completed
3755 1.1 mrg && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
3756 1.1 mrg && bb_has_eh_pred (EXIT_BLOCK_PTR_FOR_FN (cfun))
3757 1.1 mrg && fixed_regs[ARG_POINTER_REGNUM])
3758 1.1 mrg df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
3759 1.1 mrg EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
3760 1.1 mrg
3761 1.1 mrg df_canonize_collection_rec (collection_rec);
3762 1.1 mrg }
3763 1.1 mrg
3764 1.1 mrg
3765 1.1 mrg /* Record the set of hard registers that are used in the exit block.
3766 1.1 mrg It uses df->exit_block_uses to determine which bit to include. */
3767 1.1 mrg
3768 1.1 mrg static void
3769 1.1 mrg df_record_exit_block_uses (bitmap exit_block_uses)
3770 1.1 mrg {
3771 1.1 mrg class df_collection_rec collection_rec;
3772 1.1 mrg df_exit_block_uses_collect (&collection_rec, exit_block_uses);
3773 1.1 mrg
3774 1.1 mrg /* Process bb_refs chain */
3775 1.1 mrg df_refs_add_to_chains (&collection_rec,
3776 1.1 mrg BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK),
3777 1.1 mrg NULL,
3778 1.1 mrg copy_uses);
3779 1.1 mrg }
3780 1.1 mrg
3781 1.1 mrg
3782 1.1 mrg /* Update the uses in the exit block. */
3783 1.1 mrg
3784 1.1 mrg void
3785 1.1 mrg df_update_exit_block_uses (void)
3786 1.1 mrg {
3787 1.1 mrg bool changed = false;
3788 1.1 mrg
3789 1.1 mrg auto_bitmap refs (&df_bitmap_obstack);
3790 1.1 mrg df_get_exit_block_use_set (refs);
3791 1.1 mrg gcc_assert (df->exit_block_uses);
3792 1.1 mrg if (!bitmap_equal_p (df->exit_block_uses, refs))
3793 1.1 mrg {
3794 1.1 mrg struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
3795 1.1 mrg df_ref_chain_delete_du_chain (bb_info->artificial_uses);
3796 1.1 mrg df_ref_chain_delete (bb_info->artificial_uses);
3797 1.1 mrg bb_info->artificial_uses = NULL;
3798 1.1 mrg changed = true;
3799 1.1 mrg }
3800 1.1 mrg
3801 1.1 mrg if (changed)
3802 1.1 mrg {
3803 1.1 mrg df_record_exit_block_uses (refs);
3804 1.1 mrg bitmap_copy (df->exit_block_uses, refs);
3805 1.1 mrg df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
3806 1.1 mrg }
3807 1.1 mrg }
3808 1.1 mrg
3809 1.1 mrg static bool initialized = false;
3810 1.1 mrg
3811 1.1 mrg
3812 1.1 mrg /* Initialize some platform specific structures. */
3813 1.1 mrg
3814 1.1 mrg void
3815 1.1 mrg df_hard_reg_init (void)
3816 1.1 mrg {
3817 1.1 mrg int i;
3818 1.1 mrg static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
3819 1.1 mrg
3820 1.1 mrg if (initialized)
3821 1.1 mrg return;
3822 1.1 mrg
3823 1.1 mrg /* Record which registers will be eliminated. We use this in
3824 1.1 mrg mark_used_regs. */
3825 1.1 mrg CLEAR_HARD_REG_SET (elim_reg_set);
3826 1.1 mrg
3827 1.1 mrg for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
3828 1.1 mrg SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
3829 1.1 mrg
3830 1.1 mrg initialized = true;
3831 1.1 mrg }
3832 1.1 mrg
3833 1.1 mrg /* Recompute the parts of scanning that are based on regs_ever_live
3834 1.1 mrg because something changed in that array. */
3835 1.1 mrg
3836 1.1 mrg void
3837 1.1 mrg df_update_entry_exit_and_calls (void)
3838 1.1 mrg {
3839 1.1 mrg basic_block bb;
3840 1.1 mrg
3841 1.1 mrg df_update_entry_block_defs ();
3842 1.1 mrg df_update_exit_block_uses ();
3843 1.1 mrg
3844 1.1 mrg /* The call insns need to be rescanned because there may be changes
3845 1.1 mrg in the set of registers clobbered across the call. */
3846 1.1 mrg FOR_EACH_BB_FN (bb, cfun)
3847 1.1 mrg {
3848 1.1 mrg rtx_insn *insn;
3849 1.1 mrg FOR_BB_INSNS (bb, insn)
3850 1.1 mrg {
3851 1.1 mrg if (INSN_P (insn) && CALL_P (insn))
3852 1.1 mrg df_insn_rescan (insn);
3853 1.1 mrg }
3854 1.1 mrg }
3855 1.1 mrg }
3856 1.1 mrg
3857 1.1 mrg
3858 1.1 mrg /* Return true if hard REG is actually used in the some instruction.
3859 1.1 mrg There are a fair number of conditions that affect the setting of
3860 1.1 mrg this array. See the comment in df.h for df->hard_regs_live_count
3861 1.1 mrg for the conditions that this array is set. */
3862 1.1 mrg
3863 1.1 mrg bool
3864 1.1 mrg df_hard_reg_used_p (unsigned int reg)
3865 1.1 mrg {
3866 1.1 mrg return df->hard_regs_live_count[reg] != 0;
3867 1.1 mrg }
3868 1.1 mrg
3869 1.1 mrg
3870 1.1 mrg /* A count of the number of times REG is actually used in the some
3871 1.1 mrg instruction. There are a fair number of conditions that affect the
3872 1.1 mrg setting of this array. See the comment in df.h for
3873 1.1 mrg df->hard_regs_live_count for the conditions that this array is
3874 1.1 mrg set. */
3875 1.1 mrg
3876 1.1 mrg
3877 1.1 mrg unsigned int
3878 1.1 mrg df_hard_reg_used_count (unsigned int reg)
3879 1.1 mrg {
3880 1.1 mrg return df->hard_regs_live_count[reg];
3881 1.1 mrg }
3882 1.1 mrg
3883 1.1 mrg
3884 1.1 mrg /* Get the value of regs_ever_live[REGNO]. */
3885 1.1 mrg
3886 1.1 mrg bool
3887 1.1 mrg df_regs_ever_live_p (unsigned int regno)
3888 1.1 mrg {
3889 1.1 mrg return regs_ever_live[regno];
3890 1.1 mrg }
3891 1.1 mrg
3892 1.1 mrg /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
3893 1.1 mrg to change, schedule that change for the next update. */
3894 1.1 mrg
3895 1.1 mrg void
3896 1.1 mrg df_set_regs_ever_live (unsigned int regno, bool value)
3897 1.1 mrg {
3898 1.1 mrg if (regs_ever_live[regno] == value)
3899 1.1 mrg return;
3900 1.1 mrg
3901 1.1 mrg regs_ever_live[regno] = value;
3902 1.1 mrg if (df)
3903 1.1 mrg df->redo_entry_and_exit = true;
3904 1.1 mrg }
3905 1.1 mrg
3906 1.1 mrg
3907 1.1 mrg /* Compute "regs_ever_live" information from the underlying df
3908 1.1 mrg information. Set the vector to all false if RESET. */
3909 1.1 mrg
3910 1.1 mrg void
3911 1.1 mrg df_compute_regs_ever_live (bool reset)
3912 1.1 mrg {
3913 1.1 mrg unsigned int i;
3914 1.1 mrg bool changed = df->redo_entry_and_exit;
3915 1.1 mrg
3916 1.1 mrg if (reset)
3917 1.1 mrg memset (regs_ever_live, 0, sizeof (regs_ever_live));
3918 1.1 mrg
3919 1.1 mrg for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3920 1.1 mrg if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
3921 1.1 mrg {
3922 1.1 mrg regs_ever_live[i] = true;
3923 1.1 mrg changed = true;
3924 1.1 mrg }
3925 1.1 mrg if (changed)
3926 1.1 mrg df_update_entry_exit_and_calls ();
3927 1.1 mrg df->redo_entry_and_exit = false;
3928 1.1 mrg }
3929 1.1 mrg
3930 1.1 mrg
3931 1.1 mrg /*----------------------------------------------------------------------------
3933 1.1 mrg Dataflow ref information verification functions.
3934 1.1 mrg
3935 1.1 mrg df_reg_chain_mark (refs, regno, is_def, is_eq_use)
3936 1.1 mrg df_reg_chain_verify_unmarked (refs)
3937 1.1 mrg df_refs_verify (vec<stack, va_df_ref>, ref*, bool)
3938 1.1 mrg df_mws_verify (mw*, mw*, bool)
3939 1.1 mrg df_insn_refs_verify (collection_rec, bb, insn, bool)
3940 1.1 mrg df_bb_refs_verify (bb, refs, bool)
3941 1.1 mrg df_bb_verify (bb)
3942 1.1 mrg df_exit_block_bitmap_verify (bool)
3943 1.1 mrg df_entry_block_bitmap_verify (bool)
3944 1.1 mrg df_scan_verify ()
3945 1.1 mrg ----------------------------------------------------------------------------*/
3946 1.1 mrg
3947 1.1 mrg
3948 1.1 mrg /* Mark all refs in the reg chain. Verify that all of the registers
3949 1.1 mrg are in the correct chain. */
3950 1.1 mrg
3951 1.1 mrg static unsigned int
3952 1.1 mrg df_reg_chain_mark (df_ref refs, unsigned int regno,
3953 1.1 mrg bool is_def, bool is_eq_use)
3954 1.1 mrg {
3955 1.1 mrg unsigned int count = 0;
3956 1.1 mrg df_ref ref;
3957 1.1 mrg for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
3958 1.1 mrg {
3959 1.1 mrg gcc_assert (!DF_REF_IS_REG_MARKED (ref));
3960 1.1 mrg
3961 1.1 mrg /* If there are no def-use or use-def chains, make sure that all
3962 1.1 mrg of the chains are clear. */
3963 1.1 mrg if (!df_chain)
3964 1.1 mrg gcc_assert (!DF_REF_CHAIN (ref));
3965 1.1 mrg
3966 1.1 mrg /* Check to make sure the ref is in the correct chain. */
3967 1.1 mrg gcc_assert (DF_REF_REGNO (ref) == regno);
3968 1.1 mrg if (is_def)
3969 1.1 mrg gcc_assert (DF_REF_REG_DEF_P (ref));
3970 1.1 mrg else
3971 1.1 mrg gcc_assert (!DF_REF_REG_DEF_P (ref));
3972 1.1 mrg
3973 1.1 mrg if (is_eq_use)
3974 1.1 mrg gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
3975 1.1 mrg else
3976 1.1 mrg gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
3977 1.1 mrg
3978 1.1 mrg if (DF_REF_NEXT_REG (ref))
3979 1.1 mrg gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
3980 1.1 mrg count++;
3981 1.1 mrg DF_REF_REG_MARK (ref);
3982 1.1 mrg }
3983 1.1 mrg return count;
3984 1.1 mrg }
3985 1.1 mrg
3986 1.1 mrg
3987 1.1 mrg /* Verify that all of the registers in the chain are unmarked. */
3988 1.1 mrg
3989 1.1 mrg static void
3990 1.1 mrg df_reg_chain_verify_unmarked (df_ref refs)
3991 1.1 mrg {
3992 1.1 mrg df_ref ref;
3993 1.1 mrg for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
3994 1.1 mrg gcc_assert (!DF_REF_IS_REG_MARKED (ref));
3995 1.1 mrg }
3996 1.1 mrg
3997 1.1 mrg
3998 1.1 mrg /* Verify that NEW_REC and OLD_REC have exactly the same members. */
3999 1.1 mrg
4000 1.1 mrg static bool
4001 1.1 mrg df_refs_verify (const vec<df_ref, va_heap> *new_rec, df_ref old_rec,
4002 1.1 mrg bool abort_if_fail)
4003 1.1 mrg {
4004 1.1 mrg unsigned int ix;
4005 1.1 mrg df_ref new_ref;
4006 1.1 mrg
4007 1.1 mrg FOR_EACH_VEC_ELT (*new_rec, ix, new_ref)
4008 1.1 mrg {
4009 1.1 mrg if (old_rec == NULL || !df_ref_equal_p (new_ref, old_rec))
4010 1.1 mrg {
4011 1.1 mrg if (abort_if_fail)
4012 1.1 mrg gcc_assert (0);
4013 1.1 mrg else
4014 1.1 mrg return false;
4015 1.1 mrg }
4016 1.1 mrg
4017 1.1 mrg /* Abort if fail is called from the function level verifier. If
4018 1.1 mrg that is the context, mark this reg as being seem. */
4019 1.1 mrg if (abort_if_fail)
4020 1.1 mrg {
4021 1.1 mrg gcc_assert (DF_REF_IS_REG_MARKED (old_rec));
4022 1.1 mrg DF_REF_REG_UNMARK (old_rec);
4023 1.1 mrg }
4024 1.1 mrg
4025 1.1 mrg old_rec = DF_REF_NEXT_LOC (old_rec);
4026 1.1 mrg }
4027 1.1 mrg
4028 1.1 mrg if (abort_if_fail)
4029 1.1 mrg gcc_assert (old_rec == NULL);
4030 1.1 mrg else
4031 1.1 mrg return old_rec == NULL;
4032 1.1 mrg return false;
4033 1.1 mrg }
4034 1.1 mrg
4035 1.1 mrg
4036 1.1 mrg /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4037 1.1 mrg
4038 1.1 mrg static bool
4039 1.1 mrg df_mws_verify (const vec<df_mw_hardreg *, va_heap> *new_rec,
4040 1.1 mrg struct df_mw_hardreg *old_rec,
4041 1.1 mrg bool abort_if_fail)
4042 1.1 mrg {
4043 1.1 mrg unsigned int ix;
4044 1.1 mrg struct df_mw_hardreg *new_reg;
4045 1.1 mrg
4046 1.1 mrg FOR_EACH_VEC_ELT (*new_rec, ix, new_reg)
4047 1.1 mrg {
4048 1.1 mrg if (old_rec == NULL || !df_mw_equal_p (new_reg, old_rec))
4049 1.1 mrg {
4050 1.1 mrg if (abort_if_fail)
4051 1.1 mrg gcc_assert (0);
4052 1.1 mrg else
4053 1.1 mrg return false;
4054 1.1 mrg }
4055 1.1 mrg old_rec = DF_MWS_NEXT (old_rec);
4056 1.1 mrg }
4057 1.1 mrg
4058 1.1 mrg if (abort_if_fail)
4059 1.1 mrg gcc_assert (old_rec == NULL);
4060 1.1 mrg else
4061 1.1 mrg return old_rec == NULL;
4062 1.1 mrg return false;
4063 1.1 mrg }
4064 1.1 mrg
4065 1.1 mrg
4066 1.1 mrg /* Return true if the existing insn refs information is complete and
4067 1.1 mrg correct. Otherwise (i.e. if there's any missing or extra refs),
4068 1.1 mrg return the correct df_ref chain in REFS_RETURN.
4069 1.1 mrg
4070 1.1 mrg If ABORT_IF_FAIL, leave the refs that are verified (already in the
4071 1.1 mrg ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4072 1.1 mrg verification mode instead of the whole function, so unmark
4073 1.1 mrg everything.
4074 1.1 mrg
4075 1.1 mrg If ABORT_IF_FAIL is set, this function never returns false. */
4076 1.1 mrg
4077 1.1 mrg static bool
4078 1.1 mrg df_insn_refs_verify (class df_collection_rec *collection_rec,
4079 1.1 mrg basic_block bb,
4080 1.1 mrg rtx_insn *insn,
4081 1.1 mrg bool abort_if_fail)
4082 1.1 mrg {
4083 1.1 mrg bool ret1, ret2, ret3;
4084 1.1 mrg unsigned int uid = INSN_UID (insn);
4085 1.1 mrg struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
4086 1.1 mrg
4087 1.1 mrg df_insn_refs_collect (collection_rec, bb, insn_info);
4088 1.1 mrg
4089 1.1 mrg /* Unfortunately we cannot opt out early if one of these is not
4090 1.1 mrg right and abort_if_fail is set because the marks will not get cleared. */
4091 1.1 mrg ret1 = df_refs_verify (&collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4092 1.1 mrg abort_if_fail);
4093 1.1 mrg if (!ret1 && !abort_if_fail)
4094 1.1 mrg return false;
4095 1.1 mrg ret2 = df_refs_verify (&collection_rec->use_vec, DF_INSN_UID_USES (uid),
4096 1.1 mrg abort_if_fail);
4097 1.1 mrg if (!ret2 && !abort_if_fail)
4098 1.1 mrg return false;
4099 1.1 mrg ret3 = df_refs_verify (&collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4100 1.1 mrg abort_if_fail);
4101 1.1 mrg if (!ret3 && !abort_if_fail)
4102 1.1 mrg return false;
4103 1.1 mrg if (! df_mws_verify (&collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4104 1.1 mrg abort_if_fail))
4105 1.1 mrg return false;
4106 1.1 mrg return (ret1 && ret2 && ret3);
4107 1.1 mrg }
4108 1.1 mrg
4109 1.1 mrg
4110 1.1 mrg /* Return true if all refs in the basic block are correct and complete.
4111 1.1 mrg Due to df_ref_chain_verify, it will cause all refs
4112 1.1 mrg that are verified to have DF_REF_MARK bit set. */
4113 1.1 mrg
4114 1.1 mrg static bool
4115 1.1 mrg df_bb_verify (basic_block bb)
4116 1.1 mrg {
4117 1.1 mrg rtx_insn *insn;
4118 1.1 mrg struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4119 1.1 mrg class df_collection_rec collection_rec;
4120 1.1 mrg
4121 1.1 mrg gcc_assert (bb_info);
4122 1.1 mrg
4123 1.1 mrg /* Scan the block, one insn at a time, from beginning to end. */
4124 1.1 mrg FOR_BB_INSNS_REVERSE (bb, insn)
4125 1.1 mrg {
4126 1.1 mrg if (!INSN_P (insn))
4127 1.1 mrg continue;
4128 1.1 mrg df_insn_refs_verify (&collection_rec, bb, insn, true);
4129 1.1 mrg df_free_collection_rec (&collection_rec);
4130 1.1 mrg }
4131 1.1 mrg
4132 1.1 mrg /* Do the artificial defs and uses. */
4133 1.1 mrg df_bb_refs_collect (&collection_rec, bb);
4134 1.1 mrg df_refs_verify (&collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4135 1.1 mrg df_refs_verify (&collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4136 1.1 mrg df_free_collection_rec (&collection_rec);
4137 1.1 mrg
4138 1.1 mrg return true;
4139 1.1 mrg }
4140 1.1 mrg
4141 1.1 mrg
4142 1.1 mrg /* Returns true if the entry block has correct and complete df_ref set.
4143 1.1 mrg If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4144 1.1 mrg
4145 1.1 mrg static bool
4146 1.1 mrg df_entry_block_bitmap_verify (bool abort_if_fail)
4147 1.1 mrg {
4148 1.1 mrg bool is_eq;
4149 1.1 mrg
4150 1.1 mrg auto_bitmap entry_block_defs (&df_bitmap_obstack);
4151 1.1 mrg df_get_entry_block_def_set (entry_block_defs);
4152 1.1 mrg
4153 1.1 mrg is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
4154 1.1 mrg
4155 1.1 mrg if (!is_eq && abort_if_fail)
4156 1.1 mrg {
4157 1.1 mrg fprintf (stderr, "entry_block_defs = ");
4158 1.1 mrg df_print_regset (stderr, entry_block_defs);
4159 1.1 mrg fprintf (stderr, "df->entry_block_defs = ");
4160 1.1 mrg df_print_regset (stderr, df->entry_block_defs);
4161 1.1 mrg gcc_assert (0);
4162 1.1 mrg }
4163 1.1 mrg
4164 1.1 mrg return is_eq;
4165 1.1 mrg }
4166 1.1 mrg
4167 1.1 mrg
4168 1.1 mrg /* Returns true if the exit block has correct and complete df_ref set.
4169 1.1 mrg If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4170 1.1 mrg
4171 1.1 mrg static bool
4172 1.1 mrg df_exit_block_bitmap_verify (bool abort_if_fail)
4173 1.1 mrg {
4174 1.1 mrg bool is_eq;
4175 1.1 mrg
4176 1.1 mrg auto_bitmap exit_block_uses (&df_bitmap_obstack);
4177 1.1 mrg df_get_exit_block_use_set (exit_block_uses);
4178 1.1 mrg
4179 1.1 mrg is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
4180 1.1 mrg
4181 1.1 mrg if (!is_eq && abort_if_fail)
4182 1.1 mrg {
4183 1.1 mrg fprintf (stderr, "exit_block_uses = ");
4184 1.1 mrg df_print_regset (stderr, exit_block_uses);
4185 1.1 mrg fprintf (stderr, "df->exit_block_uses = ");
4186 1.1 mrg df_print_regset (stderr, df->exit_block_uses);
4187 1.1 mrg gcc_assert (0);
4188 1.1 mrg }
4189 1.1 mrg
4190 1.1 mrg return is_eq;
4191 1.1 mrg }
4192 1.1 mrg
4193 1.1 mrg
4194 1.1 mrg /* Return true if df_ref information for all insns in all blocks are
4195 1.1 mrg correct and complete. */
4196 1.1 mrg
4197 1.1 mrg void
4198 1.1 mrg df_scan_verify (void)
4199 1.1 mrg {
4200 1.1 mrg unsigned int i;
4201 1.1 mrg basic_block bb;
4202 1.1 mrg
4203 1.1 mrg if (!df)
4204 1.1 mrg return;
4205 1.1 mrg
4206 1.1 mrg /* Verification is a 4 step process. */
4207 1.1 mrg
4208 1.1 mrg /* (1) All of the refs are marked by going through the reg chains. */
4209 1.1 mrg for (i = 0; i < DF_REG_SIZE (df); i++)
4210 1.1 mrg {
4211 1.1 mrg gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4212 1.1 mrg == DF_REG_DEF_COUNT (i));
4213 1.1 mrg gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4214 1.1 mrg == DF_REG_USE_COUNT (i));
4215 1.1 mrg gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4216 1.1 mrg == DF_REG_EQ_USE_COUNT (i));
4217 1.1 mrg }
4218 1.1 mrg
4219 1.1 mrg /* (2) There are various bitmaps whose value may change over the
4220 1.1 mrg course of the compilation. This step recomputes them to make
4221 1.1 mrg sure that they have not slipped out of date. */
4222 1.1 mrg auto_bitmap regular_block_artificial_uses (&df_bitmap_obstack);
4223 1.1 mrg auto_bitmap eh_block_artificial_uses (&df_bitmap_obstack);
4224 1.1 mrg
4225 1.1 mrg df_get_regular_block_artificial_uses (regular_block_artificial_uses);
4226 1.1 mrg df_get_eh_block_artificial_uses (eh_block_artificial_uses);
4227 1.1 mrg
4228 1.1 mrg bitmap_ior_into (eh_block_artificial_uses,
4229 1.1 mrg regular_block_artificial_uses);
4230 1.1 mrg
4231 1.1 mrg /* Check artificial_uses bitmaps didn't change. */
4232 1.1 mrg gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
4233 1.1 mrg &df->regular_block_artificial_uses));
4234 1.1 mrg gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
4235 1.1 mrg &df->eh_block_artificial_uses));
4236 1.1 mrg
4237 1.1 mrg /* Verify entry block and exit block. These only verify the bitmaps,
4238 1.1 mrg the refs are verified in df_bb_verify. */
4239 1.1 mrg df_entry_block_bitmap_verify (true);
4240 1.1 mrg df_exit_block_bitmap_verify (true);
4241 1.1 mrg
4242 1.1 mrg /* (3) All of the insns in all of the blocks are traversed and the
4243 1.1 mrg marks are cleared both in the artificial refs attached to the
4244 1.1 mrg blocks and the real refs inside the insns. It is a failure to
4245 1.1 mrg clear a mark that has not been set as this means that the ref in
4246 1.1 mrg the block or insn was not in the reg chain. */
4247 1.1 mrg
4248 1.1 mrg FOR_ALL_BB_FN (bb, cfun)
4249 1.1 mrg df_bb_verify (bb);
4250 1.1 mrg
4251 1.1 mrg /* (4) See if all reg chains are traversed a second time. This time
4252 1.1 mrg a check is made that the marks are clear. A set mark would be a
4253 1.1 mrg from a reg that is not in any insn or basic block. */
4254 1.1 mrg
4255 for (i = 0; i < DF_REG_SIZE (df); i++)
4256 {
4257 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4258 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4259 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));
4260 }
4261 }
4262