cfganal.cc revision 1.1 1 1.1 mrg /* Control flow graph analysis code for GNU compiler.
2 1.1 mrg Copyright (C) 1987-2022 Free Software Foundation, Inc.
3 1.1 mrg
4 1.1 mrg This file is part of GCC.
5 1.1 mrg
6 1.1 mrg GCC is free software; you can redistribute it and/or modify it under
7 1.1 mrg the terms of the GNU General Public License as published by the Free
8 1.1 mrg Software Foundation; either version 3, or (at your option) any later
9 1.1 mrg version.
10 1.1 mrg
11 1.1 mrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 1.1 mrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 1.1 mrg FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 1.1 mrg for more details.
15 1.1 mrg
16 1.1 mrg You should have received a copy of the GNU General Public License
17 1.1 mrg along with GCC; see the file COPYING3. If not see
18 1.1 mrg <http://www.gnu.org/licenses/>. */
19 1.1 mrg
20 1.1 mrg /* This file contains various simple utilities to analyze the CFG. */
21 1.1 mrg
22 1.1 mrg #include "config.h"
23 1.1 mrg #include "system.h"
24 1.1 mrg #include "coretypes.h"
25 1.1 mrg #include "backend.h"
26 1.1 mrg #include "cfghooks.h"
27 1.1 mrg #include "timevar.h"
28 1.1 mrg #include "cfganal.h"
29 1.1 mrg #include "cfgloop.h"
30 1.1 mrg #include "diagnostic.h"
31 1.1 mrg
32 1.1 mrg namespace {
33 1.1 mrg /* Store the data structures necessary for depth-first search. */
34 1.1 mrg class depth_first_search
35 1.1 mrg {
36 1.1 mrg public:
37 1.1 mrg depth_first_search ();
38 1.1 mrg
39 1.1 mrg basic_block execute (basic_block);
40 1.1 mrg void add_bb (basic_block);
41 1.1 mrg
42 1.1 mrg private:
43 1.1 mrg /* stack for backtracking during the algorithm */
44 1.1 mrg auto_vec<basic_block, 20> m_stack;
45 1.1 mrg
46 1.1 mrg /* record of basic blocks already seen by depth-first search */
47 1.1 mrg auto_sbitmap m_visited_blocks;
48 1.1 mrg };
49 1.1 mrg }
50 1.1 mrg
51 1.1 mrg /* Mark the back edges in DFS traversal.
53 1.1 mrg Return nonzero if a loop (natural or otherwise) is present.
54 1.1 mrg Inspired by Depth_First_Search_PP described in:
55 1.1 mrg
56 1.1 mrg Advanced Compiler Design and Implementation
57 1.1 mrg Steven Muchnick
58 1.1 mrg Morgan Kaufmann, 1997
59 1.1 mrg
60 1.1 mrg and heavily borrowed from pre_and_rev_post_order_compute. */
61 1.1 mrg
62 1.1 mrg bool
63 1.1 mrg mark_dfs_back_edges (struct function *fun)
64 1.1 mrg {
65 1.1 mrg int *pre;
66 1.1 mrg int *post;
67 1.1 mrg int prenum = 1;
68 1.1 mrg int postnum = 1;
69 1.1 mrg bool found = false;
70 1.1 mrg
71 1.1 mrg /* Allocate the preorder and postorder number arrays. */
72 1.1 mrg pre = XCNEWVEC (int, last_basic_block_for_fn (fun));
73 1.1 mrg post = XCNEWVEC (int, last_basic_block_for_fn (fun));
74 1.1 mrg
75 1.1 mrg /* Allocate stack for back-tracking up CFG. */
76 1.1 mrg auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (fun) + 1);
77 1.1 mrg
78 1.1 mrg /* Allocate bitmap to track nodes that have been visited. */
79 1.1 mrg auto_sbitmap visited (last_basic_block_for_fn (fun));
80 1.1 mrg
81 1.1 mrg /* None of the nodes in the CFG have been visited yet. */
82 1.1 mrg bitmap_clear (visited);
83 1.1 mrg
84 1.1 mrg /* Push the first edge on to the stack. */
85 1.1 mrg stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (fun)->succs));
86 1.1 mrg
87 1.1 mrg while (!stack.is_empty ())
88 1.1 mrg {
89 1.1 mrg basic_block src;
90 1.1 mrg basic_block dest;
91 1.1 mrg
92 1.1 mrg /* Look at the edge on the top of the stack. */
93 1.1 mrg edge_iterator ei = stack.last ();
94 1.1 mrg src = ei_edge (ei)->src;
95 1.1 mrg dest = ei_edge (ei)->dest;
96 1.1 mrg ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
97 1.1 mrg
98 1.1 mrg /* Check if the edge destination has been visited yet. */
99 1.1 mrg if (dest != EXIT_BLOCK_PTR_FOR_FN (fun) && ! bitmap_bit_p (visited,
100 1.1 mrg dest->index))
101 1.1 mrg {
102 1.1 mrg /* Mark that we have visited the destination. */
103 1.1 mrg bitmap_set_bit (visited, dest->index);
104 1.1 mrg
105 1.1 mrg pre[dest->index] = prenum++;
106 1.1 mrg if (EDGE_COUNT (dest->succs) > 0)
107 1.1 mrg {
108 1.1 mrg /* Since the DEST node has been visited for the first
109 1.1 mrg time, check its successors. */
110 1.1 mrg stack.quick_push (ei_start (dest->succs));
111 1.1 mrg }
112 1.1 mrg else
113 1.1 mrg post[dest->index] = postnum++;
114 1.1 mrg }
115 1.1 mrg else
116 1.1 mrg {
117 1.1 mrg if (dest != EXIT_BLOCK_PTR_FOR_FN (fun)
118 1.1 mrg && src != ENTRY_BLOCK_PTR_FOR_FN (fun)
119 1.1 mrg && pre[src->index] >= pre[dest->index]
120 1.1 mrg && post[dest->index] == 0)
121 1.1 mrg ei_edge (ei)->flags |= EDGE_DFS_BACK, found = true;
122 1.1 mrg
123 1.1 mrg if (ei_one_before_end_p (ei)
124 1.1 mrg && src != ENTRY_BLOCK_PTR_FOR_FN (fun))
125 1.1 mrg post[src->index] = postnum++;
126 1.1 mrg
127 1.1 mrg if (!ei_one_before_end_p (ei))
128 1.1 mrg ei_next (&stack.last ());
129 1.1 mrg else
130 1.1 mrg stack.pop ();
131 1.1 mrg }
132 1.1 mrg }
133 1.1 mrg
134 1.1 mrg free (pre);
135 1.1 mrg free (post);
136 1.1 mrg
137 1.1 mrg return found;
138 1.1 mrg }
139 1.1 mrg
140 1.1 mrg bool
141 1.1 mrg mark_dfs_back_edges (void)
142 1.1 mrg {
143 1.1 mrg return mark_dfs_back_edges (cfun);
144 1.1 mrg }
145 1.1 mrg
146 1.1 mrg /* Return TRUE if EDGE_DFS_BACK is up to date for CFUN. */
147 1.1 mrg
148 1.1 mrg void
149 1.1 mrg verify_marked_backedges (struct function *fun)
150 1.1 mrg {
151 1.1 mrg auto_edge_flag saved_dfs_back (fun);
152 1.1 mrg basic_block bb;
153 1.1 mrg edge e;
154 1.1 mrg edge_iterator ei;
155 1.1 mrg
156 1.1 mrg // Save all the back edges...
157 1.1 mrg FOR_EACH_BB_FN (bb, fun)
158 1.1 mrg FOR_EACH_EDGE (e, ei, bb->succs)
159 1.1 mrg {
160 1.1 mrg if (e->flags & EDGE_DFS_BACK)
161 1.1 mrg {
162 1.1 mrg e->flags |= saved_dfs_back;
163 1.1 mrg e->flags &= ~EDGE_DFS_BACK;
164 1.1 mrg }
165 1.1 mrg }
166 1.1 mrg
167 1.1 mrg // ...and verify that recalculating them agrees with the saved ones.
168 1.1 mrg mark_dfs_back_edges ();
169 1.1 mrg FOR_EACH_BB_FN (bb, fun)
170 1.1 mrg FOR_EACH_EDGE (e, ei, bb->succs)
171 1.1 mrg {
172 1.1 mrg if (((e->flags & EDGE_DFS_BACK) != 0)
173 1.1 mrg != ((e->flags & saved_dfs_back) != 0))
174 1.1 mrg internal_error ("%<verify_marked_backedges%> failed");
175 1.1 mrg
176 1.1 mrg e->flags &= ~saved_dfs_back;
177 1.1 mrg }
178 1.1 mrg }
179 1.1 mrg
180 1.1 mrg /* Find unreachable blocks. An unreachable block will have 0 in
181 1.1 mrg the reachable bit in block->flags. A nonzero value indicates the
182 1.1 mrg block is reachable. */
183 1.1 mrg
184 1.1 mrg void
185 1.1 mrg find_unreachable_blocks (void)
186 1.1 mrg {
187 1.1 mrg edge e;
188 1.1 mrg edge_iterator ei;
189 1.1 mrg basic_block *tos, *worklist, bb;
190 1.1 mrg
191 1.1 mrg tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
192 1.1 mrg
193 1.1 mrg /* Clear all the reachability flags. */
194 1.1 mrg
195 1.1 mrg FOR_EACH_BB_FN (bb, cfun)
196 1.1 mrg bb->flags &= ~BB_REACHABLE;
197 1.1 mrg
198 1.1 mrg /* Add our starting points to the worklist. Almost always there will
199 1.1 mrg be only one. It isn't inconceivable that we might one day directly
200 1.1 mrg support Fortran alternate entry points. */
201 1.1 mrg
202 1.1 mrg FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
203 1.1 mrg {
204 1.1 mrg *tos++ = e->dest;
205 1.1 mrg
206 1.1 mrg /* Mark the block reachable. */
207 1.1 mrg e->dest->flags |= BB_REACHABLE;
208 1.1 mrg }
209 1.1 mrg
210 1.1 mrg /* Iterate: find everything reachable from what we've already seen. */
211 1.1 mrg
212 1.1 mrg while (tos != worklist)
213 1.1 mrg {
214 1.1 mrg basic_block b = *--tos;
215 1.1 mrg
216 1.1 mrg FOR_EACH_EDGE (e, ei, b->succs)
217 1.1 mrg {
218 1.1 mrg basic_block dest = e->dest;
219 1.1 mrg
220 1.1 mrg if (!(dest->flags & BB_REACHABLE))
221 1.1 mrg {
222 1.1 mrg *tos++ = dest;
223 1.1 mrg dest->flags |= BB_REACHABLE;
224 1.1 mrg }
225 1.1 mrg }
226 1.1 mrg }
227 1.1 mrg
228 1.1 mrg free (worklist);
229 1.1 mrg }
230 1.1 mrg
231 1.1 mrg /* Verify that there are no unreachable blocks in the current function. */
232 1.1 mrg
233 1.1 mrg void
234 1.1 mrg verify_no_unreachable_blocks (void)
235 1.1 mrg {
236 1.1 mrg find_unreachable_blocks ();
237 1.1 mrg
238 1.1 mrg basic_block bb;
239 1.1 mrg FOR_EACH_BB_FN (bb, cfun)
240 1.1 mrg gcc_assert ((bb->flags & BB_REACHABLE) != 0);
241 1.1 mrg }
242 1.1 mrg
243 1.1 mrg
244 1.1 mrg /* Functions to access an edge list with a vector representation.
246 1.1 mrg Enough data is kept such that given an index number, the
247 1.1 mrg pred and succ that edge represents can be determined, or
248 1.1 mrg given a pred and a succ, its index number can be returned.
249 1.1 mrg This allows algorithms which consume a lot of memory to
250 1.1 mrg represent the normally full matrix of edge (pred,succ) with a
251 1.1 mrg single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
252 1.1 mrg wasted space in the client code due to sparse flow graphs. */
253 1.1 mrg
254 1.1 mrg /* This functions initializes the edge list. Basically the entire
255 1.1 mrg flowgraph is processed, and all edges are assigned a number,
256 1.1 mrg and the data structure is filled in. */
257 1.1 mrg
258 1.1 mrg struct edge_list *
259 1.1 mrg create_edge_list (void)
260 1.1 mrg {
261 1.1 mrg struct edge_list *elist;
262 1.1 mrg edge e;
263 1.1 mrg int num_edges;
264 1.1 mrg basic_block bb;
265 1.1 mrg edge_iterator ei;
266 1.1 mrg
267 1.1 mrg /* Determine the number of edges in the flow graph by counting successor
268 1.1 mrg edges on each basic block. */
269 1.1 mrg num_edges = 0;
270 1.1 mrg FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
271 1.1 mrg EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
272 1.1 mrg {
273 1.1 mrg num_edges += EDGE_COUNT (bb->succs);
274 1.1 mrg }
275 1.1 mrg
276 1.1 mrg elist = XNEW (struct edge_list);
277 1.1 mrg elist->num_edges = num_edges;
278 1.1 mrg elist->index_to_edge = XNEWVEC (edge, num_edges);
279 1.1 mrg
280 1.1 mrg num_edges = 0;
281 1.1 mrg
282 1.1 mrg /* Follow successors of blocks, and register these edges. */
283 1.1 mrg FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
284 1.1 mrg EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
285 1.1 mrg FOR_EACH_EDGE (e, ei, bb->succs)
286 1.1 mrg elist->index_to_edge[num_edges++] = e;
287 1.1 mrg
288 1.1 mrg return elist;
289 1.1 mrg }
290 1.1 mrg
291 1.1 mrg /* This function free's memory associated with an edge list. */
292 1.1 mrg
293 1.1 mrg void
294 1.1 mrg free_edge_list (struct edge_list *elist)
295 1.1 mrg {
296 1.1 mrg if (elist)
297 1.1 mrg {
298 1.1 mrg free (elist->index_to_edge);
299 1.1 mrg free (elist);
300 1.1 mrg }
301 1.1 mrg }
302 1.1 mrg
303 1.1 mrg /* This function provides debug output showing an edge list. */
304 1.1 mrg
305 1.1 mrg DEBUG_FUNCTION void
306 1.1 mrg print_edge_list (FILE *f, struct edge_list *elist)
307 1.1 mrg {
308 1.1 mrg int x;
309 1.1 mrg
310 1.1 mrg fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
311 1.1 mrg n_basic_blocks_for_fn (cfun), elist->num_edges);
312 1.1 mrg
313 1.1 mrg for (x = 0; x < elist->num_edges; x++)
314 1.1 mrg {
315 1.1 mrg fprintf (f, " %-4d - edge(", x);
316 1.1 mrg if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
317 1.1 mrg fprintf (f, "entry,");
318 1.1 mrg else
319 1.1 mrg fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
320 1.1 mrg
321 1.1 mrg if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR_FOR_FN (cfun))
322 1.1 mrg fprintf (f, "exit)\n");
323 1.1 mrg else
324 1.1 mrg fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
325 1.1 mrg }
326 1.1 mrg }
327 1.1 mrg
328 1.1 mrg /* This function provides an internal consistency check of an edge list,
329 1.1 mrg verifying that all edges are present, and that there are no
330 1.1 mrg extra edges. */
331 1.1 mrg
332 1.1 mrg DEBUG_FUNCTION void
333 1.1 mrg verify_edge_list (FILE *f, struct edge_list *elist)
334 1.1 mrg {
335 1.1 mrg int pred, succ, index;
336 1.1 mrg edge e;
337 1.1 mrg basic_block bb, p, s;
338 1.1 mrg edge_iterator ei;
339 1.1 mrg
340 1.1 mrg FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
341 1.1 mrg EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
342 1.1 mrg {
343 1.1 mrg FOR_EACH_EDGE (e, ei, bb->succs)
344 1.1 mrg {
345 1.1 mrg pred = e->src->index;
346 1.1 mrg succ = e->dest->index;
347 1.1 mrg index = EDGE_INDEX (elist, e->src, e->dest);
348 1.1 mrg if (index == EDGE_INDEX_NO_EDGE)
349 1.1 mrg {
350 1.1 mrg fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
351 1.1 mrg continue;
352 1.1 mrg }
353 1.1 mrg
354 1.1 mrg if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
355 1.1 mrg fprintf (f, "*p* Pred for index %d should be %d not %d\n",
356 1.1 mrg index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
357 1.1 mrg if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
358 1.1 mrg fprintf (f, "*p* Succ for index %d should be %d not %d\n",
359 1.1 mrg index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
360 1.1 mrg }
361 1.1 mrg }
362 1.1 mrg
363 1.1 mrg /* We've verified that all the edges are in the list, now lets make sure
364 1.1 mrg there are no spurious edges in the list. This is an expensive check! */
365 1.1 mrg
366 1.1 mrg FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR_FOR_FN (cfun),
367 1.1 mrg EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
368 1.1 mrg FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
369 1.1 mrg {
370 1.1 mrg int found_edge = 0;
371 1.1 mrg
372 1.1 mrg FOR_EACH_EDGE (e, ei, p->succs)
373 1.1 mrg if (e->dest == s)
374 1.1 mrg {
375 1.1 mrg found_edge = 1;
376 1.1 mrg break;
377 1.1 mrg }
378 1.1 mrg
379 1.1 mrg FOR_EACH_EDGE (e, ei, s->preds)
380 1.1 mrg if (e->src == p)
381 1.1 mrg {
382 1.1 mrg found_edge = 1;
383 1.1 mrg break;
384 1.1 mrg }
385 1.1 mrg
386 1.1 mrg if (EDGE_INDEX (elist, p, s)
387 1.1 mrg == EDGE_INDEX_NO_EDGE && found_edge != 0)
388 1.1 mrg fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
389 1.1 mrg p->index, s->index);
390 1.1 mrg if (EDGE_INDEX (elist, p, s)
391 1.1 mrg != EDGE_INDEX_NO_EDGE && found_edge == 0)
392 1.1 mrg fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
393 1.1 mrg p->index, s->index, EDGE_INDEX (elist, p, s));
394 1.1 mrg }
395 1.1 mrg }
396 1.1 mrg
397 1.1 mrg
398 1.1 mrg /* Functions to compute control dependences. */
399 1.1 mrg
400 1.1 mrg /* Indicate block BB is control dependent on an edge with index EDGE_INDEX. */
401 1.1 mrg void
402 1.1 mrg control_dependences::set_control_dependence_map_bit (basic_block bb,
403 1.1 mrg int edge_index)
404 1.1 mrg {
405 1.1 mrg if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
406 1.1 mrg return;
407 1.1 mrg gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
408 1.1 mrg bitmap_set_bit (&control_dependence_map[bb->index], edge_index);
409 1.1 mrg }
410 1.1 mrg
411 1.1 mrg /* Clear all control dependences for block BB. */
412 1.1 mrg void
413 1.1 mrg control_dependences::clear_control_dependence_bitmap (basic_block bb)
414 1.1 mrg {
415 1.1 mrg bitmap_clear (&control_dependence_map[bb->index]);
416 1.1 mrg }
417 1.1 mrg
418 1.1 mrg /* Determine all blocks' control dependences on the given edge with edge_list
419 1.1 mrg EL index EDGE_INDEX, ala Morgan, Section 3.6. */
420 1.1 mrg
421 1.1 mrg void
422 1.1 mrg control_dependences::find_control_dependence (int edge_index)
423 1.1 mrg {
424 1.1 mrg basic_block current_block;
425 1.1 mrg basic_block ending_block;
426 1.1 mrg
427 1.1 mrg gcc_assert (get_edge_src (edge_index) != EXIT_BLOCK_PTR_FOR_FN (cfun));
428 1.1 mrg
429 1.1 mrg ending_block = get_immediate_dominator (CDI_POST_DOMINATORS,
430 1.1 mrg get_edge_src (edge_index));
431 1.1 mrg
432 1.1 mrg for (current_block = get_edge_dest (edge_index);
433 1.1 mrg current_block != ending_block
434 1.1 mrg && current_block != EXIT_BLOCK_PTR_FOR_FN (cfun);
435 1.1 mrg current_block = get_immediate_dominator (CDI_POST_DOMINATORS,
436 1.1 mrg current_block))
437 1.1 mrg set_control_dependence_map_bit (current_block, edge_index);
438 1.1 mrg }
439 1.1 mrg
440 1.1 mrg /* Record all blocks' control dependences on all edges in the edge
441 1.1 mrg list EL, ala Morgan, Section 3.6. */
442 1.1 mrg
443 1.1 mrg control_dependences::control_dependences ()
444 1.1 mrg {
445 1.1 mrg timevar_push (TV_CONTROL_DEPENDENCES);
446 1.1 mrg
447 1.1 mrg /* Initialize the edge list. */
448 1.1 mrg int num_edges = 0;
449 1.1 mrg basic_block bb;
450 1.1 mrg FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
451 1.1 mrg EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
452 1.1 mrg num_edges += EDGE_COUNT (bb->succs);
453 1.1 mrg m_el.create (num_edges);
454 1.1 mrg edge e;
455 1.1 mrg edge_iterator ei;
456 1.1 mrg FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
457 1.1 mrg EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
458 1.1 mrg FOR_EACH_EDGE (e, ei, bb->succs)
459 1.1 mrg {
460 1.1 mrg /* For abnormal edges, we don't make current_block control
461 1.1 mrg dependent because instructions that throw are always necessary
462 1.1 mrg anyway. */
463 1.1 mrg if (e->flags & EDGE_ABNORMAL)
464 1.1 mrg {
465 1.1 mrg num_edges--;
466 1.1 mrg continue;
467 1.1 mrg }
468 1.1 mrg m_el.quick_push (std::make_pair (e->src->index, e->dest->index));
469 1.1 mrg }
470 1.1 mrg
471 1.1 mrg bitmap_obstack_initialize (&m_bitmaps);
472 1.1 mrg control_dependence_map.create (last_basic_block_for_fn (cfun));
473 1.1 mrg control_dependence_map.quick_grow (last_basic_block_for_fn (cfun));
474 1.1 mrg for (int i = 0; i < last_basic_block_for_fn (cfun); ++i)
475 1.1 mrg bitmap_initialize (&control_dependence_map[i], &m_bitmaps);
476 1.1 mrg for (int i = 0; i < num_edges; ++i)
477 1.1 mrg find_control_dependence (i);
478 1.1 mrg
479 1.1 mrg timevar_pop (TV_CONTROL_DEPENDENCES);
480 1.1 mrg }
481 1.1 mrg
482 1.1 mrg /* Free control dependences and the associated edge list. */
483 1.1 mrg
484 1.1 mrg control_dependences::~control_dependences ()
485 1.1 mrg {
486 1.1 mrg control_dependence_map.release ();
487 1.1 mrg m_el.release ();
488 1.1 mrg bitmap_obstack_release (&m_bitmaps);
489 1.1 mrg }
490 1.1 mrg
491 1.1 mrg /* Returns the bitmap of edges the basic-block I is dependent on. */
492 1.1 mrg
493 1.1 mrg bitmap
494 1.1 mrg control_dependences::get_edges_dependent_on (int i)
495 1.1 mrg {
496 1.1 mrg return &control_dependence_map[i];
497 1.1 mrg }
498 1.1 mrg
499 1.1 mrg /* Returns the edge source with index I from the edge list. */
500 1.1 mrg
501 1.1 mrg basic_block
502 1.1 mrg control_dependences::get_edge_src (int i)
503 1.1 mrg {
504 1.1 mrg return BASIC_BLOCK_FOR_FN (cfun, m_el[i].first);
505 1.1 mrg }
506 1.1 mrg
507 1.1 mrg /* Returns the edge destination with index I from the edge list. */
508 1.1 mrg
509 1.1 mrg basic_block
510 1.1 mrg control_dependences::get_edge_dest (int i)
511 1.1 mrg {
512 1.1 mrg return BASIC_BLOCK_FOR_FN (cfun, m_el[i].second);
513 1.1 mrg }
514 1.1 mrg
515 1.1 mrg
516 1.1 mrg /* Given PRED and SUCC blocks, return the edge which connects the blocks.
517 1.1 mrg If no such edge exists, return NULL. */
518 1.1 mrg
519 1.1 mrg edge
520 1.1 mrg find_edge (basic_block pred, basic_block succ)
521 1.1 mrg {
522 1.1 mrg edge e;
523 1.1 mrg edge_iterator ei;
524 1.1 mrg
525 1.1 mrg if (EDGE_COUNT (pred->succs) <= EDGE_COUNT (succ->preds))
526 1.1 mrg {
527 1.1 mrg FOR_EACH_EDGE (e, ei, pred->succs)
528 1.1 mrg if (e->dest == succ)
529 1.1 mrg return e;
530 1.1 mrg }
531 1.1 mrg else
532 1.1 mrg {
533 1.1 mrg FOR_EACH_EDGE (e, ei, succ->preds)
534 1.1 mrg if (e->src == pred)
535 1.1 mrg return e;
536 1.1 mrg }
537 1.1 mrg
538 1.1 mrg return NULL;
539 1.1 mrg }
540 1.1 mrg
541 1.1 mrg /* This routine will determine what, if any, edge there is between
542 1.1 mrg a specified predecessor and successor. */
543 1.1 mrg
544 1.1 mrg int
545 1.1 mrg find_edge_index (struct edge_list *edge_list, basic_block pred, basic_block succ)
546 1.1 mrg {
547 1.1 mrg int x;
548 1.1 mrg
549 1.1 mrg for (x = 0; x < NUM_EDGES (edge_list); x++)
550 1.1 mrg if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
551 1.1 mrg && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
552 1.1 mrg return x;
553 1.1 mrg
554 1.1 mrg return (EDGE_INDEX_NO_EDGE);
555 1.1 mrg }
556 1.1 mrg
557 1.1 mrg /* This routine will remove any fake predecessor edges for a basic block.
559 1.1 mrg When the edge is removed, it is also removed from whatever successor
560 1.1 mrg list it is in. */
561 1.1 mrg
562 1.1 mrg static void
563 1.1 mrg remove_fake_predecessors (basic_block bb)
564 1.1 mrg {
565 1.1 mrg edge e;
566 1.1 mrg edge_iterator ei;
567 1.1 mrg
568 1.1 mrg for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
569 1.1 mrg {
570 1.1 mrg if ((e->flags & EDGE_FAKE) == EDGE_FAKE)
571 1.1 mrg remove_edge (e);
572 1.1 mrg else
573 1.1 mrg ei_next (&ei);
574 1.1 mrg }
575 1.1 mrg }
576 1.1 mrg
577 1.1 mrg /* This routine will remove all fake edges from the flow graph. If
578 1.1 mrg we remove all fake successors, it will automatically remove all
579 1.1 mrg fake predecessors. */
580 1.1 mrg
581 1.1 mrg void
582 1.1 mrg remove_fake_edges (void)
583 1.1 mrg {
584 1.1 mrg basic_block bb;
585 1.1 mrg
586 1.1 mrg FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
587 1.1 mrg remove_fake_predecessors (bb);
588 1.1 mrg }
589 1.1 mrg
590 1.1 mrg /* This routine will remove all fake edges to the EXIT_BLOCK. */
591 1.1 mrg
592 1.1 mrg void
593 1.1 mrg remove_fake_exit_edges (void)
594 1.1 mrg {
595 1.1 mrg remove_fake_predecessors (EXIT_BLOCK_PTR_FOR_FN (cfun));
596 1.1 mrg }
597 1.1 mrg
598 1.1 mrg
599 1.1 mrg /* This function will add a fake edge between any block which has no
600 1.1 mrg successors, and the exit block. Some data flow equations require these
601 1.1 mrg edges to exist. */
602 1.1 mrg
603 1.1 mrg void
604 1.1 mrg add_noreturn_fake_exit_edges (void)
605 1.1 mrg {
606 1.1 mrg basic_block bb;
607 1.1 mrg
608 1.1 mrg FOR_EACH_BB_FN (bb, cfun)
609 1.1 mrg if (EDGE_COUNT (bb->succs) == 0)
610 1.1 mrg make_single_succ_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
611 1.1 mrg }
612 1.1 mrg
613 1.1 mrg /* This function adds a fake edge between any noreturn block and
614 1.1 mrg infinite loops to the exit block. Some optimizations require a path
615 1.1 mrg from each node to the exit node.
616 1.1 mrg
617 1.1 mrg See also Morgan, Figure 3.10, pp. 82-83.
618 1.1 mrg
619 1.1 mrg The current implementation is ugly, not attempting to minimize the
620 1.1 mrg number of inserted fake edges. To reduce the number of fake edges
621 1.1 mrg to insert, add fake edges from _innermost_ loops containing only
622 1.1 mrg nodes not reachable from the exit block. */
623 1.1 mrg
624 1.1 mrg void
625 1.1 mrg connect_infinite_loops_to_exit (void)
626 1.1 mrg {
627 1.1 mrg /* First add fake exits to noreturn blocks, this is required to
628 1.1 mrg discover only truly infinite loops below. */
629 1.1 mrg add_noreturn_fake_exit_edges ();
630 1.1 mrg
631 1.1 mrg /* Perform depth-first search in the reverse graph to find nodes
632 1.1 mrg reachable from the exit block. */
633 1.1 mrg depth_first_search dfs;
634 1.1 mrg dfs.add_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
635 1.1 mrg
636 1.1 mrg /* Repeatedly add fake edges, updating the unreachable nodes. */
637 1.1 mrg basic_block unvisited_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
638 1.1 mrg while (1)
639 1.1 mrg {
640 1.1 mrg unvisited_block = dfs.execute (unvisited_block);
641 1.1 mrg if (!unvisited_block)
642 1.1 mrg break;
643 1.1 mrg
644 1.1 mrg basic_block deadend_block = dfs_find_deadend (unvisited_block);
645 1.1 mrg edge e = make_edge (deadend_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
646 1.1 mrg EDGE_FAKE);
647 1.1 mrg e->probability = profile_probability::never ();
648 1.1 mrg dfs.add_bb (deadend_block);
649 1.1 mrg }
650 1.1 mrg }
651 1.1 mrg
652 1.1 mrg /* Compute reverse top sort order. This is computing a post order
654 1.1 mrg numbering of the graph. If INCLUDE_ENTRY_EXIT is true, then
655 1.1 mrg ENTRY_BLOCK and EXIT_BLOCK are included. If DELETE_UNREACHABLE is
656 1.1 mrg true, unreachable blocks are deleted. */
657 1.1 mrg
658 1.1 mrg int
659 1.1 mrg post_order_compute (int *post_order, bool include_entry_exit,
660 1.1 mrg bool delete_unreachable)
661 1.1 mrg {
662 1.1 mrg int post_order_num = 0;
663 1.1 mrg int count;
664 1.1 mrg
665 1.1 mrg if (include_entry_exit)
666 1.1 mrg post_order[post_order_num++] = EXIT_BLOCK;
667 1.1 mrg
668 1.1 mrg /* Allocate stack for back-tracking up CFG. */
669 1.1 mrg auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
670 1.1 mrg
671 1.1 mrg /* Allocate bitmap to track nodes that have been visited. */
672 1.1 mrg auto_sbitmap visited (last_basic_block_for_fn (cfun));
673 1.1 mrg
674 1.1 mrg /* None of the nodes in the CFG have been visited yet. */
675 1.1 mrg bitmap_clear (visited);
676 1.1 mrg
677 1.1 mrg /* Push the first edge on to the stack. */
678 1.1 mrg stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
679 1.1 mrg
680 1.1 mrg while (!stack.is_empty ())
681 1.1 mrg {
682 1.1 mrg basic_block src;
683 1.1 mrg basic_block dest;
684 1.1 mrg
685 1.1 mrg /* Look at the edge on the top of the stack. */
686 1.1 mrg edge_iterator ei = stack.last ();
687 1.1 mrg src = ei_edge (ei)->src;
688 1.1 mrg dest = ei_edge (ei)->dest;
689 1.1 mrg
690 1.1 mrg /* Check if the edge destination has been visited yet. */
691 1.1 mrg if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
692 1.1 mrg && ! bitmap_bit_p (visited, dest->index))
693 1.1 mrg {
694 1.1 mrg /* Mark that we have visited the destination. */
695 1.1 mrg bitmap_set_bit (visited, dest->index);
696 1.1 mrg
697 1.1 mrg if (EDGE_COUNT (dest->succs) > 0)
698 1.1 mrg /* Since the DEST node has been visited for the first
699 1.1 mrg time, check its successors. */
700 1.1 mrg stack.quick_push (ei_start (dest->succs));
701 1.1 mrg else
702 1.1 mrg post_order[post_order_num++] = dest->index;
703 1.1 mrg }
704 1.1 mrg else
705 1.1 mrg {
706 1.1 mrg if (ei_one_before_end_p (ei)
707 1.1 mrg && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
708 1.1 mrg post_order[post_order_num++] = src->index;
709 1.1 mrg
710 1.1 mrg if (!ei_one_before_end_p (ei))
711 1.1 mrg ei_next (&stack.last ());
712 1.1 mrg else
713 1.1 mrg stack.pop ();
714 1.1 mrg }
715 1.1 mrg }
716 1.1 mrg
717 1.1 mrg if (include_entry_exit)
718 1.1 mrg {
719 1.1 mrg post_order[post_order_num++] = ENTRY_BLOCK;
720 1.1 mrg count = post_order_num;
721 1.1 mrg }
722 1.1 mrg else
723 1.1 mrg count = post_order_num + 2;
724 1.1 mrg
725 1.1 mrg /* Delete the unreachable blocks if some were found and we are
726 1.1 mrg supposed to do it. */
727 1.1 mrg if (delete_unreachable && (count != n_basic_blocks_for_fn (cfun)))
728 1.1 mrg {
729 1.1 mrg basic_block b;
730 1.1 mrg basic_block next_bb;
731 1.1 mrg for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
732 1.1 mrg != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
733 1.1 mrg {
734 1.1 mrg next_bb = b->next_bb;
735 1.1 mrg
736 1.1 mrg if (!(bitmap_bit_p (visited, b->index)))
737 1.1 mrg delete_basic_block (b);
738 1.1 mrg }
739 1.1 mrg
740 1.1 mrg tidy_fallthru_edges ();
741 1.1 mrg }
742 1.1 mrg
743 1.1 mrg return post_order_num;
744 1.1 mrg }
745 1.1 mrg
746 1.1 mrg
747 1.1 mrg /* Helper routine for inverted_post_order_compute
748 1.1 mrg flow_dfs_compute_reverse_execute, and the reverse-CFG
749 1.1 mrg deapth first search in dominance.cc.
750 1.1 mrg BB has to belong to a region of CFG
751 1.1 mrg unreachable by inverted traversal from the exit.
752 1.1 mrg i.e. there's no control flow path from ENTRY to EXIT
753 1.1 mrg that contains this BB.
754 1.1 mrg This can happen in two cases - if there's an infinite loop
755 1.1 mrg or if there's a block that has no successor
756 1.1 mrg (call to a function with no return).
757 1.1 mrg Some RTL passes deal with this condition by
758 1.1 mrg calling connect_infinite_loops_to_exit () and/or
759 1.1 mrg add_noreturn_fake_exit_edges ().
760 1.1 mrg However, those methods involve modifying the CFG itself
761 1.1 mrg which may not be desirable.
762 1.1 mrg Hence, we deal with the infinite loop/no return cases
763 1.1 mrg by identifying a unique basic block that can reach all blocks
764 1.1 mrg in such a region by inverted traversal.
765 1.1 mrg This function returns a basic block that guarantees
766 1.1 mrg that all blocks in the region are reachable
767 1.1 mrg by starting an inverted traversal from the returned block. */
768 1.1 mrg
769 1.1 mrg basic_block
770 1.1 mrg dfs_find_deadend (basic_block bb)
771 1.1 mrg {
772 1.1 mrg auto_bitmap visited;
773 1.1 mrg basic_block next = bb;
774 1.1 mrg
775 1.1 mrg for (;;)
776 1.1 mrg {
777 1.1 mrg if (EDGE_COUNT (next->succs) == 0)
778 1.1 mrg return next;
779 1.1 mrg
780 1.1 mrg if (! bitmap_set_bit (visited, next->index))
781 1.1 mrg return bb;
782 1.1 mrg
783 1.1 mrg bb = next;
784 1.1 mrg /* If we are in an analyzed cycle make sure to try exiting it.
785 1.1 mrg Note this is a heuristic only and expected to work when loop
786 1.1 mrg fixup is needed as well. */
787 1.1 mrg if (! bb->loop_father
788 1.1 mrg || ! loop_outer (bb->loop_father))
789 1.1 mrg next = EDGE_SUCC (bb, 0)->dest;
790 1.1 mrg else
791 1.1 mrg {
792 1.1 mrg edge_iterator ei;
793 1.1 mrg edge e;
794 1.1 mrg FOR_EACH_EDGE (e, ei, bb->succs)
795 1.1 mrg if (loop_exit_edge_p (bb->loop_father, e))
796 1.1 mrg break;
797 1.1 mrg next = e ? e->dest : EDGE_SUCC (bb, 0)->dest;
798 1.1 mrg }
799 1.1 mrg }
800 1.1 mrg }
801 1.1 mrg
802 1.1 mrg
803 1.1 mrg /* Compute the reverse top sort order of the inverted CFG
804 1.1 mrg i.e. starting from the exit block and following the edges backward
805 1.1 mrg (from successors to predecessors).
806 1.1 mrg This ordering can be used for forward dataflow problems among others.
807 1.1 mrg
808 1.1 mrg Optionally if START_POINTS is specified, start from exit block and all
809 1.1 mrg basic blocks in START_POINTS. This is used by CD-DCE.
810 1.1 mrg
811 1.1 mrg This function assumes that all blocks in the CFG are reachable
812 1.1 mrg from the ENTRY (but not necessarily from EXIT).
813 1.1 mrg
814 1.1 mrg If there's an infinite loop,
815 1.1 mrg a simple inverted traversal starting from the blocks
816 1.1 mrg with no successors can't visit all blocks.
817 1.1 mrg To solve this problem, we first do inverted traversal
818 1.1 mrg starting from the blocks with no successor.
819 1.1 mrg And if there's any block left that's not visited by the regular
820 1.1 mrg inverted traversal from EXIT,
821 1.1 mrg those blocks are in such problematic region.
822 1.1 mrg Among those, we find one block that has
823 1.1 mrg any visited predecessor (which is an entry into such a region),
824 1.1 mrg and start looking for a "dead end" from that block
825 1.1 mrg and do another inverted traversal from that block. */
826 1.1 mrg
827 1.1 mrg void
828 1.1 mrg inverted_post_order_compute (vec<int> *post_order,
829 1.1 mrg sbitmap *start_points)
830 1.1 mrg {
831 1.1 mrg basic_block bb;
832 1.1 mrg post_order->reserve_exact (n_basic_blocks_for_fn (cfun));
833 1.1 mrg
834 1.1 mrg if (flag_checking)
835 1.1 mrg verify_no_unreachable_blocks ();
836 1.1 mrg
837 1.1 mrg /* Allocate stack for back-tracking up CFG. */
838 1.1 mrg auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
839 1.1 mrg
840 1.1 mrg /* Allocate bitmap to track nodes that have been visited. */
841 1.1 mrg auto_sbitmap visited (last_basic_block_for_fn (cfun));
842 1.1 mrg
843 1.1 mrg /* None of the nodes in the CFG have been visited yet. */
844 1.1 mrg bitmap_clear (visited);
845 1.1 mrg
846 1.1 mrg if (start_points)
847 1.1 mrg {
848 1.1 mrg FOR_ALL_BB_FN (bb, cfun)
849 1.1 mrg if (bitmap_bit_p (*start_points, bb->index)
850 1.1 mrg && EDGE_COUNT (bb->preds) > 0)
851 1.1 mrg {
852 1.1 mrg stack.quick_push (ei_start (bb->preds));
853 1.1 mrg bitmap_set_bit (visited, bb->index);
854 1.1 mrg }
855 1.1 mrg if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
856 1.1 mrg {
857 1.1 mrg stack.quick_push (ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds));
858 1.1 mrg bitmap_set_bit (visited, EXIT_BLOCK_PTR_FOR_FN (cfun)->index);
859 1.1 mrg }
860 1.1 mrg }
861 1.1 mrg else
862 1.1 mrg /* Put all blocks that have no successor into the initial work list. */
863 1.1 mrg FOR_ALL_BB_FN (bb, cfun)
864 1.1 mrg if (EDGE_COUNT (bb->succs) == 0)
865 1.1 mrg {
866 1.1 mrg /* Push the initial edge on to the stack. */
867 1.1 mrg if (EDGE_COUNT (bb->preds) > 0)
868 1.1 mrg {
869 1.1 mrg stack.quick_push (ei_start (bb->preds));
870 1.1 mrg bitmap_set_bit (visited, bb->index);
871 1.1 mrg }
872 1.1 mrg }
873 1.1 mrg
874 1.1 mrg do
875 1.1 mrg {
876 1.1 mrg bool has_unvisited_bb = false;
877 1.1 mrg
878 1.1 mrg /* The inverted traversal loop. */
879 1.1 mrg while (!stack.is_empty ())
880 1.1 mrg {
881 1.1 mrg edge_iterator ei;
882 1.1 mrg basic_block pred;
883 1.1 mrg
884 1.1 mrg /* Look at the edge on the top of the stack. */
885 1.1 mrg ei = stack.last ();
886 1.1 mrg bb = ei_edge (ei)->dest;
887 1.1 mrg pred = ei_edge (ei)->src;
888 1.1 mrg
889 1.1 mrg /* Check if the predecessor has been visited yet. */
890 1.1 mrg if (! bitmap_bit_p (visited, pred->index))
891 1.1 mrg {
892 1.1 mrg /* Mark that we have visited the destination. */
893 1.1 mrg bitmap_set_bit (visited, pred->index);
894 1.1 mrg
895 1.1 mrg if (EDGE_COUNT (pred->preds) > 0)
896 1.1 mrg /* Since the predecessor node has been visited for the first
897 1.1 mrg time, check its predecessors. */
898 1.1 mrg stack.quick_push (ei_start (pred->preds));
899 1.1 mrg else
900 1.1 mrg post_order->quick_push (pred->index);
901 1.1 mrg }
902 1.1 mrg else
903 1.1 mrg {
904 1.1 mrg if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
905 1.1 mrg && ei_one_before_end_p (ei))
906 1.1 mrg post_order->quick_push (bb->index);
907 1.1 mrg
908 1.1 mrg if (!ei_one_before_end_p (ei))
909 1.1 mrg ei_next (&stack.last ());
910 1.1 mrg else
911 1.1 mrg stack.pop ();
912 1.1 mrg }
913 1.1 mrg }
914 1.1 mrg
915 1.1 mrg /* Detect any infinite loop and activate the kludge.
916 1.1 mrg Note that this doesn't check EXIT_BLOCK itself
917 1.1 mrg since EXIT_BLOCK is always added after the outer do-while loop. */
918 1.1 mrg FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
919 1.1 mrg EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
920 1.1 mrg if (!bitmap_bit_p (visited, bb->index))
921 1.1 mrg {
922 1.1 mrg has_unvisited_bb = true;
923 1.1 mrg
924 1.1 mrg if (EDGE_COUNT (bb->preds) > 0)
925 1.1 mrg {
926 1.1 mrg edge_iterator ei;
927 1.1 mrg edge e;
928 1.1 mrg basic_block visited_pred = NULL;
929 1.1 mrg
930 1.1 mrg /* Find an already visited predecessor. */
931 1.1 mrg FOR_EACH_EDGE (e, ei, bb->preds)
932 1.1 mrg {
933 1.1 mrg if (bitmap_bit_p (visited, e->src->index))
934 1.1 mrg visited_pred = e->src;
935 1.1 mrg }
936 1.1 mrg
937 1.1 mrg if (visited_pred)
938 1.1 mrg {
939 1.1 mrg basic_block be = dfs_find_deadend (bb);
940 1.1 mrg gcc_assert (be != NULL);
941 1.1 mrg bitmap_set_bit (visited, be->index);
942 1.1 mrg stack.quick_push (ei_start (be->preds));
943 1.1 mrg break;
944 1.1 mrg }
945 1.1 mrg }
946 1.1 mrg }
947 1.1 mrg
948 1.1 mrg if (has_unvisited_bb && stack.is_empty ())
949 1.1 mrg {
950 1.1 mrg /* No blocks are reachable from EXIT at all.
951 1.1 mrg Find a dead-end from the ENTRY, and restart the iteration. */
952 1.1 mrg basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR_FOR_FN (cfun));
953 1.1 mrg gcc_assert (be != NULL);
954 1.1 mrg bitmap_set_bit (visited, be->index);
955 1.1 mrg stack.quick_push (ei_start (be->preds));
956 1.1 mrg }
957 1.1 mrg
958 1.1 mrg /* The only case the below while fires is
959 1.1 mrg when there's an infinite loop. */
960 1.1 mrg }
961 1.1 mrg while (!stack.is_empty ());
962 1.1 mrg
963 1.1 mrg /* EXIT_BLOCK is always included. */
964 1.1 mrg post_order->quick_push (EXIT_BLOCK);
965 1.1 mrg }
966 1.1 mrg
967 1.1 mrg /* Compute the depth first search order of FN and store in the array
968 1.1 mrg PRE_ORDER if nonzero. If REV_POST_ORDER is nonzero, return the
969 1.1 mrg reverse completion number for each node. Returns the number of nodes
970 1.1 mrg visited. A depth first search tries to get as far away from the starting
971 1.1 mrg point as quickly as possible.
972 1.1 mrg
973 1.1 mrg In case the function has unreachable blocks the number of nodes
974 1.1 mrg visited does not include them.
975 1.1 mrg
976 1.1 mrg pre_order is a really a preorder numbering of the graph.
977 1.1 mrg rev_post_order is really a reverse postorder numbering of the graph. */
978 1.1 mrg
979 1.1 mrg int
980 1.1 mrg pre_and_rev_post_order_compute_fn (struct function *fn,
981 1.1 mrg int *pre_order, int *rev_post_order,
982 1.1 mrg bool include_entry_exit)
983 1.1 mrg {
984 1.1 mrg int pre_order_num = 0;
985 1.1 mrg int rev_post_order_num = n_basic_blocks_for_fn (fn) - 1;
986 1.1 mrg
987 1.1 mrg /* Allocate stack for back-tracking up CFG. */
988 1.1 mrg auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (fn) + 1);
989 1.1 mrg
990 1.1 mrg if (include_entry_exit)
991 1.1 mrg {
992 1.1 mrg if (pre_order)
993 1.1 mrg pre_order[pre_order_num] = ENTRY_BLOCK;
994 1.1 mrg pre_order_num++;
995 1.1 mrg if (rev_post_order)
996 1.1 mrg rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
997 1.1 mrg }
998 1.1 mrg else
999 1.1 mrg rev_post_order_num -= NUM_FIXED_BLOCKS;
1000 1.1 mrg
1001 1.1 mrg /* BB flag to track nodes that have been visited. */
1002 1.1 mrg auto_bb_flag visited (fn);
1003 1.1 mrg
1004 1.1 mrg /* Push the first edge on to the stack. */
1005 1.1 mrg stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (fn)->succs));
1006 1.1 mrg
1007 1.1 mrg while (!stack.is_empty ())
1008 1.1 mrg {
1009 1.1 mrg basic_block src;
1010 1.1 mrg basic_block dest;
1011 1.1 mrg
1012 1.1 mrg /* Look at the edge on the top of the stack. */
1013 1.1 mrg edge_iterator ei = stack.last ();
1014 1.1 mrg src = ei_edge (ei)->src;
1015 1.1 mrg dest = ei_edge (ei)->dest;
1016 1.1 mrg
1017 1.1 mrg /* Check if the edge destination has been visited yet. */
1018 1.1 mrg if (dest != EXIT_BLOCK_PTR_FOR_FN (fn)
1019 1.1 mrg && ! (dest->flags & visited))
1020 1.1 mrg {
1021 1.1 mrg /* Mark that we have visited the destination. */
1022 1.1 mrg dest->flags |= visited;
1023 1.1 mrg
1024 1.1 mrg if (pre_order)
1025 1.1 mrg pre_order[pre_order_num] = dest->index;
1026 1.1 mrg
1027 1.1 mrg pre_order_num++;
1028 1.1 mrg
1029 1.1 mrg if (EDGE_COUNT (dest->succs) > 0)
1030 1.1 mrg /* Since the DEST node has been visited for the first
1031 1.1 mrg time, check its successors. */
1032 1.1 mrg stack.quick_push (ei_start (dest->succs));
1033 1.1 mrg else if (rev_post_order)
1034 1.1 mrg /* There are no successors for the DEST node so assign
1035 1.1 mrg its reverse completion number. */
1036 1.1 mrg rev_post_order[rev_post_order_num--] = dest->index;
1037 1.1 mrg }
1038 1.1 mrg else
1039 1.1 mrg {
1040 1.1 mrg if (ei_one_before_end_p (ei)
1041 1.1 mrg && src != ENTRY_BLOCK_PTR_FOR_FN (fn)
1042 1.1 mrg && rev_post_order)
1043 1.1 mrg /* There are no more successors for the SRC node
1044 1.1 mrg so assign its reverse completion number. */
1045 1.1 mrg rev_post_order[rev_post_order_num--] = src->index;
1046 1.1 mrg
1047 1.1 mrg if (!ei_one_before_end_p (ei))
1048 1.1 mrg ei_next (&stack.last ());
1049 1.1 mrg else
1050 1.1 mrg stack.pop ();
1051 1.1 mrg }
1052 1.1 mrg }
1053 1.1 mrg
1054 1.1 mrg if (include_entry_exit)
1055 1.1 mrg {
1056 1.1 mrg if (pre_order)
1057 1.1 mrg pre_order[pre_order_num] = EXIT_BLOCK;
1058 1.1 mrg pre_order_num++;
1059 1.1 mrg if (rev_post_order)
1060 1.1 mrg rev_post_order[rev_post_order_num--] = ENTRY_BLOCK;
1061 1.1 mrg }
1062 1.1 mrg
1063 1.1 mrg /* Clear the temporarily allocated flag. */
1064 1.1 mrg if (!rev_post_order)
1065 1.1 mrg rev_post_order = pre_order;
1066 1.1 mrg for (int i = 0; i < pre_order_num; ++i)
1067 1.1 mrg BASIC_BLOCK_FOR_FN (fn, rev_post_order[i])->flags &= ~visited;
1068 1.1 mrg
1069 1.1 mrg return pre_order_num;
1070 1.1 mrg }
1071 1.1 mrg
1072 1.1 mrg /* Like pre_and_rev_post_order_compute_fn but operating on the
1073 1.1 mrg current function and asserting that all nodes were visited. */
1074 1.1 mrg
1075 1.1 mrg int
1076 1.1 mrg pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
1077 1.1 mrg bool include_entry_exit)
1078 1.1 mrg {
1079 1.1 mrg int pre_order_num
1080 1.1 mrg = pre_and_rev_post_order_compute_fn (cfun, pre_order, rev_post_order,
1081 1.1 mrg include_entry_exit);
1082 1.1 mrg if (include_entry_exit)
1083 1.1 mrg /* The number of nodes visited should be the number of blocks. */
1084 1.1 mrg gcc_assert (pre_order_num == n_basic_blocks_for_fn (cfun));
1085 1.1 mrg else
1086 1.1 mrg /* The number of nodes visited should be the number of blocks minus
1087 1.1 mrg the entry and exit blocks which are not visited here. */
1088 1.1 mrg gcc_assert (pre_order_num
1089 1.1 mrg == (n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS));
1090 1.1 mrg
1091 1.1 mrg return pre_order_num;
1092 1.1 mrg }
1093 1.1 mrg
1094 1.1 mrg
1095 1.1 mrg /* Per basic-block data for rev_post_order_and_mark_dfs_back_seme,
1096 1.1 mrg element of a sparsely populated array indexed by basic-block number. */
1097 1.1 mrg typedef auto_vec<int, 2> scc_exit_vec_t;
1098 1.1 mrg struct rpoamdbs_bb_data {
1099 1.1 mrg int depth;
1100 1.1 mrg int bb_to_pre;
1101 1.1 mrg /* The basic-block index of the SCC entry of the block visited first
1102 1.1 mrg (the SCC leader). */
1103 1.1 mrg int scc;
1104 1.1 mrg /* The index into the RPO array where the blocks SCC entries end
1105 1.1 mrg (only valid for the SCC leader). */
1106 1.1 mrg int scc_end;
1107 1.1 mrg /* The indexes of the exits destinations of this SCC (only valid
1108 1.1 mrg for the SCC leader). Initialized upon discovery of SCC leaders. */
1109 1.1 mrg scc_exit_vec_t scc_exits;
1110 1.1 mrg };
1111 1.1 mrg
1112 1.1 mrg /* Tag H as a header of B, weaving H and its loop header list into the
1113 1.1 mrg current loop header list of B. */
1114 1.1 mrg
1115 1.1 mrg static void
1116 1.1 mrg tag_header (int b, int h, rpoamdbs_bb_data *bb_data)
1117 1.1 mrg {
1118 1.1 mrg if (h == -1 || b == h)
1119 1.1 mrg return;
1120 1.1 mrg int cur1 = b;
1121 1.1 mrg int cur2 = h;
1122 1.1 mrg while (bb_data[cur1].scc != -1)
1123 1.1 mrg {
1124 1.1 mrg int ih = bb_data[cur1].scc;
1125 1.1 mrg if (ih == cur2)
1126 1.1 mrg return;
1127 1.1 mrg if (bb_data[ih].depth < bb_data[cur2].depth)
1128 1.1 mrg {
1129 1.1 mrg bb_data[cur1].scc = cur2;
1130 1.1 mrg cur1 = cur2;
1131 1.1 mrg cur2 = ih;
1132 1.1 mrg }
1133 1.1 mrg else
1134 1.1 mrg cur1 = ih;
1135 1.1 mrg }
1136 1.1 mrg bb_data[cur1].scc = cur2;
1137 1.1 mrg }
1138 1.1 mrg
1139 1.1 mrg /* Comparator for a sort of two edges destinations E1 and E2 after their index
1140 1.1 mrg in the PRE array as specified by BB_TO_PRE. */
1141 1.1 mrg
1142 1.1 mrg static int
1143 1.1 mrg cmp_edge_dest_pre (const void *e1_, const void *e2_, void *data_)
1144 1.1 mrg {
1145 1.1 mrg const int *e1 = (const int *)e1_;
1146 1.1 mrg const int *e2 = (const int *)e2_;
1147 1.1 mrg rpoamdbs_bb_data *bb_data = (rpoamdbs_bb_data *)data_;
1148 1.1 mrg return (bb_data[*e1].bb_to_pre - bb_data[*e2].bb_to_pre);
1149 1.1 mrg }
1150 1.1 mrg
1151 1.1 mrg /* Compute the reverse completion number of a depth first search
1152 1.1 mrg on the SEME region denoted by the ENTRY edge and the EXIT_BBS set of
1153 1.1 mrg exit block indexes and store it in the array REV_POST_ORDER.
1154 1.1 mrg Also sets the EDGE_DFS_BACK edge flags according to this visitation
1155 1.1 mrg order.
1156 1.1 mrg Returns the number of nodes visited.
1157 1.1 mrg
1158 1.1 mrg In case the function has unreachable blocks the number of nodes
1159 1.1 mrg visited does not include them.
1160 1.1 mrg
1161 1.1 mrg If FOR_ITERATION is true then compute an RPO where SCCs form a
1162 1.1 mrg contiguous region in the RPO array.
1163 1.1 mrg *TOPLEVEL_SCC_EXTENTS if not NULL is filled with pairs of
1164 1.1 mrg *REV_POST_ORDER indexes denoting extents of the toplevel SCCs in
1165 1.1 mrg this region. */
1166 1.1 mrg
1167 1.1 mrg int
1168 1.1 mrg rev_post_order_and_mark_dfs_back_seme (struct function *fn, edge entry,
1169 1.1 mrg bitmap exit_bbs, bool for_iteration,
1170 1.1 mrg int *rev_post_order,
1171 1.1 mrg vec<std::pair<int, int> >
1172 1.1 mrg *toplevel_scc_extents)
1173 1.1 mrg {
1174 1.1 mrg int rev_post_order_num = 0;
1175 1.1 mrg
1176 1.1 mrg /* BB flag to track nodes that have been visited. */
1177 1.1 mrg auto_bb_flag visited (fn);
1178 1.1 mrg
1179 1.1 mrg /* Lazily initialized per-BB data for the two DFS walks below. */
1180 1.1 mrg rpoamdbs_bb_data *bb_data
1181 1.1 mrg = XNEWVEC (rpoamdbs_bb_data, last_basic_block_for_fn (fn));
1182 1.1 mrg
1183 1.1 mrg /* First DFS walk, loop discovery according to
1184 1.1 mrg A New Algorithm for Identifying Loops in Decompilation
1185 1.1 mrg by Tao Wei, Jian Mao, Wei Zou and You Chen of the Institute of
1186 1.1 mrg Computer Science and Technology of the Peking University. */
1187 1.1 mrg auto_vec<edge_iterator, 20> ei_stack (n_basic_blocks_for_fn (fn) + 1);
1188 1.1 mrg auto_bb_flag is_header (fn);
1189 1.1 mrg int depth = 1;
1190 1.1 mrg unsigned n_sccs = 0;
1191 1.1 mrg
1192 1.1 mrg basic_block dest = entry->dest;
1193 1.1 mrg edge_iterator ei;
1194 1.1 mrg int pre_num = 0;
1195 1.1 mrg
1196 1.1 mrg /* DFS process DEST. */
1197 1.1 mrg find_loops:
1198 1.1 mrg bb_data[dest->index].bb_to_pre = pre_num++;
1199 1.1 mrg bb_data[dest->index].depth = depth;
1200 1.1 mrg bb_data[dest->index].scc = -1;
1201 1.1 mrg depth++;
1202 1.1 mrg gcc_assert ((dest->flags & (is_header|visited)) == 0);
1203 1.1 mrg dest->flags |= visited;
1204 1.1 mrg ei = ei_start (dest->succs);
1205 1.1 mrg while (!ei_end_p (ei))
1206 1.1 mrg {
1207 1.1 mrg ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
1208 1.1 mrg if (bitmap_bit_p (exit_bbs, ei_edge (ei)->dest->index))
1209 1.1 mrg ;
1210 1.1 mrg else if (!(ei_edge (ei)->dest->flags & visited))
1211 1.1 mrg {
1212 1.1 mrg ei_stack.quick_push (ei);
1213 1.1 mrg dest = ei_edge (ei)->dest;
1214 1.1 mrg /* DFS recurse on DEST. */
1215 1.1 mrg goto find_loops;
1216 1.1 mrg
1217 1.1 mrg ret_from_find_loops:
1218 1.1 mrg /* Return point of DFS recursion. */
1219 1.1 mrg ei = ei_stack.pop ();
1220 1.1 mrg dest = ei_edge (ei)->src;
1221 1.1 mrg int header = bb_data[ei_edge (ei)->dest->index].scc;
1222 1.1 mrg tag_header (dest->index, header, bb_data);
1223 1.1 mrg depth = bb_data[dest->index].depth + 1;
1224 1.1 mrg }
1225 1.1 mrg else
1226 1.1 mrg {
1227 1.1 mrg if (bb_data[ei_edge (ei)->dest->index].depth > 0) /* on the stack */
1228 1.1 mrg {
1229 1.1 mrg ei_edge (ei)->flags |= EDGE_DFS_BACK;
1230 1.1 mrg n_sccs++;
1231 1.1 mrg ei_edge (ei)->dest->flags |= is_header;
1232 1.1 mrg ::new (&bb_data[ei_edge (ei)->dest->index].scc_exits)
1233 1.1 mrg auto_vec<int, 2> ();
1234 1.1 mrg tag_header (dest->index, ei_edge (ei)->dest->index, bb_data);
1235 1.1 mrg }
1236 1.1 mrg else if (bb_data[ei_edge (ei)->dest->index].scc == -1)
1237 1.1 mrg ;
1238 1.1 mrg else
1239 1.1 mrg {
1240 1.1 mrg int header = bb_data[ei_edge (ei)->dest->index].scc;
1241 1.1 mrg if (bb_data[header].depth > 0)
1242 1.1 mrg tag_header (dest->index, header, bb_data);
1243 1.1 mrg else
1244 1.1 mrg {
1245 1.1 mrg /* A re-entry into an existing loop. */
1246 1.1 mrg /* ??? Need to mark is_header? */
1247 1.1 mrg while (bb_data[header].scc != -1)
1248 1.1 mrg {
1249 1.1 mrg header = bb_data[header].scc;
1250 1.1 mrg if (bb_data[header].depth > 0)
1251 1.1 mrg {
1252 1.1 mrg tag_header (dest->index, header, bb_data);
1253 1.1 mrg break;
1254 1.1 mrg }
1255 1.1 mrg }
1256 1.1 mrg }
1257 1.1 mrg }
1258 1.1 mrg }
1259 1.1 mrg ei_next (&ei);
1260 1.1 mrg }
1261 1.1 mrg rev_post_order[rev_post_order_num++] = dest->index;
1262 1.1 mrg /* not on the stack anymore */
1263 1.1 mrg bb_data[dest->index].depth = -bb_data[dest->index].depth;
1264 1.1 mrg if (!ei_stack.is_empty ())
1265 1.1 mrg /* Return from DFS recursion. */
1266 1.1 mrg goto ret_from_find_loops;
1267 1.1 mrg
1268 1.1 mrg /* Optimize for no SCCs found or !for_iteration. */
1269 1.1 mrg if (n_sccs == 0 || !for_iteration)
1270 1.1 mrg {
1271 1.1 mrg /* Clear the temporarily allocated flags. */
1272 1.1 mrg for (int i = 0; i < rev_post_order_num; ++i)
1273 1.1 mrg BASIC_BLOCK_FOR_FN (fn, rev_post_order[i])->flags
1274 1.1 mrg &= ~(is_header|visited);
1275 1.1 mrg /* And swap elements. */
1276 1.1 mrg for (int i = 0; i < rev_post_order_num/2; ++i)
1277 1.1 mrg std::swap (rev_post_order[i], rev_post_order[rev_post_order_num-i-1]);
1278 1.1 mrg XDELETEVEC (bb_data);
1279 1.1 mrg
1280 1.1 mrg return rev_post_order_num;
1281 1.1 mrg }
1282 1.1 mrg
1283 1.1 mrg /* Next find SCC exits, clear the visited flag and compute an upper bound
1284 1.1 mrg for the edge stack below. */
1285 1.1 mrg unsigned edge_count = 0;
1286 1.1 mrg for (int i = 0; i < rev_post_order_num; ++i)
1287 1.1 mrg {
1288 1.1 mrg int bb = rev_post_order[i];
1289 1.1 mrg BASIC_BLOCK_FOR_FN (fn, bb)->flags &= ~visited;
1290 1.1 mrg edge e;
1291 1.1 mrg FOR_EACH_EDGE (e, ei, BASIC_BLOCK_FOR_FN (fn, bb)->succs)
1292 1.1 mrg {
1293 1.1 mrg if (bitmap_bit_p (exit_bbs, e->dest->index))
1294 1.1 mrg continue;
1295 1.1 mrg edge_count++;
1296 1.1 mrg /* if e is an exit from e->src, record it for
1297 1.1 mrg bb_data[e->src].scc. */
1298 1.1 mrg int src_scc = e->src->index;
1299 1.1 mrg if (!(e->src->flags & is_header))
1300 1.1 mrg src_scc = bb_data[src_scc].scc;
1301 1.1 mrg if (src_scc == -1)
1302 1.1 mrg continue;
1303 1.1 mrg int dest_scc = e->dest->index;
1304 1.1 mrg if (!(e->dest->flags & is_header))
1305 1.1 mrg dest_scc = bb_data[dest_scc].scc;
1306 1.1 mrg if (src_scc == dest_scc)
1307 1.1 mrg continue;
1308 1.1 mrg /* When dest_scc is nested insde src_scc it's not an
1309 1.1 mrg exit. */
1310 1.1 mrg int tem_dest_scc = dest_scc;
1311 1.1 mrg unsigned dest_scc_depth = 0;
1312 1.1 mrg while (tem_dest_scc != -1)
1313 1.1 mrg {
1314 1.1 mrg dest_scc_depth++;
1315 1.1 mrg if ((tem_dest_scc = bb_data[tem_dest_scc].scc) == src_scc)
1316 1.1 mrg break;
1317 1.1 mrg }
1318 1.1 mrg if (tem_dest_scc != -1)
1319 1.1 mrg continue;
1320 1.1 mrg /* When src_scc is nested inside dest_scc record an
1321 1.1 mrg exit from the outermost SCC this edge exits. */
1322 1.1 mrg int tem_src_scc = src_scc;
1323 1.1 mrg unsigned src_scc_depth = 0;
1324 1.1 mrg while (tem_src_scc != -1)
1325 1.1 mrg {
1326 1.1 mrg if (bb_data[tem_src_scc].scc == dest_scc)
1327 1.1 mrg {
1328 1.1 mrg edge_count++;
1329 1.1 mrg bb_data[tem_src_scc].scc_exits.safe_push (e->dest->index);
1330 1.1 mrg break;
1331 1.1 mrg }
1332 1.1 mrg tem_src_scc = bb_data[tem_src_scc].scc;
1333 1.1 mrg src_scc_depth++;
1334 1.1 mrg }
1335 1.1 mrg /* Else find the outermost SCC this edge exits (exits
1336 1.1 mrg from the inner SCCs are not important for the DFS
1337 1.1 mrg walk adjustment). Do so by computing the common
1338 1.1 mrg ancestor SCC where the immediate child it to the source
1339 1.1 mrg SCC is the exited SCC. */
1340 1.1 mrg if (tem_src_scc == -1)
1341 1.1 mrg {
1342 1.1 mrg edge_count++;
1343 1.1 mrg while (src_scc_depth > dest_scc_depth)
1344 1.1 mrg {
1345 1.1 mrg src_scc = bb_data[src_scc].scc;
1346 1.1 mrg src_scc_depth--;
1347 1.1 mrg }
1348 1.1 mrg while (dest_scc_depth > src_scc_depth)
1349 1.1 mrg {
1350 1.1 mrg dest_scc = bb_data[dest_scc].scc;
1351 1.1 mrg dest_scc_depth--;
1352 1.1 mrg }
1353 1.1 mrg while (bb_data[src_scc].scc != bb_data[dest_scc].scc)
1354 1.1 mrg {
1355 1.1 mrg src_scc = bb_data[src_scc].scc;
1356 1.1 mrg dest_scc = bb_data[dest_scc].scc;
1357 1.1 mrg }
1358 1.1 mrg bb_data[src_scc].scc_exits.safe_push (e->dest->index);
1359 1.1 mrg }
1360 1.1 mrg }
1361 1.1 mrg }
1362 1.1 mrg
1363 1.1 mrg /* Now the second DFS walk to compute a RPO where the extent of SCCs
1364 1.1 mrg is minimized thus SCC members are adjacent in the RPO array.
1365 1.1 mrg This is done by performing a DFS walk computing RPO with first visiting
1366 1.1 mrg extra direct edges from SCC entry to its exits.
1367 1.1 mrg That simulates a DFS walk over the graph with SCCs collapsed and
1368 1.1 mrg walking the SCCs themselves only when all outgoing edges from the
1369 1.1 mrg SCCs have been visited.
1370 1.1 mrg SCC_END[scc-header-index] is the position in the RPO array of the
1371 1.1 mrg last member of the SCC. */
1372 1.1 mrg auto_vec<std::pair<basic_block, basic_block>, 20> estack (edge_count + 1);
1373 1.1 mrg int idx = rev_post_order_num;
1374 1.1 mrg basic_block edest;
1375 1.1 mrg dest = entry->dest;
1376 1.1 mrg
1377 1.1 mrg /* DFS process DEST. */
1378 1.1 mrg dfs_rpo:
1379 1.1 mrg gcc_checking_assert ((dest->flags & visited) == 0);
1380 1.1 mrg /* Verify we enter SCCs through the same header and SCC nesting appears
1381 1.1 mrg the same. */
1382 1.1 mrg gcc_assert (bb_data[dest->index].scc == -1
1383 1.1 mrg || (BASIC_BLOCK_FOR_FN (fn, bb_data[dest->index].scc)->flags
1384 1.1 mrg & visited));
1385 1.1 mrg dest->flags |= visited;
1386 1.1 mrg bb_data[dest->index].scc_end = -1;
1387 1.1 mrg if ((dest->flags & is_header)
1388 1.1 mrg && !bb_data[dest->index].scc_exits.is_empty ())
1389 1.1 mrg {
1390 1.1 mrg /* Push the all SCC exits as outgoing edges from its header to
1391 1.1 mrg be visited first.
1392 1.1 mrg To process exits in the same relative order as in the first
1393 1.1 mrg DFS walk sort them after their destination PRE order index. */
1394 1.1 mrg gcc_sort_r (&bb_data[dest->index].scc_exits[0],
1395 1.1 mrg bb_data[dest->index].scc_exits.length (),
1396 1.1 mrg sizeof (int), cmp_edge_dest_pre, bb_data);
1397 1.1 mrg /* Process edges in reverse to match previous DFS walk order. */
1398 1.1 mrg for (int i = bb_data[dest->index].scc_exits.length () - 1; i >= 0; --i)
1399 1.1 mrg estack.quick_push (std::make_pair
1400 1.1 mrg (dest, BASIC_BLOCK_FOR_FN (fn, bb_data[dest->index].scc_exits[i])));
1401 1.1 mrg }
1402 1.1 mrg else
1403 1.1 mrg {
1404 1.1 mrg if (dest->flags & is_header)
1405 1.1 mrg bb_data[dest->index].scc_end = idx - 1;
1406 1.1 mrg /* Push the edge vector in reverse to match the iteration order
1407 1.1 mrg from the DFS walk above. */
1408 1.1 mrg for (int i = EDGE_COUNT (dest->succs) - 1; i >= 0; --i)
1409 1.1 mrg if (!bitmap_bit_p (exit_bbs, EDGE_SUCC (dest, i)->dest->index))
1410 1.1 mrg estack.quick_push (std::make_pair (dest,
1411 1.1 mrg EDGE_SUCC (dest, i)->dest));
1412 1.1 mrg }
1413 1.1 mrg while (!estack.is_empty ()
1414 1.1 mrg && estack.last ().first == dest)
1415 1.1 mrg {
1416 1.1 mrg edest = estack.last ().second;
1417 1.1 mrg if (!(edest->flags & visited))
1418 1.1 mrg {
1419 1.1 mrg dest = edest;
1420 1.1 mrg /* DFS recurse on DEST. */
1421 1.1 mrg goto dfs_rpo;
1422 1.1 mrg
1423 1.1 mrg ret_from_dfs_rpo:
1424 1.1 mrg /* Return point of DFS recursion. */
1425 1.1 mrg dest = estack.last ().first;
1426 1.1 mrg }
1427 1.1 mrg estack.pop ();
1428 1.1 mrg /* If we processed all SCC exits from DEST mark the SCC end
1429 1.1 mrg since all RPO entries up to DEST itself will now belong
1430 1.1 mrg to its SCC. The special-case of no SCC exits is already
1431 1.1 mrg dealt with above. */
1432 1.1 mrg if (dest->flags & is_header
1433 1.1 mrg /* When the last exit edge was processed mark the SCC end
1434 1.1 mrg and push the regular edges. */
1435 1.1 mrg && bb_data[dest->index].scc_end == -1
1436 1.1 mrg && (estack.is_empty ()
1437 1.1 mrg || estack.last ().first != dest))
1438 1.1 mrg {
1439 1.1 mrg bb_data[dest->index].scc_end = idx - 1;
1440 1.1 mrg /* Push the edge vector in reverse to match the iteration order
1441 1.1 mrg from the DFS walk above. */
1442 1.1 mrg for (int i = EDGE_COUNT (dest->succs) - 1; i >= 0; --i)
1443 1.1 mrg if (!bitmap_bit_p (exit_bbs, EDGE_SUCC (dest, i)->dest->index))
1444 1.1 mrg estack.quick_push (std::make_pair (dest,
1445 1.1 mrg EDGE_SUCC (dest, i)->dest));
1446 1.1 mrg }
1447 1.1 mrg }
1448 1.1 mrg rev_post_order[--idx] = dest->index;
1449 1.1 mrg if (!estack.is_empty ())
1450 1.1 mrg /* Return from DFS recursion. */
1451 1.1 mrg goto ret_from_dfs_rpo;
1452 1.1 mrg
1453 1.1 mrg /* Each SCC extends are from the position of the header inside
1454 1.1 mrg the RPO array up to RPO array index scc_end[header-index]. */
1455 1.1 mrg if (toplevel_scc_extents)
1456 1.1 mrg for (int i = 0; i < rev_post_order_num; i++)
1457 1.1 mrg {
1458 1.1 mrg basic_block bb = BASIC_BLOCK_FOR_FN (fn, rev_post_order[i]);
1459 1.1 mrg if (bb->flags & is_header)
1460 1.1 mrg {
1461 1.1 mrg toplevel_scc_extents->safe_push
1462 1.1 mrg (std::make_pair (i, bb_data[bb->index].scc_end));
1463 1.1 mrg i = bb_data[bb->index].scc_end;
1464 1.1 mrg }
1465 1.1 mrg }
1466 1.1 mrg
1467 1.1 mrg /* Clear the temporarily allocated flags and free memory. */
1468 1.1 mrg for (int i = 0; i < rev_post_order_num; ++i)
1469 1.1 mrg {
1470 1.1 mrg basic_block bb = BASIC_BLOCK_FOR_FN (fn, rev_post_order[i]);
1471 1.1 mrg if (bb->flags & is_header)
1472 1.1 mrg bb_data[bb->index].scc_exits.~scc_exit_vec_t ();
1473 1.1 mrg bb->flags &= ~(visited|is_header);
1474 1.1 mrg }
1475 1.1 mrg
1476 1.1 mrg XDELETEVEC (bb_data);
1477 1.1 mrg
1478 1.1 mrg return rev_post_order_num;
1479 1.1 mrg }
1480 1.1 mrg
1481 1.1 mrg
1482 1.1 mrg
1483 1.1 mrg /* Compute the depth first search order on the _reverse_ graph and
1484 1.1 mrg store it in the array DFS_ORDER, marking the nodes visited in VISITED.
1485 1.1 mrg Returns the number of nodes visited.
1486 1.1 mrg
1487 1.1 mrg The computation is split into three pieces:
1488 1.1 mrg
1489 1.1 mrg flow_dfs_compute_reverse_init () creates the necessary data
1490 1.1 mrg structures.
1491 1.1 mrg
1492 1.1 mrg flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1493 1.1 mrg structures. The block will start the search.
1494 1.1 mrg
1495 1.1 mrg flow_dfs_compute_reverse_execute () continues (or starts) the
1496 1.1 mrg search using the block on the top of the stack, stopping when the
1497 1.1 mrg stack is empty.
1498 1.1 mrg
1499 1.1 mrg flow_dfs_compute_reverse_finish () destroys the necessary data
1500 1.1 mrg structures.
1501 1.1 mrg
1502 1.1 mrg Thus, the user will probably call ..._init(), call ..._add_bb() to
1503 1.1 mrg add a beginning basic block to the stack, call ..._execute(),
1504 1.1 mrg possibly add another bb to the stack and again call ..._execute(),
1505 1.1 mrg ..., and finally call _finish(). */
1506 1.1 mrg
1507 1.1 mrg /* Initialize the data structures used for depth-first search on the
1508 1.1 mrg reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
1509 1.1 mrg added to the basic block stack. DATA is the current depth-first
1510 1.1 mrg search context. If INITIALIZE_STACK is nonzero, there is an
1511 1.1 mrg element on the stack. */
1512 1.1 mrg
1513 1.1 mrg depth_first_search::depth_first_search () :
1514 1.1 mrg m_stack (n_basic_blocks_for_fn (cfun)),
1515 1.1 mrg m_visited_blocks (last_basic_block_for_fn (cfun))
1516 1.1 mrg {
1517 1.1 mrg bitmap_clear (m_visited_blocks);
1518 1.1 mrg }
1519 1.1 mrg
1520 1.1 mrg /* Add the specified basic block to the top of the dfs data
1521 1.1 mrg structures. When the search continues, it will start at the
1522 1.1 mrg block. */
1523 1.1 mrg
1524 1.1 mrg void
1525 1.1 mrg depth_first_search::add_bb (basic_block bb)
1526 1.1 mrg {
1527 1.1 mrg m_stack.quick_push (bb);
1528 1.1 mrg bitmap_set_bit (m_visited_blocks, bb->index);
1529 1.1 mrg }
1530 1.1 mrg
1531 1.1 mrg /* Continue the depth-first search through the reverse graph starting with the
1532 1.1 mrg block at the stack's top and ending when the stack is empty. Visited nodes
1533 1.1 mrg are marked. Returns an unvisited basic block, or NULL if there is none
1534 1.1 mrg available. */
1535 1.1 mrg
1536 1.1 mrg basic_block
1537 1.1 mrg depth_first_search::execute (basic_block last_unvisited)
1538 1.1 mrg {
1539 1.1 mrg basic_block bb;
1540 1.1 mrg edge e;
1541 1.1 mrg edge_iterator ei;
1542 1.1 mrg
1543 1.1 mrg while (!m_stack.is_empty ())
1544 1.1 mrg {
1545 1.1 mrg bb = m_stack.pop ();
1546 1.1 mrg
1547 1.1 mrg /* Perform depth-first search on adjacent vertices. */
1548 1.1 mrg FOR_EACH_EDGE (e, ei, bb->preds)
1549 1.1 mrg if (!bitmap_bit_p (m_visited_blocks, e->src->index))
1550 1.1 mrg add_bb (e->src);
1551 1.1 mrg }
1552 1.1 mrg
1553 1.1 mrg /* Determine if there are unvisited basic blocks. */
1554 1.1 mrg FOR_BB_BETWEEN (bb, last_unvisited, NULL, prev_bb)
1555 1.1 mrg if (!bitmap_bit_p (m_visited_blocks, bb->index))
1556 1.1 mrg return bb;
1557 1.1 mrg
1558 1.1 mrg return NULL;
1559 1.1 mrg }
1560 1.1 mrg
1561 1.1 mrg /* Performs dfs search from BB over vertices satisfying PREDICATE;
1562 1.1 mrg if REVERSE, go against direction of edges. Returns number of blocks
1563 1.1 mrg found and their list in RSLT. RSLT can contain at most RSLT_MAX items. */
1564 1.1 mrg int
1565 1.1 mrg dfs_enumerate_from (basic_block bb, int reverse,
1566 1.1 mrg bool (*predicate) (const_basic_block, const void *),
1567 1.1 mrg basic_block *rslt, int rslt_max, const void *data)
1568 1.1 mrg {
1569 1.1 mrg basic_block *st, lbb;
1570 1.1 mrg int sp = 0, tv = 0;
1571 1.1 mrg
1572 1.1 mrg auto_bb_flag visited (cfun);
1573 1.1 mrg
1574 1.1 mrg #define MARK_VISITED(BB) ((BB)->flags |= visited)
1575 1.1 mrg #define UNMARK_VISITED(BB) ((BB)->flags &= ~visited)
1576 1.1 mrg #define VISITED_P(BB) (((BB)->flags & visited) != 0)
1577 1.1 mrg
1578 1.1 mrg st = XNEWVEC (basic_block, rslt_max);
1579 1.1 mrg rslt[tv++] = st[sp++] = bb;
1580 1.1 mrg MARK_VISITED (bb);
1581 1.1 mrg while (sp)
1582 1.1 mrg {
1583 1.1 mrg edge e;
1584 1.1 mrg edge_iterator ei;
1585 1.1 mrg lbb = st[--sp];
1586 1.1 mrg if (reverse)
1587 1.1 mrg {
1588 1.1 mrg FOR_EACH_EDGE (e, ei, lbb->preds)
1589 1.1 mrg if (!VISITED_P (e->src) && predicate (e->src, data))
1590 1.1 mrg {
1591 1.1 mrg gcc_assert (tv != rslt_max);
1592 1.1 mrg rslt[tv++] = st[sp++] = e->src;
1593 1.1 mrg MARK_VISITED (e->src);
1594 1.1 mrg }
1595 1.1 mrg }
1596 1.1 mrg else
1597 1.1 mrg {
1598 1.1 mrg FOR_EACH_EDGE (e, ei, lbb->succs)
1599 1.1 mrg if (!VISITED_P (e->dest) && predicate (e->dest, data))
1600 1.1 mrg {
1601 1.1 mrg gcc_assert (tv != rslt_max);
1602 1.1 mrg rslt[tv++] = st[sp++] = e->dest;
1603 1.1 mrg MARK_VISITED (e->dest);
1604 1.1 mrg }
1605 1.1 mrg }
1606 1.1 mrg }
1607 1.1 mrg free (st);
1608 1.1 mrg for (sp = 0; sp < tv; sp++)
1609 1.1 mrg UNMARK_VISITED (rslt[sp]);
1610 1.1 mrg return tv;
1611 1.1 mrg #undef MARK_VISITED
1612 1.1 mrg #undef UNMARK_VISITED
1613 1.1 mrg #undef VISITED_P
1614 1.1 mrg }
1615 1.1 mrg
1616 1.1 mrg
1617 1.1 mrg /* Compute dominance frontiers, ala Harvey, Ferrante, et al.
1618 1.1 mrg
1619 1.1 mrg This algorithm can be found in Timothy Harvey's PhD thesis, at
1620 1.1 mrg http://www.cs.rice.edu/~harv/dissertation.pdf in the section on iterative
1621 1.1 mrg dominance algorithms.
1622 1.1 mrg
1623 1.1 mrg First, we identify each join point, j (any node with more than one
1624 1.1 mrg incoming edge is a join point).
1625 1.1 mrg
1626 1.1 mrg We then examine each predecessor, p, of j and walk up the dominator tree
1627 1.1 mrg starting at p.
1628 1.1 mrg
1629 1.1 mrg We stop the walk when we reach j's immediate dominator - j is in the
1630 1.1 mrg dominance frontier of each of the nodes in the walk, except for j's
1631 1.1 mrg immediate dominator. Intuitively, all of the rest of j's dominators are
1632 1.1 mrg shared by j's predecessors as well.
1633 1.1 mrg Since they dominate j, they will not have j in their dominance frontiers.
1634 1.1 mrg
1635 1.1 mrg The number of nodes touched by this algorithm is equal to the size
1636 1.1 mrg of the dominance frontiers, no more, no less.
1637 1.1 mrg */
1638 1.1 mrg
1639 1.1 mrg void
1640 1.1 mrg compute_dominance_frontiers (bitmap_head *frontiers)
1641 1.1 mrg {
1642 1.1 mrg timevar_push (TV_DOM_FRONTIERS);
1643 1.1 mrg
1644 1.1 mrg edge p;
1645 1.1 mrg edge_iterator ei;
1646 1.1 mrg basic_block b;
1647 1.1 mrg FOR_EACH_BB_FN (b, cfun)
1648 1.1 mrg {
1649 1.1 mrg if (EDGE_COUNT (b->preds) >= 2)
1650 1.1 mrg {
1651 1.1 mrg basic_block domsb = get_immediate_dominator (CDI_DOMINATORS, b);
1652 1.1 mrg FOR_EACH_EDGE (p, ei, b->preds)
1653 1.1 mrg {
1654 1.1 mrg basic_block runner = p->src;
1655 1.1 mrg if (runner == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1656 1.1 mrg continue;
1657 1.1 mrg
1658 1.1 mrg while (runner != domsb)
1659 1.1 mrg {
1660 1.1 mrg if (!bitmap_set_bit (&frontiers[runner->index], b->index))
1661 1.1 mrg break;
1662 1.1 mrg runner = get_immediate_dominator (CDI_DOMINATORS, runner);
1663 1.1 mrg }
1664 1.1 mrg }
1665 1.1 mrg }
1666 1.1 mrg }
1667 1.1 mrg
1668 1.1 mrg timevar_pop (TV_DOM_FRONTIERS);
1669 1.1 mrg }
1670 1.1 mrg
1671 1.1 mrg /* Given a set of blocks with variable definitions (DEF_BLOCKS),
1672 1.1 mrg return a bitmap with all the blocks in the iterated dominance
1673 1.1 mrg frontier of the blocks in DEF_BLOCKS. DFS contains dominance
1674 1.1 mrg frontier information as returned by compute_dominance_frontiers.
1675 1.1 mrg
1676 1.1 mrg The resulting set of blocks are the potential sites where PHI nodes
1677 1.1 mrg are needed. The caller is responsible for freeing the memory
1678 1.1 mrg allocated for the return value. */
1679 1.1 mrg
1680 1.1 mrg bitmap
1681 1.1 mrg compute_idf (bitmap def_blocks, bitmap_head *dfs)
1682 1.1 mrg {
1683 1.1 mrg bitmap_iterator bi;
1684 1.1 mrg unsigned bb_index, i;
1685 1.1 mrg bitmap phi_insertion_points;
1686 1.1 mrg
1687 1.1 mrg phi_insertion_points = BITMAP_ALLOC (NULL);
1688 1.1 mrg
1689 1.1 mrg /* Seed the work set with all the blocks in DEF_BLOCKS. */
1690 1.1 mrg auto_bitmap work_set;
1691 1.1 mrg bitmap_copy (work_set, def_blocks);
1692 1.1 mrg bitmap_tree_view (work_set);
1693 1.1 mrg
1694 1.1 mrg /* Pop a block off the workset, add every block that appears in
1695 1.1 mrg the original block's DF that we have not already processed to
1696 1.1 mrg the workset. Iterate until the workset is empty. Blocks
1697 1.1 mrg which are added to the workset are potential sites for
1698 1.1 mrg PHI nodes. */
1699 1.1 mrg while (!bitmap_empty_p (work_set))
1700 1.1 mrg {
1701 1.1 mrg /* The dominance frontier of a block is blocks after it so iterating
1702 1.1 mrg on earlier blocks first is better.
1703 1.1 mrg ??? Basic blocks are by no means guaranteed to be ordered in
1704 1.1 mrg optimal order for this iteration. */
1705 1.1 mrg bb_index = bitmap_first_set_bit (work_set);
1706 1.1 mrg bitmap_clear_bit (work_set, bb_index);
1707 1.1 mrg
1708 1.1 mrg /* Since the registration of NEW -> OLD name mappings is done
1709 1.1 mrg separately from the call to update_ssa, when updating the SSA
1710 1.1 mrg form, the basic blocks where new and/or old names are defined
1711 1.1 mrg may have disappeared by CFG cleanup calls. In this case,
1712 1.1 mrg we may pull a non-existing block from the work stack. */
1713 1.1 mrg gcc_checking_assert (bb_index
1714 1.1 mrg < (unsigned) last_basic_block_for_fn (cfun));
1715 1.1 mrg
1716 1.1 mrg EXECUTE_IF_AND_COMPL_IN_BITMAP (&dfs[bb_index], phi_insertion_points,
1717 1.1 mrg 0, i, bi)
1718 1.1 mrg {
1719 1.1 mrg bitmap_set_bit (work_set, i);
1720 1.1 mrg bitmap_set_bit (phi_insertion_points, i);
1721 1.1 mrg }
1722 1.1 mrg }
1723 1.1 mrg
1724 1.1 mrg return phi_insertion_points;
1725 1.1 mrg }
1726 1.1 mrg
1727 1.1 mrg /* Intersection and union of preds/succs for sbitmap based data flow
1728 1.1 mrg solvers. All four functions defined below take the same arguments:
1729 1.1 mrg B is the basic block to perform the operation for. DST is the
1730 1.1 mrg target sbitmap, i.e. the result. SRC is an sbitmap vector of size
1731 1.1 mrg last_basic_block so that it can be indexed with basic block indices.
1732 1.1 mrg DST may be (but does not have to be) SRC[B->index]. */
1733 1.1 mrg
1734 1.1 mrg /* Set the bitmap DST to the intersection of SRC of successors of
1735 1.1 mrg basic block B. */
1736 1.1 mrg
1737 1.1 mrg void
1738 1.1 mrg bitmap_intersection_of_succs (sbitmap dst, sbitmap *src, basic_block b)
1739 1.1 mrg {
1740 1.1 mrg unsigned int set_size = dst->size;
1741 1.1 mrg edge e;
1742 1.1 mrg unsigned ix;
1743 1.1 mrg
1744 1.1 mrg for (e = NULL, ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1745 1.1 mrg {
1746 1.1 mrg e = EDGE_SUCC (b, ix);
1747 1.1 mrg if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1748 1.1 mrg continue;
1749 1.1 mrg
1750 1.1 mrg bitmap_copy (dst, src[e->dest->index]);
1751 1.1 mrg break;
1752 1.1 mrg }
1753 1.1 mrg
1754 1.1 mrg if (e == 0)
1755 1.1 mrg bitmap_ones (dst);
1756 1.1 mrg else
1757 1.1 mrg for (++ix; ix < EDGE_COUNT (b->succs); ix++)
1758 1.1 mrg {
1759 1.1 mrg unsigned int i;
1760 1.1 mrg SBITMAP_ELT_TYPE *p, *r;
1761 1.1 mrg
1762 1.1 mrg e = EDGE_SUCC (b, ix);
1763 1.1 mrg if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1764 1.1 mrg continue;
1765 1.1 mrg
1766 1.1 mrg p = src[e->dest->index]->elms;
1767 1.1 mrg r = dst->elms;
1768 1.1 mrg for (i = 0; i < set_size; i++)
1769 1.1 mrg *r++ &= *p++;
1770 1.1 mrg }
1771 1.1 mrg }
1772 1.1 mrg
1773 1.1 mrg /* Set the bitmap DST to the intersection of SRC of predecessors of
1774 1.1 mrg basic block B. */
1775 1.1 mrg
1776 1.1 mrg void
1777 1.1 mrg bitmap_intersection_of_preds (sbitmap dst, sbitmap *src, basic_block b)
1778 1.1 mrg {
1779 1.1 mrg unsigned int set_size = dst->size;
1780 1.1 mrg edge e;
1781 1.1 mrg unsigned ix;
1782 1.1 mrg
1783 1.1 mrg for (e = NULL, ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1784 1.1 mrg {
1785 1.1 mrg e = EDGE_PRED (b, ix);
1786 1.1 mrg if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1787 1.1 mrg continue;
1788 1.1 mrg
1789 1.1 mrg bitmap_copy (dst, src[e->src->index]);
1790 1.1 mrg break;
1791 1.1 mrg }
1792 1.1 mrg
1793 1.1 mrg if (e == 0)
1794 1.1 mrg bitmap_ones (dst);
1795 1.1 mrg else
1796 1.1 mrg for (++ix; ix < EDGE_COUNT (b->preds); ix++)
1797 1.1 mrg {
1798 1.1 mrg unsigned int i;
1799 1.1 mrg SBITMAP_ELT_TYPE *p, *r;
1800 1.1 mrg
1801 1.1 mrg e = EDGE_PRED (b, ix);
1802 1.1 mrg if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1803 1.1 mrg continue;
1804 1.1 mrg
1805 1.1 mrg p = src[e->src->index]->elms;
1806 1.1 mrg r = dst->elms;
1807 1.1 mrg for (i = 0; i < set_size; i++)
1808 1.1 mrg *r++ &= *p++;
1809 1.1 mrg }
1810 1.1 mrg }
1811 1.1 mrg
1812 1.1 mrg /* Set the bitmap DST to the union of SRC of successors of
1813 1.1 mrg basic block B. */
1814 1.1 mrg
1815 1.1 mrg void
1816 1.1 mrg bitmap_union_of_succs (sbitmap dst, sbitmap *src, basic_block b)
1817 1.1 mrg {
1818 1.1 mrg unsigned int set_size = dst->size;
1819 1.1 mrg edge e;
1820 1.1 mrg unsigned ix;
1821 1.1 mrg
1822 1.1 mrg for (ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1823 1.1 mrg {
1824 1.1 mrg e = EDGE_SUCC (b, ix);
1825 1.1 mrg if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1826 1.1 mrg continue;
1827 1.1 mrg
1828 1.1 mrg bitmap_copy (dst, src[e->dest->index]);
1829 1.1 mrg break;
1830 1.1 mrg }
1831 1.1 mrg
1832 1.1 mrg if (ix == EDGE_COUNT (b->succs))
1833 1.1 mrg bitmap_clear (dst);
1834 1.1 mrg else
1835 1.1 mrg for (ix++; ix < EDGE_COUNT (b->succs); ix++)
1836 1.1 mrg {
1837 1.1 mrg unsigned int i;
1838 1.1 mrg SBITMAP_ELT_TYPE *p, *r;
1839 1.1 mrg
1840 1.1 mrg e = EDGE_SUCC (b, ix);
1841 1.1 mrg if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1842 1.1 mrg continue;
1843 1.1 mrg
1844 1.1 mrg p = src[e->dest->index]->elms;
1845 1.1 mrg r = dst->elms;
1846 1.1 mrg for (i = 0; i < set_size; i++)
1847 1.1 mrg *r++ |= *p++;
1848 1.1 mrg }
1849 1.1 mrg }
1850 1.1 mrg
1851 1.1 mrg /* Set the bitmap DST to the union of SRC of predecessors of
1852 1.1 mrg basic block B. */
1853 1.1 mrg
1854 1.1 mrg void
1855 1.1 mrg bitmap_union_of_preds (sbitmap dst, sbitmap *src, basic_block b)
1856 1.1 mrg {
1857 1.1 mrg unsigned int set_size = dst->size;
1858 1.1 mrg edge e;
1859 1.1 mrg unsigned ix;
1860 1.1 mrg
1861 1.1 mrg for (ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1862 1.1 mrg {
1863 1.1 mrg e = EDGE_PRED (b, ix);
1864 1.1 mrg if (e->src== ENTRY_BLOCK_PTR_FOR_FN (cfun))
1865 1.1 mrg continue;
1866 1.1 mrg
1867 1.1 mrg bitmap_copy (dst, src[e->src->index]);
1868 1.1 mrg break;
1869 1.1 mrg }
1870 1.1 mrg
1871 1.1 mrg if (ix == EDGE_COUNT (b->preds))
1872 1.1 mrg bitmap_clear (dst);
1873 1.1 mrg else
1874 1.1 mrg for (ix++; ix < EDGE_COUNT (b->preds); ix++)
1875 1.1 mrg {
1876 1.1 mrg unsigned int i;
1877 1.1 mrg SBITMAP_ELT_TYPE *p, *r;
1878 1.1 mrg
1879 1.1 mrg e = EDGE_PRED (b, ix);
1880 1.1 mrg if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1881 1.1 mrg continue;
1882 1.1 mrg
1883 1.1 mrg p = src[e->src->index]->elms;
1884 1.1 mrg r = dst->elms;
1885 1.1 mrg for (i = 0; i < set_size; i++)
1886 1.1 mrg *r++ |= *p++;
1887 1.1 mrg }
1888 1.1 mrg }
1889 1.1 mrg
1890 1.1 mrg /* Returns the list of basic blocks in the function in an order that guarantees
1891 1.1 mrg that if a block X has just a single predecessor Y, then Y is after X in the
1892 1.1 mrg ordering. */
1893 1.1 mrg
1894 1.1 mrg basic_block *
1895 1.1 mrg single_pred_before_succ_order (void)
1896 1.1 mrg {
1897 1.1 mrg basic_block x, y;
1898 1.1 mrg basic_block *order = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
1899 1.1 mrg unsigned n = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
1900 1.1 mrg unsigned np, i;
1901 1.1 mrg auto_sbitmap visited (last_basic_block_for_fn (cfun));
1902 1.1 mrg
1903 1.1 mrg #define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
1904 1.1 mrg #define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
1905 1.1 mrg
1906 1.1 mrg bitmap_clear (visited);
1907 1.1 mrg
1908 1.1 mrg MARK_VISITED (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1909 1.1 mrg FOR_EACH_BB_FN (x, cfun)
1910 1.1 mrg {
1911 1.1 mrg if (VISITED_P (x))
1912 1.1 mrg continue;
1913 1.1 mrg
1914 1.1 mrg /* Walk the predecessors of x as long as they have precisely one
1915 1.1 mrg predecessor and add them to the list, so that they get stored
1916 1.1 mrg after x. */
1917 1.1 mrg for (y = x, np = 1;
1918 1.1 mrg single_pred_p (y) && !VISITED_P (single_pred (y));
1919 1.1 mrg y = single_pred (y))
1920 1.1 mrg np++;
1921 1.1 mrg for (y = x, i = n - np;
1922 1.1 mrg single_pred_p (y) && !VISITED_P (single_pred (y));
1923 1.1 mrg y = single_pred (y), i++)
1924 1.1 mrg {
1925 1.1 mrg order[i] = y;
1926 1.1 mrg MARK_VISITED (y);
1927 1.1 mrg }
1928 1.1 mrg order[i] = y;
1929 1.1 mrg MARK_VISITED (y);
1930 1.1 mrg
1931 1.1 mrg gcc_assert (i == n - 1);
1932 1.1 mrg n -= np;
1933 1.1 mrg }
1934 1.1 mrg
1935 1.1 mrg gcc_assert (n == 0);
1936 1.1 mrg return order;
1937 1.1 mrg
1938 1.1 mrg #undef MARK_VISITED
1939 1.1 mrg #undef VISITED_P
1940 1.1 mrg }
1941 1.1 mrg
1942 1.1 mrg /* Ignoring loop backedges, if BB has precisely one incoming edge then
1943 1.1 mrg return that edge. Otherwise return NULL.
1944 1.1 mrg
1945 1.1 mrg When IGNORE_NOT_EXECUTABLE is true, also ignore edges that are not marked
1946 1.1 mrg as executable. */
1947 1.1 mrg
1948 1.1 mrg edge
1949 1.1 mrg single_pred_edge_ignoring_loop_edges (basic_block bb,
1950 1.1 mrg bool ignore_not_executable)
1951 1.1 mrg {
1952 1.1 mrg edge retval = NULL;
1953 1.1 mrg edge e;
1954 1.1 mrg edge_iterator ei;
1955 1.1 mrg
1956 1.1 mrg FOR_EACH_EDGE (e, ei, bb->preds)
1957 1.1 mrg {
1958 1.1 mrg /* A loop back edge can be identified by the destination of
1959 1.1 mrg the edge dominating the source of the edge. */
1960 1.1 mrg if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1961 1.1 mrg continue;
1962 1.1 mrg
1963 1.1 mrg /* We can safely ignore edges that are not executable. */
1964 1.1 mrg if (ignore_not_executable
1965 1.1 mrg && (e->flags & EDGE_EXECUTABLE) == 0)
1966 1.1 mrg continue;
1967 1.1 mrg
1968 1.1 mrg /* If we have already seen a non-loop edge, then we must have
1969 1.1 mrg multiple incoming non-loop edges and thus we return NULL. */
1970 1.1 mrg if (retval)
1971 1.1 mrg return NULL;
1972 1.1 mrg
1973 1.1 mrg /* This is the first non-loop incoming edge we have found. Record
1974 1.1 mrg it. */
1975 1.1 mrg retval = e;
1976 }
1977
1978 return retval;
1979 }
1980