gimple-range-path.cc revision 1.1 1 1.1 mrg /* Basic block path solver.
2 1.1 mrg Copyright (C) 2021-2022 Free Software Foundation, Inc.
3 1.1 mrg Contributed by Aldy Hernandez <aldyh (at) redhat.com>.
4 1.1 mrg
5 1.1 mrg This file is part of GCC.
6 1.1 mrg
7 1.1 mrg GCC is free software; you can redistribute it and/or modify it under
8 1.1 mrg the terms of the GNU General Public License as published by the Free
9 1.1 mrg Software Foundation; either version 3, or (at your option) any later
10 1.1 mrg version.
11 1.1 mrg
12 1.1 mrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 1.1 mrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 1.1 mrg FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 1.1 mrg for more details.
16 1.1 mrg
17 1.1 mrg You should have received a copy of the GNU General Public License
18 1.1 mrg along with GCC; see the file COPYING3. If not see
19 1.1 mrg <http://www.gnu.org/licenses/>. */
20 1.1 mrg
21 1.1 mrg #include "config.h"
22 1.1 mrg #include "system.h"
23 1.1 mrg #include "coretypes.h"
24 1.1 mrg #include "backend.h"
25 1.1 mrg #include "tree.h"
26 1.1 mrg #include "gimple.h"
27 1.1 mrg #include "cfganal.h"
28 1.1 mrg #include "value-range.h"
29 1.1 mrg #include "gimple-range.h"
30 1.1 mrg #include "tree-pretty-print.h"
31 1.1 mrg #include "gimple-range-path.h"
32 1.1 mrg #include "ssa.h"
33 1.1 mrg #include "tree-cfg.h"
34 1.1 mrg #include "gimple-iterator.h"
35 1.1 mrg
36 1.1 mrg // Internal construct to help facilitate debugging of solver.
37 1.1 mrg #define DEBUG_SOLVER (dump_file && (param_threader_debug == THREADER_DEBUG_ALL))
38 1.1 mrg
39 1.1 mrg path_range_query::path_range_query (bool resolve, gimple_ranger *ranger)
40 1.1 mrg : m_cache (new ssa_global_cache),
41 1.1 mrg m_has_cache_entry (BITMAP_ALLOC (NULL)),
42 1.1 mrg m_resolve (resolve),
43 1.1 mrg m_alloced_ranger (!ranger)
44 1.1 mrg {
45 1.1 mrg if (m_alloced_ranger)
46 1.1 mrg m_ranger = new gimple_ranger;
47 1.1 mrg else
48 1.1 mrg m_ranger = ranger;
49 1.1 mrg
50 1.1 mrg m_oracle = new path_oracle (m_ranger->oracle ());
51 1.1 mrg
52 1.1 mrg if (m_resolve && flag_checking)
53 1.1 mrg verify_marked_backedges (cfun);
54 1.1 mrg }
55 1.1 mrg
56 1.1 mrg path_range_query::~path_range_query ()
57 1.1 mrg {
58 1.1 mrg delete m_oracle;
59 1.1 mrg if (m_alloced_ranger)
60 1.1 mrg delete m_ranger;
61 1.1 mrg BITMAP_FREE (m_has_cache_entry);
62 1.1 mrg delete m_cache;
63 1.1 mrg }
64 1.1 mrg
65 1.1 mrg // Return TRUE if NAME is in the import bitmap.
66 1.1 mrg
67 1.1 mrg bool
68 1.1 mrg path_range_query::import_p (tree name)
69 1.1 mrg {
70 1.1 mrg return (TREE_CODE (name) == SSA_NAME
71 1.1 mrg && bitmap_bit_p (m_imports, SSA_NAME_VERSION (name)));
72 1.1 mrg }
73 1.1 mrg
74 1.1 mrg // Mark cache entry for NAME as unused.
75 1.1 mrg
76 1.1 mrg void
77 1.1 mrg path_range_query::clear_cache (tree name)
78 1.1 mrg {
79 1.1 mrg unsigned v = SSA_NAME_VERSION (name);
80 1.1 mrg bitmap_clear_bit (m_has_cache_entry, v);
81 1.1 mrg }
82 1.1 mrg
83 1.1 mrg // If NAME has a cache entry, return it in R, and return TRUE.
84 1.1 mrg
85 1.1 mrg inline bool
86 1.1 mrg path_range_query::get_cache (irange &r, tree name)
87 1.1 mrg {
88 1.1 mrg if (!gimple_range_ssa_p (name))
89 1.1 mrg return get_global_range_query ()->range_of_expr (r, name);
90 1.1 mrg
91 1.1 mrg unsigned v = SSA_NAME_VERSION (name);
92 1.1 mrg if (bitmap_bit_p (m_has_cache_entry, v))
93 1.1 mrg return m_cache->get_global_range (r, name);
94 1.1 mrg
95 1.1 mrg return false;
96 1.1 mrg }
97 1.1 mrg
98 1.1 mrg // Set the cache entry for NAME to R.
99 1.1 mrg
100 1.1 mrg void
101 1.1 mrg path_range_query::set_cache (const irange &r, tree name)
102 1.1 mrg {
103 1.1 mrg unsigned v = SSA_NAME_VERSION (name);
104 1.1 mrg bitmap_set_bit (m_has_cache_entry, v);
105 1.1 mrg m_cache->set_global_range (name, r);
106 1.1 mrg }
107 1.1 mrg
108 1.1 mrg void
109 1.1 mrg path_range_query::dump (FILE *dump_file)
110 1.1 mrg {
111 1.1 mrg push_dump_file save (dump_file, dump_flags & ~TDF_DETAILS);
112 1.1 mrg
113 1.1 mrg if (m_path.is_empty ())
114 1.1 mrg return;
115 1.1 mrg
116 1.1 mrg unsigned i;
117 1.1 mrg bitmap_iterator bi;
118 1.1 mrg
119 1.1 mrg dump_ranger (dump_file, m_path);
120 1.1 mrg
121 1.1 mrg fprintf (dump_file, "Imports:\n");
122 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (m_imports, 0, i, bi)
123 1.1 mrg {
124 1.1 mrg tree name = ssa_name (i);
125 1.1 mrg print_generic_expr (dump_file, name, TDF_SLIM);
126 1.1 mrg fprintf (dump_file, "\n");
127 1.1 mrg }
128 1.1 mrg
129 1.1 mrg m_cache->dump (dump_file);
130 1.1 mrg }
131 1.1 mrg
132 1.1 mrg void
133 1.1 mrg path_range_query::debug ()
134 1.1 mrg {
135 1.1 mrg dump (stderr);
136 1.1 mrg }
137 1.1 mrg
138 1.1 mrg // Return TRUE if NAME is defined outside the current path.
139 1.1 mrg
140 1.1 mrg bool
141 1.1 mrg path_range_query::defined_outside_path (tree name)
142 1.1 mrg {
143 1.1 mrg gimple *def = SSA_NAME_DEF_STMT (name);
144 1.1 mrg basic_block bb = gimple_bb (def);
145 1.1 mrg
146 1.1 mrg return !bb || !m_path.contains (bb);
147 1.1 mrg }
148 1.1 mrg
149 1.1 mrg // Return the range of NAME on entry to the path.
150 1.1 mrg
151 1.1 mrg void
152 1.1 mrg path_range_query::range_on_path_entry (irange &r, tree name)
153 1.1 mrg {
154 1.1 mrg gcc_checking_assert (defined_outside_path (name));
155 1.1 mrg basic_block entry = entry_bb ();
156 1.1 mrg
157 1.1 mrg // Prefer to use range_of_expr if we have a statement to look at,
158 1.1 mrg // since it has better caching than range_on_edge.
159 1.1 mrg gimple *last = last_stmt (entry);
160 1.1 mrg if (last)
161 1.1 mrg {
162 1.1 mrg if (m_ranger->range_of_expr (r, name, last))
163 1.1 mrg return;
164 1.1 mrg gcc_unreachable ();
165 1.1 mrg }
166 1.1 mrg
167 1.1 mrg // If we have no statement, look at all the incoming ranges to the
168 1.1 mrg // block. This can happen when we're querying a block with only an
169 1.1 mrg // outgoing edge (no statement but the fall through edge), but for
170 1.1 mrg // which we can determine a range on entry to the block.
171 1.1 mrg int_range_max tmp;
172 1.1 mrg bool changed = false;
173 1.1 mrg r.set_undefined ();
174 1.1 mrg for (unsigned i = 0; i < EDGE_COUNT (entry->preds); ++i)
175 1.1 mrg {
176 1.1 mrg edge e = EDGE_PRED (entry, i);
177 1.1 mrg if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
178 1.1 mrg && m_ranger->range_on_edge (tmp, e, name))
179 1.1 mrg {
180 1.1 mrg r.union_ (tmp);
181 1.1 mrg changed = true;
182 1.1 mrg }
183 1.1 mrg }
184 1.1 mrg
185 1.1 mrg // Make sure we don't return UNDEFINED by mistake.
186 1.1 mrg if (!changed)
187 1.1 mrg r.set_varying (TREE_TYPE (name));
188 1.1 mrg }
189 1.1 mrg
190 1.1 mrg // Return the range of NAME at the end of the path being analyzed.
191 1.1 mrg
192 1.1 mrg bool
193 1.1 mrg path_range_query::internal_range_of_expr (irange &r, tree name, gimple *stmt)
194 1.1 mrg {
195 1.1 mrg if (!irange::supports_type_p (TREE_TYPE (name)))
196 1.1 mrg return false;
197 1.1 mrg
198 1.1 mrg if (get_cache (r, name))
199 1.1 mrg return true;
200 1.1 mrg
201 1.1 mrg if (m_resolve && defined_outside_path (name))
202 1.1 mrg {
203 1.1 mrg range_on_path_entry (r, name);
204 1.1 mrg set_cache (r, name);
205 1.1 mrg return true;
206 1.1 mrg }
207 1.1 mrg
208 1.1 mrg if (stmt
209 1.1 mrg && range_defined_in_block (r, name, gimple_bb (stmt)))
210 1.1 mrg {
211 1.1 mrg if (TREE_CODE (name) == SSA_NAME)
212 1.1 mrg r.intersect (gimple_range_global (name));
213 1.1 mrg
214 1.1 mrg set_cache (r, name);
215 1.1 mrg return true;
216 1.1 mrg }
217 1.1 mrg
218 1.1 mrg r = gimple_range_global (name);
219 1.1 mrg return true;
220 1.1 mrg }
221 1.1 mrg
222 1.1 mrg bool
223 1.1 mrg path_range_query::range_of_expr (irange &r, tree name, gimple *stmt)
224 1.1 mrg {
225 1.1 mrg if (internal_range_of_expr (r, name, stmt))
226 1.1 mrg {
227 1.1 mrg if (r.undefined_p ())
228 1.1 mrg m_undefined_path = true;
229 1.1 mrg
230 1.1 mrg return true;
231 1.1 mrg }
232 1.1 mrg return false;
233 1.1 mrg }
234 1.1 mrg
235 1.1 mrg bool
236 1.1 mrg path_range_query::unreachable_path_p ()
237 1.1 mrg {
238 1.1 mrg return m_undefined_path;
239 1.1 mrg }
240 1.1 mrg
241 1.1 mrg // Initialize the current path to PATH. The current block is set to
242 1.1 mrg // the entry block to the path.
243 1.1 mrg //
244 1.1 mrg // Note that the blocks are in reverse order, so the exit block is
245 1.1 mrg // path[0].
246 1.1 mrg
247 1.1 mrg void
248 1.1 mrg path_range_query::set_path (const vec<basic_block> &path)
249 1.1 mrg {
250 1.1 mrg gcc_checking_assert (path.length () > 1);
251 1.1 mrg m_path = path.copy ();
252 1.1 mrg m_pos = m_path.length () - 1;
253 1.1 mrg bitmap_clear (m_has_cache_entry);
254 1.1 mrg }
255 1.1 mrg
256 1.1 mrg bool
257 1.1 mrg path_range_query::ssa_defined_in_bb (tree name, basic_block bb)
258 1.1 mrg {
259 1.1 mrg return (TREE_CODE (name) == SSA_NAME
260 1.1 mrg && SSA_NAME_DEF_STMT (name)
261 1.1 mrg && gimple_bb (SSA_NAME_DEF_STMT (name)) == bb);
262 1.1 mrg }
263 1.1 mrg
264 1.1 mrg // Return the range of the result of PHI in R.
265 1.1 mrg //
266 1.1 mrg // Since PHIs are calculated in parallel at the beginning of the
267 1.1 mrg // block, we must be careful to never save anything to the cache here.
268 1.1 mrg // It is the caller's responsibility to adjust the cache. Also,
269 1.1 mrg // calculating the PHI's range must not trigger additional lookups.
270 1.1 mrg
271 1.1 mrg void
272 1.1 mrg path_range_query::ssa_range_in_phi (irange &r, gphi *phi)
273 1.1 mrg {
274 1.1 mrg tree name = gimple_phi_result (phi);
275 1.1 mrg basic_block bb = gimple_bb (phi);
276 1.1 mrg unsigned nargs = gimple_phi_num_args (phi);
277 1.1 mrg
278 1.1 mrg if (at_entry ())
279 1.1 mrg {
280 1.1 mrg if (m_resolve && m_ranger->range_of_expr (r, name, phi))
281 1.1 mrg return;
282 1.1 mrg
283 1.1 mrg // Try to fold the phi exclusively with global or cached values.
284 1.1 mrg // This will get things like PHI <5(99), 6(88)>. We do this by
285 1.1 mrg // calling range_of_expr with no context.
286 1.1 mrg int_range_max arg_range;
287 1.1 mrg r.set_undefined ();
288 1.1 mrg for (size_t i = 0; i < nargs; ++i)
289 1.1 mrg {
290 1.1 mrg tree arg = gimple_phi_arg_def (phi, i);
291 1.1 mrg if (range_of_expr (arg_range, arg, /*stmt=*/NULL))
292 1.1 mrg r.union_ (arg_range);
293 1.1 mrg else
294 1.1 mrg {
295 1.1 mrg r.set_varying (TREE_TYPE (name));
296 1.1 mrg return;
297 1.1 mrg }
298 1.1 mrg }
299 1.1 mrg return;
300 1.1 mrg }
301 1.1 mrg
302 1.1 mrg basic_block prev = prev_bb ();
303 1.1 mrg edge e_in = find_edge (prev, bb);
304 1.1 mrg
305 1.1 mrg for (size_t i = 0; i < nargs; ++i)
306 1.1 mrg if (e_in == gimple_phi_arg_edge (phi, i))
307 1.1 mrg {
308 1.1 mrg tree arg = gimple_phi_arg_def (phi, i);
309 1.1 mrg // Avoid using the cache for ARGs defined in this block, as
310 1.1 mrg // that could create an ordering problem.
311 1.1 mrg if (ssa_defined_in_bb (arg, bb) || !get_cache (r, arg))
312 1.1 mrg {
313 1.1 mrg if (m_resolve)
314 1.1 mrg {
315 1.1 mrg int_range_max tmp;
316 1.1 mrg // Using both the range on entry to the path, and the
317 1.1 mrg // range on this edge yields significantly better
318 1.1 mrg // results.
319 1.1 mrg if (defined_outside_path (arg))
320 1.1 mrg range_on_path_entry (r, arg);
321 1.1 mrg else
322 1.1 mrg r.set_varying (TREE_TYPE (name));
323 1.1 mrg m_ranger->range_on_edge (tmp, e_in, arg);
324 1.1 mrg r.intersect (tmp);
325 1.1 mrg return;
326 1.1 mrg }
327 1.1 mrg r.set_varying (TREE_TYPE (name));
328 1.1 mrg }
329 1.1 mrg return;
330 1.1 mrg }
331 1.1 mrg gcc_unreachable ();
332 1.1 mrg }
333 1.1 mrg
334 1.1 mrg // If NAME is defined in BB, set R to the range of NAME, and return
335 1.1 mrg // TRUE. Otherwise, return FALSE.
336 1.1 mrg
337 1.1 mrg bool
338 1.1 mrg path_range_query::range_defined_in_block (irange &r, tree name, basic_block bb)
339 1.1 mrg {
340 1.1 mrg gimple *def_stmt = SSA_NAME_DEF_STMT (name);
341 1.1 mrg basic_block def_bb = gimple_bb (def_stmt);
342 1.1 mrg
343 1.1 mrg if (def_bb != bb)
344 1.1 mrg return false;
345 1.1 mrg
346 1.1 mrg if (get_cache (r, name))
347 1.1 mrg return true;
348 1.1 mrg
349 1.1 mrg if (gimple_code (def_stmt) == GIMPLE_PHI)
350 1.1 mrg ssa_range_in_phi (r, as_a<gphi *> (def_stmt));
351 1.1 mrg else
352 1.1 mrg {
353 1.1 mrg if (name)
354 1.1 mrg get_path_oracle ()->killing_def (name);
355 1.1 mrg
356 1.1 mrg if (!range_of_stmt (r, def_stmt, name))
357 1.1 mrg r.set_varying (TREE_TYPE (name));
358 1.1 mrg }
359 1.1 mrg
360 1.1 mrg if (bb)
361 1.1 mrg m_non_null.adjust_range (r, name, bb, false);
362 1.1 mrg
363 1.1 mrg if (DEBUG_SOLVER && (bb || !r.varying_p ()))
364 1.1 mrg {
365 1.1 mrg fprintf (dump_file, "range_defined_in_block (BB%d) for ", bb ? bb->index : -1);
366 1.1 mrg print_generic_expr (dump_file, name, TDF_SLIM);
367 1.1 mrg fprintf (dump_file, " is ");
368 1.1 mrg r.dump (dump_file);
369 1.1 mrg fprintf (dump_file, "\n");
370 1.1 mrg }
371 1.1 mrg
372 1.1 mrg return true;
373 1.1 mrg }
374 1.1 mrg
375 1.1 mrg // Compute ranges defined in the PHIs in this block.
376 1.1 mrg
377 1.1 mrg void
378 1.1 mrg path_range_query::compute_ranges_in_phis (basic_block bb)
379 1.1 mrg {
380 1.1 mrg int_range_max r;
381 1.1 mrg auto_bitmap phi_set;
382 1.1 mrg
383 1.1 mrg // PHIs must be resolved simultaneously on entry to the block
384 1.1 mrg // because any dependencies must be satistifed with values on entry.
385 1.1 mrg // Thus, we calculate all PHIs first, and then update the cache at
386 1.1 mrg // the end.
387 1.1 mrg
388 1.1 mrg for (auto iter = gsi_start_phis (bb); !gsi_end_p (iter); gsi_next (&iter))
389 1.1 mrg {
390 1.1 mrg gphi *phi = iter.phi ();
391 1.1 mrg tree name = gimple_phi_result (phi);
392 1.1 mrg
393 1.1 mrg if (import_p (name) && range_defined_in_block (r, name, bb))
394 1.1 mrg {
395 1.1 mrg unsigned v = SSA_NAME_VERSION (name);
396 1.1 mrg set_cache (r, name);
397 1.1 mrg bitmap_set_bit (phi_set, v);
398 1.1 mrg // Pretend we don't have a cache entry for this name until
399 1.1 mrg // we're done with all PHIs.
400 1.1 mrg bitmap_clear_bit (m_has_cache_entry, v);
401 1.1 mrg }
402 1.1 mrg }
403 1.1 mrg bitmap_ior_into (m_has_cache_entry, phi_set);
404 1.1 mrg }
405 1.1 mrg
406 1.1 mrg // Return TRUE if relations may be invalidated after crossing edge E.
407 1.1 mrg
408 1.1 mrg bool
409 1.1 mrg path_range_query::relations_may_be_invalidated (edge e)
410 1.1 mrg {
411 1.1 mrg // As soon as the path crosses a back edge, we can encounter
412 1.1 mrg // definitions of SSA_NAMEs that may have had a use in the path
413 1.1 mrg // already, so this will then be a new definition. The relation
414 1.1 mrg // code is all designed around seeing things in dominator order, and
415 1.1 mrg // crossing a back edge in the path violates this assumption.
416 1.1 mrg return (e->flags & EDGE_DFS_BACK);
417 1.1 mrg }
418 1.1 mrg
419 1.1 mrg // Compute ranges defined in the current block, or exported to the
420 1.1 mrg // next block.
421 1.1 mrg
422 1.1 mrg void
423 1.1 mrg path_range_query::compute_ranges_in_block (basic_block bb)
424 1.1 mrg {
425 1.1 mrg bitmap_iterator bi;
426 1.1 mrg int_range_max r, cached_range;
427 1.1 mrg unsigned i;
428 1.1 mrg
429 1.1 mrg if (m_resolve && !at_entry ())
430 1.1 mrg compute_phi_relations (bb, prev_bb ());
431 1.1 mrg
432 1.1 mrg // Force recalculation of any names in the cache that are defined in
433 1.1 mrg // this block. This can happen on interdependent SSA/phis in loops.
434 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (m_imports, 0, i, bi)
435 1.1 mrg {
436 1.1 mrg tree name = ssa_name (i);
437 1.1 mrg if (ssa_defined_in_bb (name, bb))
438 1.1 mrg clear_cache (name);
439 1.1 mrg }
440 1.1 mrg
441 1.1 mrg // Solve imports defined in this block, starting with the PHIs...
442 1.1 mrg compute_ranges_in_phis (bb);
443 1.1 mrg // ...and then the rest of the imports.
444 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (m_imports, 0, i, bi)
445 1.1 mrg {
446 1.1 mrg tree name = ssa_name (i);
447 1.1 mrg
448 1.1 mrg if (gimple_code (SSA_NAME_DEF_STMT (name)) != GIMPLE_PHI
449 1.1 mrg && range_defined_in_block (r, name, bb))
450 1.1 mrg set_cache (r, name);
451 1.1 mrg }
452 1.1 mrg
453 1.1 mrg if (at_exit ())
454 1.1 mrg return;
455 1.1 mrg
456 1.1 mrg // Solve imports that are exported to the next block.
457 1.1 mrg basic_block next = next_bb ();
458 1.1 mrg edge e = find_edge (bb, next);
459 1.1 mrg
460 1.1 mrg if (m_resolve && relations_may_be_invalidated (e))
461 1.1 mrg {
462 1.1 mrg if (DEBUG_SOLVER)
463 1.1 mrg fprintf (dump_file,
464 1.1 mrg "Resetting relations as they may be invalidated in %d->%d.\n",
465 1.1 mrg e->src->index, e->dest->index);
466 1.1 mrg
467 1.1 mrg path_oracle *p = get_path_oracle ();
468 1.1 mrg p->reset_path ();
469 1.1 mrg // ?? Instead of nuking the root oracle altogether, we could
470 1.1 mrg // reset the path oracle to search for relations from the top of
471 1.1 mrg // the loop with the root oracle. Something for future development.
472 1.1 mrg p->set_root_oracle (nullptr);
473 1.1 mrg }
474 1.1 mrg
475 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (m_imports, 0, i, bi)
476 1.1 mrg {
477 1.1 mrg tree name = ssa_name (i);
478 1.1 mrg gori_compute &g = m_ranger->gori ();
479 1.1 mrg bitmap exports = g.exports (bb);
480 1.1 mrg
481 1.1 mrg if (bitmap_bit_p (exports, i))
482 1.1 mrg {
483 1.1 mrg if (g.outgoing_edge_range_p (r, e, name, *this))
484 1.1 mrg {
485 1.1 mrg if (get_cache (cached_range, name))
486 1.1 mrg r.intersect (cached_range);
487 1.1 mrg
488 1.1 mrg set_cache (r, name);
489 1.1 mrg if (DEBUG_SOLVER)
490 1.1 mrg {
491 1.1 mrg fprintf (dump_file, "outgoing_edge_range_p for ");
492 1.1 mrg print_generic_expr (dump_file, name, TDF_SLIM);
493 1.1 mrg fprintf (dump_file, " on edge %d->%d ",
494 1.1 mrg e->src->index, e->dest->index);
495 1.1 mrg fprintf (dump_file, "is ");
496 1.1 mrg r.dump (dump_file);
497 1.1 mrg fprintf (dump_file, "\n");
498 1.1 mrg }
499 1.1 mrg }
500 1.1 mrg }
501 1.1 mrg }
502 1.1 mrg
503 1.1 mrg if (m_resolve)
504 1.1 mrg compute_outgoing_relations (bb, next);
505 1.1 mrg }
506 1.1 mrg
507 1.1 mrg // Adjust all pointer imports in BB with non-null information.
508 1.1 mrg
509 1.1 mrg void
510 1.1 mrg path_range_query::adjust_for_non_null_uses (basic_block bb)
511 1.1 mrg {
512 1.1 mrg int_range_max r;
513 1.1 mrg bitmap_iterator bi;
514 1.1 mrg unsigned i;
515 1.1 mrg
516 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (m_imports, 0, i, bi)
517 1.1 mrg {
518 1.1 mrg tree name = ssa_name (i);
519 1.1 mrg
520 1.1 mrg if (!POINTER_TYPE_P (TREE_TYPE (name)))
521 1.1 mrg continue;
522 1.1 mrg
523 1.1 mrg if (get_cache (r, name))
524 1.1 mrg {
525 1.1 mrg if (r.nonzero_p ())
526 1.1 mrg continue;
527 1.1 mrg }
528 1.1 mrg else
529 1.1 mrg r.set_varying (TREE_TYPE (name));
530 1.1 mrg
531 1.1 mrg if (m_non_null.adjust_range (r, name, bb, false))
532 1.1 mrg set_cache (r, name);
533 1.1 mrg }
534 1.1 mrg }
535 1.1 mrg
536 1.1 mrg // If NAME is a supported SSA_NAME, add it the bitmap in IMPORTS.
537 1.1 mrg
538 1.1 mrg bool
539 1.1 mrg path_range_query::add_to_imports (tree name, bitmap imports)
540 1.1 mrg {
541 1.1 mrg if (TREE_CODE (name) == SSA_NAME
542 1.1 mrg && irange::supports_type_p (TREE_TYPE (name)))
543 1.1 mrg return bitmap_set_bit (imports, SSA_NAME_VERSION (name));
544 1.1 mrg return false;
545 1.1 mrg }
546 1.1 mrg
547 1.1 mrg // Compute the imports to the path ending in EXIT. These are
548 1.1 mrg // essentially the SSA names used to calculate the final conditional
549 1.1 mrg // along the path.
550 1.1 mrg //
551 1.1 mrg // They are hints for the solver. Adding more elements doesn't slow
552 1.1 mrg // us down, because we don't solve anything that doesn't appear in the
553 1.1 mrg // path. On the other hand, not having enough imports will limit what
554 1.1 mrg // we can solve.
555 1.1 mrg
556 1.1 mrg void
557 1.1 mrg path_range_query::compute_imports (bitmap imports, basic_block exit)
558 1.1 mrg {
559 1.1 mrg // Start with the imports from the exit block...
560 1.1 mrg gori_compute &gori = m_ranger->gori ();
561 1.1 mrg bitmap r_imports = gori.imports (exit);
562 1.1 mrg bitmap_copy (imports, r_imports);
563 1.1 mrg
564 1.1 mrg auto_vec<tree> worklist (bitmap_count_bits (imports));
565 1.1 mrg bitmap_iterator bi;
566 1.1 mrg unsigned i;
567 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (imports, 0, i, bi)
568 1.1 mrg {
569 1.1 mrg tree name = ssa_name (i);
570 1.1 mrg worklist.quick_push (name);
571 1.1 mrg }
572 1.1 mrg
573 1.1 mrg // ...and add any operands used to define these imports.
574 1.1 mrg while (!worklist.is_empty ())
575 1.1 mrg {
576 1.1 mrg tree name = worklist.pop ();
577 1.1 mrg gimple *def_stmt = SSA_NAME_DEF_STMT (name);
578 1.1 mrg
579 1.1 mrg if (is_gimple_assign (def_stmt))
580 1.1 mrg {
581 1.1 mrg add_to_imports (gimple_assign_rhs1 (def_stmt), imports);
582 1.1 mrg tree rhs = gimple_assign_rhs2 (def_stmt);
583 1.1 mrg if (rhs && add_to_imports (rhs, imports))
584 1.1 mrg worklist.safe_push (rhs);
585 1.1 mrg rhs = gimple_assign_rhs3 (def_stmt);
586 1.1 mrg if (rhs && add_to_imports (rhs, imports))
587 1.1 mrg worklist.safe_push (rhs);
588 1.1 mrg }
589 1.1 mrg else if (gphi *phi = dyn_cast <gphi *> (def_stmt))
590 1.1 mrg {
591 1.1 mrg for (size_t i = 0; i < gimple_phi_num_args (phi); ++i)
592 1.1 mrg {
593 1.1 mrg edge e = gimple_phi_arg_edge (phi, i);
594 1.1 mrg tree arg = gimple_phi_arg (phi, i)->def;
595 1.1 mrg
596 1.1 mrg if (TREE_CODE (arg) == SSA_NAME
597 1.1 mrg && m_path.contains (e->src)
598 1.1 mrg && bitmap_set_bit (imports, SSA_NAME_VERSION (arg)))
599 1.1 mrg worklist.safe_push (arg);
600 1.1 mrg }
601 1.1 mrg }
602 1.1 mrg }
603 1.1 mrg // Exported booleans along the path, may help conditionals.
604 1.1 mrg if (m_resolve)
605 1.1 mrg for (i = 0; i < m_path.length (); ++i)
606 1.1 mrg {
607 1.1 mrg basic_block bb = m_path[i];
608 1.1 mrg tree name;
609 1.1 mrg FOR_EACH_GORI_EXPORT_NAME (gori, bb, name)
610 1.1 mrg if (TREE_CODE (TREE_TYPE (name)) == BOOLEAN_TYPE)
611 1.1 mrg bitmap_set_bit (imports, SSA_NAME_VERSION (name));
612 1.1 mrg }
613 1.1 mrg }
614 1.1 mrg
615 1.1 mrg // Compute the ranges for IMPORTS along PATH.
616 1.1 mrg //
617 1.1 mrg // IMPORTS are the set of SSA names, any of which could potentially
618 1.1 mrg // change the value of the final conditional in PATH. Default to the
619 1.1 mrg // imports of the last block in the path if none is given.
620 1.1 mrg
621 1.1 mrg void
622 1.1 mrg path_range_query::compute_ranges (const vec<basic_block> &path,
623 1.1 mrg const bitmap_head *imports)
624 1.1 mrg {
625 1.1 mrg if (DEBUG_SOLVER)
626 1.1 mrg fprintf (dump_file, "\n==============================================\n");
627 1.1 mrg
628 1.1 mrg set_path (path);
629 1.1 mrg m_undefined_path = false;
630 1.1 mrg
631 1.1 mrg if (imports)
632 1.1 mrg bitmap_copy (m_imports, imports);
633 1.1 mrg else
634 1.1 mrg compute_imports (m_imports, exit_bb ());
635 1.1 mrg
636 1.1 mrg if (m_resolve)
637 1.1 mrg get_path_oracle ()->reset_path ();
638 1.1 mrg
639 1.1 mrg if (DEBUG_SOLVER)
640 1.1 mrg {
641 1.1 mrg fprintf (dump_file, "path_range_query: compute_ranges for path: ");
642 1.1 mrg for (unsigned i = path.length (); i > 0; --i)
643 1.1 mrg {
644 1.1 mrg basic_block bb = path[i - 1];
645 1.1 mrg fprintf (dump_file, "%d", bb->index);
646 1.1 mrg if (i > 1)
647 1.1 mrg fprintf (dump_file, "->");
648 1.1 mrg }
649 1.1 mrg fprintf (dump_file, "\n");
650 1.1 mrg }
651 1.1 mrg
652 1.1 mrg while (1)
653 1.1 mrg {
654 1.1 mrg basic_block bb = curr_bb ();
655 1.1 mrg
656 1.1 mrg compute_ranges_in_block (bb);
657 1.1 mrg adjust_for_non_null_uses (bb);
658 1.1 mrg
659 1.1 mrg if (at_exit ())
660 1.1 mrg break;
661 1.1 mrg
662 1.1 mrg move_next ();
663 1.1 mrg }
664 1.1 mrg
665 1.1 mrg if (DEBUG_SOLVER)
666 1.1 mrg {
667 1.1 mrg get_path_oracle ()->dump (dump_file);
668 1.1 mrg dump (dump_file);
669 1.1 mrg }
670 1.1 mrg }
671 1.1 mrg
672 1.1 mrg // Convenience function to compute ranges along a path consisting of
673 1.1 mrg // E->SRC and E->DEST.
674 1.1 mrg
675 1.1 mrg void
676 1.1 mrg path_range_query::compute_ranges (edge e)
677 1.1 mrg {
678 1.1 mrg auto_vec<basic_block> bbs (2);
679 1.1 mrg bbs.quick_push (e->dest);
680 1.1 mrg bbs.quick_push (e->src);
681 1.1 mrg compute_ranges (bbs);
682 1.1 mrg }
683 1.1 mrg
684 1.1 mrg // A folding aid used to register and query relations along a path.
685 1.1 mrg // When queried, it returns relations as they would appear on exit to
686 1.1 mrg // the path.
687 1.1 mrg //
688 1.1 mrg // Relations are registered on entry so the path_oracle knows which
689 1.1 mrg // block to query the root oracle at when a relation lies outside the
690 1.1 mrg // path. However, when queried we return the relation on exit to the
691 1.1 mrg // path, since the root_oracle ignores the registered.
692 1.1 mrg
693 1.1 mrg class jt_fur_source : public fur_depend
694 1.1 mrg {
695 1.1 mrg public:
696 1.1 mrg jt_fur_source (gimple *s, path_range_query *, gori_compute *,
697 1.1 mrg const vec<basic_block> &);
698 1.1 mrg relation_kind query_relation (tree op1, tree op2) override;
699 1.1 mrg void register_relation (gimple *, relation_kind, tree op1, tree op2) override;
700 1.1 mrg void register_relation (edge, relation_kind, tree op1, tree op2) override;
701 1.1 mrg private:
702 1.1 mrg basic_block m_entry;
703 1.1 mrg };
704 1.1 mrg
705 1.1 mrg jt_fur_source::jt_fur_source (gimple *s,
706 1.1 mrg path_range_query *query,
707 1.1 mrg gori_compute *gori,
708 1.1 mrg const vec<basic_block> &path)
709 1.1 mrg : fur_depend (s, gori, query)
710 1.1 mrg {
711 1.1 mrg gcc_checking_assert (!path.is_empty ());
712 1.1 mrg
713 1.1 mrg m_entry = path[path.length () - 1];
714 1.1 mrg
715 1.1 mrg if (dom_info_available_p (CDI_DOMINATORS))
716 1.1 mrg m_oracle = query->oracle ();
717 1.1 mrg else
718 1.1 mrg m_oracle = NULL;
719 1.1 mrg }
720 1.1 mrg
721 1.1 mrg // Ignore statement and register relation on entry to path.
722 1.1 mrg
723 1.1 mrg void
724 1.1 mrg jt_fur_source::register_relation (gimple *, relation_kind k, tree op1, tree op2)
725 1.1 mrg {
726 1.1 mrg if (m_oracle)
727 1.1 mrg m_oracle->register_relation (m_entry, k, op1, op2);
728 1.1 mrg }
729 1.1 mrg
730 1.1 mrg // Ignore edge and register relation on entry to path.
731 1.1 mrg
732 1.1 mrg void
733 1.1 mrg jt_fur_source::register_relation (edge, relation_kind k, tree op1, tree op2)
734 1.1 mrg {
735 1.1 mrg if (m_oracle)
736 1.1 mrg m_oracle->register_relation (m_entry, k, op1, op2);
737 1.1 mrg }
738 1.1 mrg
739 1.1 mrg relation_kind
740 1.1 mrg jt_fur_source::query_relation (tree op1, tree op2)
741 1.1 mrg {
742 1.1 mrg if (!m_oracle)
743 1.1 mrg return VREL_NONE;
744 1.1 mrg
745 1.1 mrg if (TREE_CODE (op1) != SSA_NAME || TREE_CODE (op2) != SSA_NAME)
746 1.1 mrg return VREL_NONE;
747 1.1 mrg
748 1.1 mrg return m_oracle->query_relation (m_entry, op1, op2);
749 1.1 mrg }
750 1.1 mrg
751 1.1 mrg // Return the range of STMT at the end of the path being analyzed.
752 1.1 mrg
753 1.1 mrg bool
754 1.1 mrg path_range_query::range_of_stmt (irange &r, gimple *stmt, tree)
755 1.1 mrg {
756 1.1 mrg tree type = gimple_range_type (stmt);
757 1.1 mrg
758 1.1 mrg if (!irange::supports_type_p (type))
759 1.1 mrg return false;
760 1.1 mrg
761 1.1 mrg // If resolving unknowns, fold the statement making use of any
762 1.1 mrg // relations along the path.
763 1.1 mrg if (m_resolve)
764 1.1 mrg {
765 1.1 mrg fold_using_range f;
766 1.1 mrg jt_fur_source src (stmt, this, &m_ranger->gori (), m_path);
767 1.1 mrg if (!f.fold_stmt (r, stmt, src))
768 1.1 mrg r.set_varying (type);
769 1.1 mrg }
770 1.1 mrg // Otherwise, fold without relations.
771 1.1 mrg else if (!fold_range (r, stmt, this))
772 1.1 mrg r.set_varying (type);
773 1.1 mrg
774 1.1 mrg return true;
775 1.1 mrg }
776 1.1 mrg
777 1.1 mrg // If possible, register the relation on the incoming edge E into PHI.
778 1.1 mrg
779 1.1 mrg void
780 1.1 mrg path_range_query::maybe_register_phi_relation (gphi *phi, edge e)
781 1.1 mrg {
782 1.1 mrg tree arg = gimple_phi_arg_def (phi, e->dest_idx);
783 1.1 mrg
784 1.1 mrg if (!gimple_range_ssa_p (arg))
785 1.1 mrg return;
786 1.1 mrg
787 1.1 mrg if (relations_may_be_invalidated (e))
788 1.1 mrg return;
789 1.1 mrg
790 1.1 mrg basic_block bb = gimple_bb (phi);
791 1.1 mrg tree result = gimple_phi_result (phi);
792 1.1 mrg
793 1.1 mrg // Avoid recording the equivalence if the arg is defined in this
794 1.1 mrg // block, as that could create an ordering problem.
795 1.1 mrg if (ssa_defined_in_bb (arg, bb))
796 1.1 mrg return;
797 1.1 mrg
798 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS))
799 1.1 mrg fprintf (dump_file, "maybe_register_phi_relation in bb%d:", bb->index);
800 1.1 mrg
801 1.1 mrg get_path_oracle ()->killing_def (result);
802 1.1 mrg m_oracle->register_relation (entry_bb (), EQ_EXPR, arg, result);
803 1.1 mrg }
804 1.1 mrg
805 1.1 mrg // Compute relations for each PHI in BB. For example:
806 1.1 mrg //
807 1.1 mrg // x_5 = PHI<y_9(5),...>
808 1.1 mrg //
809 1.1 mrg // If the path flows through BB5, we can register that x_5 == y_9.
810 1.1 mrg
811 1.1 mrg void
812 1.1 mrg path_range_query::compute_phi_relations (basic_block bb, basic_block prev)
813 1.1 mrg {
814 1.1 mrg if (prev == NULL)
815 1.1 mrg return;
816 1.1 mrg
817 1.1 mrg edge e_in = find_edge (prev, bb);
818 1.1 mrg
819 1.1 mrg for (gphi_iterator iter = gsi_start_phis (bb); !gsi_end_p (iter);
820 1.1 mrg gsi_next (&iter))
821 1.1 mrg {
822 1.1 mrg gphi *phi = iter.phi ();
823 1.1 mrg tree result = gimple_phi_result (phi);
824 1.1 mrg unsigned nargs = gimple_phi_num_args (phi);
825 1.1 mrg
826 1.1 mrg if (!import_p (result))
827 1.1 mrg continue;
828 1.1 mrg
829 1.1 mrg for (size_t i = 0; i < nargs; ++i)
830 1.1 mrg if (e_in == gimple_phi_arg_edge (phi, i))
831 1.1 mrg {
832 1.1 mrg maybe_register_phi_relation (phi, e_in);
833 1.1 mrg break;
834 1.1 mrg }
835 1.1 mrg }
836 1.1 mrg }
837 1.1 mrg
838 1.1 mrg // Compute outgoing relations from BB to NEXT.
839 1.1 mrg
840 1.1 mrg void
841 1.1 mrg path_range_query::compute_outgoing_relations (basic_block bb, basic_block next)
842 1.1 mrg {
843 1.1 mrg gimple *stmt = last_stmt (bb);
844 1.1 mrg
845 1.1 mrg if (stmt
846 1.1 mrg && gimple_code (stmt) == GIMPLE_COND
847 1.1 mrg && (import_p (gimple_cond_lhs (stmt))
848 1.1 mrg || import_p (gimple_cond_rhs (stmt))))
849 1.1 mrg {
850 1.1 mrg int_range<2> r;
851 1.1 mrg gcond *cond = as_a<gcond *> (stmt);
852 1.1 mrg edge e0 = EDGE_SUCC (bb, 0);
853 1.1 mrg edge e1 = EDGE_SUCC (bb, 1);
854 1.1 mrg
855 1.1 mrg if (e0->dest == next)
856 1.1 mrg gcond_edge_range (r, e0);
857 1.1 mrg else if (e1->dest == next)
858 1.1 mrg gcond_edge_range (r, e1);
859 1.1 mrg else
860 1.1 mrg gcc_unreachable ();
861 1.1 mrg
862 1.1 mrg jt_fur_source src (NULL, this, &m_ranger->gori (), m_path);
863 1.1 mrg src.register_outgoing_edges (cond, r, e0, e1);
864 1.1 mrg }
865 1.1 mrg }
866