tree-ssa-structalias.cc revision 1.1.1.1 1 1.1 mrg /* Tree based points-to analysis
2 1.1 mrg Copyright (C) 2005-2022 Free Software Foundation, Inc.
3 1.1 mrg Contributed by Daniel Berlin <dberlin (at) dberlin.org>
4 1.1 mrg
5 1.1 mrg This file is part of GCC.
6 1.1 mrg
7 1.1 mrg GCC is free software; you can redistribute it and/or modify
8 1.1 mrg under the terms of the GNU General Public License as published by
9 1.1 mrg the Free Software Foundation; either version 3 of the License, or
10 1.1 mrg (at your option) any later version.
11 1.1 mrg
12 1.1 mrg GCC is distributed in the hope that it will be useful,
13 1.1 mrg but WITHOUT ANY WARRANTY; without even the implied warranty of
14 1.1 mrg MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 1.1 mrg GNU General Public License for more details.
16 1.1 mrg
17 1.1 mrg You should have received a copy of the GNU General Public License
18 1.1 mrg along with GCC; see the file COPYING3. If not see
19 1.1 mrg <http://www.gnu.org/licenses/>. */
20 1.1 mrg
21 1.1 mrg #include "config.h"
22 1.1 mrg #include "system.h"
23 1.1 mrg #include "coretypes.h"
24 1.1 mrg #include "backend.h"
25 1.1 mrg #include "rtl.h"
26 1.1 mrg #include "tree.h"
27 1.1 mrg #include "gimple.h"
28 1.1 mrg #include "alloc-pool.h"
29 1.1 mrg #include "tree-pass.h"
30 1.1 mrg #include "ssa.h"
31 1.1 mrg #include "cgraph.h"
32 1.1 mrg #include "tree-pretty-print.h"
33 1.1 mrg #include "diagnostic-core.h"
34 1.1 mrg #include "fold-const.h"
35 1.1 mrg #include "stor-layout.h"
36 1.1 mrg #include "stmt.h"
37 1.1 mrg #include "gimple-iterator.h"
38 1.1 mrg #include "tree-into-ssa.h"
39 1.1 mrg #include "tree-dfa.h"
40 1.1 mrg #include "gimple-walk.h"
41 1.1 mrg #include "varasm.h"
42 1.1 mrg #include "stringpool.h"
43 1.1 mrg #include "attribs.h"
44 1.1 mrg #include "tree-ssa.h"
45 1.1 mrg #include "tree-cfg.h"
46 1.1 mrg #include "gimple-range.h"
47 1.1 mrg #include "ipa-modref-tree.h"
48 1.1 mrg #include "ipa-modref.h"
49 1.1 mrg #include "attr-fnspec.h"
50 1.1 mrg
51 1.1 mrg /* The idea behind this analyzer is to generate set constraints from the
52 1.1 mrg program, then solve the resulting constraints in order to generate the
53 1.1 mrg points-to sets.
54 1.1 mrg
55 1.1 mrg Set constraints are a way of modeling program analysis problems that
56 1.1 mrg involve sets. They consist of an inclusion constraint language,
57 1.1 mrg describing the variables (each variable is a set) and operations that
58 1.1 mrg are involved on the variables, and a set of rules that derive facts
59 1.1 mrg from these operations. To solve a system of set constraints, you derive
60 1.1 mrg all possible facts under the rules, which gives you the correct sets
61 1.1 mrg as a consequence.
62 1.1 mrg
63 1.1 mrg See "Efficient Field-sensitive pointer analysis for C" by "David
64 1.1 mrg J. Pearce and Paul H. J. Kelly and Chris Hankin", at
65 1.1 mrg http://citeseer.ist.psu.edu/pearce04efficient.html
66 1.1 mrg
67 1.1 mrg Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
68 1.1 mrg of C Code in a Second" by "Nevin Heintze and Olivier Tardieu" at
69 1.1 mrg http://citeseer.ist.psu.edu/heintze01ultrafast.html
70 1.1 mrg
71 1.1 mrg There are three types of real constraint expressions, DEREF,
72 1.1 mrg ADDRESSOF, and SCALAR. Each constraint expression consists
73 1.1 mrg of a constraint type, a variable, and an offset.
74 1.1 mrg
75 1.1 mrg SCALAR is a constraint expression type used to represent x, whether
76 1.1 mrg it appears on the LHS or the RHS of a statement.
77 1.1 mrg DEREF is a constraint expression type used to represent *x, whether
78 1.1 mrg it appears on the LHS or the RHS of a statement.
79 1.1 mrg ADDRESSOF is a constraint expression used to represent &x, whether
80 1.1 mrg it appears on the LHS or the RHS of a statement.
81 1.1 mrg
82 1.1 mrg Each pointer variable in the program is assigned an integer id, and
83 1.1 mrg each field of a structure variable is assigned an integer id as well.
84 1.1 mrg
85 1.1 mrg Structure variables are linked to their list of fields through a "next
86 1.1 mrg field" in each variable that points to the next field in offset
87 1.1 mrg order.
88 1.1 mrg Each variable for a structure field has
89 1.1 mrg
90 1.1 mrg 1. "size", that tells the size in bits of that field.
91 1.1 mrg 2. "fullsize", that tells the size in bits of the entire structure.
92 1.1 mrg 3. "offset", that tells the offset in bits from the beginning of the
93 1.1 mrg structure to this field.
94 1.1 mrg
95 1.1 mrg Thus,
96 1.1 mrg struct f
97 1.1 mrg {
98 1.1 mrg int a;
99 1.1 mrg int b;
100 1.1 mrg } foo;
101 1.1 mrg int *bar;
102 1.1 mrg
103 1.1 mrg looks like
104 1.1 mrg
105 1.1 mrg foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
106 1.1 mrg foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
107 1.1 mrg bar -> id 3, size 32, offset 0, fullsize 32, next NULL
108 1.1 mrg
109 1.1 mrg
110 1.1 mrg In order to solve the system of set constraints, the following is
111 1.1 mrg done:
112 1.1 mrg
113 1.1 mrg 1. Each constraint variable x has a solution set associated with it,
114 1.1 mrg Sol(x).
115 1.1 mrg
116 1.1 mrg 2. Constraints are separated into direct, copy, and complex.
117 1.1 mrg Direct constraints are ADDRESSOF constraints that require no extra
118 1.1 mrg processing, such as P = &Q
119 1.1 mrg Copy constraints are those of the form P = Q.
120 1.1 mrg Complex constraints are all the constraints involving dereferences
121 1.1 mrg and offsets (including offsetted copies).
122 1.1 mrg
123 1.1 mrg 3. All direct constraints of the form P = &Q are processed, such
124 1.1 mrg that Q is added to Sol(P)
125 1.1 mrg
126 1.1 mrg 4. All complex constraints for a given constraint variable are stored in a
127 1.1 mrg linked list attached to that variable's node.
128 1.1 mrg
129 1.1 mrg 5. A directed graph is built out of the copy constraints. Each
130 1.1 mrg constraint variable is a node in the graph, and an edge from
131 1.1 mrg Q to P is added for each copy constraint of the form P = Q
132 1.1 mrg
133 1.1 mrg 6. The graph is then walked, and solution sets are
134 1.1 mrg propagated along the copy edges, such that an edge from Q to P
135 1.1 mrg causes Sol(P) <- Sol(P) union Sol(Q).
136 1.1 mrg
137 1.1 mrg 7. As we visit each node, all complex constraints associated with
138 1.1 mrg that node are processed by adding appropriate copy edges to the graph, or the
139 1.1 mrg appropriate variables to the solution set.
140 1.1 mrg
141 1.1 mrg 8. The process of walking the graph is iterated until no solution
142 1.1 mrg sets change.
143 1.1 mrg
144 1.1 mrg Prior to walking the graph in steps 6 and 7, We perform static
145 1.1 mrg cycle elimination on the constraint graph, as well
146 1.1 mrg as off-line variable substitution.
147 1.1 mrg
148 1.1 mrg TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
149 1.1 mrg on and turned into anything), but isn't. You can just see what offset
150 1.1 mrg inside the pointed-to struct it's going to access.
151 1.1 mrg
152 1.1 mrg TODO: Constant bounded arrays can be handled as if they were structs of the
153 1.1 mrg same number of elements.
154 1.1 mrg
155 1.1 mrg TODO: Modeling heap and incoming pointers becomes much better if we
156 1.1 mrg add fields to them as we discover them, which we could do.
157 1.1 mrg
158 1.1 mrg TODO: We could handle unions, but to be honest, it's probably not
159 1.1 mrg worth the pain or slowdown. */
160 1.1 mrg
161 1.1 mrg /* IPA-PTA optimizations possible.
162 1.1 mrg
163 1.1 mrg When the indirect function called is ANYTHING we can add disambiguation
164 1.1 mrg based on the function signatures (or simply the parameter count which
165 1.1 mrg is the varinfo size). We also do not need to consider functions that
166 1.1 mrg do not have their address taken.
167 1.1 mrg
168 1.1 mrg The is_global_var bit which marks escape points is overly conservative
169 1.1 mrg in IPA mode. Split it to is_escape_point and is_global_var - only
170 1.1 mrg externally visible globals are escape points in IPA mode.
171 1.1 mrg There is now is_ipa_escape_point but this is only used in a few
172 1.1 mrg selected places.
173 1.1 mrg
174 1.1 mrg The way we introduce DECL_PT_UID to avoid fixing up all points-to
175 1.1 mrg sets in the translation unit when we copy a DECL during inlining
176 1.1 mrg pessimizes precision. The advantage is that the DECL_PT_UID keeps
177 1.1 mrg compile-time and memory usage overhead low - the points-to sets
178 1.1 mrg do not grow or get unshared as they would during a fixup phase.
179 1.1 mrg An alternative solution is to delay IPA PTA until after all
180 1.1 mrg inlining transformations have been applied.
181 1.1 mrg
182 1.1 mrg The way we propagate clobber/use information isn't optimized.
183 1.1 mrg It should use a new complex constraint that properly filters
184 1.1 mrg out local variables of the callee (though that would make
185 1.1 mrg the sets invalid after inlining). OTOH we might as well
186 1.1 mrg admit defeat to WHOPR and simply do all the clobber/use analysis
187 1.1 mrg and propagation after PTA finished but before we threw away
188 1.1 mrg points-to information for memory variables. WHOPR and PTA
189 1.1 mrg do not play along well anyway - the whole constraint solving
190 1.1 mrg would need to be done in WPA phase and it will be very interesting
191 1.1 mrg to apply the results to local SSA names during LTRANS phase.
192 1.1 mrg
193 1.1 mrg We probably should compute a per-function unit-ESCAPE solution
194 1.1 mrg propagating it simply like the clobber / uses solutions. The
195 1.1 mrg solution can go alongside the non-IPA escaped solution and be
196 1.1 mrg used to query which vars escape the unit through a function.
197 1.1 mrg This is also required to make the escaped-HEAP trick work in IPA mode.
198 1.1 mrg
199 1.1 mrg We never put function decls in points-to sets so we do not
200 1.1 mrg keep the set of called functions for indirect calls.
201 1.1 mrg
202 1.1 mrg And probably more. */
203 1.1 mrg
204 1.1 mrg static bool use_field_sensitive = true;
205 1.1 mrg static int in_ipa_mode = 0;
206 1.1 mrg
207 1.1 mrg /* Used for predecessor bitmaps. */
208 1.1 mrg static bitmap_obstack predbitmap_obstack;
209 1.1 mrg
210 1.1 mrg /* Used for points-to sets. */
211 1.1 mrg static bitmap_obstack pta_obstack;
212 1.1 mrg
213 1.1 mrg /* Used for oldsolution members of variables. */
214 1.1 mrg static bitmap_obstack oldpta_obstack;
215 1.1 mrg
216 1.1 mrg /* Used for per-solver-iteration bitmaps. */
217 1.1 mrg static bitmap_obstack iteration_obstack;
218 1.1 mrg
219 1.1 mrg static unsigned int create_variable_info_for (tree, const char *, bool);
220 1.1 mrg typedef struct constraint_graph *constraint_graph_t;
221 1.1 mrg static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
222 1.1 mrg
223 1.1 mrg struct constraint;
224 1.1 mrg typedef struct constraint *constraint_t;
225 1.1 mrg
226 1.1 mrg
227 1.1 mrg #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
228 1.1 mrg if (a) \
229 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
230 1.1 mrg
231 1.1 mrg static struct constraint_stats
232 1.1 mrg {
233 1.1 mrg unsigned int total_vars;
234 1.1 mrg unsigned int nonpointer_vars;
235 1.1 mrg unsigned int unified_vars_static;
236 1.1 mrg unsigned int unified_vars_dynamic;
237 1.1 mrg unsigned int iterations;
238 1.1 mrg unsigned int num_edges;
239 1.1 mrg unsigned int num_implicit_edges;
240 1.1 mrg unsigned int points_to_sets_created;
241 1.1 mrg } stats;
242 1.1 mrg
243 1.1 mrg struct variable_info
244 1.1 mrg {
245 1.1 mrg /* ID of this variable */
246 1.1 mrg unsigned int id;
247 1.1 mrg
248 1.1 mrg /* True if this is a variable created by the constraint analysis, such as
249 1.1 mrg heap variables and constraints we had to break up. */
250 1.1 mrg unsigned int is_artificial_var : 1;
251 1.1 mrg
252 1.1 mrg /* True if this is a special variable whose solution set should not be
253 1.1 mrg changed. */
254 1.1 mrg unsigned int is_special_var : 1;
255 1.1 mrg
256 1.1 mrg /* True for variables whose size is not known or variable. */
257 1.1 mrg unsigned int is_unknown_size_var : 1;
258 1.1 mrg
259 1.1 mrg /* True for (sub-)fields that represent a whole variable. */
260 1.1 mrg unsigned int is_full_var : 1;
261 1.1 mrg
262 1.1 mrg /* True if this is a heap variable. */
263 1.1 mrg unsigned int is_heap_var : 1;
264 1.1 mrg
265 1.1 mrg /* True if this is a register variable. */
266 1.1 mrg unsigned int is_reg_var : 1;
267 1.1 mrg
268 1.1 mrg /* True if this field may contain pointers. */
269 1.1 mrg unsigned int may_have_pointers : 1;
270 1.1 mrg
271 1.1 mrg /* True if this field has only restrict qualified pointers. */
272 1.1 mrg unsigned int only_restrict_pointers : 1;
273 1.1 mrg
274 1.1 mrg /* True if this represents a heap var created for a restrict qualified
275 1.1 mrg pointer. */
276 1.1 mrg unsigned int is_restrict_var : 1;
277 1.1 mrg
278 1.1 mrg /* True if this represents a global variable. */
279 1.1 mrg unsigned int is_global_var : 1;
280 1.1 mrg
281 1.1 mrg /* True if this represents a module escape point for IPA analysis. */
282 1.1 mrg unsigned int is_ipa_escape_point : 1;
283 1.1 mrg
284 1.1 mrg /* True if this represents a IPA function info. */
285 1.1 mrg unsigned int is_fn_info : 1;
286 1.1 mrg
287 1.1 mrg /* True if this appears as RHS in a ADDRESSOF constraint. */
288 1.1 mrg unsigned int address_taken : 1;
289 1.1 mrg
290 1.1 mrg /* ??? Store somewhere better. */
291 1.1 mrg unsigned short ruid;
292 1.1 mrg
293 1.1 mrg /* The ID of the variable for the next field in this structure
294 1.1 mrg or zero for the last field in this structure. */
295 1.1 mrg unsigned next;
296 1.1 mrg
297 1.1 mrg /* The ID of the variable for the first field in this structure. */
298 1.1 mrg unsigned head;
299 1.1 mrg
300 1.1 mrg /* Offset of this variable, in bits, from the base variable */
301 1.1 mrg unsigned HOST_WIDE_INT offset;
302 1.1 mrg
303 1.1 mrg /* Size of the variable, in bits. */
304 1.1 mrg unsigned HOST_WIDE_INT size;
305 1.1 mrg
306 1.1 mrg /* Full size of the base variable, in bits. */
307 1.1 mrg unsigned HOST_WIDE_INT fullsize;
308 1.1 mrg
309 1.1 mrg /* In IPA mode the shadow UID in case the variable needs to be duplicated in
310 1.1 mrg the final points-to solution because it reaches its containing
311 1.1 mrg function recursively. Zero if none is needed. */
312 1.1 mrg unsigned int shadow_var_uid;
313 1.1 mrg
314 1.1 mrg /* Name of this variable */
315 1.1 mrg const char *name;
316 1.1 mrg
317 1.1 mrg /* Tree that this variable is associated with. */
318 1.1 mrg tree decl;
319 1.1 mrg
320 1.1 mrg /* Points-to set for this variable. */
321 1.1 mrg bitmap solution;
322 1.1 mrg
323 1.1 mrg /* Old points-to set for this variable. */
324 1.1 mrg bitmap oldsolution;
325 1.1 mrg };
326 1.1 mrg typedef struct variable_info *varinfo_t;
327 1.1 mrg
328 1.1 mrg static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
329 1.1 mrg static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
330 1.1 mrg unsigned HOST_WIDE_INT);
331 1.1 mrg static varinfo_t lookup_vi_for_tree (tree);
332 1.1 mrg static inline bool type_can_have_subvars (const_tree);
333 1.1 mrg static void make_param_constraints (varinfo_t);
334 1.1 mrg
335 1.1 mrg /* Pool of variable info structures. */
336 1.1 mrg static object_allocator<variable_info> variable_info_pool
337 1.1 mrg ("Variable info pool");
338 1.1 mrg
339 1.1 mrg /* Map varinfo to final pt_solution. */
340 1.1 mrg static hash_map<varinfo_t, pt_solution *> *final_solutions;
341 1.1 mrg struct obstack final_solutions_obstack;
342 1.1 mrg
343 1.1 mrg /* Table of variable info structures for constraint variables.
344 1.1 mrg Indexed directly by variable info id. */
345 1.1 mrg static vec<varinfo_t> varmap;
346 1.1 mrg
347 1.1 mrg /* Return the varmap element N */
348 1.1 mrg
349 1.1 mrg static inline varinfo_t
350 1.1 mrg get_varinfo (unsigned int n)
351 1.1 mrg {
352 1.1 mrg return varmap[n];
353 1.1 mrg }
354 1.1 mrg
355 1.1 mrg /* Return the next variable in the list of sub-variables of VI
356 1.1 mrg or NULL if VI is the last sub-variable. */
357 1.1 mrg
358 1.1 mrg static inline varinfo_t
359 1.1 mrg vi_next (varinfo_t vi)
360 1.1 mrg {
361 1.1 mrg return get_varinfo (vi->next);
362 1.1 mrg }
363 1.1 mrg
364 1.1 mrg /* Static IDs for the special variables. Variable ID zero is unused
365 1.1 mrg and used as terminator for the sub-variable chain. */
366 1.1 mrg enum { nothing_id = 1, anything_id = 2, string_id = 3,
367 1.1 mrg escaped_id = 4, nonlocal_id = 5,
368 1.1 mrg storedanything_id = 6, integer_id = 7 };
369 1.1 mrg
370 1.1 mrg /* Return a new variable info structure consisting for a variable
371 1.1 mrg named NAME, and using constraint graph node NODE. Append it
372 1.1 mrg to the vector of variable info structures. */
373 1.1 mrg
374 1.1 mrg static varinfo_t
375 1.1 mrg new_var_info (tree t, const char *name, bool add_id)
376 1.1 mrg {
377 1.1 mrg unsigned index = varmap.length ();
378 1.1 mrg varinfo_t ret = variable_info_pool.allocate ();
379 1.1 mrg
380 1.1 mrg if (dump_file && add_id)
381 1.1 mrg {
382 1.1 mrg char *tempname = xasprintf ("%s(%d)", name, index);
383 1.1 mrg name = ggc_strdup (tempname);
384 1.1 mrg free (tempname);
385 1.1 mrg }
386 1.1 mrg
387 1.1 mrg ret->id = index;
388 1.1 mrg ret->name = name;
389 1.1 mrg ret->decl = t;
390 1.1 mrg /* Vars without decl are artificial and do not have sub-variables. */
391 1.1 mrg ret->is_artificial_var = (t == NULL_TREE);
392 1.1 mrg ret->is_special_var = false;
393 1.1 mrg ret->is_unknown_size_var = false;
394 1.1 mrg ret->is_full_var = (t == NULL_TREE);
395 1.1 mrg ret->is_heap_var = false;
396 1.1 mrg ret->may_have_pointers = true;
397 1.1 mrg ret->only_restrict_pointers = false;
398 1.1 mrg ret->is_restrict_var = false;
399 1.1 mrg ret->ruid = 0;
400 1.1 mrg ret->is_global_var = (t == NULL_TREE);
401 1.1 mrg ret->is_ipa_escape_point = false;
402 1.1 mrg ret->is_fn_info = false;
403 1.1 mrg ret->address_taken = false;
404 1.1 mrg if (t && DECL_P (t))
405 1.1 mrg ret->is_global_var = (is_global_var (t)
406 1.1 mrg /* We have to treat even local register variables
407 1.1 mrg as escape points. */
408 1.1 mrg || (VAR_P (t) && DECL_HARD_REGISTER (t)));
409 1.1 mrg ret->is_reg_var = (t && TREE_CODE (t) == SSA_NAME);
410 1.1 mrg ret->solution = BITMAP_ALLOC (&pta_obstack);
411 1.1 mrg ret->oldsolution = NULL;
412 1.1 mrg ret->next = 0;
413 1.1 mrg ret->shadow_var_uid = 0;
414 1.1 mrg ret->head = ret->id;
415 1.1 mrg
416 1.1 mrg stats.total_vars++;
417 1.1 mrg
418 1.1 mrg varmap.safe_push (ret);
419 1.1 mrg
420 1.1 mrg return ret;
421 1.1 mrg }
422 1.1 mrg
423 1.1 mrg /* A map mapping call statements to per-stmt variables for uses
424 1.1 mrg and clobbers specific to the call. */
425 1.1 mrg static hash_map<gimple *, varinfo_t> *call_stmt_vars;
426 1.1 mrg
427 1.1 mrg /* Lookup or create the variable for the call statement CALL. */
428 1.1 mrg
429 1.1 mrg static varinfo_t
430 1.1 mrg get_call_vi (gcall *call)
431 1.1 mrg {
432 1.1 mrg varinfo_t vi, vi2;
433 1.1 mrg
434 1.1 mrg bool existed;
435 1.1 mrg varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
436 1.1 mrg if (existed)
437 1.1 mrg return *slot_p;
438 1.1 mrg
439 1.1 mrg vi = new_var_info (NULL_TREE, "CALLUSED", true);
440 1.1 mrg vi->offset = 0;
441 1.1 mrg vi->size = 1;
442 1.1 mrg vi->fullsize = 2;
443 1.1 mrg vi->is_full_var = true;
444 1.1 mrg vi->is_reg_var = true;
445 1.1 mrg
446 1.1 mrg vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED", true);
447 1.1 mrg vi2->offset = 1;
448 1.1 mrg vi2->size = 1;
449 1.1 mrg vi2->fullsize = 2;
450 1.1 mrg vi2->is_full_var = true;
451 1.1 mrg vi2->is_reg_var = true;
452 1.1 mrg
453 1.1 mrg vi->next = vi2->id;
454 1.1 mrg
455 1.1 mrg *slot_p = vi;
456 1.1 mrg return vi;
457 1.1 mrg }
458 1.1 mrg
459 1.1 mrg /* Lookup the variable for the call statement CALL representing
460 1.1 mrg the uses. Returns NULL if there is nothing special about this call. */
461 1.1 mrg
462 1.1 mrg static varinfo_t
463 1.1 mrg lookup_call_use_vi (gcall *call)
464 1.1 mrg {
465 1.1 mrg varinfo_t *slot_p = call_stmt_vars->get (call);
466 1.1 mrg if (slot_p)
467 1.1 mrg return *slot_p;
468 1.1 mrg
469 1.1 mrg return NULL;
470 1.1 mrg }
471 1.1 mrg
472 1.1 mrg /* Lookup the variable for the call statement CALL representing
473 1.1 mrg the clobbers. Returns NULL if there is nothing special about this call. */
474 1.1 mrg
475 1.1 mrg static varinfo_t
476 1.1 mrg lookup_call_clobber_vi (gcall *call)
477 1.1 mrg {
478 1.1 mrg varinfo_t uses = lookup_call_use_vi (call);
479 1.1 mrg if (!uses)
480 1.1 mrg return NULL;
481 1.1 mrg
482 1.1 mrg return vi_next (uses);
483 1.1 mrg }
484 1.1 mrg
485 1.1 mrg /* Lookup or create the variable for the call statement CALL representing
486 1.1 mrg the uses. */
487 1.1 mrg
488 1.1 mrg static varinfo_t
489 1.1 mrg get_call_use_vi (gcall *call)
490 1.1 mrg {
491 1.1 mrg return get_call_vi (call);
492 1.1 mrg }
493 1.1 mrg
494 1.1 mrg /* Lookup or create the variable for the call statement CALL representing
495 1.1 mrg the clobbers. */
496 1.1 mrg
497 1.1 mrg static varinfo_t ATTRIBUTE_UNUSED
498 1.1 mrg get_call_clobber_vi (gcall *call)
499 1.1 mrg {
500 1.1 mrg return vi_next (get_call_vi (call));
501 1.1 mrg }
502 1.1 mrg
503 1.1 mrg
504 1.1 mrg enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF};
505 1.1 mrg
506 1.1 mrg /* An expression that appears in a constraint. */
507 1.1 mrg
508 1.1 mrg struct constraint_expr
509 1.1 mrg {
510 1.1 mrg /* Constraint type. */
511 1.1 mrg constraint_expr_type type;
512 1.1 mrg
513 1.1 mrg /* Variable we are referring to in the constraint. */
514 1.1 mrg unsigned int var;
515 1.1 mrg
516 1.1 mrg /* Offset, in bits, of this constraint from the beginning of
517 1.1 mrg variables it ends up referring to.
518 1.1 mrg
519 1.1 mrg IOW, in a deref constraint, we would deref, get the result set,
520 1.1 mrg then add OFFSET to each member. */
521 1.1 mrg HOST_WIDE_INT offset;
522 1.1 mrg };
523 1.1 mrg
524 1.1 mrg /* Use 0x8000... as special unknown offset. */
525 1.1 mrg #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
526 1.1 mrg
527 1.1 mrg typedef struct constraint_expr ce_s;
528 1.1 mrg static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
529 1.1 mrg static void get_constraint_for (tree, vec<ce_s> *);
530 1.1 mrg static void get_constraint_for_rhs (tree, vec<ce_s> *);
531 1.1 mrg static void do_deref (vec<ce_s> *);
532 1.1 mrg
533 1.1 mrg /* Our set constraints are made up of two constraint expressions, one
534 1.1 mrg LHS, and one RHS.
535 1.1 mrg
536 1.1 mrg As described in the introduction, our set constraints each represent an
537 1.1 mrg operation between set valued variables.
538 1.1 mrg */
539 1.1 mrg struct constraint
540 1.1 mrg {
541 1.1 mrg struct constraint_expr lhs;
542 1.1 mrg struct constraint_expr rhs;
543 1.1 mrg };
544 1.1 mrg
545 1.1 mrg /* List of constraints that we use to build the constraint graph from. */
546 1.1 mrg
547 1.1 mrg static vec<constraint_t> constraints;
548 1.1 mrg static object_allocator<constraint> constraint_pool ("Constraint pool");
549 1.1 mrg
550 1.1 mrg /* The constraint graph is represented as an array of bitmaps
551 1.1 mrg containing successor nodes. */
552 1.1 mrg
553 1.1 mrg struct constraint_graph
554 1.1 mrg {
555 1.1 mrg /* Size of this graph, which may be different than the number of
556 1.1 mrg nodes in the variable map. */
557 1.1 mrg unsigned int size;
558 1.1 mrg
559 1.1 mrg /* Explicit successors of each node. */
560 1.1 mrg bitmap *succs;
561 1.1 mrg
562 1.1 mrg /* Implicit predecessors of each node (Used for variable
563 1.1 mrg substitution). */
564 1.1 mrg bitmap *implicit_preds;
565 1.1 mrg
566 1.1 mrg /* Explicit predecessors of each node (Used for variable substitution). */
567 1.1 mrg bitmap *preds;
568 1.1 mrg
569 1.1 mrg /* Indirect cycle representatives, or -1 if the node has no indirect
570 1.1 mrg cycles. */
571 1.1 mrg int *indirect_cycles;
572 1.1 mrg
573 1.1 mrg /* Representative node for a node. rep[a] == a unless the node has
574 1.1 mrg been unified. */
575 1.1 mrg unsigned int *rep;
576 1.1 mrg
577 1.1 mrg /* Equivalence class representative for a label. This is used for
578 1.1 mrg variable substitution. */
579 1.1 mrg int *eq_rep;
580 1.1 mrg
581 1.1 mrg /* Pointer equivalence label for a node. All nodes with the same
582 1.1 mrg pointer equivalence label can be unified together at some point
583 1.1 mrg (either during constraint optimization or after the constraint
584 1.1 mrg graph is built). */
585 1.1 mrg unsigned int *pe;
586 1.1 mrg
587 1.1 mrg /* Pointer equivalence representative for a label. This is used to
588 1.1 mrg handle nodes that are pointer equivalent but not location
589 1.1 mrg equivalent. We can unite these once the addressof constraints
590 1.1 mrg are transformed into initial points-to sets. */
591 1.1 mrg int *pe_rep;
592 1.1 mrg
593 1.1 mrg /* Pointer equivalence label for each node, used during variable
594 1.1 mrg substitution. */
595 1.1 mrg unsigned int *pointer_label;
596 1.1 mrg
597 1.1 mrg /* Location equivalence label for each node, used during location
598 1.1 mrg equivalence finding. */
599 1.1 mrg unsigned int *loc_label;
600 1.1 mrg
601 1.1 mrg /* Pointed-by set for each node, used during location equivalence
602 1.1 mrg finding. This is pointed-by rather than pointed-to, because it
603 1.1 mrg is constructed using the predecessor graph. */
604 1.1 mrg bitmap *pointed_by;
605 1.1 mrg
606 1.1 mrg /* Points to sets for pointer equivalence. This is *not* the actual
607 1.1 mrg points-to sets for nodes. */
608 1.1 mrg bitmap *points_to;
609 1.1 mrg
610 1.1 mrg /* Bitmap of nodes where the bit is set if the node is a direct
611 1.1 mrg node. Used for variable substitution. */
612 1.1 mrg sbitmap direct_nodes;
613 1.1 mrg
614 1.1 mrg /* Bitmap of nodes where the bit is set if the node is address
615 1.1 mrg taken. Used for variable substitution. */
616 1.1 mrg bitmap address_taken;
617 1.1 mrg
618 1.1 mrg /* Vector of complex constraints for each graph node. Complex
619 1.1 mrg constraints are those involving dereferences or offsets that are
620 1.1 mrg not 0. */
621 1.1 mrg vec<constraint_t> *complex;
622 1.1 mrg };
623 1.1 mrg
624 1.1 mrg static constraint_graph_t graph;
625 1.1 mrg
626 1.1 mrg /* During variable substitution and the offline version of indirect
627 1.1 mrg cycle finding, we create nodes to represent dereferences and
628 1.1 mrg address taken constraints. These represent where these start and
629 1.1 mrg end. */
630 1.1 mrg #define FIRST_REF_NODE (varmap).length ()
631 1.1 mrg #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
632 1.1 mrg
633 1.1 mrg /* Return the representative node for NODE, if NODE has been unioned
634 1.1 mrg with another NODE.
635 1.1 mrg This function performs path compression along the way to finding
636 1.1 mrg the representative. */
637 1.1 mrg
638 1.1 mrg static unsigned int
639 1.1 mrg find (unsigned int node)
640 1.1 mrg {
641 1.1 mrg gcc_checking_assert (node < graph->size);
642 1.1 mrg if (graph->rep[node] != node)
643 1.1 mrg return graph->rep[node] = find (graph->rep[node]);
644 1.1 mrg return node;
645 1.1 mrg }
646 1.1 mrg
647 1.1 mrg /* Union the TO and FROM nodes to the TO nodes.
648 1.1 mrg Note that at some point in the future, we may want to do
649 1.1 mrg union-by-rank, in which case we are going to have to return the
650 1.1 mrg node we unified to. */
651 1.1 mrg
652 1.1 mrg static bool
653 1.1 mrg unite (unsigned int to, unsigned int from)
654 1.1 mrg {
655 1.1 mrg gcc_checking_assert (to < graph->size && from < graph->size);
656 1.1 mrg if (to != from && graph->rep[from] != to)
657 1.1 mrg {
658 1.1 mrg graph->rep[from] = to;
659 1.1 mrg return true;
660 1.1 mrg }
661 1.1 mrg return false;
662 1.1 mrg }
663 1.1 mrg
664 1.1 mrg /* Create a new constraint consisting of LHS and RHS expressions. */
665 1.1 mrg
666 1.1 mrg static constraint_t
667 1.1 mrg new_constraint (const struct constraint_expr lhs,
668 1.1 mrg const struct constraint_expr rhs)
669 1.1 mrg {
670 1.1 mrg constraint_t ret = constraint_pool.allocate ();
671 1.1 mrg ret->lhs = lhs;
672 1.1 mrg ret->rhs = rhs;
673 1.1 mrg return ret;
674 1.1 mrg }
675 1.1 mrg
676 1.1 mrg /* Print out constraint C to FILE. */
677 1.1 mrg
678 1.1 mrg static void
679 1.1 mrg dump_constraint (FILE *file, constraint_t c)
680 1.1 mrg {
681 1.1 mrg if (c->lhs.type == ADDRESSOF)
682 1.1 mrg fprintf (file, "&");
683 1.1 mrg else if (c->lhs.type == DEREF)
684 1.1 mrg fprintf (file, "*");
685 1.1 mrg if (dump_file)
686 1.1 mrg fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
687 1.1 mrg else
688 1.1 mrg fprintf (file, "V%d", c->lhs.var);
689 1.1 mrg if (c->lhs.offset == UNKNOWN_OFFSET)
690 1.1 mrg fprintf (file, " + UNKNOWN");
691 1.1 mrg else if (c->lhs.offset != 0)
692 1.1 mrg fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
693 1.1 mrg fprintf (file, " = ");
694 1.1 mrg if (c->rhs.type == ADDRESSOF)
695 1.1 mrg fprintf (file, "&");
696 1.1 mrg else if (c->rhs.type == DEREF)
697 1.1 mrg fprintf (file, "*");
698 1.1 mrg if (dump_file)
699 1.1 mrg fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
700 1.1 mrg else
701 1.1 mrg fprintf (file, "V%d", c->rhs.var);
702 1.1 mrg if (c->rhs.offset == UNKNOWN_OFFSET)
703 1.1 mrg fprintf (file, " + UNKNOWN");
704 1.1 mrg else if (c->rhs.offset != 0)
705 1.1 mrg fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
706 1.1 mrg }
707 1.1 mrg
708 1.1 mrg
709 1.1 mrg void debug_constraint (constraint_t);
710 1.1 mrg void debug_constraints (void);
711 1.1 mrg void debug_constraint_graph (void);
712 1.1 mrg void debug_solution_for_var (unsigned int);
713 1.1 mrg void debug_sa_points_to_info (void);
714 1.1 mrg void debug_varinfo (varinfo_t);
715 1.1 mrg void debug_varmap (void);
716 1.1 mrg
717 1.1 mrg /* Print out constraint C to stderr. */
718 1.1 mrg
719 1.1 mrg DEBUG_FUNCTION void
720 1.1 mrg debug_constraint (constraint_t c)
721 1.1 mrg {
722 1.1 mrg dump_constraint (stderr, c);
723 1.1 mrg fprintf (stderr, "\n");
724 1.1 mrg }
725 1.1 mrg
726 1.1 mrg /* Print out all constraints to FILE */
727 1.1 mrg
728 1.1 mrg static void
729 1.1 mrg dump_constraints (FILE *file, int from)
730 1.1 mrg {
731 1.1 mrg int i;
732 1.1 mrg constraint_t c;
733 1.1 mrg for (i = from; constraints.iterate (i, &c); i++)
734 1.1 mrg if (c)
735 1.1 mrg {
736 1.1 mrg dump_constraint (file, c);
737 1.1 mrg fprintf (file, "\n");
738 1.1 mrg }
739 1.1 mrg }
740 1.1 mrg
741 1.1 mrg /* Print out all constraints to stderr. */
742 1.1 mrg
743 1.1 mrg DEBUG_FUNCTION void
744 1.1 mrg debug_constraints (void)
745 1.1 mrg {
746 1.1 mrg dump_constraints (stderr, 0);
747 1.1 mrg }
748 1.1 mrg
749 1.1 mrg /* Print the constraint graph in dot format. */
750 1.1 mrg
751 1.1 mrg static void
752 1.1 mrg dump_constraint_graph (FILE *file)
753 1.1 mrg {
754 1.1 mrg unsigned int i;
755 1.1 mrg
756 1.1 mrg /* Only print the graph if it has already been initialized: */
757 1.1 mrg if (!graph)
758 1.1 mrg return;
759 1.1 mrg
760 1.1 mrg /* Prints the header of the dot file: */
761 1.1 mrg fprintf (file, "strict digraph {\n");
762 1.1 mrg fprintf (file, " node [\n shape = box\n ]\n");
763 1.1 mrg fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
764 1.1 mrg fprintf (file, "\n // List of nodes and complex constraints in "
765 1.1 mrg "the constraint graph:\n");
766 1.1 mrg
767 1.1 mrg /* The next lines print the nodes in the graph together with the
768 1.1 mrg complex constraints attached to them. */
769 1.1 mrg for (i = 1; i < graph->size; i++)
770 1.1 mrg {
771 1.1 mrg if (i == FIRST_REF_NODE)
772 1.1 mrg continue;
773 1.1 mrg if (find (i) != i)
774 1.1 mrg continue;
775 1.1 mrg if (i < FIRST_REF_NODE)
776 1.1 mrg fprintf (file, "\"%s\"", get_varinfo (i)->name);
777 1.1 mrg else
778 1.1 mrg fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
779 1.1 mrg if (graph->complex[i].exists ())
780 1.1 mrg {
781 1.1 mrg unsigned j;
782 1.1 mrg constraint_t c;
783 1.1 mrg fprintf (file, " [label=\"\\N\\n");
784 1.1 mrg for (j = 0; graph->complex[i].iterate (j, &c); ++j)
785 1.1 mrg {
786 1.1 mrg dump_constraint (file, c);
787 1.1 mrg fprintf (file, "\\l");
788 1.1 mrg }
789 1.1 mrg fprintf (file, "\"]");
790 1.1 mrg }
791 1.1 mrg fprintf (file, ";\n");
792 1.1 mrg }
793 1.1 mrg
794 1.1 mrg /* Go over the edges. */
795 1.1 mrg fprintf (file, "\n // Edges in the constraint graph:\n");
796 1.1 mrg for (i = 1; i < graph->size; i++)
797 1.1 mrg {
798 1.1 mrg unsigned j;
799 1.1 mrg bitmap_iterator bi;
800 1.1 mrg if (find (i) != i)
801 1.1 mrg continue;
802 1.1 mrg EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
803 1.1 mrg {
804 1.1 mrg unsigned to = find (j);
805 1.1 mrg if (i == to)
806 1.1 mrg continue;
807 1.1 mrg if (i < FIRST_REF_NODE)
808 1.1 mrg fprintf (file, "\"%s\"", get_varinfo (i)->name);
809 1.1 mrg else
810 1.1 mrg fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
811 1.1 mrg fprintf (file, " -> ");
812 1.1 mrg if (to < FIRST_REF_NODE)
813 1.1 mrg fprintf (file, "\"%s\"", get_varinfo (to)->name);
814 1.1 mrg else
815 1.1 mrg fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
816 1.1 mrg fprintf (file, ";\n");
817 1.1 mrg }
818 1.1 mrg }
819 1.1 mrg
820 1.1 mrg /* Prints the tail of the dot file. */
821 1.1 mrg fprintf (file, "}\n");
822 1.1 mrg }
823 1.1 mrg
824 1.1 mrg /* Print out the constraint graph to stderr. */
825 1.1 mrg
826 1.1 mrg DEBUG_FUNCTION void
827 1.1 mrg debug_constraint_graph (void)
828 1.1 mrg {
829 1.1 mrg dump_constraint_graph (stderr);
830 1.1 mrg }
831 1.1 mrg
832 1.1 mrg /* SOLVER FUNCTIONS
833 1.1 mrg
834 1.1 mrg The solver is a simple worklist solver, that works on the following
835 1.1 mrg algorithm:
836 1.1 mrg
837 1.1 mrg sbitmap changed_nodes = all zeroes;
838 1.1 mrg changed_count = 0;
839 1.1 mrg For each node that is not already collapsed:
840 1.1 mrg changed_count++;
841 1.1 mrg set bit in changed nodes
842 1.1 mrg
843 1.1 mrg while (changed_count > 0)
844 1.1 mrg {
845 1.1 mrg compute topological ordering for constraint graph
846 1.1 mrg
847 1.1 mrg find and collapse cycles in the constraint graph (updating
848 1.1 mrg changed if necessary)
849 1.1 mrg
850 1.1 mrg for each node (n) in the graph in topological order:
851 1.1 mrg changed_count--;
852 1.1 mrg
853 1.1 mrg Process each complex constraint associated with the node,
854 1.1 mrg updating changed if necessary.
855 1.1 mrg
856 1.1 mrg For each outgoing edge from n, propagate the solution from n to
857 1.1 mrg the destination of the edge, updating changed as necessary.
858 1.1 mrg
859 1.1 mrg } */
860 1.1 mrg
861 1.1 mrg /* Return true if two constraint expressions A and B are equal. */
862 1.1 mrg
863 1.1 mrg static bool
864 1.1 mrg constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
865 1.1 mrg {
866 1.1 mrg return a.type == b.type && a.var == b.var && a.offset == b.offset;
867 1.1 mrg }
868 1.1 mrg
869 1.1 mrg /* Return true if constraint expression A is less than constraint expression
870 1.1 mrg B. This is just arbitrary, but consistent, in order to give them an
871 1.1 mrg ordering. */
872 1.1 mrg
873 1.1 mrg static bool
874 1.1 mrg constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
875 1.1 mrg {
876 1.1 mrg if (a.type == b.type)
877 1.1 mrg {
878 1.1 mrg if (a.var == b.var)
879 1.1 mrg return a.offset < b.offset;
880 1.1 mrg else
881 1.1 mrg return a.var < b.var;
882 1.1 mrg }
883 1.1 mrg else
884 1.1 mrg return a.type < b.type;
885 1.1 mrg }
886 1.1 mrg
887 1.1 mrg /* Return true if constraint A is less than constraint B. This is just
888 1.1 mrg arbitrary, but consistent, in order to give them an ordering. */
889 1.1 mrg
890 1.1 mrg static bool
891 1.1 mrg constraint_less (const constraint_t &a, const constraint_t &b)
892 1.1 mrg {
893 1.1 mrg if (constraint_expr_less (a->lhs, b->lhs))
894 1.1 mrg return true;
895 1.1 mrg else if (constraint_expr_less (b->lhs, a->lhs))
896 1.1 mrg return false;
897 1.1 mrg else
898 1.1 mrg return constraint_expr_less (a->rhs, b->rhs);
899 1.1 mrg }
900 1.1 mrg
901 1.1 mrg /* Return true if two constraints A and B are equal. */
902 1.1 mrg
903 1.1 mrg static bool
904 1.1 mrg constraint_equal (struct constraint a, struct constraint b)
905 1.1 mrg {
906 1.1 mrg return constraint_expr_equal (a.lhs, b.lhs)
907 1.1 mrg && constraint_expr_equal (a.rhs, b.rhs);
908 1.1 mrg }
909 1.1 mrg
910 1.1 mrg
911 1.1 mrg /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
912 1.1 mrg
913 1.1 mrg static constraint_t
914 1.1 mrg constraint_vec_find (vec<constraint_t> vec,
915 1.1 mrg struct constraint lookfor)
916 1.1 mrg {
917 1.1 mrg unsigned int place;
918 1.1 mrg constraint_t found;
919 1.1 mrg
920 1.1 mrg if (!vec.exists ())
921 1.1 mrg return NULL;
922 1.1 mrg
923 1.1 mrg place = vec.lower_bound (&lookfor, constraint_less);
924 1.1 mrg if (place >= vec.length ())
925 1.1 mrg return NULL;
926 1.1 mrg found = vec[place];
927 1.1 mrg if (!constraint_equal (*found, lookfor))
928 1.1 mrg return NULL;
929 1.1 mrg return found;
930 1.1 mrg }
931 1.1 mrg
932 1.1 mrg /* Union two constraint vectors, TO and FROM. Put the result in TO.
933 1.1 mrg Returns true of TO set is changed. */
934 1.1 mrg
935 1.1 mrg static bool
936 1.1 mrg constraint_set_union (vec<constraint_t> *to,
937 1.1 mrg vec<constraint_t> *from)
938 1.1 mrg {
939 1.1 mrg int i;
940 1.1 mrg constraint_t c;
941 1.1 mrg bool any_change = false;
942 1.1 mrg
943 1.1 mrg FOR_EACH_VEC_ELT (*from, i, c)
944 1.1 mrg {
945 1.1 mrg if (constraint_vec_find (*to, *c) == NULL)
946 1.1 mrg {
947 1.1 mrg unsigned int place = to->lower_bound (c, constraint_less);
948 1.1 mrg to->safe_insert (place, c);
949 1.1 mrg any_change = true;
950 1.1 mrg }
951 1.1 mrg }
952 1.1 mrg return any_change;
953 1.1 mrg }
954 1.1 mrg
955 1.1 mrg /* Expands the solution in SET to all sub-fields of variables included. */
956 1.1 mrg
957 1.1 mrg static bitmap
958 1.1 mrg solution_set_expand (bitmap set, bitmap *expanded)
959 1.1 mrg {
960 1.1 mrg bitmap_iterator bi;
961 1.1 mrg unsigned j;
962 1.1 mrg
963 1.1 mrg if (*expanded)
964 1.1 mrg return *expanded;
965 1.1 mrg
966 1.1 mrg *expanded = BITMAP_ALLOC (&iteration_obstack);
967 1.1 mrg
968 1.1 mrg /* In a first pass expand to the head of the variables we need to
969 1.1 mrg add all sub-fields off. This avoids quadratic behavior. */
970 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
971 1.1 mrg {
972 1.1 mrg varinfo_t v = get_varinfo (j);
973 1.1 mrg if (v->is_artificial_var
974 1.1 mrg || v->is_full_var)
975 1.1 mrg continue;
976 1.1 mrg bitmap_set_bit (*expanded, v->head);
977 1.1 mrg }
978 1.1 mrg
979 1.1 mrg /* In the second pass now expand all head variables with subfields. */
980 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
981 1.1 mrg {
982 1.1 mrg varinfo_t v = get_varinfo (j);
983 1.1 mrg if (v->head != j)
984 1.1 mrg continue;
985 1.1 mrg for (v = vi_next (v); v != NULL; v = vi_next (v))
986 1.1 mrg bitmap_set_bit (*expanded, v->id);
987 1.1 mrg }
988 1.1 mrg
989 1.1 mrg /* And finally set the rest of the bits from SET. */
990 1.1 mrg bitmap_ior_into (*expanded, set);
991 1.1 mrg
992 1.1 mrg return *expanded;
993 1.1 mrg }
994 1.1 mrg
995 1.1 mrg /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
996 1.1 mrg process. */
997 1.1 mrg
998 1.1 mrg static bool
999 1.1 mrg set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
1000 1.1 mrg bitmap *expanded_delta)
1001 1.1 mrg {
1002 1.1 mrg bool changed = false;
1003 1.1 mrg bitmap_iterator bi;
1004 1.1 mrg unsigned int i;
1005 1.1 mrg
1006 1.1 mrg /* If the solution of DELTA contains anything it is good enough to transfer
1007 1.1 mrg this to TO. */
1008 1.1 mrg if (bitmap_bit_p (delta, anything_id))
1009 1.1 mrg return bitmap_set_bit (to, anything_id);
1010 1.1 mrg
1011 1.1 mrg /* If the offset is unknown we have to expand the solution to
1012 1.1 mrg all subfields. */
1013 1.1 mrg if (inc == UNKNOWN_OFFSET)
1014 1.1 mrg {
1015 1.1 mrg delta = solution_set_expand (delta, expanded_delta);
1016 1.1 mrg changed |= bitmap_ior_into (to, delta);
1017 1.1 mrg return changed;
1018 1.1 mrg }
1019 1.1 mrg
1020 1.1 mrg /* For non-zero offset union the offsetted solution into the destination. */
1021 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
1022 1.1 mrg {
1023 1.1 mrg varinfo_t vi = get_varinfo (i);
1024 1.1 mrg
1025 1.1 mrg /* If this is a variable with just one field just set its bit
1026 1.1 mrg in the result. */
1027 1.1 mrg if (vi->is_artificial_var
1028 1.1 mrg || vi->is_unknown_size_var
1029 1.1 mrg || vi->is_full_var)
1030 1.1 mrg changed |= bitmap_set_bit (to, i);
1031 1.1 mrg else
1032 1.1 mrg {
1033 1.1 mrg HOST_WIDE_INT fieldoffset = vi->offset + inc;
1034 1.1 mrg unsigned HOST_WIDE_INT size = vi->size;
1035 1.1 mrg
1036 1.1 mrg /* If the offset makes the pointer point to before the
1037 1.1 mrg variable use offset zero for the field lookup. */
1038 1.1 mrg if (fieldoffset < 0)
1039 1.1 mrg vi = get_varinfo (vi->head);
1040 1.1 mrg else
1041 1.1 mrg vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1042 1.1 mrg
1043 1.1 mrg do
1044 1.1 mrg {
1045 1.1 mrg changed |= bitmap_set_bit (to, vi->id);
1046 1.1 mrg if (vi->is_full_var
1047 1.1 mrg || vi->next == 0)
1048 1.1 mrg break;
1049 1.1 mrg
1050 1.1 mrg /* We have to include all fields that overlap the current field
1051 1.1 mrg shifted by inc. */
1052 1.1 mrg vi = vi_next (vi);
1053 1.1 mrg }
1054 1.1 mrg while (vi->offset < fieldoffset + size);
1055 1.1 mrg }
1056 1.1 mrg }
1057 1.1 mrg
1058 1.1 mrg return changed;
1059 1.1 mrg }
1060 1.1 mrg
1061 1.1 mrg /* Insert constraint C into the list of complex constraints for graph
1062 1.1 mrg node VAR. */
1063 1.1 mrg
1064 1.1 mrg static void
1065 1.1 mrg insert_into_complex (constraint_graph_t graph,
1066 1.1 mrg unsigned int var, constraint_t c)
1067 1.1 mrg {
1068 1.1 mrg vec<constraint_t> complex = graph->complex[var];
1069 1.1 mrg unsigned int place = complex.lower_bound (c, constraint_less);
1070 1.1 mrg
1071 1.1 mrg /* Only insert constraints that do not already exist. */
1072 1.1 mrg if (place >= complex.length ()
1073 1.1 mrg || !constraint_equal (*c, *complex[place]))
1074 1.1 mrg graph->complex[var].safe_insert (place, c);
1075 1.1 mrg }
1076 1.1 mrg
1077 1.1 mrg
1078 1.1 mrg /* Condense two variable nodes into a single variable node, by moving
1079 1.1 mrg all associated info from FROM to TO. Returns true if TO node's
1080 1.1 mrg constraint set changes after the merge. */
1081 1.1 mrg
1082 1.1 mrg static bool
1083 1.1 mrg merge_node_constraints (constraint_graph_t graph, unsigned int to,
1084 1.1 mrg unsigned int from)
1085 1.1 mrg {
1086 1.1 mrg unsigned int i;
1087 1.1 mrg constraint_t c;
1088 1.1 mrg bool any_change = false;
1089 1.1 mrg
1090 1.1 mrg gcc_checking_assert (find (from) == to);
1091 1.1 mrg
1092 1.1 mrg /* Move all complex constraints from src node into to node */
1093 1.1 mrg FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1094 1.1 mrg {
1095 1.1 mrg /* In complex constraints for node FROM, we may have either
1096 1.1 mrg a = *FROM, and *FROM = a, or an offseted constraint which are
1097 1.1 mrg always added to the rhs node's constraints. */
1098 1.1 mrg
1099 1.1 mrg if (c->rhs.type == DEREF)
1100 1.1 mrg c->rhs.var = to;
1101 1.1 mrg else if (c->lhs.type == DEREF)
1102 1.1 mrg c->lhs.var = to;
1103 1.1 mrg else
1104 1.1 mrg c->rhs.var = to;
1105 1.1 mrg
1106 1.1 mrg }
1107 1.1 mrg any_change = constraint_set_union (&graph->complex[to],
1108 1.1 mrg &graph->complex[from]);
1109 1.1 mrg graph->complex[from].release ();
1110 1.1 mrg return any_change;
1111 1.1 mrg }
1112 1.1 mrg
1113 1.1 mrg
1114 1.1 mrg /* Remove edges involving NODE from GRAPH. */
1115 1.1 mrg
1116 1.1 mrg static void
1117 1.1 mrg clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1118 1.1 mrg {
1119 1.1 mrg if (graph->succs[node])
1120 1.1 mrg BITMAP_FREE (graph->succs[node]);
1121 1.1 mrg }
1122 1.1 mrg
1123 1.1 mrg /* Merge GRAPH nodes FROM and TO into node TO. */
1124 1.1 mrg
1125 1.1 mrg static void
1126 1.1 mrg merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1127 1.1 mrg unsigned int from)
1128 1.1 mrg {
1129 1.1 mrg if (graph->indirect_cycles[from] != -1)
1130 1.1 mrg {
1131 1.1 mrg /* If we have indirect cycles with the from node, and we have
1132 1.1 mrg none on the to node, the to node has indirect cycles from the
1133 1.1 mrg from node now that they are unified.
1134 1.1 mrg If indirect cycles exist on both, unify the nodes that they
1135 1.1 mrg are in a cycle with, since we know they are in a cycle with
1136 1.1 mrg each other. */
1137 1.1 mrg if (graph->indirect_cycles[to] == -1)
1138 1.1 mrg graph->indirect_cycles[to] = graph->indirect_cycles[from];
1139 1.1 mrg }
1140 1.1 mrg
1141 1.1 mrg /* Merge all the successor edges. */
1142 1.1 mrg if (graph->succs[from])
1143 1.1 mrg {
1144 1.1 mrg if (!graph->succs[to])
1145 1.1 mrg graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1146 1.1 mrg bitmap_ior_into (graph->succs[to],
1147 1.1 mrg graph->succs[from]);
1148 1.1 mrg }
1149 1.1 mrg
1150 1.1 mrg clear_edges_for_node (graph, from);
1151 1.1 mrg }
1152 1.1 mrg
1153 1.1 mrg
1154 1.1 mrg /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1155 1.1 mrg it doesn't exist in the graph already. */
1156 1.1 mrg
1157 1.1 mrg static void
1158 1.1 mrg add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1159 1.1 mrg unsigned int from)
1160 1.1 mrg {
1161 1.1 mrg if (to == from)
1162 1.1 mrg return;
1163 1.1 mrg
1164 1.1 mrg if (!graph->implicit_preds[to])
1165 1.1 mrg graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1166 1.1 mrg
1167 1.1 mrg if (bitmap_set_bit (graph->implicit_preds[to], from))
1168 1.1 mrg stats.num_implicit_edges++;
1169 1.1 mrg }
1170 1.1 mrg
1171 1.1 mrg /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1172 1.1 mrg it doesn't exist in the graph already.
1173 1.1 mrg Return false if the edge already existed, true otherwise. */
1174 1.1 mrg
1175 1.1 mrg static void
1176 1.1 mrg add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1177 1.1 mrg unsigned int from)
1178 1.1 mrg {
1179 1.1 mrg if (!graph->preds[to])
1180 1.1 mrg graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1181 1.1 mrg bitmap_set_bit (graph->preds[to], from);
1182 1.1 mrg }
1183 1.1 mrg
1184 1.1 mrg /* Add a graph edge to GRAPH, going from FROM to TO if
1185 1.1 mrg it doesn't exist in the graph already.
1186 1.1 mrg Return false if the edge already existed, true otherwise. */
1187 1.1 mrg
1188 1.1 mrg static bool
1189 1.1 mrg add_graph_edge (constraint_graph_t graph, unsigned int to,
1190 1.1 mrg unsigned int from)
1191 1.1 mrg {
1192 1.1 mrg if (to == from)
1193 1.1 mrg {
1194 1.1 mrg return false;
1195 1.1 mrg }
1196 1.1 mrg else
1197 1.1 mrg {
1198 1.1 mrg bool r = false;
1199 1.1 mrg
1200 1.1 mrg if (!graph->succs[from])
1201 1.1 mrg graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1202 1.1 mrg
1203 1.1 mrg /* The graph solving process does not avoid "triangles", thus
1204 1.1 mrg there can be multiple paths from a node to another involving
1205 1.1 mrg intermediate other nodes. That causes extra copying which is
1206 1.1 mrg most difficult to avoid when the intermediate node is ESCAPED
1207 1.1 mrg because there are no edges added from ESCAPED. Avoid
1208 1.1 mrg adding the direct edge FROM -> TO when we have FROM -> ESCAPED
1209 1.1 mrg and TO contains ESCAPED.
1210 1.1 mrg ??? Note this is only a heuristic, it does not prevent the
1211 1.1 mrg situation from occuring. The heuristic helps PR38474 and
1212 1.1 mrg PR99912 significantly. */
1213 1.1 mrg if (to < FIRST_REF_NODE
1214 1.1 mrg && bitmap_bit_p (graph->succs[from], find (escaped_id))
1215 1.1 mrg && bitmap_bit_p (get_varinfo (find (to))->solution, escaped_id))
1216 1.1 mrg return false;
1217 1.1 mrg
1218 1.1 mrg if (bitmap_set_bit (graph->succs[from], to))
1219 1.1 mrg {
1220 1.1 mrg r = true;
1221 1.1 mrg if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1222 1.1 mrg stats.num_edges++;
1223 1.1 mrg }
1224 1.1 mrg return r;
1225 1.1 mrg }
1226 1.1 mrg }
1227 1.1 mrg
1228 1.1 mrg
1229 1.1 mrg /* Initialize the constraint graph structure to contain SIZE nodes. */
1230 1.1 mrg
1231 1.1 mrg static void
1232 1.1 mrg init_graph (unsigned int size)
1233 1.1 mrg {
1234 1.1 mrg unsigned int j;
1235 1.1 mrg
1236 1.1 mrg graph = XCNEW (struct constraint_graph);
1237 1.1 mrg graph->size = size;
1238 1.1 mrg graph->succs = XCNEWVEC (bitmap, graph->size);
1239 1.1 mrg graph->indirect_cycles = XNEWVEC (int, graph->size);
1240 1.1 mrg graph->rep = XNEWVEC (unsigned int, graph->size);
1241 1.1 mrg /* ??? Macros do not support template types with multiple arguments,
1242 1.1 mrg so we use a typedef to work around it. */
1243 1.1 mrg typedef vec<constraint_t> vec_constraint_t_heap;
1244 1.1 mrg graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1245 1.1 mrg graph->pe = XCNEWVEC (unsigned int, graph->size);
1246 1.1 mrg graph->pe_rep = XNEWVEC (int, graph->size);
1247 1.1 mrg
1248 1.1 mrg for (j = 0; j < graph->size; j++)
1249 1.1 mrg {
1250 1.1 mrg graph->rep[j] = j;
1251 1.1 mrg graph->pe_rep[j] = -1;
1252 1.1 mrg graph->indirect_cycles[j] = -1;
1253 1.1 mrg }
1254 1.1 mrg }
1255 1.1 mrg
1256 1.1 mrg /* Build the constraint graph, adding only predecessor edges right now. */
1257 1.1 mrg
1258 1.1 mrg static void
1259 1.1 mrg build_pred_graph (void)
1260 1.1 mrg {
1261 1.1 mrg int i;
1262 1.1 mrg constraint_t c;
1263 1.1 mrg unsigned int j;
1264 1.1 mrg
1265 1.1 mrg graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1266 1.1 mrg graph->preds = XCNEWVEC (bitmap, graph->size);
1267 1.1 mrg graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1268 1.1 mrg graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1269 1.1 mrg graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1270 1.1 mrg graph->points_to = XCNEWVEC (bitmap, graph->size);
1271 1.1 mrg graph->eq_rep = XNEWVEC (int, graph->size);
1272 1.1 mrg graph->direct_nodes = sbitmap_alloc (graph->size);
1273 1.1 mrg graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1274 1.1 mrg bitmap_clear (graph->direct_nodes);
1275 1.1 mrg
1276 1.1 mrg for (j = 1; j < FIRST_REF_NODE; j++)
1277 1.1 mrg {
1278 1.1 mrg if (!get_varinfo (j)->is_special_var)
1279 1.1 mrg bitmap_set_bit (graph->direct_nodes, j);
1280 1.1 mrg }
1281 1.1 mrg
1282 1.1 mrg for (j = 0; j < graph->size; j++)
1283 1.1 mrg graph->eq_rep[j] = -1;
1284 1.1 mrg
1285 1.1 mrg for (j = 0; j < varmap.length (); j++)
1286 1.1 mrg graph->indirect_cycles[j] = -1;
1287 1.1 mrg
1288 1.1 mrg FOR_EACH_VEC_ELT (constraints, i, c)
1289 1.1 mrg {
1290 1.1 mrg struct constraint_expr lhs = c->lhs;
1291 1.1 mrg struct constraint_expr rhs = c->rhs;
1292 1.1 mrg unsigned int lhsvar = lhs.var;
1293 1.1 mrg unsigned int rhsvar = rhs.var;
1294 1.1 mrg
1295 1.1 mrg if (lhs.type == DEREF)
1296 1.1 mrg {
1297 1.1 mrg /* *x = y. */
1298 1.1 mrg if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1299 1.1 mrg add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1300 1.1 mrg }
1301 1.1 mrg else if (rhs.type == DEREF)
1302 1.1 mrg {
1303 1.1 mrg /* x = *y */
1304 1.1 mrg if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1305 1.1 mrg add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1306 1.1 mrg else
1307 1.1 mrg bitmap_clear_bit (graph->direct_nodes, lhsvar);
1308 1.1 mrg }
1309 1.1 mrg else if (rhs.type == ADDRESSOF)
1310 1.1 mrg {
1311 1.1 mrg varinfo_t v;
1312 1.1 mrg
1313 1.1 mrg /* x = &y */
1314 1.1 mrg if (graph->points_to[lhsvar] == NULL)
1315 1.1 mrg graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1316 1.1 mrg bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1317 1.1 mrg
1318 1.1 mrg if (graph->pointed_by[rhsvar] == NULL)
1319 1.1 mrg graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1320 1.1 mrg bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1321 1.1 mrg
1322 1.1 mrg /* Implicitly, *x = y */
1323 1.1 mrg add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1324 1.1 mrg
1325 1.1 mrg /* All related variables are no longer direct nodes. */
1326 1.1 mrg bitmap_clear_bit (graph->direct_nodes, rhsvar);
1327 1.1 mrg v = get_varinfo (rhsvar);
1328 1.1 mrg if (!v->is_full_var)
1329 1.1 mrg {
1330 1.1 mrg v = get_varinfo (v->head);
1331 1.1 mrg do
1332 1.1 mrg {
1333 1.1 mrg bitmap_clear_bit (graph->direct_nodes, v->id);
1334 1.1 mrg v = vi_next (v);
1335 1.1 mrg }
1336 1.1 mrg while (v != NULL);
1337 1.1 mrg }
1338 1.1 mrg bitmap_set_bit (graph->address_taken, rhsvar);
1339 1.1 mrg }
1340 1.1 mrg else if (lhsvar > anything_id
1341 1.1 mrg && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1342 1.1 mrg {
1343 1.1 mrg /* x = y */
1344 1.1 mrg add_pred_graph_edge (graph, lhsvar, rhsvar);
1345 1.1 mrg /* Implicitly, *x = *y */
1346 1.1 mrg add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1347 1.1 mrg FIRST_REF_NODE + rhsvar);
1348 1.1 mrg }
1349 1.1 mrg else if (lhs.offset != 0 || rhs.offset != 0)
1350 1.1 mrg {
1351 1.1 mrg if (rhs.offset != 0)
1352 1.1 mrg bitmap_clear_bit (graph->direct_nodes, lhs.var);
1353 1.1 mrg else if (lhs.offset != 0)
1354 1.1 mrg bitmap_clear_bit (graph->direct_nodes, rhs.var);
1355 1.1 mrg }
1356 1.1 mrg }
1357 1.1 mrg }
1358 1.1 mrg
1359 1.1 mrg /* Build the constraint graph, adding successor edges. */
1360 1.1 mrg
1361 1.1 mrg static void
1362 1.1 mrg build_succ_graph (void)
1363 1.1 mrg {
1364 1.1 mrg unsigned i, t;
1365 1.1 mrg constraint_t c;
1366 1.1 mrg
1367 1.1 mrg FOR_EACH_VEC_ELT (constraints, i, c)
1368 1.1 mrg {
1369 1.1 mrg struct constraint_expr lhs;
1370 1.1 mrg struct constraint_expr rhs;
1371 1.1 mrg unsigned int lhsvar;
1372 1.1 mrg unsigned int rhsvar;
1373 1.1 mrg
1374 1.1 mrg if (!c)
1375 1.1 mrg continue;
1376 1.1 mrg
1377 1.1 mrg lhs = c->lhs;
1378 1.1 mrg rhs = c->rhs;
1379 1.1 mrg lhsvar = find (lhs.var);
1380 1.1 mrg rhsvar = find (rhs.var);
1381 1.1 mrg
1382 1.1 mrg if (lhs.type == DEREF)
1383 1.1 mrg {
1384 1.1 mrg if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1385 1.1 mrg add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1386 1.1 mrg }
1387 1.1 mrg else if (rhs.type == DEREF)
1388 1.1 mrg {
1389 1.1 mrg if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1390 1.1 mrg add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1391 1.1 mrg }
1392 1.1 mrg else if (rhs.type == ADDRESSOF)
1393 1.1 mrg {
1394 1.1 mrg /* x = &y */
1395 1.1 mrg gcc_checking_assert (find (rhs.var) == rhs.var);
1396 1.1 mrg bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1397 1.1 mrg }
1398 1.1 mrg else if (lhsvar > anything_id
1399 1.1 mrg && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1400 1.1 mrg {
1401 1.1 mrg add_graph_edge (graph, lhsvar, rhsvar);
1402 1.1 mrg }
1403 1.1 mrg }
1404 1.1 mrg
1405 1.1 mrg /* Add edges from STOREDANYTHING to all non-direct nodes that can
1406 1.1 mrg receive pointers. */
1407 1.1 mrg t = find (storedanything_id);
1408 1.1 mrg for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1409 1.1 mrg {
1410 1.1 mrg if (!bitmap_bit_p (graph->direct_nodes, i)
1411 1.1 mrg && get_varinfo (i)->may_have_pointers)
1412 1.1 mrg add_graph_edge (graph, find (i), t);
1413 1.1 mrg }
1414 1.1 mrg
1415 1.1 mrg /* Everything stored to ANYTHING also potentially escapes. */
1416 1.1 mrg add_graph_edge (graph, find (escaped_id), t);
1417 1.1 mrg }
1418 1.1 mrg
1419 1.1 mrg
1420 1.1 mrg /* Changed variables on the last iteration. */
1421 1.1 mrg static bitmap changed;
1422 1.1 mrg
1423 1.1 mrg /* Strongly Connected Component visitation info. */
1424 1.1 mrg
1425 1.1 mrg class scc_info
1426 1.1 mrg {
1427 1.1 mrg public:
1428 1.1 mrg scc_info (size_t size);
1429 1.1 mrg ~scc_info ();
1430 1.1 mrg
1431 1.1 mrg auto_sbitmap visited;
1432 1.1 mrg auto_sbitmap deleted;
1433 1.1 mrg unsigned int *dfs;
1434 1.1 mrg unsigned int *node_mapping;
1435 1.1 mrg int current_index;
1436 1.1 mrg auto_vec<unsigned> scc_stack;
1437 1.1 mrg };
1438 1.1 mrg
1439 1.1 mrg
1440 1.1 mrg /* Recursive routine to find strongly connected components in GRAPH.
1441 1.1 mrg SI is the SCC info to store the information in, and N is the id of current
1442 1.1 mrg graph node we are processing.
1443 1.1 mrg
1444 1.1 mrg This is Tarjan's strongly connected component finding algorithm, as
1445 1.1 mrg modified by Nuutila to keep only non-root nodes on the stack.
1446 1.1 mrg The algorithm can be found in "On finding the strongly connected
1447 1.1 mrg connected components in a directed graph" by Esko Nuutila and Eljas
1448 1.1 mrg Soisalon-Soininen, in Information Processing Letters volume 49,
1449 1.1 mrg number 1, pages 9-14. */
1450 1.1 mrg
1451 1.1 mrg static void
1452 1.1 mrg scc_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
1453 1.1 mrg {
1454 1.1 mrg unsigned int i;
1455 1.1 mrg bitmap_iterator bi;
1456 1.1 mrg unsigned int my_dfs;
1457 1.1 mrg
1458 1.1 mrg bitmap_set_bit (si->visited, n);
1459 1.1 mrg si->dfs[n] = si->current_index ++;
1460 1.1 mrg my_dfs = si->dfs[n];
1461 1.1 mrg
1462 1.1 mrg /* Visit all the successors. */
1463 1.1 mrg EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1464 1.1 mrg {
1465 1.1 mrg unsigned int w;
1466 1.1 mrg
1467 1.1 mrg if (i > LAST_REF_NODE)
1468 1.1 mrg break;
1469 1.1 mrg
1470 1.1 mrg w = find (i);
1471 1.1 mrg if (bitmap_bit_p (si->deleted, w))
1472 1.1 mrg continue;
1473 1.1 mrg
1474 1.1 mrg if (!bitmap_bit_p (si->visited, w))
1475 1.1 mrg scc_visit (graph, si, w);
1476 1.1 mrg
1477 1.1 mrg unsigned int t = find (w);
1478 1.1 mrg gcc_checking_assert (find (n) == n);
1479 1.1 mrg if (si->dfs[t] < si->dfs[n])
1480 1.1 mrg si->dfs[n] = si->dfs[t];
1481 1.1 mrg }
1482 1.1 mrg
1483 1.1 mrg /* See if any components have been identified. */
1484 1.1 mrg if (si->dfs[n] == my_dfs)
1485 1.1 mrg {
1486 1.1 mrg if (si->scc_stack.length () > 0
1487 1.1 mrg && si->dfs[si->scc_stack.last ()] >= my_dfs)
1488 1.1 mrg {
1489 1.1 mrg bitmap scc = BITMAP_ALLOC (NULL);
1490 1.1 mrg unsigned int lowest_node;
1491 1.1 mrg bitmap_iterator bi;
1492 1.1 mrg
1493 1.1 mrg bitmap_set_bit (scc, n);
1494 1.1 mrg
1495 1.1 mrg while (si->scc_stack.length () != 0
1496 1.1 mrg && si->dfs[si->scc_stack.last ()] >= my_dfs)
1497 1.1 mrg {
1498 1.1 mrg unsigned int w = si->scc_stack.pop ();
1499 1.1 mrg
1500 1.1 mrg bitmap_set_bit (scc, w);
1501 1.1 mrg }
1502 1.1 mrg
1503 1.1 mrg lowest_node = bitmap_first_set_bit (scc);
1504 1.1 mrg gcc_assert (lowest_node < FIRST_REF_NODE);
1505 1.1 mrg
1506 1.1 mrg /* Collapse the SCC nodes into a single node, and mark the
1507 1.1 mrg indirect cycles. */
1508 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1509 1.1 mrg {
1510 1.1 mrg if (i < FIRST_REF_NODE)
1511 1.1 mrg {
1512 1.1 mrg if (unite (lowest_node, i))
1513 1.1 mrg unify_nodes (graph, lowest_node, i, false);
1514 1.1 mrg }
1515 1.1 mrg else
1516 1.1 mrg {
1517 1.1 mrg unite (lowest_node, i);
1518 1.1 mrg graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1519 1.1 mrg }
1520 1.1 mrg }
1521 1.1 mrg }
1522 1.1 mrg bitmap_set_bit (si->deleted, n);
1523 1.1 mrg }
1524 1.1 mrg else
1525 1.1 mrg si->scc_stack.safe_push (n);
1526 1.1 mrg }
1527 1.1 mrg
1528 1.1 mrg /* Unify node FROM into node TO, updating the changed count if
1529 1.1 mrg necessary when UPDATE_CHANGED is true. */
1530 1.1 mrg
1531 1.1 mrg static void
1532 1.1 mrg unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1533 1.1 mrg bool update_changed)
1534 1.1 mrg {
1535 1.1 mrg gcc_checking_assert (to != from && find (to) == to);
1536 1.1 mrg
1537 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS))
1538 1.1 mrg fprintf (dump_file, "Unifying %s to %s\n",
1539 1.1 mrg get_varinfo (from)->name,
1540 1.1 mrg get_varinfo (to)->name);
1541 1.1 mrg
1542 1.1 mrg if (update_changed)
1543 1.1 mrg stats.unified_vars_dynamic++;
1544 1.1 mrg else
1545 1.1 mrg stats.unified_vars_static++;
1546 1.1 mrg
1547 1.1 mrg merge_graph_nodes (graph, to, from);
1548 1.1 mrg if (merge_node_constraints (graph, to, from))
1549 1.1 mrg {
1550 1.1 mrg if (update_changed)
1551 1.1 mrg bitmap_set_bit (changed, to);
1552 1.1 mrg }
1553 1.1 mrg
1554 1.1 mrg /* Mark TO as changed if FROM was changed. If TO was already marked
1555 1.1 mrg as changed, decrease the changed count. */
1556 1.1 mrg
1557 1.1 mrg if (update_changed
1558 1.1 mrg && bitmap_clear_bit (changed, from))
1559 1.1 mrg bitmap_set_bit (changed, to);
1560 1.1 mrg varinfo_t fromvi = get_varinfo (from);
1561 1.1 mrg if (fromvi->solution)
1562 1.1 mrg {
1563 1.1 mrg /* If the solution changes because of the merging, we need to mark
1564 1.1 mrg the variable as changed. */
1565 1.1 mrg varinfo_t tovi = get_varinfo (to);
1566 1.1 mrg if (bitmap_ior_into (tovi->solution, fromvi->solution))
1567 1.1 mrg {
1568 1.1 mrg if (update_changed)
1569 1.1 mrg bitmap_set_bit (changed, to);
1570 1.1 mrg }
1571 1.1 mrg
1572 1.1 mrg BITMAP_FREE (fromvi->solution);
1573 1.1 mrg if (fromvi->oldsolution)
1574 1.1 mrg BITMAP_FREE (fromvi->oldsolution);
1575 1.1 mrg
1576 1.1 mrg if (stats.iterations > 0
1577 1.1 mrg && tovi->oldsolution)
1578 1.1 mrg BITMAP_FREE (tovi->oldsolution);
1579 1.1 mrg }
1580 1.1 mrg if (graph->succs[to])
1581 1.1 mrg bitmap_clear_bit (graph->succs[to], to);
1582 1.1 mrg }
1583 1.1 mrg
1584 1.1 mrg /* Process a constraint C that represents x = *(y + off), using DELTA as the
1585 1.1 mrg starting solution for y. */
1586 1.1 mrg
1587 1.1 mrg static void
1588 1.1 mrg do_sd_constraint (constraint_graph_t graph, constraint_t c,
1589 1.1 mrg bitmap delta, bitmap *expanded_delta)
1590 1.1 mrg {
1591 1.1 mrg unsigned int lhs = c->lhs.var;
1592 1.1 mrg bool flag = false;
1593 1.1 mrg bitmap sol = get_varinfo (lhs)->solution;
1594 1.1 mrg unsigned int j;
1595 1.1 mrg bitmap_iterator bi;
1596 1.1 mrg HOST_WIDE_INT roffset = c->rhs.offset;
1597 1.1 mrg
1598 1.1 mrg /* Our IL does not allow this. */
1599 1.1 mrg gcc_checking_assert (c->lhs.offset == 0);
1600 1.1 mrg
1601 1.1 mrg /* If the solution of Y contains anything it is good enough to transfer
1602 1.1 mrg this to the LHS. */
1603 1.1 mrg if (bitmap_bit_p (delta, anything_id))
1604 1.1 mrg {
1605 1.1 mrg flag |= bitmap_set_bit (sol, anything_id);
1606 1.1 mrg goto done;
1607 1.1 mrg }
1608 1.1 mrg
1609 1.1 mrg /* If we do not know at with offset the rhs is dereferenced compute
1610 1.1 mrg the reachability set of DELTA, conservatively assuming it is
1611 1.1 mrg dereferenced at all valid offsets. */
1612 1.1 mrg if (roffset == UNKNOWN_OFFSET)
1613 1.1 mrg {
1614 1.1 mrg delta = solution_set_expand (delta, expanded_delta);
1615 1.1 mrg /* No further offset processing is necessary. */
1616 1.1 mrg roffset = 0;
1617 1.1 mrg }
1618 1.1 mrg
1619 1.1 mrg /* For each variable j in delta (Sol(y)), add
1620 1.1 mrg an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1621 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1622 1.1 mrg {
1623 1.1 mrg varinfo_t v = get_varinfo (j);
1624 1.1 mrg HOST_WIDE_INT fieldoffset = v->offset + roffset;
1625 1.1 mrg unsigned HOST_WIDE_INT size = v->size;
1626 1.1 mrg unsigned int t;
1627 1.1 mrg
1628 1.1 mrg if (v->is_full_var)
1629 1.1 mrg ;
1630 1.1 mrg else if (roffset != 0)
1631 1.1 mrg {
1632 1.1 mrg if (fieldoffset < 0)
1633 1.1 mrg v = get_varinfo (v->head);
1634 1.1 mrg else
1635 1.1 mrg v = first_or_preceding_vi_for_offset (v, fieldoffset);
1636 1.1 mrg }
1637 1.1 mrg
1638 1.1 mrg /* We have to include all fields that overlap the current field
1639 1.1 mrg shifted by roffset. */
1640 1.1 mrg do
1641 1.1 mrg {
1642 1.1 mrg t = find (v->id);
1643 1.1 mrg
1644 1.1 mrg /* Adding edges from the special vars is pointless.
1645 1.1 mrg They don't have sets that can change. */
1646 1.1 mrg if (get_varinfo (t)->is_special_var)
1647 1.1 mrg flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1648 1.1 mrg /* Merging the solution from ESCAPED needlessly increases
1649 1.1 mrg the set. Use ESCAPED as representative instead. */
1650 1.1 mrg else if (v->id == escaped_id)
1651 1.1 mrg flag |= bitmap_set_bit (sol, escaped_id);
1652 1.1 mrg else if (v->may_have_pointers
1653 1.1 mrg && add_graph_edge (graph, lhs, t))
1654 1.1 mrg flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1655 1.1 mrg
1656 1.1 mrg if (v->is_full_var
1657 1.1 mrg || v->next == 0)
1658 1.1 mrg break;
1659 1.1 mrg
1660 1.1 mrg v = vi_next (v);
1661 1.1 mrg }
1662 1.1 mrg while (v->offset < fieldoffset + size);
1663 1.1 mrg }
1664 1.1 mrg
1665 1.1 mrg done:
1666 1.1 mrg /* If the LHS solution changed, mark the var as changed. */
1667 1.1 mrg if (flag)
1668 1.1 mrg {
1669 1.1 mrg get_varinfo (lhs)->solution = sol;
1670 1.1 mrg bitmap_set_bit (changed, lhs);
1671 1.1 mrg }
1672 1.1 mrg }
1673 1.1 mrg
1674 1.1 mrg /* Process a constraint C that represents *(x + off) = y using DELTA
1675 1.1 mrg as the starting solution for x. */
1676 1.1 mrg
1677 1.1 mrg static void
1678 1.1 mrg do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1679 1.1 mrg {
1680 1.1 mrg unsigned int rhs = c->rhs.var;
1681 1.1 mrg bitmap sol = get_varinfo (rhs)->solution;
1682 1.1 mrg unsigned int j;
1683 1.1 mrg bitmap_iterator bi;
1684 1.1 mrg HOST_WIDE_INT loff = c->lhs.offset;
1685 1.1 mrg bool escaped_p = false;
1686 1.1 mrg
1687 1.1 mrg /* Our IL does not allow this. */
1688 1.1 mrg gcc_checking_assert (c->rhs.offset == 0);
1689 1.1 mrg
1690 1.1 mrg /* If the solution of y contains ANYTHING simply use the ANYTHING
1691 1.1 mrg solution. This avoids needlessly increasing the points-to sets. */
1692 1.1 mrg if (bitmap_bit_p (sol, anything_id))
1693 1.1 mrg sol = get_varinfo (find (anything_id))->solution;
1694 1.1 mrg
1695 1.1 mrg /* If the solution for x contains ANYTHING we have to merge the
1696 1.1 mrg solution of y into all pointer variables which we do via
1697 1.1 mrg STOREDANYTHING. */
1698 1.1 mrg if (bitmap_bit_p (delta, anything_id))
1699 1.1 mrg {
1700 1.1 mrg unsigned t = find (storedanything_id);
1701 1.1 mrg if (add_graph_edge (graph, t, rhs))
1702 1.1 mrg {
1703 1.1 mrg if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1704 1.1 mrg bitmap_set_bit (changed, t);
1705 1.1 mrg }
1706 1.1 mrg return;
1707 1.1 mrg }
1708 1.1 mrg
1709 1.1 mrg /* If we do not know at with offset the rhs is dereferenced compute
1710 1.1 mrg the reachability set of DELTA, conservatively assuming it is
1711 1.1 mrg dereferenced at all valid offsets. */
1712 1.1 mrg if (loff == UNKNOWN_OFFSET)
1713 1.1 mrg {
1714 1.1 mrg delta = solution_set_expand (delta, expanded_delta);
1715 1.1 mrg loff = 0;
1716 1.1 mrg }
1717 1.1 mrg
1718 1.1 mrg /* For each member j of delta (Sol(x)), add an edge from y to j and
1719 1.1 mrg union Sol(y) into Sol(j) */
1720 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1721 1.1 mrg {
1722 1.1 mrg varinfo_t v = get_varinfo (j);
1723 1.1 mrg unsigned int t;
1724 1.1 mrg HOST_WIDE_INT fieldoffset = v->offset + loff;
1725 1.1 mrg unsigned HOST_WIDE_INT size = v->size;
1726 1.1 mrg
1727 1.1 mrg if (v->is_full_var)
1728 1.1 mrg ;
1729 1.1 mrg else if (loff != 0)
1730 1.1 mrg {
1731 1.1 mrg if (fieldoffset < 0)
1732 1.1 mrg v = get_varinfo (v->head);
1733 1.1 mrg else
1734 1.1 mrg v = first_or_preceding_vi_for_offset (v, fieldoffset);
1735 1.1 mrg }
1736 1.1 mrg
1737 1.1 mrg /* We have to include all fields that overlap the current field
1738 1.1 mrg shifted by loff. */
1739 1.1 mrg do
1740 1.1 mrg {
1741 1.1 mrg if (v->may_have_pointers)
1742 1.1 mrg {
1743 1.1 mrg /* If v is a global variable then this is an escape point. */
1744 1.1 mrg if (v->is_global_var
1745 1.1 mrg && !escaped_p)
1746 1.1 mrg {
1747 1.1 mrg t = find (escaped_id);
1748 1.1 mrg if (add_graph_edge (graph, t, rhs)
1749 1.1 mrg && bitmap_ior_into (get_varinfo (t)->solution, sol))
1750 1.1 mrg bitmap_set_bit (changed, t);
1751 1.1 mrg /* Enough to let rhs escape once. */
1752 1.1 mrg escaped_p = true;
1753 1.1 mrg }
1754 1.1 mrg
1755 1.1 mrg if (v->is_special_var)
1756 1.1 mrg break;
1757 1.1 mrg
1758 1.1 mrg t = find (v->id);
1759 1.1 mrg if (add_graph_edge (graph, t, rhs)
1760 1.1 mrg && bitmap_ior_into (get_varinfo (t)->solution, sol))
1761 1.1 mrg bitmap_set_bit (changed, t);
1762 1.1 mrg }
1763 1.1 mrg
1764 1.1 mrg if (v->is_full_var
1765 1.1 mrg || v->next == 0)
1766 1.1 mrg break;
1767 1.1 mrg
1768 1.1 mrg v = vi_next (v);
1769 1.1 mrg }
1770 1.1 mrg while (v->offset < fieldoffset + size);
1771 1.1 mrg }
1772 1.1 mrg }
1773 1.1 mrg
1774 1.1 mrg /* Handle a non-simple (simple meaning requires no iteration),
1775 1.1 mrg constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1776 1.1 mrg
1777 1.1 mrg static void
1778 1.1 mrg do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1779 1.1 mrg bitmap *expanded_delta)
1780 1.1 mrg {
1781 1.1 mrg if (c->lhs.type == DEREF)
1782 1.1 mrg {
1783 1.1 mrg if (c->rhs.type == ADDRESSOF)
1784 1.1 mrg {
1785 1.1 mrg gcc_unreachable ();
1786 1.1 mrg }
1787 1.1 mrg else
1788 1.1 mrg {
1789 1.1 mrg /* *x = y */
1790 1.1 mrg do_ds_constraint (c, delta, expanded_delta);
1791 1.1 mrg }
1792 1.1 mrg }
1793 1.1 mrg else if (c->rhs.type == DEREF)
1794 1.1 mrg {
1795 1.1 mrg /* x = *y */
1796 1.1 mrg if (!(get_varinfo (c->lhs.var)->is_special_var))
1797 1.1 mrg do_sd_constraint (graph, c, delta, expanded_delta);
1798 1.1 mrg }
1799 1.1 mrg else
1800 1.1 mrg {
1801 1.1 mrg bitmap tmp;
1802 1.1 mrg bool flag = false;
1803 1.1 mrg
1804 1.1 mrg gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1805 1.1 mrg && c->rhs.offset != 0 && c->lhs.offset == 0);
1806 1.1 mrg tmp = get_varinfo (c->lhs.var)->solution;
1807 1.1 mrg
1808 1.1 mrg flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1809 1.1 mrg expanded_delta);
1810 1.1 mrg
1811 1.1 mrg if (flag)
1812 1.1 mrg bitmap_set_bit (changed, c->lhs.var);
1813 1.1 mrg }
1814 1.1 mrg }
1815 1.1 mrg
1816 1.1 mrg /* Initialize and return a new SCC info structure. */
1817 1.1 mrg
1818 1.1 mrg scc_info::scc_info (size_t size) :
1819 1.1 mrg visited (size), deleted (size), current_index (0), scc_stack (1)
1820 1.1 mrg {
1821 1.1 mrg bitmap_clear (visited);
1822 1.1 mrg bitmap_clear (deleted);
1823 1.1 mrg node_mapping = XNEWVEC (unsigned int, size);
1824 1.1 mrg dfs = XCNEWVEC (unsigned int, size);
1825 1.1 mrg
1826 1.1 mrg for (size_t i = 0; i < size; i++)
1827 1.1 mrg node_mapping[i] = i;
1828 1.1 mrg }
1829 1.1 mrg
1830 1.1 mrg /* Free an SCC info structure pointed to by SI */
1831 1.1 mrg
1832 1.1 mrg scc_info::~scc_info ()
1833 1.1 mrg {
1834 1.1 mrg free (node_mapping);
1835 1.1 mrg free (dfs);
1836 1.1 mrg }
1837 1.1 mrg
1838 1.1 mrg
1839 1.1 mrg /* Find indirect cycles in GRAPH that occur, using strongly connected
1840 1.1 mrg components, and note them in the indirect cycles map.
1841 1.1 mrg
1842 1.1 mrg This technique comes from Ben Hardekopf and Calvin Lin,
1843 1.1 mrg "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1844 1.1 mrg Lines of Code", submitted to PLDI 2007. */
1845 1.1 mrg
1846 1.1 mrg static void
1847 1.1 mrg find_indirect_cycles (constraint_graph_t graph)
1848 1.1 mrg {
1849 1.1 mrg unsigned int i;
1850 1.1 mrg unsigned int size = graph->size;
1851 1.1 mrg scc_info si (size);
1852 1.1 mrg
1853 1.1 mrg for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1854 1.1 mrg if (!bitmap_bit_p (si.visited, i) && find (i) == i)
1855 1.1 mrg scc_visit (graph, &si, i);
1856 1.1 mrg }
1857 1.1 mrg
1858 1.1 mrg /* Visit the graph in topological order starting at node N, and store the
1859 1.1 mrg order in TOPO_ORDER using VISITED to indicate visited nodes. */
1860 1.1 mrg
1861 1.1 mrg static void
1862 1.1 mrg topo_visit (constraint_graph_t graph, vec<unsigned> &topo_order,
1863 1.1 mrg sbitmap visited, unsigned int n)
1864 1.1 mrg {
1865 1.1 mrg bitmap_iterator bi;
1866 1.1 mrg unsigned int j;
1867 1.1 mrg
1868 1.1 mrg bitmap_set_bit (visited, n);
1869 1.1 mrg
1870 1.1 mrg if (graph->succs[n])
1871 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1872 1.1 mrg {
1873 1.1 mrg unsigned k = find (j);
1874 1.1 mrg if (!bitmap_bit_p (visited, k))
1875 1.1 mrg topo_visit (graph, topo_order, visited, k);
1876 1.1 mrg }
1877 1.1 mrg
1878 1.1 mrg topo_order.quick_push (n);
1879 1.1 mrg }
1880 1.1 mrg
1881 1.1 mrg /* Compute a topological ordering for GRAPH, and return the result. */
1882 1.1 mrg
1883 1.1 mrg static auto_vec<unsigned>
1884 1.1 mrg compute_topo_order (constraint_graph_t graph)
1885 1.1 mrg {
1886 1.1 mrg unsigned int i;
1887 1.1 mrg unsigned int size = graph->size;
1888 1.1 mrg
1889 1.1 mrg auto_sbitmap visited (size);
1890 1.1 mrg bitmap_clear (visited);
1891 1.1 mrg
1892 1.1 mrg /* For the heuristic in add_graph_edge to work optimally make sure to
1893 1.1 mrg first visit the connected component of the graph containing
1894 1.1 mrg ESCAPED. Do this by extracting the connected component
1895 1.1 mrg with ESCAPED and append that to all other components as solve_graph
1896 1.1 mrg pops from the order. */
1897 1.1 mrg auto_vec<unsigned> tail (size);
1898 1.1 mrg topo_visit (graph, tail, visited, find (escaped_id));
1899 1.1 mrg
1900 1.1 mrg auto_vec<unsigned> topo_order (size);
1901 1.1 mrg
1902 1.1 mrg for (i = 0; i != size; ++i)
1903 1.1 mrg if (!bitmap_bit_p (visited, i) && find (i) == i)
1904 1.1 mrg topo_visit (graph, topo_order, visited, i);
1905 1.1 mrg
1906 1.1 mrg topo_order.splice (tail);
1907 1.1 mrg return topo_order;
1908 1.1 mrg }
1909 1.1 mrg
1910 1.1 mrg /* Structure used to for hash value numbering of pointer equivalence
1911 1.1 mrg classes. */
1912 1.1 mrg
1913 1.1 mrg typedef struct equiv_class_label
1914 1.1 mrg {
1915 1.1 mrg hashval_t hashcode;
1916 1.1 mrg unsigned int equivalence_class;
1917 1.1 mrg bitmap labels;
1918 1.1 mrg } *equiv_class_label_t;
1919 1.1 mrg typedef const struct equiv_class_label *const_equiv_class_label_t;
1920 1.1 mrg
1921 1.1 mrg /* Equiv_class_label hashtable helpers. */
1922 1.1 mrg
1923 1.1 mrg struct equiv_class_hasher : nofree_ptr_hash <equiv_class_label>
1924 1.1 mrg {
1925 1.1 mrg static inline hashval_t hash (const equiv_class_label *);
1926 1.1 mrg static inline bool equal (const equiv_class_label *,
1927 1.1 mrg const equiv_class_label *);
1928 1.1 mrg };
1929 1.1 mrg
1930 1.1 mrg /* Hash function for a equiv_class_label_t */
1931 1.1 mrg
1932 1.1 mrg inline hashval_t
1933 1.1 mrg equiv_class_hasher::hash (const equiv_class_label *ecl)
1934 1.1 mrg {
1935 1.1 mrg return ecl->hashcode;
1936 1.1 mrg }
1937 1.1 mrg
1938 1.1 mrg /* Equality function for two equiv_class_label_t's. */
1939 1.1 mrg
1940 1.1 mrg inline bool
1941 1.1 mrg equiv_class_hasher::equal (const equiv_class_label *eql1,
1942 1.1 mrg const equiv_class_label *eql2)
1943 1.1 mrg {
1944 1.1 mrg return (eql1->hashcode == eql2->hashcode
1945 1.1 mrg && bitmap_equal_p (eql1->labels, eql2->labels));
1946 1.1 mrg }
1947 1.1 mrg
1948 1.1 mrg /* A hashtable for mapping a bitmap of labels->pointer equivalence
1949 1.1 mrg classes. */
1950 1.1 mrg static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1951 1.1 mrg
1952 1.1 mrg /* A hashtable for mapping a bitmap of labels->location equivalence
1953 1.1 mrg classes. */
1954 1.1 mrg static hash_table<equiv_class_hasher> *location_equiv_class_table;
1955 1.1 mrg
1956 1.1 mrg struct obstack equiv_class_obstack;
1957 1.1 mrg
1958 1.1 mrg /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1959 1.1 mrg hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1960 1.1 mrg is equivalent to. */
1961 1.1 mrg
1962 1.1 mrg static equiv_class_label *
1963 1.1 mrg equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
1964 1.1 mrg bitmap labels)
1965 1.1 mrg {
1966 1.1 mrg equiv_class_label **slot;
1967 1.1 mrg equiv_class_label ecl;
1968 1.1 mrg
1969 1.1 mrg ecl.labels = labels;
1970 1.1 mrg ecl.hashcode = bitmap_hash (labels);
1971 1.1 mrg slot = table->find_slot (&ecl, INSERT);
1972 1.1 mrg if (!*slot)
1973 1.1 mrg {
1974 1.1 mrg *slot = XOBNEW (&equiv_class_obstack, struct equiv_class_label);
1975 1.1 mrg (*slot)->labels = labels;
1976 1.1 mrg (*slot)->hashcode = ecl.hashcode;
1977 1.1 mrg (*slot)->equivalence_class = 0;
1978 1.1 mrg }
1979 1.1 mrg
1980 1.1 mrg return *slot;
1981 1.1 mrg }
1982 1.1 mrg
1983 1.1 mrg /* Perform offline variable substitution.
1984 1.1 mrg
1985 1.1 mrg This is a worst case quadratic time way of identifying variables
1986 1.1 mrg that must have equivalent points-to sets, including those caused by
1987 1.1 mrg static cycles, and single entry subgraphs, in the constraint graph.
1988 1.1 mrg
1989 1.1 mrg The technique is described in "Exploiting Pointer and Location
1990 1.1 mrg Equivalence to Optimize Pointer Analysis. In the 14th International
1991 1.1 mrg Static Analysis Symposium (SAS), August 2007." It is known as the
1992 1.1 mrg "HU" algorithm, and is equivalent to value numbering the collapsed
1993 1.1 mrg constraint graph including evaluating unions.
1994 1.1 mrg
1995 1.1 mrg The general method of finding equivalence classes is as follows:
1996 1.1 mrg Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1997 1.1 mrg Initialize all non-REF nodes to be direct nodes.
1998 1.1 mrg For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1999 1.1 mrg variable}
2000 1.1 mrg For each constraint containing the dereference, we also do the same
2001 1.1 mrg thing.
2002 1.1 mrg
2003 1.1 mrg We then compute SCC's in the graph and unify nodes in the same SCC,
2004 1.1 mrg including pts sets.
2005 1.1 mrg
2006 1.1 mrg For each non-collapsed node x:
2007 1.1 mrg Visit all unvisited explicit incoming edges.
2008 1.1 mrg Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
2009 1.1 mrg where y->x.
2010 1.1 mrg Lookup the equivalence class for pts(x).
2011 1.1 mrg If we found one, equivalence_class(x) = found class.
2012 1.1 mrg Otherwise, equivalence_class(x) = new class, and new_class is
2013 1.1 mrg added to the lookup table.
2014 1.1 mrg
2015 1.1 mrg All direct nodes with the same equivalence class can be replaced
2016 1.1 mrg with a single representative node.
2017 1.1 mrg All unlabeled nodes (label == 0) are not pointers and all edges
2018 1.1 mrg involving them can be eliminated.
2019 1.1 mrg We perform these optimizations during rewrite_constraints
2020 1.1 mrg
2021 1.1 mrg In addition to pointer equivalence class finding, we also perform
2022 1.1 mrg location equivalence class finding. This is the set of variables
2023 1.1 mrg that always appear together in points-to sets. We use this to
2024 1.1 mrg compress the size of the points-to sets. */
2025 1.1 mrg
2026 1.1 mrg /* Current maximum pointer equivalence class id. */
2027 1.1 mrg static int pointer_equiv_class;
2028 1.1 mrg
2029 1.1 mrg /* Current maximum location equivalence class id. */
2030 1.1 mrg static int location_equiv_class;
2031 1.1 mrg
2032 1.1 mrg /* Recursive routine to find strongly connected components in GRAPH,
2033 1.1 mrg and label it's nodes with DFS numbers. */
2034 1.1 mrg
2035 1.1 mrg static void
2036 1.1 mrg condense_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
2037 1.1 mrg {
2038 1.1 mrg unsigned int i;
2039 1.1 mrg bitmap_iterator bi;
2040 1.1 mrg unsigned int my_dfs;
2041 1.1 mrg
2042 1.1 mrg gcc_checking_assert (si->node_mapping[n] == n);
2043 1.1 mrg bitmap_set_bit (si->visited, n);
2044 1.1 mrg si->dfs[n] = si->current_index ++;
2045 1.1 mrg my_dfs = si->dfs[n];
2046 1.1 mrg
2047 1.1 mrg /* Visit all the successors. */
2048 1.1 mrg EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2049 1.1 mrg {
2050 1.1 mrg unsigned int w = si->node_mapping[i];
2051 1.1 mrg
2052 1.1 mrg if (bitmap_bit_p (si->deleted, w))
2053 1.1 mrg continue;
2054 1.1 mrg
2055 1.1 mrg if (!bitmap_bit_p (si->visited, w))
2056 1.1 mrg condense_visit (graph, si, w);
2057 1.1 mrg
2058 1.1 mrg unsigned int t = si->node_mapping[w];
2059 1.1 mrg gcc_checking_assert (si->node_mapping[n] == n);
2060 1.1 mrg if (si->dfs[t] < si->dfs[n])
2061 1.1 mrg si->dfs[n] = si->dfs[t];
2062 1.1 mrg }
2063 1.1 mrg
2064 1.1 mrg /* Visit all the implicit predecessors. */
2065 1.1 mrg EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2066 1.1 mrg {
2067 1.1 mrg unsigned int w = si->node_mapping[i];
2068 1.1 mrg
2069 1.1 mrg if (bitmap_bit_p (si->deleted, w))
2070 1.1 mrg continue;
2071 1.1 mrg
2072 1.1 mrg if (!bitmap_bit_p (si->visited, w))
2073 1.1 mrg condense_visit (graph, si, w);
2074 1.1 mrg
2075 1.1 mrg unsigned int t = si->node_mapping[w];
2076 1.1 mrg gcc_assert (si->node_mapping[n] == n);
2077 1.1 mrg if (si->dfs[t] < si->dfs[n])
2078 1.1 mrg si->dfs[n] = si->dfs[t];
2079 1.1 mrg }
2080 1.1 mrg
2081 1.1 mrg /* See if any components have been identified. */
2082 1.1 mrg if (si->dfs[n] == my_dfs)
2083 1.1 mrg {
2084 1.1 mrg if (si->scc_stack.length () != 0
2085 1.1 mrg && si->dfs[si->scc_stack.last ()] >= my_dfs)
2086 1.1 mrg {
2087 1.1 mrg /* Find the first node of the SCC and do non-bitmap work. */
2088 1.1 mrg bool direct_p = true;
2089 1.1 mrg unsigned first = si->scc_stack.length ();
2090 1.1 mrg do
2091 1.1 mrg {
2092 1.1 mrg --first;
2093 1.1 mrg unsigned int w = si->scc_stack[first];
2094 1.1 mrg si->node_mapping[w] = n;
2095 1.1 mrg if (!bitmap_bit_p (graph->direct_nodes, w))
2096 1.1 mrg direct_p = false;
2097 1.1 mrg }
2098 1.1 mrg while (first > 0
2099 1.1 mrg && si->dfs[si->scc_stack[first - 1]] >= my_dfs);
2100 1.1 mrg if (!direct_p)
2101 1.1 mrg bitmap_clear_bit (graph->direct_nodes, n);
2102 1.1 mrg
2103 1.1 mrg /* Want to reduce to node n, push that first. */
2104 1.1 mrg si->scc_stack.reserve (1);
2105 1.1 mrg si->scc_stack.quick_push (si->scc_stack[first]);
2106 1.1 mrg si->scc_stack[first] = n;
2107 1.1 mrg
2108 1.1 mrg unsigned scc_size = si->scc_stack.length () - first;
2109 1.1 mrg unsigned split = scc_size / 2;
2110 1.1 mrg unsigned carry = scc_size - split * 2;
2111 1.1 mrg while (split > 0)
2112 1.1 mrg {
2113 1.1 mrg for (unsigned i = 0; i < split; ++i)
2114 1.1 mrg {
2115 1.1 mrg unsigned a = si->scc_stack[first + i];
2116 1.1 mrg unsigned b = si->scc_stack[first + split + carry + i];
2117 1.1 mrg
2118 1.1 mrg /* Unify our nodes. */
2119 1.1 mrg if (graph->preds[b])
2120 1.1 mrg {
2121 1.1 mrg if (!graph->preds[a])
2122 1.1 mrg std::swap (graph->preds[a], graph->preds[b]);
2123 1.1 mrg else
2124 1.1 mrg bitmap_ior_into_and_free (graph->preds[a],
2125 1.1 mrg &graph->preds[b]);
2126 1.1 mrg }
2127 1.1 mrg if (graph->implicit_preds[b])
2128 1.1 mrg {
2129 1.1 mrg if (!graph->implicit_preds[a])
2130 1.1 mrg std::swap (graph->implicit_preds[a],
2131 1.1 mrg graph->implicit_preds[b]);
2132 1.1 mrg else
2133 1.1 mrg bitmap_ior_into_and_free (graph->implicit_preds[a],
2134 1.1 mrg &graph->implicit_preds[b]);
2135 1.1 mrg }
2136 1.1 mrg if (graph->points_to[b])
2137 1.1 mrg {
2138 1.1 mrg if (!graph->points_to[a])
2139 1.1 mrg std::swap (graph->points_to[a], graph->points_to[b]);
2140 1.1 mrg else
2141 1.1 mrg bitmap_ior_into_and_free (graph->points_to[a],
2142 1.1 mrg &graph->points_to[b]);
2143 1.1 mrg }
2144 1.1 mrg }
2145 1.1 mrg unsigned remain = split + carry;
2146 1.1 mrg split = remain / 2;
2147 1.1 mrg carry = remain - split * 2;
2148 1.1 mrg }
2149 1.1 mrg /* Actually pop the SCC. */
2150 1.1 mrg si->scc_stack.truncate (first);
2151 1.1 mrg }
2152 1.1 mrg bitmap_set_bit (si->deleted, n);
2153 1.1 mrg }
2154 1.1 mrg else
2155 1.1 mrg si->scc_stack.safe_push (n);
2156 1.1 mrg }
2157 1.1 mrg
2158 1.1 mrg /* Label pointer equivalences.
2159 1.1 mrg
2160 1.1 mrg This performs a value numbering of the constraint graph to
2161 1.1 mrg discover which variables will always have the same points-to sets
2162 1.1 mrg under the current set of constraints.
2163 1.1 mrg
2164 1.1 mrg The way it value numbers is to store the set of points-to bits
2165 1.1 mrg generated by the constraints and graph edges. This is just used as a
2166 1.1 mrg hash and equality comparison. The *actual set of points-to bits* is
2167 1.1 mrg completely irrelevant, in that we don't care about being able to
2168 1.1 mrg extract them later.
2169 1.1 mrg
2170 1.1 mrg The equality values (currently bitmaps) just have to satisfy a few
2171 1.1 mrg constraints, the main ones being:
2172 1.1 mrg 1. The combining operation must be order independent.
2173 1.1 mrg 2. The end result of a given set of operations must be unique iff the
2174 1.1 mrg combination of input values is unique
2175 1.1 mrg 3. Hashable. */
2176 1.1 mrg
2177 1.1 mrg static void
2178 1.1 mrg label_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
2179 1.1 mrg {
2180 1.1 mrg unsigned int i, first_pred;
2181 1.1 mrg bitmap_iterator bi;
2182 1.1 mrg
2183 1.1 mrg bitmap_set_bit (si->visited, n);
2184 1.1 mrg
2185 1.1 mrg /* Label and union our incoming edges's points to sets. */
2186 1.1 mrg first_pred = -1U;
2187 1.1 mrg EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2188 1.1 mrg {
2189 1.1 mrg unsigned int w = si->node_mapping[i];
2190 1.1 mrg if (!bitmap_bit_p (si->visited, w))
2191 1.1 mrg label_visit (graph, si, w);
2192 1.1 mrg
2193 1.1 mrg /* Skip unused edges */
2194 1.1 mrg if (w == n || graph->pointer_label[w] == 0)
2195 1.1 mrg continue;
2196 1.1 mrg
2197 1.1 mrg if (graph->points_to[w])
2198 1.1 mrg {
2199 1.1 mrg if (!graph->points_to[n])
2200 1.1 mrg {
2201 1.1 mrg if (first_pred == -1U)
2202 1.1 mrg first_pred = w;
2203 1.1 mrg else
2204 1.1 mrg {
2205 1.1 mrg graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2206 1.1 mrg bitmap_ior (graph->points_to[n],
2207 1.1 mrg graph->points_to[first_pred],
2208 1.1 mrg graph->points_to[w]);
2209 1.1 mrg }
2210 1.1 mrg }
2211 1.1 mrg else
2212 1.1 mrg bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2213 1.1 mrg }
2214 1.1 mrg }
2215 1.1 mrg
2216 1.1 mrg /* Indirect nodes get fresh variables and a new pointer equiv class. */
2217 1.1 mrg if (!bitmap_bit_p (graph->direct_nodes, n))
2218 1.1 mrg {
2219 1.1 mrg if (!graph->points_to[n])
2220 1.1 mrg {
2221 1.1 mrg graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2222 1.1 mrg if (first_pred != -1U)
2223 1.1 mrg bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2224 1.1 mrg }
2225 1.1 mrg bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2226 1.1 mrg graph->pointer_label[n] = pointer_equiv_class++;
2227 1.1 mrg equiv_class_label_t ecl;
2228 1.1 mrg ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2229 1.1 mrg graph->points_to[n]);
2230 1.1 mrg ecl->equivalence_class = graph->pointer_label[n];
2231 1.1 mrg return;
2232 1.1 mrg }
2233 1.1 mrg
2234 1.1 mrg /* If there was only a single non-empty predecessor the pointer equiv
2235 1.1 mrg class is the same. */
2236 1.1 mrg if (!graph->points_to[n])
2237 1.1 mrg {
2238 1.1 mrg if (first_pred != -1U)
2239 1.1 mrg {
2240 1.1 mrg graph->pointer_label[n] = graph->pointer_label[first_pred];
2241 1.1 mrg graph->points_to[n] = graph->points_to[first_pred];
2242 1.1 mrg }
2243 1.1 mrg return;
2244 1.1 mrg }
2245 1.1 mrg
2246 1.1 mrg if (!bitmap_empty_p (graph->points_to[n]))
2247 1.1 mrg {
2248 1.1 mrg equiv_class_label_t ecl;
2249 1.1 mrg ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2250 1.1 mrg graph->points_to[n]);
2251 1.1 mrg if (ecl->equivalence_class == 0)
2252 1.1 mrg ecl->equivalence_class = pointer_equiv_class++;
2253 1.1 mrg else
2254 1.1 mrg {
2255 1.1 mrg BITMAP_FREE (graph->points_to[n]);
2256 1.1 mrg graph->points_to[n] = ecl->labels;
2257 1.1 mrg }
2258 1.1 mrg graph->pointer_label[n] = ecl->equivalence_class;
2259 1.1 mrg }
2260 1.1 mrg }
2261 1.1 mrg
2262 1.1 mrg /* Print the pred graph in dot format. */
2263 1.1 mrg
2264 1.1 mrg static void
2265 1.1 mrg dump_pred_graph (class scc_info *si, FILE *file)
2266 1.1 mrg {
2267 1.1 mrg unsigned int i;
2268 1.1 mrg
2269 1.1 mrg /* Only print the graph if it has already been initialized: */
2270 1.1 mrg if (!graph)
2271 1.1 mrg return;
2272 1.1 mrg
2273 1.1 mrg /* Prints the header of the dot file: */
2274 1.1 mrg fprintf (file, "strict digraph {\n");
2275 1.1 mrg fprintf (file, " node [\n shape = box\n ]\n");
2276 1.1 mrg fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2277 1.1 mrg fprintf (file, "\n // List of nodes and complex constraints in "
2278 1.1 mrg "the constraint graph:\n");
2279 1.1 mrg
2280 1.1 mrg /* The next lines print the nodes in the graph together with the
2281 1.1 mrg complex constraints attached to them. */
2282 1.1 mrg for (i = 1; i < graph->size; i++)
2283 1.1 mrg {
2284 1.1 mrg if (i == FIRST_REF_NODE)
2285 1.1 mrg continue;
2286 1.1 mrg if (si->node_mapping[i] != i)
2287 1.1 mrg continue;
2288 1.1 mrg if (i < FIRST_REF_NODE)
2289 1.1 mrg fprintf (file, "\"%s\"", get_varinfo (i)->name);
2290 1.1 mrg else
2291 1.1 mrg fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2292 1.1 mrg if (graph->points_to[i]
2293 1.1 mrg && !bitmap_empty_p (graph->points_to[i]))
2294 1.1 mrg {
2295 1.1 mrg if (i < FIRST_REF_NODE)
2296 1.1 mrg fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2297 1.1 mrg else
2298 1.1 mrg fprintf (file, "[label=\"*%s = {",
2299 1.1 mrg get_varinfo (i - FIRST_REF_NODE)->name);
2300 1.1 mrg unsigned j;
2301 1.1 mrg bitmap_iterator bi;
2302 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2303 1.1 mrg fprintf (file, " %d", j);
2304 1.1 mrg fprintf (file, " }\"]");
2305 1.1 mrg }
2306 1.1 mrg fprintf (file, ";\n");
2307 1.1 mrg }
2308 1.1 mrg
2309 1.1 mrg /* Go over the edges. */
2310 1.1 mrg fprintf (file, "\n // Edges in the constraint graph:\n");
2311 1.1 mrg for (i = 1; i < graph->size; i++)
2312 1.1 mrg {
2313 1.1 mrg unsigned j;
2314 1.1 mrg bitmap_iterator bi;
2315 1.1 mrg if (si->node_mapping[i] != i)
2316 1.1 mrg continue;
2317 1.1 mrg EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2318 1.1 mrg {
2319 1.1 mrg unsigned from = si->node_mapping[j];
2320 1.1 mrg if (from < FIRST_REF_NODE)
2321 1.1 mrg fprintf (file, "\"%s\"", get_varinfo (from)->name);
2322 1.1 mrg else
2323 1.1 mrg fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2324 1.1 mrg fprintf (file, " -> ");
2325 1.1 mrg if (i < FIRST_REF_NODE)
2326 1.1 mrg fprintf (file, "\"%s\"", get_varinfo (i)->name);
2327 1.1 mrg else
2328 1.1 mrg fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2329 1.1 mrg fprintf (file, ";\n");
2330 1.1 mrg }
2331 1.1 mrg }
2332 1.1 mrg
2333 1.1 mrg /* Prints the tail of the dot file. */
2334 1.1 mrg fprintf (file, "}\n");
2335 1.1 mrg }
2336 1.1 mrg
2337 1.1 mrg /* Perform offline variable substitution, discovering equivalence
2338 1.1 mrg classes, and eliminating non-pointer variables. */
2339 1.1 mrg
2340 1.1 mrg static class scc_info *
2341 1.1 mrg perform_var_substitution (constraint_graph_t graph)
2342 1.1 mrg {
2343 1.1 mrg unsigned int i;
2344 1.1 mrg unsigned int size = graph->size;
2345 1.1 mrg scc_info *si = new scc_info (size);
2346 1.1 mrg
2347 1.1 mrg bitmap_obstack_initialize (&iteration_obstack);
2348 1.1 mrg gcc_obstack_init (&equiv_class_obstack);
2349 1.1 mrg pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2350 1.1 mrg location_equiv_class_table
2351 1.1 mrg = new hash_table<equiv_class_hasher> (511);
2352 1.1 mrg pointer_equiv_class = 1;
2353 1.1 mrg location_equiv_class = 1;
2354 1.1 mrg
2355 1.1 mrg /* Condense the nodes, which means to find SCC's, count incoming
2356 1.1 mrg predecessors, and unite nodes in SCC's. */
2357 1.1 mrg for (i = 1; i < FIRST_REF_NODE; i++)
2358 1.1 mrg if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2359 1.1 mrg condense_visit (graph, si, si->node_mapping[i]);
2360 1.1 mrg
2361 1.1 mrg if (dump_file && (dump_flags & TDF_GRAPH))
2362 1.1 mrg {
2363 1.1 mrg fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2364 1.1 mrg "in dot format:\n");
2365 1.1 mrg dump_pred_graph (si, dump_file);
2366 1.1 mrg fprintf (dump_file, "\n\n");
2367 1.1 mrg }
2368 1.1 mrg
2369 1.1 mrg bitmap_clear (si->visited);
2370 1.1 mrg /* Actually the label the nodes for pointer equivalences */
2371 1.1 mrg for (i = 1; i < FIRST_REF_NODE; i++)
2372 1.1 mrg if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2373 1.1 mrg label_visit (graph, si, si->node_mapping[i]);
2374 1.1 mrg
2375 1.1 mrg /* Calculate location equivalence labels. */
2376 1.1 mrg for (i = 1; i < FIRST_REF_NODE; i++)
2377 1.1 mrg {
2378 1.1 mrg bitmap pointed_by;
2379 1.1 mrg bitmap_iterator bi;
2380 1.1 mrg unsigned int j;
2381 1.1 mrg
2382 1.1 mrg if (!graph->pointed_by[i])
2383 1.1 mrg continue;
2384 1.1 mrg pointed_by = BITMAP_ALLOC (&iteration_obstack);
2385 1.1 mrg
2386 1.1 mrg /* Translate the pointed-by mapping for pointer equivalence
2387 1.1 mrg labels. */
2388 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2389 1.1 mrg {
2390 1.1 mrg bitmap_set_bit (pointed_by,
2391 1.1 mrg graph->pointer_label[si->node_mapping[j]]);
2392 1.1 mrg }
2393 1.1 mrg /* The original pointed_by is now dead. */
2394 1.1 mrg BITMAP_FREE (graph->pointed_by[i]);
2395 1.1 mrg
2396 1.1 mrg /* Look up the location equivalence label if one exists, or make
2397 1.1 mrg one otherwise. */
2398 1.1 mrg equiv_class_label_t ecl;
2399 1.1 mrg ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2400 1.1 mrg if (ecl->equivalence_class == 0)
2401 1.1 mrg ecl->equivalence_class = location_equiv_class++;
2402 1.1 mrg else
2403 1.1 mrg {
2404 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS))
2405 1.1 mrg fprintf (dump_file, "Found location equivalence for node %s\n",
2406 1.1 mrg get_varinfo (i)->name);
2407 1.1 mrg BITMAP_FREE (pointed_by);
2408 1.1 mrg }
2409 1.1 mrg graph->loc_label[i] = ecl->equivalence_class;
2410 1.1 mrg
2411 1.1 mrg }
2412 1.1 mrg
2413 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS))
2414 1.1 mrg for (i = 1; i < FIRST_REF_NODE; i++)
2415 1.1 mrg {
2416 1.1 mrg unsigned j = si->node_mapping[i];
2417 1.1 mrg if (j != i)
2418 1.1 mrg {
2419 1.1 mrg fprintf (dump_file, "%s node id %d ",
2420 1.1 mrg bitmap_bit_p (graph->direct_nodes, i)
2421 1.1 mrg ? "Direct" : "Indirect", i);
2422 1.1 mrg if (i < FIRST_REF_NODE)
2423 1.1 mrg fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2424 1.1 mrg else
2425 1.1 mrg fprintf (dump_file, "\"*%s\"",
2426 1.1 mrg get_varinfo (i - FIRST_REF_NODE)->name);
2427 1.1 mrg fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2428 1.1 mrg if (j < FIRST_REF_NODE)
2429 1.1 mrg fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2430 1.1 mrg else
2431 1.1 mrg fprintf (dump_file, "\"*%s\"\n",
2432 1.1 mrg get_varinfo (j - FIRST_REF_NODE)->name);
2433 1.1 mrg }
2434 1.1 mrg else
2435 1.1 mrg {
2436 1.1 mrg fprintf (dump_file,
2437 1.1 mrg "Equivalence classes for %s node id %d ",
2438 1.1 mrg bitmap_bit_p (graph->direct_nodes, i)
2439 1.1 mrg ? "direct" : "indirect", i);
2440 1.1 mrg if (i < FIRST_REF_NODE)
2441 1.1 mrg fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2442 1.1 mrg else
2443 1.1 mrg fprintf (dump_file, "\"*%s\"",
2444 1.1 mrg get_varinfo (i - FIRST_REF_NODE)->name);
2445 1.1 mrg fprintf (dump_file,
2446 1.1 mrg ": pointer %d, location %d\n",
2447 1.1 mrg graph->pointer_label[i], graph->loc_label[i]);
2448 1.1 mrg }
2449 1.1 mrg }
2450 1.1 mrg
2451 1.1 mrg /* Quickly eliminate our non-pointer variables. */
2452 1.1 mrg
2453 1.1 mrg for (i = 1; i < FIRST_REF_NODE; i++)
2454 1.1 mrg {
2455 1.1 mrg unsigned int node = si->node_mapping[i];
2456 1.1 mrg
2457 1.1 mrg if (graph->pointer_label[node] == 0)
2458 1.1 mrg {
2459 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS))
2460 1.1 mrg fprintf (dump_file,
2461 1.1 mrg "%s is a non-pointer variable, eliminating edges.\n",
2462 1.1 mrg get_varinfo (node)->name);
2463 1.1 mrg stats.nonpointer_vars++;
2464 1.1 mrg clear_edges_for_node (graph, node);
2465 1.1 mrg }
2466 1.1 mrg }
2467 1.1 mrg
2468 1.1 mrg return si;
2469 1.1 mrg }
2470 1.1 mrg
2471 1.1 mrg /* Free information that was only necessary for variable
2472 1.1 mrg substitution. */
2473 1.1 mrg
2474 1.1 mrg static void
2475 1.1 mrg free_var_substitution_info (class scc_info *si)
2476 1.1 mrg {
2477 1.1 mrg delete si;
2478 1.1 mrg free (graph->pointer_label);
2479 1.1 mrg free (graph->loc_label);
2480 1.1 mrg free (graph->pointed_by);
2481 1.1 mrg free (graph->points_to);
2482 1.1 mrg free (graph->eq_rep);
2483 1.1 mrg sbitmap_free (graph->direct_nodes);
2484 1.1 mrg delete pointer_equiv_class_table;
2485 1.1 mrg pointer_equiv_class_table = NULL;
2486 1.1 mrg delete location_equiv_class_table;
2487 1.1 mrg location_equiv_class_table = NULL;
2488 1.1 mrg obstack_free (&equiv_class_obstack, NULL);
2489 1.1 mrg bitmap_obstack_release (&iteration_obstack);
2490 1.1 mrg }
2491 1.1 mrg
2492 1.1 mrg /* Return an existing node that is equivalent to NODE, which has
2493 1.1 mrg equivalence class LABEL, if one exists. Return NODE otherwise. */
2494 1.1 mrg
2495 1.1 mrg static unsigned int
2496 1.1 mrg find_equivalent_node (constraint_graph_t graph,
2497 1.1 mrg unsigned int node, unsigned int label)
2498 1.1 mrg {
2499 1.1 mrg /* If the address version of this variable is unused, we can
2500 1.1 mrg substitute it for anything else with the same label.
2501 1.1 mrg Otherwise, we know the pointers are equivalent, but not the
2502 1.1 mrg locations, and we can unite them later. */
2503 1.1 mrg
2504 1.1 mrg if (!bitmap_bit_p (graph->address_taken, node))
2505 1.1 mrg {
2506 1.1 mrg gcc_checking_assert (label < graph->size);
2507 1.1 mrg
2508 1.1 mrg if (graph->eq_rep[label] != -1)
2509 1.1 mrg {
2510 1.1 mrg /* Unify the two variables since we know they are equivalent. */
2511 1.1 mrg if (unite (graph->eq_rep[label], node))
2512 1.1 mrg unify_nodes (graph, graph->eq_rep[label], node, false);
2513 1.1 mrg return graph->eq_rep[label];
2514 1.1 mrg }
2515 1.1 mrg else
2516 1.1 mrg {
2517 1.1 mrg graph->eq_rep[label] = node;
2518 1.1 mrg graph->pe_rep[label] = node;
2519 1.1 mrg }
2520 1.1 mrg }
2521 1.1 mrg else
2522 1.1 mrg {
2523 1.1 mrg gcc_checking_assert (label < graph->size);
2524 1.1 mrg graph->pe[node] = label;
2525 1.1 mrg if (graph->pe_rep[label] == -1)
2526 1.1 mrg graph->pe_rep[label] = node;
2527 1.1 mrg }
2528 1.1 mrg
2529 1.1 mrg return node;
2530 1.1 mrg }
2531 1.1 mrg
2532 1.1 mrg /* Unite pointer equivalent but not location equivalent nodes in
2533 1.1 mrg GRAPH. This may only be performed once variable substitution is
2534 1.1 mrg finished. */
2535 1.1 mrg
2536 1.1 mrg static void
2537 1.1 mrg unite_pointer_equivalences (constraint_graph_t graph)
2538 1.1 mrg {
2539 1.1 mrg unsigned int i;
2540 1.1 mrg
2541 1.1 mrg /* Go through the pointer equivalences and unite them to their
2542 1.1 mrg representative, if they aren't already. */
2543 1.1 mrg for (i = 1; i < FIRST_REF_NODE; i++)
2544 1.1 mrg {
2545 1.1 mrg unsigned int label = graph->pe[i];
2546 1.1 mrg if (label)
2547 1.1 mrg {
2548 1.1 mrg int label_rep = graph->pe_rep[label];
2549 1.1 mrg
2550 1.1 mrg if (label_rep == -1)
2551 1.1 mrg continue;
2552 1.1 mrg
2553 1.1 mrg label_rep = find (label_rep);
2554 1.1 mrg if (label_rep >= 0 && unite (label_rep, find (i)))
2555 1.1 mrg unify_nodes (graph, label_rep, i, false);
2556 1.1 mrg }
2557 1.1 mrg }
2558 1.1 mrg }
2559 1.1 mrg
2560 1.1 mrg /* Move complex constraints to the GRAPH nodes they belong to. */
2561 1.1 mrg
2562 1.1 mrg static void
2563 1.1 mrg move_complex_constraints (constraint_graph_t graph)
2564 1.1 mrg {
2565 1.1 mrg int i;
2566 1.1 mrg constraint_t c;
2567 1.1 mrg
2568 1.1 mrg FOR_EACH_VEC_ELT (constraints, i, c)
2569 1.1 mrg {
2570 1.1 mrg if (c)
2571 1.1 mrg {
2572 1.1 mrg struct constraint_expr lhs = c->lhs;
2573 1.1 mrg struct constraint_expr rhs = c->rhs;
2574 1.1 mrg
2575 1.1 mrg if (lhs.type == DEREF)
2576 1.1 mrg {
2577 1.1 mrg insert_into_complex (graph, lhs.var, c);
2578 1.1 mrg }
2579 1.1 mrg else if (rhs.type == DEREF)
2580 1.1 mrg {
2581 1.1 mrg if (!(get_varinfo (lhs.var)->is_special_var))
2582 1.1 mrg insert_into_complex (graph, rhs.var, c);
2583 1.1 mrg }
2584 1.1 mrg else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2585 1.1 mrg && (lhs.offset != 0 || rhs.offset != 0))
2586 1.1 mrg {
2587 1.1 mrg insert_into_complex (graph, rhs.var, c);
2588 1.1 mrg }
2589 1.1 mrg }
2590 1.1 mrg }
2591 1.1 mrg }
2592 1.1 mrg
2593 1.1 mrg
2594 1.1 mrg /* Optimize and rewrite complex constraints while performing
2595 1.1 mrg collapsing of equivalent nodes. SI is the SCC_INFO that is the
2596 1.1 mrg result of perform_variable_substitution. */
2597 1.1 mrg
2598 1.1 mrg static void
2599 1.1 mrg rewrite_constraints (constraint_graph_t graph,
2600 1.1 mrg class scc_info *si)
2601 1.1 mrg {
2602 1.1 mrg int i;
2603 1.1 mrg constraint_t c;
2604 1.1 mrg
2605 1.1 mrg if (flag_checking)
2606 1.1 mrg {
2607 1.1 mrg for (unsigned int j = 0; j < graph->size; j++)
2608 1.1 mrg gcc_assert (find (j) == j);
2609 1.1 mrg }
2610 1.1 mrg
2611 1.1 mrg FOR_EACH_VEC_ELT (constraints, i, c)
2612 1.1 mrg {
2613 1.1 mrg struct constraint_expr lhs = c->lhs;
2614 1.1 mrg struct constraint_expr rhs = c->rhs;
2615 1.1 mrg unsigned int lhsvar = find (lhs.var);
2616 1.1 mrg unsigned int rhsvar = find (rhs.var);
2617 1.1 mrg unsigned int lhsnode, rhsnode;
2618 1.1 mrg unsigned int lhslabel, rhslabel;
2619 1.1 mrg
2620 1.1 mrg lhsnode = si->node_mapping[lhsvar];
2621 1.1 mrg rhsnode = si->node_mapping[rhsvar];
2622 1.1 mrg lhslabel = graph->pointer_label[lhsnode];
2623 1.1 mrg rhslabel = graph->pointer_label[rhsnode];
2624 1.1 mrg
2625 1.1 mrg /* See if it is really a non-pointer variable, and if so, ignore
2626 1.1 mrg the constraint. */
2627 1.1 mrg if (lhslabel == 0)
2628 1.1 mrg {
2629 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS))
2630 1.1 mrg {
2631 1.1 mrg
2632 1.1 mrg fprintf (dump_file, "%s is a non-pointer variable, "
2633 1.1 mrg "ignoring constraint:",
2634 1.1 mrg get_varinfo (lhs.var)->name);
2635 1.1 mrg dump_constraint (dump_file, c);
2636 1.1 mrg fprintf (dump_file, "\n");
2637 1.1 mrg }
2638 1.1 mrg constraints[i] = NULL;
2639 1.1 mrg continue;
2640 1.1 mrg }
2641 1.1 mrg
2642 1.1 mrg if (rhslabel == 0)
2643 1.1 mrg {
2644 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS))
2645 1.1 mrg {
2646 1.1 mrg
2647 1.1 mrg fprintf (dump_file, "%s is a non-pointer variable, "
2648 1.1 mrg "ignoring constraint:",
2649 1.1 mrg get_varinfo (rhs.var)->name);
2650 1.1 mrg dump_constraint (dump_file, c);
2651 1.1 mrg fprintf (dump_file, "\n");
2652 1.1 mrg }
2653 1.1 mrg constraints[i] = NULL;
2654 1.1 mrg continue;
2655 1.1 mrg }
2656 1.1 mrg
2657 1.1 mrg lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2658 1.1 mrg rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2659 1.1 mrg c->lhs.var = lhsvar;
2660 1.1 mrg c->rhs.var = rhsvar;
2661 1.1 mrg }
2662 1.1 mrg }
2663 1.1 mrg
2664 1.1 mrg /* Eliminate indirect cycles involving NODE. Return true if NODE was
2665 1.1 mrg part of an SCC, false otherwise. */
2666 1.1 mrg
2667 1.1 mrg static bool
2668 1.1 mrg eliminate_indirect_cycles (unsigned int node)
2669 1.1 mrg {
2670 1.1 mrg if (graph->indirect_cycles[node] != -1
2671 1.1 mrg && !bitmap_empty_p (get_varinfo (node)->solution))
2672 1.1 mrg {
2673 1.1 mrg unsigned int i;
2674 1.1 mrg auto_vec<unsigned> queue;
2675 1.1 mrg int queuepos;
2676 1.1 mrg unsigned int to = find (graph->indirect_cycles[node]);
2677 1.1 mrg bitmap_iterator bi;
2678 1.1 mrg
2679 1.1 mrg /* We can't touch the solution set and call unify_nodes
2680 1.1 mrg at the same time, because unify_nodes is going to do
2681 1.1 mrg bitmap unions into it. */
2682 1.1 mrg
2683 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2684 1.1 mrg {
2685 1.1 mrg if (find (i) == i && i != to)
2686 1.1 mrg {
2687 1.1 mrg if (unite (to, i))
2688 1.1 mrg queue.safe_push (i);
2689 1.1 mrg }
2690 1.1 mrg }
2691 1.1 mrg
2692 1.1 mrg for (queuepos = 0;
2693 1.1 mrg queue.iterate (queuepos, &i);
2694 1.1 mrg queuepos++)
2695 1.1 mrg {
2696 1.1 mrg unify_nodes (graph, to, i, true);
2697 1.1 mrg }
2698 1.1 mrg return true;
2699 1.1 mrg }
2700 1.1 mrg return false;
2701 1.1 mrg }
2702 1.1 mrg
2703 1.1 mrg /* Solve the constraint graph GRAPH using our worklist solver.
2704 1.1 mrg This is based on the PW* family of solvers from the "Efficient Field
2705 1.1 mrg Sensitive Pointer Analysis for C" paper.
2706 1.1 mrg It works by iterating over all the graph nodes, processing the complex
2707 1.1 mrg constraints and propagating the copy constraints, until everything stops
2708 1.1 mrg changed. This corresponds to steps 6-8 in the solving list given above. */
2709 1.1 mrg
2710 1.1 mrg static void
2711 1.1 mrg solve_graph (constraint_graph_t graph)
2712 1.1 mrg {
2713 1.1 mrg unsigned int size = graph->size;
2714 1.1 mrg unsigned int i;
2715 1.1 mrg bitmap pts;
2716 1.1 mrg
2717 1.1 mrg changed = BITMAP_ALLOC (NULL);
2718 1.1 mrg
2719 1.1 mrg /* Mark all initial non-collapsed nodes as changed. */
2720 1.1 mrg for (i = 1; i < size; i++)
2721 1.1 mrg {
2722 1.1 mrg varinfo_t ivi = get_varinfo (i);
2723 1.1 mrg if (find (i) == i && !bitmap_empty_p (ivi->solution)
2724 1.1 mrg && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2725 1.1 mrg || graph->complex[i].length () > 0))
2726 1.1 mrg bitmap_set_bit (changed, i);
2727 1.1 mrg }
2728 1.1 mrg
2729 1.1 mrg /* Allocate a bitmap to be used to store the changed bits. */
2730 1.1 mrg pts = BITMAP_ALLOC (&pta_obstack);
2731 1.1 mrg
2732 1.1 mrg while (!bitmap_empty_p (changed))
2733 1.1 mrg {
2734 1.1 mrg unsigned int i;
2735 1.1 mrg stats.iterations++;
2736 1.1 mrg
2737 1.1 mrg bitmap_obstack_initialize (&iteration_obstack);
2738 1.1 mrg
2739 1.1 mrg auto_vec<unsigned> topo_order = compute_topo_order (graph);
2740 1.1 mrg while (topo_order.length () != 0)
2741 1.1 mrg {
2742 1.1 mrg i = topo_order.pop ();
2743 1.1 mrg
2744 1.1 mrg /* If this variable is not a representative, skip it. */
2745 1.1 mrg if (find (i) != i)
2746 1.1 mrg continue;
2747 1.1 mrg
2748 1.1 mrg /* In certain indirect cycle cases, we may merge this
2749 1.1 mrg variable to another. */
2750 1.1 mrg if (eliminate_indirect_cycles (i) && find (i) != i)
2751 1.1 mrg continue;
2752 1.1 mrg
2753 1.1 mrg /* If the node has changed, we need to process the
2754 1.1 mrg complex constraints and outgoing edges again. */
2755 1.1 mrg if (bitmap_clear_bit (changed, i))
2756 1.1 mrg {
2757 1.1 mrg unsigned int j;
2758 1.1 mrg constraint_t c;
2759 1.1 mrg bitmap solution;
2760 1.1 mrg vec<constraint_t> complex = graph->complex[i];
2761 1.1 mrg varinfo_t vi = get_varinfo (i);
2762 1.1 mrg bool solution_empty;
2763 1.1 mrg
2764 1.1 mrg /* Compute the changed set of solution bits. If anything
2765 1.1 mrg is in the solution just propagate that. */
2766 1.1 mrg if (bitmap_bit_p (vi->solution, anything_id))
2767 1.1 mrg {
2768 1.1 mrg /* If anything is also in the old solution there is
2769 1.1 mrg nothing to do.
2770 1.1 mrg ??? But we shouldn't ended up with "changed" set ... */
2771 1.1 mrg if (vi->oldsolution
2772 1.1 mrg && bitmap_bit_p (vi->oldsolution, anything_id))
2773 1.1 mrg continue;
2774 1.1 mrg bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2775 1.1 mrg }
2776 1.1 mrg else if (vi->oldsolution)
2777 1.1 mrg bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2778 1.1 mrg else
2779 1.1 mrg bitmap_copy (pts, vi->solution);
2780 1.1 mrg
2781 1.1 mrg if (bitmap_empty_p (pts))
2782 1.1 mrg continue;
2783 1.1 mrg
2784 1.1 mrg if (vi->oldsolution)
2785 1.1 mrg bitmap_ior_into (vi->oldsolution, pts);
2786 1.1 mrg else
2787 1.1 mrg {
2788 1.1 mrg vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2789 1.1 mrg bitmap_copy (vi->oldsolution, pts);
2790 1.1 mrg }
2791 1.1 mrg
2792 1.1 mrg solution = vi->solution;
2793 1.1 mrg solution_empty = bitmap_empty_p (solution);
2794 1.1 mrg
2795 1.1 mrg /* Process the complex constraints */
2796 1.1 mrg bitmap expanded_pts = NULL;
2797 1.1 mrg FOR_EACH_VEC_ELT (complex, j, c)
2798 1.1 mrg {
2799 1.1 mrg /* XXX: This is going to unsort the constraints in
2800 1.1 mrg some cases, which will occasionally add duplicate
2801 1.1 mrg constraints during unification. This does not
2802 1.1 mrg affect correctness. */
2803 1.1 mrg c->lhs.var = find (c->lhs.var);
2804 1.1 mrg c->rhs.var = find (c->rhs.var);
2805 1.1 mrg
2806 1.1 mrg /* The only complex constraint that can change our
2807 1.1 mrg solution to non-empty, given an empty solution,
2808 1.1 mrg is a constraint where the lhs side is receiving
2809 1.1 mrg some set from elsewhere. */
2810 1.1 mrg if (!solution_empty || c->lhs.type != DEREF)
2811 1.1 mrg do_complex_constraint (graph, c, pts, &expanded_pts);
2812 1.1 mrg }
2813 1.1 mrg BITMAP_FREE (expanded_pts);
2814 1.1 mrg
2815 1.1 mrg solution_empty = bitmap_empty_p (solution);
2816 1.1 mrg
2817 1.1 mrg if (!solution_empty)
2818 1.1 mrg {
2819 1.1 mrg bitmap_iterator bi;
2820 1.1 mrg unsigned eff_escaped_id = find (escaped_id);
2821 1.1 mrg
2822 1.1 mrg /* Propagate solution to all successors. */
2823 1.1 mrg unsigned to_remove = ~0U;
2824 1.1 mrg EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2825 1.1 mrg 0, j, bi)
2826 1.1 mrg {
2827 1.1 mrg if (to_remove != ~0U)
2828 1.1 mrg {
2829 1.1 mrg bitmap_clear_bit (graph->succs[i], to_remove);
2830 1.1 mrg to_remove = ~0U;
2831 1.1 mrg }
2832 1.1 mrg unsigned int to = find (j);
2833 1.1 mrg if (to != j)
2834 1.1 mrg {
2835 1.1 mrg /* Update the succ graph, avoiding duplicate
2836 1.1 mrg work. */
2837 1.1 mrg to_remove = j;
2838 1.1 mrg if (! bitmap_set_bit (graph->succs[i], to))
2839 1.1 mrg continue;
2840 1.1 mrg /* We eventually end up processing 'to' twice
2841 1.1 mrg as it is undefined whether bitmap iteration
2842 1.1 mrg iterates over bits set during iteration.
2843 1.1 mrg Play safe instead of doing tricks. */
2844 1.1 mrg }
2845 1.1 mrg /* Don't try to propagate to ourselves. */
2846 1.1 mrg if (to == i)
2847 1.1 mrg continue;
2848 1.1 mrg
2849 1.1 mrg bitmap tmp = get_varinfo (to)->solution;
2850 1.1 mrg bool flag = false;
2851 1.1 mrg
2852 1.1 mrg /* If we propagate from ESCAPED use ESCAPED as
2853 1.1 mrg placeholder. */
2854 1.1 mrg if (i == eff_escaped_id)
2855 1.1 mrg flag = bitmap_set_bit (tmp, escaped_id);
2856 1.1 mrg else
2857 1.1 mrg flag = bitmap_ior_into (tmp, pts);
2858 1.1 mrg
2859 1.1 mrg if (flag)
2860 1.1 mrg bitmap_set_bit (changed, to);
2861 1.1 mrg }
2862 1.1 mrg if (to_remove != ~0U)
2863 1.1 mrg bitmap_clear_bit (graph->succs[i], to_remove);
2864 1.1 mrg }
2865 1.1 mrg }
2866 1.1 mrg }
2867 1.1 mrg bitmap_obstack_release (&iteration_obstack);
2868 1.1 mrg }
2869 1.1 mrg
2870 1.1 mrg BITMAP_FREE (pts);
2871 1.1 mrg BITMAP_FREE (changed);
2872 1.1 mrg bitmap_obstack_release (&oldpta_obstack);
2873 1.1 mrg }
2874 1.1 mrg
2875 1.1 mrg /* Map from trees to variable infos. */
2876 1.1 mrg static hash_map<tree, varinfo_t> *vi_for_tree;
2877 1.1 mrg
2878 1.1 mrg
2879 1.1 mrg /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2880 1.1 mrg
2881 1.1 mrg static void
2882 1.1 mrg insert_vi_for_tree (tree t, varinfo_t vi)
2883 1.1 mrg {
2884 1.1 mrg gcc_assert (vi);
2885 1.1 mrg bool existed = vi_for_tree->put (t, vi);
2886 1.1 mrg gcc_assert (!existed);
2887 1.1 mrg }
2888 1.1 mrg
2889 1.1 mrg /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2890 1.1 mrg exist in the map, return NULL, otherwise, return the varinfo we found. */
2891 1.1 mrg
2892 1.1 mrg static varinfo_t
2893 1.1 mrg lookup_vi_for_tree (tree t)
2894 1.1 mrg {
2895 1.1 mrg varinfo_t *slot = vi_for_tree->get (t);
2896 1.1 mrg if (slot == NULL)
2897 1.1 mrg return NULL;
2898 1.1 mrg
2899 1.1 mrg return *slot;
2900 1.1 mrg }
2901 1.1 mrg
2902 1.1 mrg /* Return a printable name for DECL */
2903 1.1 mrg
2904 1.1 mrg static const char *
2905 1.1 mrg alias_get_name (tree decl)
2906 1.1 mrg {
2907 1.1 mrg const char *res = "NULL";
2908 1.1 mrg if (dump_file)
2909 1.1 mrg {
2910 1.1 mrg char *temp = NULL;
2911 1.1 mrg if (TREE_CODE (decl) == SSA_NAME)
2912 1.1 mrg {
2913 1.1 mrg res = get_name (decl);
2914 1.1 mrg temp = xasprintf ("%s_%u", res ? res : "", SSA_NAME_VERSION (decl));
2915 1.1 mrg }
2916 1.1 mrg else if (HAS_DECL_ASSEMBLER_NAME_P (decl)
2917 1.1 mrg && DECL_ASSEMBLER_NAME_SET_P (decl))
2918 1.1 mrg res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl));
2919 1.1 mrg else if (DECL_P (decl))
2920 1.1 mrg {
2921 1.1 mrg res = get_name (decl);
2922 1.1 mrg if (!res)
2923 1.1 mrg temp = xasprintf ("D.%u", DECL_UID (decl));
2924 1.1 mrg }
2925 1.1 mrg
2926 1.1 mrg if (temp)
2927 1.1 mrg {
2928 1.1 mrg res = ggc_strdup (temp);
2929 1.1 mrg free (temp);
2930 1.1 mrg }
2931 1.1 mrg }
2932 1.1 mrg
2933 1.1 mrg return res;
2934 1.1 mrg }
2935 1.1 mrg
2936 1.1 mrg /* Find the variable id for tree T in the map.
2937 1.1 mrg If T doesn't exist in the map, create an entry for it and return it. */
2938 1.1 mrg
2939 1.1 mrg static varinfo_t
2940 1.1 mrg get_vi_for_tree (tree t)
2941 1.1 mrg {
2942 1.1 mrg varinfo_t *slot = vi_for_tree->get (t);
2943 1.1 mrg if (slot == NULL)
2944 1.1 mrg {
2945 1.1 mrg unsigned int id = create_variable_info_for (t, alias_get_name (t), false);
2946 1.1 mrg return get_varinfo (id);
2947 1.1 mrg }
2948 1.1 mrg
2949 1.1 mrg return *slot;
2950 1.1 mrg }
2951 1.1 mrg
2952 1.1 mrg /* Get a scalar constraint expression for a new temporary variable. */
2953 1.1 mrg
2954 1.1 mrg static struct constraint_expr
2955 1.1 mrg new_scalar_tmp_constraint_exp (const char *name, bool add_id)
2956 1.1 mrg {
2957 1.1 mrg struct constraint_expr tmp;
2958 1.1 mrg varinfo_t vi;
2959 1.1 mrg
2960 1.1 mrg vi = new_var_info (NULL_TREE, name, add_id);
2961 1.1 mrg vi->offset = 0;
2962 1.1 mrg vi->size = -1;
2963 1.1 mrg vi->fullsize = -1;
2964 1.1 mrg vi->is_full_var = 1;
2965 1.1 mrg vi->is_reg_var = 1;
2966 1.1 mrg
2967 1.1 mrg tmp.var = vi->id;
2968 1.1 mrg tmp.type = SCALAR;
2969 1.1 mrg tmp.offset = 0;
2970 1.1 mrg
2971 1.1 mrg return tmp;
2972 1.1 mrg }
2973 1.1 mrg
2974 1.1 mrg /* Get a constraint expression vector from an SSA_VAR_P node.
2975 1.1 mrg If address_p is true, the result will be taken its address of. */
2976 1.1 mrg
2977 1.1 mrg static void
2978 1.1 mrg get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2979 1.1 mrg {
2980 1.1 mrg struct constraint_expr cexpr;
2981 1.1 mrg varinfo_t vi;
2982 1.1 mrg
2983 1.1 mrg /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2984 1.1 mrg gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2985 1.1 mrg
2986 1.1 mrg if (TREE_CODE (t) == SSA_NAME
2987 1.1 mrg && SSA_NAME_IS_DEFAULT_DEF (t))
2988 1.1 mrg {
2989 1.1 mrg /* For parameters, get at the points-to set for the actual parm
2990 1.1 mrg decl. */
2991 1.1 mrg if (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2992 1.1 mrg || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
2993 1.1 mrg {
2994 1.1 mrg get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2995 1.1 mrg return;
2996 1.1 mrg }
2997 1.1 mrg /* For undefined SSA names return nothing. */
2998 1.1 mrg else if (!ssa_defined_default_def_p (t))
2999 1.1 mrg {
3000 1.1 mrg cexpr.var = nothing_id;
3001 1.1 mrg cexpr.type = SCALAR;
3002 1.1 mrg cexpr.offset = 0;
3003 1.1 mrg results->safe_push (cexpr);
3004 1.1 mrg return;
3005 1.1 mrg }
3006 1.1 mrg }
3007 1.1 mrg
3008 1.1 mrg /* For global variables resort to the alias target. */
3009 1.1 mrg if (VAR_P (t) && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
3010 1.1 mrg {
3011 1.1 mrg varpool_node *node = varpool_node::get (t);
3012 1.1 mrg if (node && node->alias && node->analyzed)
3013 1.1 mrg {
3014 1.1 mrg node = node->ultimate_alias_target ();
3015 1.1 mrg /* Canonicalize the PT uid of all aliases to the ultimate target.
3016 1.1 mrg ??? Hopefully the set of aliases can't change in a way that
3017 1.1 mrg changes the ultimate alias target. */
3018 1.1 mrg gcc_assert ((! DECL_PT_UID_SET_P (node->decl)
3019 1.1 mrg || DECL_PT_UID (node->decl) == DECL_UID (node->decl))
3020 1.1 mrg && (! DECL_PT_UID_SET_P (t)
3021 1.1 mrg || DECL_PT_UID (t) == DECL_UID (node->decl)));
3022 1.1 mrg DECL_PT_UID (t) = DECL_UID (node->decl);
3023 1.1 mrg t = node->decl;
3024 1.1 mrg }
3025 1.1 mrg
3026 1.1 mrg /* If this is decl may bind to NULL note that. */
3027 1.1 mrg if (address_p
3028 1.1 mrg && (! node || ! node->nonzero_address ()))
3029 1.1 mrg {
3030 1.1 mrg cexpr.var = nothing_id;
3031 1.1 mrg cexpr.type = SCALAR;
3032 1.1 mrg cexpr.offset = 0;
3033 1.1 mrg results->safe_push (cexpr);
3034 1.1 mrg }
3035 1.1 mrg }
3036 1.1 mrg
3037 1.1 mrg vi = get_vi_for_tree (t);
3038 1.1 mrg cexpr.var = vi->id;
3039 1.1 mrg cexpr.type = SCALAR;
3040 1.1 mrg cexpr.offset = 0;
3041 1.1 mrg
3042 1.1 mrg /* If we are not taking the address of the constraint expr, add all
3043 1.1 mrg sub-fiels of the variable as well. */
3044 1.1 mrg if (!address_p
3045 1.1 mrg && !vi->is_full_var)
3046 1.1 mrg {
3047 1.1 mrg for (; vi; vi = vi_next (vi))
3048 1.1 mrg {
3049 1.1 mrg cexpr.var = vi->id;
3050 1.1 mrg results->safe_push (cexpr);
3051 1.1 mrg }
3052 1.1 mrg return;
3053 1.1 mrg }
3054 1.1 mrg
3055 1.1 mrg results->safe_push (cexpr);
3056 1.1 mrg }
3057 1.1 mrg
3058 1.1 mrg /* Process constraint T, performing various simplifications and then
3059 1.1 mrg adding it to our list of overall constraints. */
3060 1.1 mrg
3061 1.1 mrg static void
3062 1.1 mrg process_constraint (constraint_t t)
3063 1.1 mrg {
3064 1.1 mrg struct constraint_expr rhs = t->rhs;
3065 1.1 mrg struct constraint_expr lhs = t->lhs;
3066 1.1 mrg
3067 1.1 mrg gcc_assert (rhs.var < varmap.length ());
3068 1.1 mrg gcc_assert (lhs.var < varmap.length ());
3069 1.1 mrg
3070 1.1 mrg /* If we didn't get any useful constraint from the lhs we get
3071 1.1 mrg &ANYTHING as fallback from get_constraint_for. Deal with
3072 1.1 mrg it here by turning it into *ANYTHING. */
3073 1.1 mrg if (lhs.type == ADDRESSOF
3074 1.1 mrg && lhs.var == anything_id)
3075 1.1 mrg lhs.type = DEREF;
3076 1.1 mrg
3077 1.1 mrg /* ADDRESSOF on the lhs is invalid. */
3078 1.1 mrg gcc_assert (lhs.type != ADDRESSOF);
3079 1.1 mrg
3080 1.1 mrg /* We shouldn't add constraints from things that cannot have pointers.
3081 1.1 mrg It's not completely trivial to avoid in the callers, so do it here. */
3082 1.1 mrg if (rhs.type != ADDRESSOF
3083 1.1 mrg && !get_varinfo (rhs.var)->may_have_pointers)
3084 1.1 mrg return;
3085 1.1 mrg
3086 1.1 mrg /* Likewise adding to the solution of a non-pointer var isn't useful. */
3087 1.1 mrg if (!get_varinfo (lhs.var)->may_have_pointers)
3088 1.1 mrg return;
3089 1.1 mrg
3090 1.1 mrg /* This can happen in our IR with things like n->a = *p */
3091 1.1 mrg if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3092 1.1 mrg {
3093 1.1 mrg /* Split into tmp = *rhs, *lhs = tmp */
3094 1.1 mrg struct constraint_expr tmplhs;
3095 1.1 mrg tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3096 1.1 mrg process_constraint (new_constraint (tmplhs, rhs));
3097 1.1 mrg process_constraint (new_constraint (lhs, tmplhs));
3098 1.1 mrg }
3099 1.1 mrg else if ((rhs.type != SCALAR || rhs.offset != 0) && lhs.type == DEREF)
3100 1.1 mrg {
3101 1.1 mrg /* Split into tmp = &rhs, *lhs = tmp */
3102 1.1 mrg struct constraint_expr tmplhs;
3103 1.1 mrg tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3104 1.1 mrg process_constraint (new_constraint (tmplhs, rhs));
3105 1.1 mrg process_constraint (new_constraint (lhs, tmplhs));
3106 1.1 mrg }
3107 1.1 mrg else
3108 1.1 mrg {
3109 1.1 mrg gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3110 1.1 mrg if (rhs.type == ADDRESSOF)
3111 1.1 mrg get_varinfo (get_varinfo (rhs.var)->head)->address_taken = true;
3112 1.1 mrg constraints.safe_push (t);
3113 1.1 mrg }
3114 1.1 mrg }
3115 1.1 mrg
3116 1.1 mrg
3117 1.1 mrg /* Return the position, in bits, of FIELD_DECL from the beginning of its
3118 1.1 mrg structure. */
3119 1.1 mrg
3120 1.1 mrg static HOST_WIDE_INT
3121 1.1 mrg bitpos_of_field (const tree fdecl)
3122 1.1 mrg {
3123 1.1 mrg if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3124 1.1 mrg || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3125 1.1 mrg return -1;
3126 1.1 mrg
3127 1.1 mrg return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3128 1.1 mrg + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3129 1.1 mrg }
3130 1.1 mrg
3131 1.1 mrg
3132 1.1 mrg /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3133 1.1 mrg resulting constraint expressions in *RESULTS. */
3134 1.1 mrg
3135 1.1 mrg static void
3136 1.1 mrg get_constraint_for_ptr_offset (tree ptr, tree offset,
3137 1.1 mrg vec<ce_s> *results)
3138 1.1 mrg {
3139 1.1 mrg struct constraint_expr c;
3140 1.1 mrg unsigned int j, n;
3141 1.1 mrg HOST_WIDE_INT rhsoffset;
3142 1.1 mrg
3143 1.1 mrg /* If we do not do field-sensitive PTA adding offsets to pointers
3144 1.1 mrg does not change the points-to solution. */
3145 1.1 mrg if (!use_field_sensitive)
3146 1.1 mrg {
3147 1.1 mrg get_constraint_for_rhs (ptr, results);
3148 1.1 mrg return;
3149 1.1 mrg }
3150 1.1 mrg
3151 1.1 mrg /* If the offset is not a non-negative integer constant that fits
3152 1.1 mrg in a HOST_WIDE_INT, we have to fall back to a conservative
3153 1.1 mrg solution which includes all sub-fields of all pointed-to
3154 1.1 mrg variables of ptr. */
3155 1.1 mrg if (offset == NULL_TREE
3156 1.1 mrg || TREE_CODE (offset) != INTEGER_CST)
3157 1.1 mrg rhsoffset = UNKNOWN_OFFSET;
3158 1.1 mrg else
3159 1.1 mrg {
3160 1.1 mrg /* Sign-extend the offset. */
3161 1.1 mrg offset_int soffset = offset_int::from (wi::to_wide (offset), SIGNED);
3162 1.1 mrg if (!wi::fits_shwi_p (soffset))
3163 1.1 mrg rhsoffset = UNKNOWN_OFFSET;
3164 1.1 mrg else
3165 1.1 mrg {
3166 1.1 mrg /* Make sure the bit-offset also fits. */
3167 1.1 mrg HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3168 1.1 mrg rhsoffset = rhsunitoffset * (unsigned HOST_WIDE_INT) BITS_PER_UNIT;
3169 1.1 mrg if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3170 1.1 mrg rhsoffset = UNKNOWN_OFFSET;
3171 1.1 mrg }
3172 1.1 mrg }
3173 1.1 mrg
3174 1.1 mrg get_constraint_for_rhs (ptr, results);
3175 1.1 mrg if (rhsoffset == 0)
3176 1.1 mrg return;
3177 1.1 mrg
3178 1.1 mrg /* As we are eventually appending to the solution do not use
3179 1.1 mrg vec::iterate here. */
3180 1.1 mrg n = results->length ();
3181 1.1 mrg for (j = 0; j < n; j++)
3182 1.1 mrg {
3183 1.1 mrg varinfo_t curr;
3184 1.1 mrg c = (*results)[j];
3185 1.1 mrg curr = get_varinfo (c.var);
3186 1.1 mrg
3187 1.1 mrg if (c.type == ADDRESSOF
3188 1.1 mrg /* If this varinfo represents a full variable just use it. */
3189 1.1 mrg && curr->is_full_var)
3190 1.1 mrg ;
3191 1.1 mrg else if (c.type == ADDRESSOF
3192 1.1 mrg /* If we do not know the offset add all subfields. */
3193 1.1 mrg && rhsoffset == UNKNOWN_OFFSET)
3194 1.1 mrg {
3195 1.1 mrg varinfo_t temp = get_varinfo (curr->head);
3196 1.1 mrg do
3197 1.1 mrg {
3198 1.1 mrg struct constraint_expr c2;
3199 1.1 mrg c2.var = temp->id;
3200 1.1 mrg c2.type = ADDRESSOF;
3201 1.1 mrg c2.offset = 0;
3202 1.1 mrg if (c2.var != c.var)
3203 1.1 mrg results->safe_push (c2);
3204 1.1 mrg temp = vi_next (temp);
3205 1.1 mrg }
3206 1.1 mrg while (temp);
3207 1.1 mrg }
3208 1.1 mrg else if (c.type == ADDRESSOF)
3209 1.1 mrg {
3210 1.1 mrg varinfo_t temp;
3211 1.1 mrg unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3212 1.1 mrg
3213 1.1 mrg /* If curr->offset + rhsoffset is less than zero adjust it. */
3214 1.1 mrg if (rhsoffset < 0
3215 1.1 mrg && curr->offset < offset)
3216 1.1 mrg offset = 0;
3217 1.1 mrg
3218 1.1 mrg /* We have to include all fields that overlap the current
3219 1.1 mrg field shifted by rhsoffset. And we include at least
3220 1.1 mrg the last or the first field of the variable to represent
3221 1.1 mrg reachability of off-bound addresses, in particular &object + 1,
3222 1.1 mrg conservatively correct. */
3223 1.1 mrg temp = first_or_preceding_vi_for_offset (curr, offset);
3224 1.1 mrg c.var = temp->id;
3225 1.1 mrg c.offset = 0;
3226 1.1 mrg temp = vi_next (temp);
3227 1.1 mrg while (temp
3228 1.1 mrg && temp->offset < offset + curr->size)
3229 1.1 mrg {
3230 1.1 mrg struct constraint_expr c2;
3231 1.1 mrg c2.var = temp->id;
3232 1.1 mrg c2.type = ADDRESSOF;
3233 1.1 mrg c2.offset = 0;
3234 1.1 mrg results->safe_push (c2);
3235 1.1 mrg temp = vi_next (temp);
3236 1.1 mrg }
3237 1.1 mrg }
3238 1.1 mrg else if (c.type == SCALAR)
3239 1.1 mrg {
3240 1.1 mrg gcc_assert (c.offset == 0);
3241 1.1 mrg c.offset = rhsoffset;
3242 1.1 mrg }
3243 1.1 mrg else
3244 1.1 mrg /* We shouldn't get any DEREFs here. */
3245 1.1 mrg gcc_unreachable ();
3246 1.1 mrg
3247 1.1 mrg (*results)[j] = c;
3248 1.1 mrg }
3249 1.1 mrg }
3250 1.1 mrg
3251 1.1 mrg
3252 1.1 mrg /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3253 1.1 mrg If address_p is true the result will be taken its address of.
3254 1.1 mrg If lhs_p is true then the constraint expression is assumed to be used
3255 1.1 mrg as the lhs. */
3256 1.1 mrg
3257 1.1 mrg static void
3258 1.1 mrg get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3259 1.1 mrg bool address_p, bool lhs_p)
3260 1.1 mrg {
3261 1.1 mrg tree orig_t = t;
3262 1.1 mrg poly_int64 bitsize = -1;
3263 1.1 mrg poly_int64 bitmaxsize = -1;
3264 1.1 mrg poly_int64 bitpos;
3265 1.1 mrg bool reverse;
3266 1.1 mrg tree forzero;
3267 1.1 mrg
3268 1.1 mrg /* Some people like to do cute things like take the address of
3269 1.1 mrg &0->a.b */
3270 1.1 mrg forzero = t;
3271 1.1 mrg while (handled_component_p (forzero)
3272 1.1 mrg || INDIRECT_REF_P (forzero)
3273 1.1 mrg || TREE_CODE (forzero) == MEM_REF)
3274 1.1 mrg forzero = TREE_OPERAND (forzero, 0);
3275 1.1 mrg
3276 1.1 mrg if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3277 1.1 mrg {
3278 1.1 mrg struct constraint_expr temp;
3279 1.1 mrg
3280 1.1 mrg temp.offset = 0;
3281 1.1 mrg temp.var = integer_id;
3282 1.1 mrg temp.type = SCALAR;
3283 1.1 mrg results->safe_push (temp);
3284 1.1 mrg return;
3285 1.1 mrg }
3286 1.1 mrg
3287 1.1 mrg t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse);
3288 1.1 mrg
3289 1.1 mrg /* We can end up here for component references on a
3290 1.1 mrg VIEW_CONVERT_EXPR <>(&foobar) or things like a
3291 1.1 mrg BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for
3292 1.1 mrg symbolic constants simply give up. */
3293 1.1 mrg if (TREE_CODE (t) == ADDR_EXPR)
3294 1.1 mrg {
3295 1.1 mrg constraint_expr result;
3296 1.1 mrg result.type = SCALAR;
3297 1.1 mrg result.var = anything_id;
3298 1.1 mrg result.offset = 0;
3299 1.1 mrg results->safe_push (result);
3300 1.1 mrg return;
3301 1.1 mrg }
3302 1.1 mrg
3303 1.1 mrg /* Avoid creating pointer-offset constraints, so handle MEM_REF
3304 1.1 mrg offsets directly. Pretend to take the address of the base,
3305 1.1 mrg we'll take care of adding the required subset of sub-fields below. */
3306 1.1 mrg if (TREE_CODE (t) == MEM_REF
3307 1.1 mrg && !integer_zerop (TREE_OPERAND (t, 0)))
3308 1.1 mrg {
3309 1.1 mrg poly_offset_int off = mem_ref_offset (t);
3310 1.1 mrg off <<= LOG2_BITS_PER_UNIT;
3311 1.1 mrg off += bitpos;
3312 1.1 mrg poly_int64 off_hwi;
3313 1.1 mrg if (off.to_shwi (&off_hwi))
3314 1.1 mrg bitpos = off_hwi;
3315 1.1 mrg else
3316 1.1 mrg {
3317 1.1 mrg bitpos = 0;
3318 1.1 mrg bitmaxsize = -1;
3319 1.1 mrg }
3320 1.1 mrg get_constraint_for_1 (TREE_OPERAND (t, 0), results, false, lhs_p);
3321 1.1 mrg do_deref (results);
3322 1.1 mrg }
3323 1.1 mrg else
3324 1.1 mrg get_constraint_for_1 (t, results, true, lhs_p);
3325 1.1 mrg
3326 1.1 mrg /* Strip off nothing_id. */
3327 1.1 mrg if (results->length () == 2)
3328 1.1 mrg {
3329 1.1 mrg gcc_assert ((*results)[0].var == nothing_id);
3330 1.1 mrg results->unordered_remove (0);
3331 1.1 mrg }
3332 1.1 mrg gcc_assert (results->length () == 1);
3333 1.1 mrg struct constraint_expr &result = results->last ();
3334 1.1 mrg
3335 1.1 mrg if (result.type == SCALAR
3336 1.1 mrg && get_varinfo (result.var)->is_full_var)
3337 1.1 mrg /* For single-field vars do not bother about the offset. */
3338 1.1 mrg result.offset = 0;
3339 1.1 mrg else if (result.type == SCALAR)
3340 1.1 mrg {
3341 1.1 mrg /* In languages like C, you can access one past the end of an
3342 1.1 mrg array. You aren't allowed to dereference it, so we can
3343 1.1 mrg ignore this constraint. When we handle pointer subtraction,
3344 1.1 mrg we may have to do something cute here. */
3345 1.1 mrg
3346 1.1 mrg if (maybe_lt (poly_uint64 (bitpos), get_varinfo (result.var)->fullsize)
3347 1.1 mrg && maybe_ne (bitmaxsize, 0))
3348 1.1 mrg {
3349 1.1 mrg /* It's also not true that the constraint will actually start at the
3350 1.1 mrg right offset, it may start in some padding. We only care about
3351 1.1 mrg setting the constraint to the first actual field it touches, so
3352 1.1 mrg walk to find it. */
3353 1.1 mrg struct constraint_expr cexpr = result;
3354 1.1 mrg varinfo_t curr;
3355 1.1 mrg results->pop ();
3356 1.1 mrg cexpr.offset = 0;
3357 1.1 mrg for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3358 1.1 mrg {
3359 1.1 mrg if (ranges_maybe_overlap_p (poly_int64 (curr->offset),
3360 1.1 mrg curr->size, bitpos, bitmaxsize))
3361 1.1 mrg {
3362 1.1 mrg cexpr.var = curr->id;
3363 1.1 mrg results->safe_push (cexpr);
3364 1.1 mrg if (address_p)
3365 1.1 mrg break;
3366 1.1 mrg }
3367 1.1 mrg }
3368 1.1 mrg /* If we are going to take the address of this field then
3369 1.1 mrg to be able to compute reachability correctly add at least
3370 1.1 mrg the last field of the variable. */
3371 1.1 mrg if (address_p && results->length () == 0)
3372 1.1 mrg {
3373 1.1 mrg curr = get_varinfo (cexpr.var);
3374 1.1 mrg while (curr->next != 0)
3375 1.1 mrg curr = vi_next (curr);
3376 1.1 mrg cexpr.var = curr->id;
3377 1.1 mrg results->safe_push (cexpr);
3378 1.1 mrg }
3379 1.1 mrg else if (results->length () == 0)
3380 1.1 mrg /* Assert that we found *some* field there. The user couldn't be
3381 1.1 mrg accessing *only* padding. */
3382 1.1 mrg /* Still the user could access one past the end of an array
3383 1.1 mrg embedded in a struct resulting in accessing *only* padding. */
3384 1.1 mrg /* Or accessing only padding via type-punning to a type
3385 1.1 mrg that has a filed just in padding space. */
3386 1.1 mrg {
3387 1.1 mrg cexpr.type = SCALAR;
3388 1.1 mrg cexpr.var = anything_id;
3389 1.1 mrg cexpr.offset = 0;
3390 1.1 mrg results->safe_push (cexpr);
3391 1.1 mrg }
3392 1.1 mrg }
3393 1.1 mrg else if (known_eq (bitmaxsize, 0))
3394 1.1 mrg {
3395 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS))
3396 1.1 mrg fprintf (dump_file, "Access to zero-sized part of variable, "
3397 1.1 mrg "ignoring\n");
3398 1.1 mrg }
3399 1.1 mrg else
3400 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS))
3401 1.1 mrg fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3402 1.1 mrg }
3403 1.1 mrg else if (result.type == DEREF)
3404 1.1 mrg {
3405 1.1 mrg /* If we do not know exactly where the access goes say so. Note
3406 1.1 mrg that only for non-structure accesses we know that we access
3407 1.1 mrg at most one subfiled of any variable. */
3408 1.1 mrg HOST_WIDE_INT const_bitpos;
3409 1.1 mrg if (!bitpos.is_constant (&const_bitpos)
3410 1.1 mrg || const_bitpos == -1
3411 1.1 mrg || maybe_ne (bitsize, bitmaxsize)
3412 1.1 mrg || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3413 1.1 mrg || result.offset == UNKNOWN_OFFSET)
3414 1.1 mrg result.offset = UNKNOWN_OFFSET;
3415 1.1 mrg else
3416 1.1 mrg result.offset += const_bitpos;
3417 1.1 mrg }
3418 1.1 mrg else if (result.type == ADDRESSOF)
3419 1.1 mrg {
3420 1.1 mrg /* We can end up here for component references on constants like
3421 1.1 mrg VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */
3422 1.1 mrg result.type = SCALAR;
3423 1.1 mrg result.var = anything_id;
3424 1.1 mrg result.offset = 0;
3425 1.1 mrg }
3426 1.1 mrg else
3427 1.1 mrg gcc_unreachable ();
3428 1.1 mrg }
3429 1.1 mrg
3430 1.1 mrg
3431 1.1 mrg /* Dereference the constraint expression CONS, and return the result.
3432 1.1 mrg DEREF (ADDRESSOF) = SCALAR
3433 1.1 mrg DEREF (SCALAR) = DEREF
3434 1.1 mrg DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3435 1.1 mrg This is needed so that we can handle dereferencing DEREF constraints. */
3436 1.1 mrg
3437 1.1 mrg static void
3438 1.1 mrg do_deref (vec<ce_s> *constraints)
3439 1.1 mrg {
3440 1.1 mrg struct constraint_expr *c;
3441 1.1 mrg unsigned int i = 0;
3442 1.1 mrg
3443 1.1 mrg FOR_EACH_VEC_ELT (*constraints, i, c)
3444 1.1 mrg {
3445 1.1 mrg if (c->type == SCALAR)
3446 1.1 mrg c->type = DEREF;
3447 1.1 mrg else if (c->type == ADDRESSOF)
3448 1.1 mrg c->type = SCALAR;
3449 1.1 mrg else if (c->type == DEREF)
3450 1.1 mrg {
3451 1.1 mrg struct constraint_expr tmplhs;
3452 1.1 mrg tmplhs = new_scalar_tmp_constraint_exp ("dereftmp", true);
3453 1.1 mrg process_constraint (new_constraint (tmplhs, *c));
3454 1.1 mrg c->var = tmplhs.var;
3455 1.1 mrg }
3456 1.1 mrg else
3457 1.1 mrg gcc_unreachable ();
3458 1.1 mrg }
3459 1.1 mrg }
3460 1.1 mrg
3461 1.1 mrg /* Given a tree T, return the constraint expression for taking the
3462 1.1 mrg address of it. */
3463 1.1 mrg
3464 1.1 mrg static void
3465 1.1 mrg get_constraint_for_address_of (tree t, vec<ce_s> *results)
3466 1.1 mrg {
3467 1.1 mrg struct constraint_expr *c;
3468 1.1 mrg unsigned int i;
3469 1.1 mrg
3470 1.1 mrg get_constraint_for_1 (t, results, true, true);
3471 1.1 mrg
3472 1.1 mrg FOR_EACH_VEC_ELT (*results, i, c)
3473 1.1 mrg {
3474 1.1 mrg if (c->type == DEREF)
3475 1.1 mrg c->type = SCALAR;
3476 1.1 mrg else
3477 1.1 mrg c->type = ADDRESSOF;
3478 1.1 mrg }
3479 1.1 mrg }
3480 1.1 mrg
3481 1.1 mrg /* Given a tree T, return the constraint expression for it. */
3482 1.1 mrg
3483 1.1 mrg static void
3484 1.1 mrg get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3485 1.1 mrg bool lhs_p)
3486 1.1 mrg {
3487 1.1 mrg struct constraint_expr temp;
3488 1.1 mrg
3489 1.1 mrg /* x = integer is all glommed to a single variable, which doesn't
3490 1.1 mrg point to anything by itself. That is, of course, unless it is an
3491 1.1 mrg integer constant being treated as a pointer, in which case, we
3492 1.1 mrg will return that this is really the addressof anything. This
3493 1.1 mrg happens below, since it will fall into the default case. The only
3494 1.1 mrg case we know something about an integer treated like a pointer is
3495 1.1 mrg when it is the NULL pointer, and then we just say it points to
3496 1.1 mrg NULL.
3497 1.1 mrg
3498 1.1 mrg Do not do that if -fno-delete-null-pointer-checks though, because
3499 1.1 mrg in that case *NULL does not fail, so it _should_ alias *anything.
3500 1.1 mrg It is not worth adding a new option or renaming the existing one,
3501 1.1 mrg since this case is relatively obscure. */
3502 1.1 mrg if ((TREE_CODE (t) == INTEGER_CST
3503 1.1 mrg && integer_zerop (t))
3504 1.1 mrg /* The only valid CONSTRUCTORs in gimple with pointer typed
3505 1.1 mrg elements are zero-initializer. But in IPA mode we also
3506 1.1 mrg process global initializers, so verify at least. */
3507 1.1 mrg || (TREE_CODE (t) == CONSTRUCTOR
3508 1.1 mrg && CONSTRUCTOR_NELTS (t) == 0))
3509 1.1 mrg {
3510 1.1 mrg if (flag_delete_null_pointer_checks)
3511 1.1 mrg temp.var = nothing_id;
3512 1.1 mrg else
3513 1.1 mrg temp.var = nonlocal_id;
3514 1.1 mrg temp.type = ADDRESSOF;
3515 1.1 mrg temp.offset = 0;
3516 1.1 mrg results->safe_push (temp);
3517 1.1 mrg return;
3518 1.1 mrg }
3519 1.1 mrg
3520 1.1 mrg /* String constants are read-only, ideally we'd have a CONST_DECL
3521 1.1 mrg for those. */
3522 1.1 mrg if (TREE_CODE (t) == STRING_CST)
3523 1.1 mrg {
3524 1.1 mrg temp.var = string_id;
3525 1.1 mrg temp.type = SCALAR;
3526 1.1 mrg temp.offset = 0;
3527 1.1 mrg results->safe_push (temp);
3528 1.1 mrg return;
3529 1.1 mrg }
3530 1.1 mrg
3531 1.1 mrg switch (TREE_CODE_CLASS (TREE_CODE (t)))
3532 1.1 mrg {
3533 1.1 mrg case tcc_expression:
3534 1.1 mrg {
3535 1.1 mrg switch (TREE_CODE (t))
3536 1.1 mrg {
3537 1.1 mrg case ADDR_EXPR:
3538 1.1 mrg get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3539 1.1 mrg return;
3540 1.1 mrg default:;
3541 1.1 mrg }
3542 1.1 mrg break;
3543 1.1 mrg }
3544 1.1 mrg case tcc_reference:
3545 1.1 mrg {
3546 1.1 mrg switch (TREE_CODE (t))
3547 1.1 mrg {
3548 1.1 mrg case MEM_REF:
3549 1.1 mrg {
3550 1.1 mrg struct constraint_expr cs;
3551 1.1 mrg varinfo_t vi, curr;
3552 1.1 mrg get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3553 1.1 mrg TREE_OPERAND (t, 1), results);
3554 1.1 mrg do_deref (results);
3555 1.1 mrg
3556 1.1 mrg /* If we are not taking the address then make sure to process
3557 1.1 mrg all subvariables we might access. */
3558 1.1 mrg if (address_p)
3559 1.1 mrg return;
3560 1.1 mrg
3561 1.1 mrg cs = results->last ();
3562 1.1 mrg if (cs.type == DEREF
3563 1.1 mrg && type_can_have_subvars (TREE_TYPE (t)))
3564 1.1 mrg {
3565 1.1 mrg /* For dereferences this means we have to defer it
3566 1.1 mrg to solving time. */
3567 1.1 mrg results->last ().offset = UNKNOWN_OFFSET;
3568 1.1 mrg return;
3569 1.1 mrg }
3570 1.1 mrg if (cs.type != SCALAR)
3571 1.1 mrg return;
3572 1.1 mrg
3573 1.1 mrg vi = get_varinfo (cs.var);
3574 1.1 mrg curr = vi_next (vi);
3575 1.1 mrg if (!vi->is_full_var
3576 1.1 mrg && curr)
3577 1.1 mrg {
3578 1.1 mrg unsigned HOST_WIDE_INT size;
3579 1.1 mrg if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3580 1.1 mrg size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3581 1.1 mrg else
3582 1.1 mrg size = -1;
3583 1.1 mrg for (; curr; curr = vi_next (curr))
3584 1.1 mrg {
3585 1.1 mrg if (curr->offset - vi->offset < size)
3586 1.1 mrg {
3587 1.1 mrg cs.var = curr->id;
3588 1.1 mrg results->safe_push (cs);
3589 1.1 mrg }
3590 1.1 mrg else
3591 1.1 mrg break;
3592 1.1 mrg }
3593 1.1 mrg }
3594 1.1 mrg return;
3595 1.1 mrg }
3596 1.1 mrg case ARRAY_REF:
3597 1.1 mrg case ARRAY_RANGE_REF:
3598 1.1 mrg case COMPONENT_REF:
3599 1.1 mrg case IMAGPART_EXPR:
3600 1.1 mrg case REALPART_EXPR:
3601 1.1 mrg case BIT_FIELD_REF:
3602 1.1 mrg get_constraint_for_component_ref (t, results, address_p, lhs_p);
3603 1.1 mrg return;
3604 1.1 mrg case VIEW_CONVERT_EXPR:
3605 1.1 mrg get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3606 1.1 mrg lhs_p);
3607 1.1 mrg return;
3608 1.1 mrg /* We are missing handling for TARGET_MEM_REF here. */
3609 1.1 mrg default:;
3610 1.1 mrg }
3611 1.1 mrg break;
3612 1.1 mrg }
3613 1.1 mrg case tcc_exceptional:
3614 1.1 mrg {
3615 1.1 mrg switch (TREE_CODE (t))
3616 1.1 mrg {
3617 1.1 mrg case SSA_NAME:
3618 1.1 mrg {
3619 1.1 mrg get_constraint_for_ssa_var (t, results, address_p);
3620 1.1 mrg return;
3621 1.1 mrg }
3622 1.1 mrg case CONSTRUCTOR:
3623 1.1 mrg {
3624 1.1 mrg unsigned int i;
3625 1.1 mrg tree val;
3626 1.1 mrg auto_vec<ce_s> tmp;
3627 1.1 mrg FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3628 1.1 mrg {
3629 1.1 mrg struct constraint_expr *rhsp;
3630 1.1 mrg unsigned j;
3631 1.1 mrg get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3632 1.1 mrg FOR_EACH_VEC_ELT (tmp, j, rhsp)
3633 1.1 mrg results->safe_push (*rhsp);
3634 1.1 mrg tmp.truncate (0);
3635 1.1 mrg }
3636 1.1 mrg /* We do not know whether the constructor was complete,
3637 1.1 mrg so technically we have to add &NOTHING or &ANYTHING
3638 1.1 mrg like we do for an empty constructor as well. */
3639 1.1 mrg return;
3640 1.1 mrg }
3641 1.1 mrg default:;
3642 1.1 mrg }
3643 1.1 mrg break;
3644 1.1 mrg }
3645 1.1 mrg case tcc_declaration:
3646 1.1 mrg {
3647 1.1 mrg get_constraint_for_ssa_var (t, results, address_p);
3648 1.1 mrg return;
3649 1.1 mrg }
3650 1.1 mrg case tcc_constant:
3651 1.1 mrg {
3652 1.1 mrg /* We cannot refer to automatic variables through constants. */
3653 1.1 mrg temp.type = ADDRESSOF;
3654 1.1 mrg temp.var = nonlocal_id;
3655 1.1 mrg temp.offset = 0;
3656 1.1 mrg results->safe_push (temp);
3657 1.1 mrg return;
3658 1.1 mrg }
3659 1.1 mrg default:;
3660 1.1 mrg }
3661 1.1 mrg
3662 1.1 mrg /* The default fallback is a constraint from anything. */
3663 1.1 mrg temp.type = ADDRESSOF;
3664 1.1 mrg temp.var = anything_id;
3665 1.1 mrg temp.offset = 0;
3666 1.1 mrg results->safe_push (temp);
3667 1.1 mrg }
3668 1.1 mrg
3669 1.1 mrg /* Given a gimple tree T, return the constraint expression vector for it. */
3670 1.1 mrg
3671 1.1 mrg static void
3672 1.1 mrg get_constraint_for (tree t, vec<ce_s> *results)
3673 1.1 mrg {
3674 1.1 mrg gcc_assert (results->length () == 0);
3675 1.1 mrg
3676 1.1 mrg get_constraint_for_1 (t, results, false, true);
3677 1.1 mrg }
3678 1.1 mrg
3679 1.1 mrg /* Given a gimple tree T, return the constraint expression vector for it
3680 1.1 mrg to be used as the rhs of a constraint. */
3681 1.1 mrg
3682 1.1 mrg static void
3683 1.1 mrg get_constraint_for_rhs (tree t, vec<ce_s> *results)
3684 1.1 mrg {
3685 1.1 mrg gcc_assert (results->length () == 0);
3686 1.1 mrg
3687 1.1 mrg get_constraint_for_1 (t, results, false, false);
3688 1.1 mrg }
3689 1.1 mrg
3690 1.1 mrg
3691 1.1 mrg /* Efficiently generates constraints from all entries in *RHSC to all
3692 1.1 mrg entries in *LHSC. */
3693 1.1 mrg
3694 1.1 mrg static void
3695 1.1 mrg process_all_all_constraints (const vec<ce_s> &lhsc,
3696 1.1 mrg const vec<ce_s> &rhsc)
3697 1.1 mrg {
3698 1.1 mrg struct constraint_expr *lhsp, *rhsp;
3699 1.1 mrg unsigned i, j;
3700 1.1 mrg
3701 1.1 mrg if (lhsc.length () <= 1 || rhsc.length () <= 1)
3702 1.1 mrg {
3703 1.1 mrg FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3704 1.1 mrg FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3705 1.1 mrg process_constraint (new_constraint (*lhsp, *rhsp));
3706 1.1 mrg }
3707 1.1 mrg else
3708 1.1 mrg {
3709 1.1 mrg struct constraint_expr tmp;
3710 1.1 mrg tmp = new_scalar_tmp_constraint_exp ("allalltmp", true);
3711 1.1 mrg FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3712 1.1 mrg process_constraint (new_constraint (tmp, *rhsp));
3713 1.1 mrg FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3714 1.1 mrg process_constraint (new_constraint (*lhsp, tmp));
3715 1.1 mrg }
3716 1.1 mrg }
3717 1.1 mrg
3718 1.1 mrg /* Handle aggregate copies by expanding into copies of the respective
3719 1.1 mrg fields of the structures. */
3720 1.1 mrg
3721 1.1 mrg static void
3722 1.1 mrg do_structure_copy (tree lhsop, tree rhsop)
3723 1.1 mrg {
3724 1.1 mrg struct constraint_expr *lhsp, *rhsp;
3725 1.1 mrg auto_vec<ce_s> lhsc;
3726 1.1 mrg auto_vec<ce_s> rhsc;
3727 1.1 mrg unsigned j;
3728 1.1 mrg
3729 1.1 mrg get_constraint_for (lhsop, &lhsc);
3730 1.1 mrg get_constraint_for_rhs (rhsop, &rhsc);
3731 1.1 mrg lhsp = &lhsc[0];
3732 1.1 mrg rhsp = &rhsc[0];
3733 1.1 mrg if (lhsp->type == DEREF
3734 1.1 mrg || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3735 1.1 mrg || rhsp->type == DEREF)
3736 1.1 mrg {
3737 1.1 mrg if (lhsp->type == DEREF)
3738 1.1 mrg {
3739 1.1 mrg gcc_assert (lhsc.length () == 1);
3740 1.1 mrg lhsp->offset = UNKNOWN_OFFSET;
3741 1.1 mrg }
3742 1.1 mrg if (rhsp->type == DEREF)
3743 1.1 mrg {
3744 1.1 mrg gcc_assert (rhsc.length () == 1);
3745 1.1 mrg rhsp->offset = UNKNOWN_OFFSET;
3746 1.1 mrg }
3747 1.1 mrg process_all_all_constraints (lhsc, rhsc);
3748 1.1 mrg }
3749 1.1 mrg else if (lhsp->type == SCALAR
3750 1.1 mrg && (rhsp->type == SCALAR
3751 1.1 mrg || rhsp->type == ADDRESSOF))
3752 1.1 mrg {
3753 1.1 mrg HOST_WIDE_INT lhssize, lhsoffset;
3754 1.1 mrg HOST_WIDE_INT rhssize, rhsoffset;
3755 1.1 mrg bool reverse;
3756 1.1 mrg unsigned k = 0;
3757 1.1 mrg if (!get_ref_base_and_extent_hwi (lhsop, &lhsoffset, &lhssize, &reverse)
3758 1.1 mrg || !get_ref_base_and_extent_hwi (rhsop, &rhsoffset, &rhssize,
3759 1.1 mrg &reverse))
3760 1.1 mrg {
3761 1.1 mrg process_all_all_constraints (lhsc, rhsc);
3762 1.1 mrg return;
3763 1.1 mrg }
3764 1.1 mrg for (j = 0; lhsc.iterate (j, &lhsp);)
3765 1.1 mrg {
3766 1.1 mrg varinfo_t lhsv, rhsv;
3767 1.1 mrg rhsp = &rhsc[k];
3768 1.1 mrg lhsv = get_varinfo (lhsp->var);
3769 1.1 mrg rhsv = get_varinfo (rhsp->var);
3770 1.1 mrg if (lhsv->may_have_pointers
3771 1.1 mrg && (lhsv->is_full_var
3772 1.1 mrg || rhsv->is_full_var
3773 1.1 mrg || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3774 1.1 mrg rhsv->offset + lhsoffset, rhsv->size)))
3775 1.1 mrg process_constraint (new_constraint (*lhsp, *rhsp));
3776 1.1 mrg if (!rhsv->is_full_var
3777 1.1 mrg && (lhsv->is_full_var
3778 1.1 mrg || (lhsv->offset + rhsoffset + lhsv->size
3779 1.1 mrg > rhsv->offset + lhsoffset + rhsv->size)))
3780 1.1 mrg {
3781 1.1 mrg ++k;
3782 1.1 mrg if (k >= rhsc.length ())
3783 1.1 mrg break;
3784 1.1 mrg }
3785 1.1 mrg else
3786 1.1 mrg ++j;
3787 1.1 mrg }
3788 1.1 mrg }
3789 1.1 mrg else
3790 1.1 mrg gcc_unreachable ();
3791 1.1 mrg }
3792 1.1 mrg
3793 1.1 mrg /* Create constraints ID = { rhsc }. */
3794 1.1 mrg
3795 1.1 mrg static void
3796 1.1 mrg make_constraints_to (unsigned id, const vec<ce_s> &rhsc)
3797 1.1 mrg {
3798 1.1 mrg struct constraint_expr *c;
3799 1.1 mrg struct constraint_expr includes;
3800 1.1 mrg unsigned int j;
3801 1.1 mrg
3802 1.1 mrg includes.var = id;
3803 1.1 mrg includes.offset = 0;
3804 1.1 mrg includes.type = SCALAR;
3805 1.1 mrg
3806 1.1 mrg FOR_EACH_VEC_ELT (rhsc, j, c)
3807 1.1 mrg process_constraint (new_constraint (includes, *c));
3808 1.1 mrg }
3809 1.1 mrg
3810 1.1 mrg /* Create a constraint ID = OP. */
3811 1.1 mrg
3812 1.1 mrg static void
3813 1.1 mrg make_constraint_to (unsigned id, tree op)
3814 1.1 mrg {
3815 1.1 mrg auto_vec<ce_s> rhsc;
3816 1.1 mrg get_constraint_for_rhs (op, &rhsc);
3817 1.1 mrg make_constraints_to (id, rhsc);
3818 1.1 mrg }
3819 1.1 mrg
3820 1.1 mrg /* Create a constraint ID = &FROM. */
3821 1.1 mrg
3822 1.1 mrg static void
3823 1.1 mrg make_constraint_from (varinfo_t vi, int from)
3824 1.1 mrg {
3825 1.1 mrg struct constraint_expr lhs, rhs;
3826 1.1 mrg
3827 1.1 mrg lhs.var = vi->id;
3828 1.1 mrg lhs.offset = 0;
3829 1.1 mrg lhs.type = SCALAR;
3830 1.1 mrg
3831 1.1 mrg rhs.var = from;
3832 1.1 mrg rhs.offset = 0;
3833 1.1 mrg rhs.type = ADDRESSOF;
3834 1.1 mrg process_constraint (new_constraint (lhs, rhs));
3835 1.1 mrg }
3836 1.1 mrg
3837 1.1 mrg /* Create a constraint ID = FROM. */
3838 1.1 mrg
3839 1.1 mrg static void
3840 1.1 mrg make_copy_constraint (varinfo_t vi, int from)
3841 1.1 mrg {
3842 1.1 mrg struct constraint_expr lhs, rhs;
3843 1.1 mrg
3844 1.1 mrg lhs.var = vi->id;
3845 1.1 mrg lhs.offset = 0;
3846 1.1 mrg lhs.type = SCALAR;
3847 1.1 mrg
3848 1.1 mrg rhs.var = from;
3849 1.1 mrg rhs.offset = 0;
3850 1.1 mrg rhs.type = SCALAR;
3851 1.1 mrg process_constraint (new_constraint (lhs, rhs));
3852 1.1 mrg }
3853 1.1 mrg
3854 1.1 mrg /* Make constraints necessary to make OP escape. */
3855 1.1 mrg
3856 1.1 mrg static void
3857 1.1 mrg make_escape_constraint (tree op)
3858 1.1 mrg {
3859 1.1 mrg make_constraint_to (escaped_id, op);
3860 1.1 mrg }
3861 1.1 mrg
3862 1.1 mrg /* Make constraint necessary to make all indirect references
3863 1.1 mrg from VI escape. */
3864 1.1 mrg
3865 1.1 mrg static void
3866 1.1 mrg make_indirect_escape_constraint (varinfo_t vi)
3867 1.1 mrg {
3868 1.1 mrg struct constraint_expr lhs, rhs;
3869 1.1 mrg /* escaped = *(VAR + UNKNOWN); */
3870 1.1 mrg lhs.type = SCALAR;
3871 1.1 mrg lhs.var = escaped_id;
3872 1.1 mrg lhs.offset = 0;
3873 1.1 mrg rhs.type = DEREF;
3874 1.1 mrg rhs.var = vi->id;
3875 1.1 mrg rhs.offset = UNKNOWN_OFFSET;
3876 1.1 mrg process_constraint (new_constraint (lhs, rhs));
3877 1.1 mrg }
3878 1.1 mrg
3879 1.1 mrg /* Add constraints to that the solution of VI is transitively closed. */
3880 1.1 mrg
3881 1.1 mrg static void
3882 1.1 mrg make_transitive_closure_constraints (varinfo_t vi)
3883 1.1 mrg {
3884 1.1 mrg struct constraint_expr lhs, rhs;
3885 1.1 mrg
3886 1.1 mrg /* VAR = *(VAR + UNKNOWN); */
3887 1.1 mrg lhs.type = SCALAR;
3888 1.1 mrg lhs.var = vi->id;
3889 1.1 mrg lhs.offset = 0;
3890 1.1 mrg rhs.type = DEREF;
3891 1.1 mrg rhs.var = vi->id;
3892 1.1 mrg rhs.offset = UNKNOWN_OFFSET;
3893 1.1 mrg process_constraint (new_constraint (lhs, rhs));
3894 1.1 mrg }
3895 1.1 mrg
3896 1.1 mrg /* Add constraints to that the solution of VI has all subvariables added. */
3897 1.1 mrg
3898 1.1 mrg static void
3899 1.1 mrg make_any_offset_constraints (varinfo_t vi)
3900 1.1 mrg {
3901 1.1 mrg struct constraint_expr lhs, rhs;
3902 1.1 mrg
3903 1.1 mrg /* VAR = VAR + UNKNOWN; */
3904 1.1 mrg lhs.type = SCALAR;
3905 1.1 mrg lhs.var = vi->id;
3906 1.1 mrg lhs.offset = 0;
3907 1.1 mrg rhs.type = SCALAR;
3908 1.1 mrg rhs.var = vi->id;
3909 1.1 mrg rhs.offset = UNKNOWN_OFFSET;
3910 1.1 mrg process_constraint (new_constraint (lhs, rhs));
3911 1.1 mrg }
3912 1.1 mrg
3913 1.1 mrg /* Temporary storage for fake var decls. */
3914 1.1 mrg struct obstack fake_var_decl_obstack;
3915 1.1 mrg
3916 1.1 mrg /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3917 1.1 mrg
3918 1.1 mrg static tree
3919 1.1 mrg build_fake_var_decl (tree type)
3920 1.1 mrg {
3921 1.1 mrg tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3922 1.1 mrg memset (decl, 0, sizeof (struct tree_var_decl));
3923 1.1 mrg TREE_SET_CODE (decl, VAR_DECL);
3924 1.1 mrg TREE_TYPE (decl) = type;
3925 1.1 mrg DECL_UID (decl) = allocate_decl_uid ();
3926 1.1 mrg SET_DECL_PT_UID (decl, -1);
3927 1.1 mrg layout_decl (decl, 0);
3928 1.1 mrg return decl;
3929 1.1 mrg }
3930 1.1 mrg
3931 1.1 mrg /* Create a new artificial heap variable with NAME.
3932 1.1 mrg Return the created variable. */
3933 1.1 mrg
3934 1.1 mrg static varinfo_t
3935 1.1 mrg make_heapvar (const char *name, bool add_id)
3936 1.1 mrg {
3937 1.1 mrg varinfo_t vi;
3938 1.1 mrg tree heapvar;
3939 1.1 mrg
3940 1.1 mrg heapvar = build_fake_var_decl (ptr_type_node);
3941 1.1 mrg DECL_EXTERNAL (heapvar) = 1;
3942 1.1 mrg
3943 1.1 mrg vi = new_var_info (heapvar, name, add_id);
3944 1.1 mrg vi->is_heap_var = true;
3945 1.1 mrg vi->is_unknown_size_var = true;
3946 1.1 mrg vi->offset = 0;
3947 1.1 mrg vi->fullsize = ~0;
3948 1.1 mrg vi->size = ~0;
3949 1.1 mrg vi->is_full_var = true;
3950 1.1 mrg insert_vi_for_tree (heapvar, vi);
3951 1.1 mrg
3952 1.1 mrg return vi;
3953 1.1 mrg }
3954 1.1 mrg
3955 1.1 mrg /* Create a new artificial heap variable with NAME and make a
3956 1.1 mrg constraint from it to LHS. Set flags according to a tag used
3957 1.1 mrg for tracking restrict pointers. */
3958 1.1 mrg
3959 1.1 mrg static varinfo_t
3960 1.1 mrg make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id)
3961 1.1 mrg {
3962 1.1 mrg varinfo_t vi = make_heapvar (name, add_id);
3963 1.1 mrg vi->is_restrict_var = 1;
3964 1.1 mrg vi->is_global_var = 1;
3965 1.1 mrg vi->may_have_pointers = 1;
3966 1.1 mrg make_constraint_from (lhs, vi->id);
3967 1.1 mrg return vi;
3968 1.1 mrg }
3969 1.1 mrg
3970 1.1 mrg /* Create a new artificial heap variable with NAME and make a
3971 1.1 mrg constraint from it to LHS. Set flags according to a tag used
3972 1.1 mrg for tracking restrict pointers and make the artificial heap
3973 1.1 mrg point to global memory. */
3974 1.1 mrg
3975 1.1 mrg static varinfo_t
3976 1.1 mrg make_constraint_from_global_restrict (varinfo_t lhs, const char *name,
3977 1.1 mrg bool add_id)
3978 1.1 mrg {
3979 1.1 mrg varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id);
3980 1.1 mrg make_copy_constraint (vi, nonlocal_id);
3981 1.1 mrg return vi;
3982 1.1 mrg }
3983 1.1 mrg
3984 1.1 mrg /* In IPA mode there are varinfos for different aspects of reach
3985 1.1 mrg function designator. One for the points-to set of the return
3986 1.1 mrg value, one for the variables that are clobbered by the function,
3987 1.1 mrg one for its uses and one for each parameter (including a single
3988 1.1 mrg glob for remaining variadic arguments). */
3989 1.1 mrg
3990 1.1 mrg enum { fi_clobbers = 1, fi_uses = 2,
3991 1.1 mrg fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3992 1.1 mrg
3993 1.1 mrg /* Get a constraint for the requested part of a function designator FI
3994 1.1 mrg when operating in IPA mode. */
3995 1.1 mrg
3996 1.1 mrg static struct constraint_expr
3997 1.1 mrg get_function_part_constraint (varinfo_t fi, unsigned part)
3998 1.1 mrg {
3999 1.1 mrg struct constraint_expr c;
4000 1.1 mrg
4001 1.1 mrg gcc_assert (in_ipa_mode);
4002 1.1 mrg
4003 1.1 mrg if (fi->id == anything_id)
4004 1.1 mrg {
4005 1.1 mrg /* ??? We probably should have a ANYFN special variable. */
4006 1.1 mrg c.var = anything_id;
4007 1.1 mrg c.offset = 0;
4008 1.1 mrg c.type = SCALAR;
4009 1.1 mrg }
4010 1.1 mrg else if (fi->decl && TREE_CODE (fi->decl) == FUNCTION_DECL)
4011 1.1 mrg {
4012 1.1 mrg varinfo_t ai = first_vi_for_offset (fi, part);
4013 1.1 mrg if (ai)
4014 1.1 mrg c.var = ai->id;
4015 1.1 mrg else
4016 1.1 mrg c.var = anything_id;
4017 1.1 mrg c.offset = 0;
4018 1.1 mrg c.type = SCALAR;
4019 1.1 mrg }
4020 1.1 mrg else
4021 1.1 mrg {
4022 1.1 mrg c.var = fi->id;
4023 1.1 mrg c.offset = part;
4024 1.1 mrg c.type = DEREF;
4025 1.1 mrg }
4026 1.1 mrg
4027 1.1 mrg return c;
4028 1.1 mrg }
4029 1.1 mrg
4030 1.1 mrg /* Produce constraints for argument ARG of call STMT with eaf flags
4031 1.1 mrg FLAGS. RESULTS is array holding constraints for return value.
4032 1.1 mrg CALLESCAPE_ID is variable where call loocal escapes are added.
4033 1.1 mrg WRITES_GLOVEL_MEMORY is true if callee may write global memory. */
4034 1.1 mrg
4035 1.1 mrg static void
4036 1.1 mrg handle_call_arg (gcall *stmt, tree arg, vec<ce_s> *results, int flags,
4037 1.1 mrg int callescape_id, bool writes_global_memory)
4038 1.1 mrg {
4039 1.1 mrg int relevant_indirect_flags = EAF_NO_INDIRECT_CLOBBER | EAF_NO_INDIRECT_READ
4040 1.1 mrg | EAF_NO_INDIRECT_ESCAPE;
4041 1.1 mrg int relevant_flags = relevant_indirect_flags
4042 1.1 mrg | EAF_NO_DIRECT_CLOBBER
4043 1.1 mrg | EAF_NO_DIRECT_READ
4044 1.1 mrg | EAF_NO_DIRECT_ESCAPE;
4045 1.1 mrg if (gimple_call_lhs (stmt))
4046 1.1 mrg {
4047 1.1 mrg relevant_flags |= EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY;
4048 1.1 mrg relevant_indirect_flags |= EAF_NOT_RETURNED_INDIRECTLY;
4049 1.1 mrg
4050 1.1 mrg /* If value is never read from it can not be returned indirectly
4051 1.1 mrg (except through the escape solution).
4052 1.1 mrg For all flags we get these implications right except for
4053 1.1 mrg not_returned because we miss return functions in ipa-prop. */
4054 1.1 mrg
4055 1.1 mrg if (flags & EAF_NO_DIRECT_READ)
4056 1.1 mrg flags |= EAF_NOT_RETURNED_INDIRECTLY;
4057 1.1 mrg }
4058 1.1 mrg
4059 1.1 mrg /* If the argument is not used we can ignore it.
4060 1.1 mrg Similarly argument is invisile for us if it not clobbered, does not
4061 1.1 mrg escape, is not read and can not be returned. */
4062 1.1 mrg if ((flags & EAF_UNUSED) || ((flags & relevant_flags) == relevant_flags))
4063 1.1 mrg return;
4064 1.1 mrg
4065 1.1 mrg /* Produce varinfo for direct accesses to ARG. */
4066 1.1 mrg varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
4067 1.1 mrg tem->is_reg_var = true;
4068 1.1 mrg make_constraint_to (tem->id, arg);
4069 1.1 mrg make_any_offset_constraints (tem);
4070 1.1 mrg
4071 1.1 mrg bool callarg_transitive = false;
4072 1.1 mrg
4073 1.1 mrg /* As an compile time optimization if we make no difference between
4074 1.1 mrg direct and indirect accesses make arg transitively closed.
4075 1.1 mrg This avoids the need to build indir arg and do everything twice. */
4076 1.1 mrg if (((flags & EAF_NO_INDIRECT_CLOBBER) != 0)
4077 1.1 mrg == ((flags & EAF_NO_DIRECT_CLOBBER) != 0)
4078 1.1 mrg && (((flags & EAF_NO_INDIRECT_READ) != 0)
4079 1.1 mrg == ((flags & EAF_NO_DIRECT_READ) != 0))
4080 1.1 mrg && (((flags & EAF_NO_INDIRECT_ESCAPE) != 0)
4081 1.1 mrg == ((flags & EAF_NO_DIRECT_ESCAPE) != 0))
4082 1.1 mrg && (((flags & EAF_NOT_RETURNED_INDIRECTLY) != 0)
4083 1.1 mrg == ((flags & EAF_NOT_RETURNED_DIRECTLY) != 0)))
4084 1.1 mrg {
4085 1.1 mrg make_transitive_closure_constraints (tem);
4086 1.1 mrg callarg_transitive = true;
4087 1.1 mrg gcc_checking_assert (!(flags & EAF_NO_DIRECT_READ));
4088 1.1 mrg }
4089 1.1 mrg
4090 1.1 mrg /* If necessary, produce varinfo for indirect accesses to ARG. */
4091 1.1 mrg varinfo_t indir_tem = NULL;
4092 1.1 mrg if (!callarg_transitive
4093 1.1 mrg && (flags & relevant_indirect_flags) != relevant_indirect_flags)
4094 1.1 mrg {
4095 1.1 mrg struct constraint_expr lhs, rhs;
4096 1.1 mrg indir_tem = new_var_info (NULL_TREE, "indircallarg", true);
4097 1.1 mrg indir_tem->is_reg_var = true;
4098 1.1 mrg
4099 1.1 mrg /* indir_term = *tem. */
4100 1.1 mrg lhs.type = SCALAR;
4101 1.1 mrg lhs.var = indir_tem->id;
4102 1.1 mrg lhs.offset = 0;
4103 1.1 mrg
4104 1.1 mrg rhs.type = DEREF;
4105 1.1 mrg rhs.var = tem->id;
4106 1.1 mrg rhs.offset = UNKNOWN_OFFSET;
4107 1.1 mrg process_constraint (new_constraint (lhs, rhs));
4108 1.1 mrg
4109 1.1 mrg make_any_offset_constraints (indir_tem);
4110 1.1 mrg
4111 1.1 mrg /* If we do not read indirectly there is no need for transitive closure.
4112 1.1 mrg We know there is only one level of indirection. */
4113 1.1 mrg if (!(flags & EAF_NO_INDIRECT_READ))
4114 1.1 mrg make_transitive_closure_constraints (indir_tem);
4115 1.1 mrg gcc_checking_assert (!(flags & EAF_NO_DIRECT_READ));
4116 1.1 mrg }
4117 1.1 mrg
4118 1.1 mrg if (gimple_call_lhs (stmt))
4119 1.1 mrg {
4120 1.1 mrg if (!(flags & EAF_NOT_RETURNED_DIRECTLY))
4121 1.1 mrg {
4122 1.1 mrg struct constraint_expr cexpr;
4123 1.1 mrg cexpr.var = tem->id;
4124 1.1 mrg cexpr.type = SCALAR;
4125 1.1 mrg cexpr.offset = 0;
4126 1.1 mrg results->safe_push (cexpr);
4127 1.1 mrg }
4128 1.1 mrg if (!callarg_transitive & !(flags & EAF_NOT_RETURNED_INDIRECTLY))
4129 1.1 mrg {
4130 1.1 mrg struct constraint_expr cexpr;
4131 1.1 mrg cexpr.var = indir_tem->id;
4132 1.1 mrg cexpr.type = SCALAR;
4133 1.1 mrg cexpr.offset = 0;
4134 1.1 mrg results->safe_push (cexpr);
4135 1.1 mrg }
4136 1.1 mrg }
4137 1.1 mrg
4138 1.1 mrg if (!(flags & EAF_NO_DIRECT_READ))
4139 1.1 mrg {
4140 1.1 mrg varinfo_t uses = get_call_use_vi (stmt);
4141 1.1 mrg make_copy_constraint (uses, tem->id);
4142 1.1 mrg if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_READ))
4143 1.1 mrg make_copy_constraint (uses, indir_tem->id);
4144 1.1 mrg }
4145 1.1 mrg else
4146 1.1 mrg /* To read indirectly we need to read directly. */
4147 1.1 mrg gcc_checking_assert (flags & EAF_NO_INDIRECT_READ);
4148 1.1 mrg
4149 1.1 mrg if (!(flags & EAF_NO_DIRECT_CLOBBER))
4150 1.1 mrg {
4151 1.1 mrg struct constraint_expr lhs, rhs;
4152 1.1 mrg
4153 1.1 mrg /* *arg = callescape. */
4154 1.1 mrg lhs.type = DEREF;
4155 1.1 mrg lhs.var = tem->id;
4156 1.1 mrg lhs.offset = 0;
4157 1.1 mrg
4158 1.1 mrg rhs.type = SCALAR;
4159 1.1 mrg rhs.var = callescape_id;
4160 1.1 mrg rhs.offset = 0;
4161 1.1 mrg process_constraint (new_constraint (lhs, rhs));
4162 1.1 mrg
4163 1.1 mrg /* callclobbered = arg. */
4164 1.1 mrg make_copy_constraint (get_call_clobber_vi (stmt), tem->id);
4165 1.1 mrg }
4166 1.1 mrg if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_CLOBBER))
4167 1.1 mrg {
4168 1.1 mrg struct constraint_expr lhs, rhs;
4169 1.1 mrg
4170 1.1 mrg /* *indir_arg = callescape. */
4171 1.1 mrg lhs.type = DEREF;
4172 1.1 mrg lhs.var = indir_tem->id;
4173 1.1 mrg lhs.offset = 0;
4174 1.1 mrg
4175 1.1 mrg rhs.type = SCALAR;
4176 1.1 mrg rhs.var = callescape_id;
4177 1.1 mrg rhs.offset = 0;
4178 1.1 mrg process_constraint (new_constraint (lhs, rhs));
4179 1.1 mrg
4180 1.1 mrg /* callclobbered = indir_arg. */
4181 1.1 mrg make_copy_constraint (get_call_clobber_vi (stmt), indir_tem->id);
4182 1.1 mrg }
4183 1.1 mrg
4184 1.1 mrg if (!(flags & (EAF_NO_DIRECT_ESCAPE | EAF_NO_INDIRECT_ESCAPE)))
4185 1.1 mrg {
4186 1.1 mrg struct constraint_expr lhs, rhs;
4187 1.1 mrg
4188 1.1 mrg /* callescape = arg; */
4189 1.1 mrg lhs.var = callescape_id;
4190 1.1 mrg lhs.offset = 0;
4191 1.1 mrg lhs.type = SCALAR;
4192 1.1 mrg
4193 1.1 mrg rhs.var = tem->id;
4194 1.1 mrg rhs.offset = 0;
4195 1.1 mrg rhs.type = SCALAR;
4196 1.1 mrg process_constraint (new_constraint (lhs, rhs));
4197 1.1 mrg
4198 1.1 mrg if (writes_global_memory)
4199 1.1 mrg make_escape_constraint (arg);
4200 1.1 mrg }
4201 1.1 mrg else if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_ESCAPE))
4202 1.1 mrg {
4203 1.1 mrg struct constraint_expr lhs, rhs;
4204 1.1 mrg
4205 1.1 mrg /* callescape = *(indir_arg + UNKNOWN); */
4206 1.1 mrg lhs.var = callescape_id;
4207 1.1 mrg lhs.offset = 0;
4208 1.1 mrg lhs.type = SCALAR;
4209 1.1 mrg
4210 1.1 mrg rhs.var = indir_tem->id;
4211 1.1 mrg rhs.offset = 0;
4212 1.1 mrg rhs.type = SCALAR;
4213 1.1 mrg process_constraint (new_constraint (lhs, rhs));
4214 1.1 mrg
4215 1.1 mrg if (writes_global_memory)
4216 1.1 mrg make_indirect_escape_constraint (tem);
4217 1.1 mrg }
4218 1.1 mrg }
4219 1.1 mrg
4220 1.1 mrg /* Determine global memory access of call STMT and update
4221 1.1 mrg WRITES_GLOBAL_MEMORY, READS_GLOBAL_MEMORY and USES_GLOBAL_MEMORY. */
4222 1.1 mrg
4223 1.1 mrg static void
4224 1.1 mrg determine_global_memory_access (gcall *stmt,
4225 1.1 mrg bool *writes_global_memory,
4226 1.1 mrg bool *reads_global_memory,
4227 1.1 mrg bool *uses_global_memory)
4228 1.1 mrg {
4229 1.1 mrg tree callee;
4230 1.1 mrg cgraph_node *node;
4231 1.1 mrg modref_summary *summary;
4232 1.1 mrg
4233 1.1 mrg /* We need to detrmine reads to set uses. */
4234 1.1 mrg gcc_assert (!uses_global_memory || reads_global_memory);
4235 1.1 mrg
4236 1.1 mrg if ((callee = gimple_call_fndecl (stmt)) != NULL_TREE
4237 1.1 mrg && (node = cgraph_node::get (callee)) != NULL
4238 1.1 mrg && (summary = get_modref_function_summary (node)))
4239 1.1 mrg {
4240 1.1 mrg if (writes_global_memory && *writes_global_memory)
4241 1.1 mrg *writes_global_memory = summary->global_memory_written;
4242 1.1 mrg if (reads_global_memory && *reads_global_memory)
4243 1.1 mrg *reads_global_memory = summary->global_memory_read;
4244 1.1 mrg if (reads_global_memory && uses_global_memory
4245 1.1 mrg && !summary->calls_interposable
4246 1.1 mrg && !*reads_global_memory && node->binds_to_current_def_p ())
4247 1.1 mrg *uses_global_memory = false;
4248 1.1 mrg }
4249 1.1 mrg if ((writes_global_memory && *writes_global_memory)
4250 1.1 mrg || (uses_global_memory && *uses_global_memory)
4251 1.1 mrg || (reads_global_memory && *reads_global_memory))
4252 1.1 mrg {
4253 1.1 mrg attr_fnspec fnspec = gimple_call_fnspec (stmt);
4254 1.1 mrg if (fnspec.known_p ())
4255 1.1 mrg {
4256 1.1 mrg if (writes_global_memory
4257 1.1 mrg && !fnspec.global_memory_written_p ())
4258 1.1 mrg *writes_global_memory = false;
4259 1.1 mrg if (reads_global_memory && !fnspec.global_memory_read_p ())
4260 1.1 mrg {
4261 1.1 mrg *reads_global_memory = false;
4262 1.1 mrg if (uses_global_memory)
4263 1.1 mrg *uses_global_memory = false;
4264 1.1 mrg }
4265 1.1 mrg }
4266 1.1 mrg }
4267 1.1 mrg }
4268 1.1 mrg
4269 1.1 mrg /* For non-IPA mode, generate constraints necessary for a call on the
4270 1.1 mrg RHS and collect return value constraint to RESULTS to be used later in
4271 1.1 mrg handle_lhs_call.
4272 1.1 mrg
4273 1.1 mrg IMPLICIT_EAF_FLAGS are added to each function argument. If
4274 1.1 mrg WRITES_GLOBAL_MEMORY is true function is assumed to possibly write to global
4275 1.1 mrg memory. Similar for READS_GLOBAL_MEMORY. */
4276 1.1 mrg
4277 1.1 mrg static void
4278 1.1 mrg handle_rhs_call (gcall *stmt, vec<ce_s> *results,
4279 1.1 mrg int implicit_eaf_flags,
4280 1.1 mrg bool writes_global_memory,
4281 1.1 mrg bool reads_global_memory)
4282 1.1 mrg {
4283 1.1 mrg determine_global_memory_access (stmt, &writes_global_memory,
4284 1.1 mrg &reads_global_memory,
4285 1.1 mrg NULL);
4286 1.1 mrg
4287 1.1 mrg varinfo_t callescape = new_var_info (NULL_TREE, "callescape", true);
4288 1.1 mrg
4289 1.1 mrg /* If function can use global memory, add it to callescape
4290 1.1 mrg and to possible return values. If not we can still use/return addresses
4291 1.1 mrg of global symbols. */
4292 1.1 mrg struct constraint_expr lhs, rhs;
4293 1.1 mrg
4294 1.1 mrg lhs.type = SCALAR;
4295 1.1 mrg lhs.var = callescape->id;
4296 1.1 mrg lhs.offset = 0;
4297 1.1 mrg
4298 1.1 mrg rhs.type = reads_global_memory ? SCALAR : ADDRESSOF;
4299 1.1 mrg rhs.var = nonlocal_id;
4300 1.1 mrg rhs.offset = 0;
4301 1.1 mrg
4302 1.1 mrg process_constraint (new_constraint (lhs, rhs));
4303 1.1 mrg results->safe_push (rhs);
4304 1.1 mrg
4305 1.1 mrg varinfo_t uses = get_call_use_vi (stmt);
4306 1.1 mrg make_copy_constraint (uses, callescape->id);
4307 1.1 mrg
4308 1.1 mrg for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
4309 1.1 mrg {
4310 1.1 mrg tree arg = gimple_call_arg (stmt, i);
4311 1.1 mrg int flags = gimple_call_arg_flags (stmt, i);
4312 1.1 mrg handle_call_arg (stmt, arg, results,
4313 1.1 mrg flags | implicit_eaf_flags,
4314 1.1 mrg callescape->id, writes_global_memory);
4315 1.1 mrg }
4316 1.1 mrg
4317 1.1 mrg /* The static chain escapes as well. */
4318 1.1 mrg if (gimple_call_chain (stmt))
4319 1.1 mrg handle_call_arg (stmt, gimple_call_chain (stmt), results,
4320 1.1 mrg implicit_eaf_flags
4321 1.1 mrg | gimple_call_static_chain_flags (stmt),
4322 1.1 mrg callescape->id, writes_global_memory);
4323 1.1 mrg
4324 1.1 mrg /* And if we applied NRV the address of the return slot escapes as well. */
4325 1.1 mrg if (gimple_call_return_slot_opt_p (stmt)
4326 1.1 mrg && gimple_call_lhs (stmt) != NULL_TREE
4327 1.1 mrg && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4328 1.1 mrg {
4329 1.1 mrg int flags = gimple_call_retslot_flags (stmt);
4330 1.1 mrg const int relevant_flags = EAF_NO_DIRECT_ESCAPE
4331 1.1 mrg | EAF_NOT_RETURNED_DIRECTLY;
4332 1.1 mrg
4333 1.1 mrg if (!(flags & EAF_UNUSED) && (flags & relevant_flags) != relevant_flags)
4334 1.1 mrg {
4335 1.1 mrg auto_vec<ce_s> tmpc;
4336 1.1 mrg
4337 1.1 mrg get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4338 1.1 mrg
4339 1.1 mrg if (!(flags & EAF_NO_DIRECT_ESCAPE))
4340 1.1 mrg {
4341 1.1 mrg make_constraints_to (callescape->id, tmpc);
4342 1.1 mrg if (writes_global_memory)
4343 1.1 mrg make_constraints_to (escaped_id, tmpc);
4344 1.1 mrg }
4345 1.1 mrg if (!(flags & EAF_NOT_RETURNED_DIRECTLY))
4346 1.1 mrg {
4347 1.1 mrg struct constraint_expr *c;
4348 1.1 mrg unsigned i;
4349 1.1 mrg FOR_EACH_VEC_ELT (tmpc, i, c)
4350 1.1 mrg results->safe_push (*c);
4351 1.1 mrg }
4352 1.1 mrg }
4353 1.1 mrg }
4354 1.1 mrg }
4355 1.1 mrg
4356 1.1 mrg /* For non-IPA mode, generate constraints necessary for a call
4357 1.1 mrg that returns a pointer and assigns it to LHS. This simply makes
4358 1.1 mrg the LHS point to global and escaped variables. */
4359 1.1 mrg
4360 1.1 mrg static void
4361 1.1 mrg handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> &rhsc,
4362 1.1 mrg tree fndecl)
4363 1.1 mrg {
4364 1.1 mrg auto_vec<ce_s> lhsc;
4365 1.1 mrg
4366 1.1 mrg get_constraint_for (lhs, &lhsc);
4367 1.1 mrg /* If the store is to a global decl make sure to
4368 1.1 mrg add proper escape constraints. */
4369 1.1 mrg lhs = get_base_address (lhs);
4370 1.1 mrg if (lhs
4371 1.1 mrg && DECL_P (lhs)
4372 1.1 mrg && is_global_var (lhs))
4373 1.1 mrg {
4374 1.1 mrg struct constraint_expr tmpc;
4375 1.1 mrg tmpc.var = escaped_id;
4376 1.1 mrg tmpc.offset = 0;
4377 1.1 mrg tmpc.type = SCALAR;
4378 1.1 mrg lhsc.safe_push (tmpc);
4379 1.1 mrg }
4380 1.1 mrg
4381 1.1 mrg /* If the call returns an argument unmodified override the rhs
4382 1.1 mrg constraints. */
4383 1.1 mrg if (flags & ERF_RETURNS_ARG
4384 1.1 mrg && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4385 1.1 mrg {
4386 1.1 mrg tree arg;
4387 1.1 mrg rhsc.create (0);
4388 1.1 mrg arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4389 1.1 mrg get_constraint_for (arg, &rhsc);
4390 1.1 mrg process_all_all_constraints (lhsc, rhsc);
4391 1.1 mrg rhsc.release ();
4392 1.1 mrg }
4393 1.1 mrg else if (flags & ERF_NOALIAS)
4394 1.1 mrg {
4395 1.1 mrg varinfo_t vi;
4396 1.1 mrg struct constraint_expr tmpc;
4397 1.1 mrg rhsc.create (0);
4398 1.1 mrg vi = make_heapvar ("HEAP", true);
4399 1.1 mrg /* We are marking allocated storage local, we deal with it becoming
4400 1.1 mrg global by escaping and setting of vars_contains_escaped_heap. */
4401 1.1 mrg DECL_EXTERNAL (vi->decl) = 0;
4402 1.1 mrg vi->is_global_var = 0;
4403 1.1 mrg /* If this is not a real malloc call assume the memory was
4404 1.1 mrg initialized and thus may point to global memory. All
4405 1.1 mrg builtin functions with the malloc attribute behave in a sane way. */
4406 1.1 mrg if (!fndecl
4407 1.1 mrg || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
4408 1.1 mrg make_constraint_from (vi, nonlocal_id);
4409 1.1 mrg tmpc.var = vi->id;
4410 1.1 mrg tmpc.offset = 0;
4411 1.1 mrg tmpc.type = ADDRESSOF;
4412 1.1 mrg rhsc.safe_push (tmpc);
4413 1.1 mrg process_all_all_constraints (lhsc, rhsc);
4414 1.1 mrg rhsc.release ();
4415 1.1 mrg }
4416 1.1 mrg else
4417 1.1 mrg process_all_all_constraints (lhsc, rhsc);
4418 1.1 mrg }
4419 1.1 mrg
4420 1.1 mrg
4421 1.1 mrg /* Return the varinfo for the callee of CALL. */
4422 1.1 mrg
4423 1.1 mrg static varinfo_t
4424 1.1 mrg get_fi_for_callee (gcall *call)
4425 1.1 mrg {
4426 1.1 mrg tree decl, fn = gimple_call_fn (call);
4427 1.1 mrg
4428 1.1 mrg if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4429 1.1 mrg fn = OBJ_TYPE_REF_EXPR (fn);
4430 1.1 mrg
4431 1.1 mrg /* If we can directly resolve the function being called, do so.
4432 1.1 mrg Otherwise, it must be some sort of indirect expression that
4433 1.1 mrg we should still be able to handle. */
4434 1.1 mrg decl = gimple_call_addr_fndecl (fn);
4435 1.1 mrg if (decl)
4436 1.1 mrg return get_vi_for_tree (decl);
4437 1.1 mrg
4438 1.1 mrg /* If the function is anything other than a SSA name pointer we have no
4439 1.1 mrg clue and should be getting ANYFN (well, ANYTHING for now). */
4440 1.1 mrg if (!fn || TREE_CODE (fn) != SSA_NAME)
4441 1.1 mrg return get_varinfo (anything_id);
4442 1.1 mrg
4443 1.1 mrg if (SSA_NAME_IS_DEFAULT_DEF (fn)
4444 1.1 mrg && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4445 1.1 mrg || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4446 1.1 mrg fn = SSA_NAME_VAR (fn);
4447 1.1 mrg
4448 1.1 mrg return get_vi_for_tree (fn);
4449 1.1 mrg }
4450 1.1 mrg
4451 1.1 mrg /* Create constraints for assigning call argument ARG to the incoming parameter
4452 1.1 mrg INDEX of function FI. */
4453 1.1 mrg
4454 1.1 mrg static void
4455 1.1 mrg find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg)
4456 1.1 mrg {
4457 1.1 mrg struct constraint_expr lhs;
4458 1.1 mrg lhs = get_function_part_constraint (fi, fi_parm_base + index);
4459 1.1 mrg
4460 1.1 mrg auto_vec<ce_s, 2> rhsc;
4461 1.1 mrg get_constraint_for_rhs (arg, &rhsc);
4462 1.1 mrg
4463 1.1 mrg unsigned j;
4464 1.1 mrg struct constraint_expr *rhsp;
4465 1.1 mrg FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4466 1.1 mrg process_constraint (new_constraint (lhs, *rhsp));
4467 1.1 mrg }
4468 1.1 mrg
4469 1.1 mrg /* Return true if FNDECL may be part of another lto partition. */
4470 1.1 mrg
4471 1.1 mrg static bool
4472 1.1 mrg fndecl_maybe_in_other_partition (tree fndecl)
4473 1.1 mrg {
4474 1.1 mrg cgraph_node *fn_node = cgraph_node::get (fndecl);
4475 1.1 mrg if (fn_node == NULL)
4476 1.1 mrg return true;
4477 1.1 mrg
4478 1.1 mrg return fn_node->in_other_partition;
4479 1.1 mrg }
4480 1.1 mrg
4481 1.1 mrg /* Create constraints for the builtin call T. Return true if the call
4482 1.1 mrg was handled, otherwise false. */
4483 1.1 mrg
4484 1.1 mrg static bool
4485 1.1 mrg find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4486 1.1 mrg {
4487 1.1 mrg tree fndecl = gimple_call_fndecl (t);
4488 1.1 mrg auto_vec<ce_s, 2> lhsc;
4489 1.1 mrg auto_vec<ce_s, 4> rhsc;
4490 1.1 mrg varinfo_t fi;
4491 1.1 mrg
4492 1.1 mrg if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4493 1.1 mrg /* ??? All builtins that are handled here need to be handled
4494 1.1 mrg in the alias-oracle query functions explicitly! */
4495 1.1 mrg switch (DECL_FUNCTION_CODE (fndecl))
4496 1.1 mrg {
4497 1.1 mrg /* All the following functions return a pointer to the same object
4498 1.1 mrg as their first argument points to. The functions do not add
4499 1.1 mrg to the ESCAPED solution. The functions make the first argument
4500 1.1 mrg pointed to memory point to what the second argument pointed to
4501 1.1 mrg memory points to. */
4502 1.1 mrg case BUILT_IN_STRCPY:
4503 1.1 mrg case BUILT_IN_STRNCPY:
4504 1.1 mrg case BUILT_IN_BCOPY:
4505 1.1 mrg case BUILT_IN_MEMCPY:
4506 1.1 mrg case BUILT_IN_MEMMOVE:
4507 1.1 mrg case BUILT_IN_MEMPCPY:
4508 1.1 mrg case BUILT_IN_STPCPY:
4509 1.1 mrg case BUILT_IN_STPNCPY:
4510 1.1 mrg case BUILT_IN_STRCAT:
4511 1.1 mrg case BUILT_IN_STRNCAT:
4512 1.1 mrg case BUILT_IN_STRCPY_CHK:
4513 1.1 mrg case BUILT_IN_STRNCPY_CHK:
4514 1.1 mrg case BUILT_IN_MEMCPY_CHK:
4515 1.1 mrg case BUILT_IN_MEMMOVE_CHK:
4516 1.1 mrg case BUILT_IN_MEMPCPY_CHK:
4517 1.1 mrg case BUILT_IN_STPCPY_CHK:
4518 1.1 mrg case BUILT_IN_STPNCPY_CHK:
4519 1.1 mrg case BUILT_IN_STRCAT_CHK:
4520 1.1 mrg case BUILT_IN_STRNCAT_CHK:
4521 1.1 mrg case BUILT_IN_TM_MEMCPY:
4522 1.1 mrg case BUILT_IN_TM_MEMMOVE:
4523 1.1 mrg {
4524 1.1 mrg tree res = gimple_call_lhs (t);
4525 1.1 mrg tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4526 1.1 mrg == BUILT_IN_BCOPY ? 1 : 0));
4527 1.1 mrg tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4528 1.1 mrg == BUILT_IN_BCOPY ? 0 : 1));
4529 1.1 mrg if (res != NULL_TREE)
4530 1.1 mrg {
4531 1.1 mrg get_constraint_for (res, &lhsc);
4532 1.1 mrg if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4533 1.1 mrg || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4534 1.1 mrg || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4535 1.1 mrg || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4536 1.1 mrg || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4537 1.1 mrg || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4538 1.1 mrg get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4539 1.1 mrg else
4540 1.1 mrg get_constraint_for (dest, &rhsc);
4541 1.1 mrg process_all_all_constraints (lhsc, rhsc);
4542 1.1 mrg lhsc.truncate (0);
4543 1.1 mrg rhsc.truncate (0);
4544 1.1 mrg }
4545 1.1 mrg get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4546 1.1 mrg get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4547 1.1 mrg do_deref (&lhsc);
4548 1.1 mrg do_deref (&rhsc);
4549 1.1 mrg process_all_all_constraints (lhsc, rhsc);
4550 1.1 mrg return true;
4551 1.1 mrg }
4552 1.1 mrg case BUILT_IN_MEMSET:
4553 1.1 mrg case BUILT_IN_MEMSET_CHK:
4554 1.1 mrg case BUILT_IN_TM_MEMSET:
4555 1.1 mrg {
4556 1.1 mrg tree res = gimple_call_lhs (t);
4557 1.1 mrg tree dest = gimple_call_arg (t, 0);
4558 1.1 mrg unsigned i;
4559 1.1 mrg ce_s *lhsp;
4560 1.1 mrg struct constraint_expr ac;
4561 1.1 mrg if (res != NULL_TREE)
4562 1.1 mrg {
4563 1.1 mrg get_constraint_for (res, &lhsc);
4564 1.1 mrg get_constraint_for (dest, &rhsc);
4565 1.1 mrg process_all_all_constraints (lhsc, rhsc);
4566 1.1 mrg lhsc.truncate (0);
4567 1.1 mrg }
4568 1.1 mrg get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4569 1.1 mrg do_deref (&lhsc);
4570 1.1 mrg if (flag_delete_null_pointer_checks
4571 1.1 mrg && integer_zerop (gimple_call_arg (t, 1)))
4572 1.1 mrg {
4573 1.1 mrg ac.type = ADDRESSOF;
4574 1.1 mrg ac.var = nothing_id;
4575 1.1 mrg }
4576 1.1 mrg else
4577 1.1 mrg {
4578 1.1 mrg ac.type = SCALAR;
4579 1.1 mrg ac.var = integer_id;
4580 1.1 mrg }
4581 1.1 mrg ac.offset = 0;
4582 1.1 mrg FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4583 1.1 mrg process_constraint (new_constraint (*lhsp, ac));
4584 1.1 mrg return true;
4585 1.1 mrg }
4586 1.1 mrg case BUILT_IN_STACK_SAVE:
4587 1.1 mrg case BUILT_IN_STACK_RESTORE:
4588 1.1 mrg /* Nothing interesting happens. */
4589 1.1 mrg return true;
4590 1.1 mrg case BUILT_IN_ALLOCA:
4591 1.1 mrg case BUILT_IN_ALLOCA_WITH_ALIGN:
4592 1.1 mrg case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX:
4593 1.1 mrg {
4594 1.1 mrg tree ptr = gimple_call_lhs (t);
4595 1.1 mrg if (ptr == NULL_TREE)
4596 1.1 mrg return true;
4597 1.1 mrg get_constraint_for (ptr, &lhsc);
4598 1.1 mrg varinfo_t vi = make_heapvar ("HEAP", true);
4599 1.1 mrg /* Alloca storage is never global. To exempt it from escaped
4600 1.1 mrg handling make it a non-heap var. */
4601 1.1 mrg DECL_EXTERNAL (vi->decl) = 0;
4602 1.1 mrg vi->is_global_var = 0;
4603 1.1 mrg vi->is_heap_var = 0;
4604 1.1 mrg struct constraint_expr tmpc;
4605 1.1 mrg tmpc.var = vi->id;
4606 1.1 mrg tmpc.offset = 0;
4607 1.1 mrg tmpc.type = ADDRESSOF;
4608 1.1 mrg rhsc.safe_push (tmpc);
4609 1.1 mrg process_all_all_constraints (lhsc, rhsc);
4610 1.1 mrg return true;
4611 1.1 mrg }
4612 1.1 mrg case BUILT_IN_POSIX_MEMALIGN:
4613 1.1 mrg {
4614 1.1 mrg tree ptrptr = gimple_call_arg (t, 0);
4615 1.1 mrg get_constraint_for (ptrptr, &lhsc);
4616 1.1 mrg do_deref (&lhsc);
4617 1.1 mrg varinfo_t vi = make_heapvar ("HEAP", true);
4618 1.1 mrg /* We are marking allocated storage local, we deal with it becoming
4619 1.1 mrg global by escaping and setting of vars_contains_escaped_heap. */
4620 1.1 mrg DECL_EXTERNAL (vi->decl) = 0;
4621 1.1 mrg vi->is_global_var = 0;
4622 1.1 mrg struct constraint_expr tmpc;
4623 1.1 mrg tmpc.var = vi->id;
4624 1.1 mrg tmpc.offset = 0;
4625 1.1 mrg tmpc.type = ADDRESSOF;
4626 1.1 mrg rhsc.safe_push (tmpc);
4627 1.1 mrg process_all_all_constraints (lhsc, rhsc);
4628 1.1 mrg return true;
4629 1.1 mrg }
4630 1.1 mrg case BUILT_IN_ASSUME_ALIGNED:
4631 1.1 mrg {
4632 1.1 mrg tree res = gimple_call_lhs (t);
4633 1.1 mrg tree dest = gimple_call_arg (t, 0);
4634 1.1 mrg if (res != NULL_TREE)
4635 1.1 mrg {
4636 1.1 mrg get_constraint_for (res, &lhsc);
4637 1.1 mrg get_constraint_for (dest, &rhsc);
4638 1.1 mrg process_all_all_constraints (lhsc, rhsc);
4639 1.1 mrg }
4640 1.1 mrg return true;
4641 1.1 mrg }
4642 1.1 mrg /* All the following functions do not return pointers, do not
4643 1.1 mrg modify the points-to sets of memory reachable from their
4644 1.1 mrg arguments and do not add to the ESCAPED solution. */
4645 1.1 mrg case BUILT_IN_SINCOS:
4646 1.1 mrg case BUILT_IN_SINCOSF:
4647 1.1 mrg case BUILT_IN_SINCOSL:
4648 1.1 mrg case BUILT_IN_FREXP:
4649 1.1 mrg case BUILT_IN_FREXPF:
4650 1.1 mrg case BUILT_IN_FREXPL:
4651 1.1 mrg case BUILT_IN_GAMMA_R:
4652 1.1 mrg case BUILT_IN_GAMMAF_R:
4653 1.1 mrg case BUILT_IN_GAMMAL_R:
4654 1.1 mrg case BUILT_IN_LGAMMA_R:
4655 1.1 mrg case BUILT_IN_LGAMMAF_R:
4656 1.1 mrg case BUILT_IN_LGAMMAL_R:
4657 1.1 mrg case BUILT_IN_MODF:
4658 1.1 mrg case BUILT_IN_MODFF:
4659 1.1 mrg case BUILT_IN_MODFL:
4660 1.1 mrg case BUILT_IN_REMQUO:
4661 1.1 mrg case BUILT_IN_REMQUOF:
4662 1.1 mrg case BUILT_IN_REMQUOL:
4663 1.1 mrg case BUILT_IN_FREE:
4664 1.1 mrg return true;
4665 1.1 mrg case BUILT_IN_STRDUP:
4666 1.1 mrg case BUILT_IN_STRNDUP:
4667 1.1 mrg case BUILT_IN_REALLOC:
4668 1.1 mrg if (gimple_call_lhs (t))
4669 1.1 mrg {
4670 1.1 mrg auto_vec<ce_s> rhsc;
4671 1.1 mrg handle_lhs_call (t, gimple_call_lhs (t),
4672 1.1 mrg gimple_call_return_flags (t) | ERF_NOALIAS,
4673 1.1 mrg rhsc, fndecl);
4674 1.1 mrg get_constraint_for_ptr_offset (gimple_call_lhs (t),
4675 1.1 mrg NULL_TREE, &lhsc);
4676 1.1 mrg get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4677 1.1 mrg NULL_TREE, &rhsc);
4678 1.1 mrg do_deref (&lhsc);
4679 1.1 mrg do_deref (&rhsc);
4680 1.1 mrg process_all_all_constraints (lhsc, rhsc);
4681 1.1 mrg lhsc.truncate (0);
4682 1.1 mrg rhsc.truncate (0);
4683 1.1 mrg /* For realloc the resulting pointer can be equal to the
4684 1.1 mrg argument as well. But only doing this wouldn't be
4685 1.1 mrg correct because with ptr == 0 realloc behaves like malloc. */
4686 1.1 mrg if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4687 1.1 mrg {
4688 1.1 mrg get_constraint_for (gimple_call_lhs (t), &lhsc);
4689 1.1 mrg get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4690 1.1 mrg process_all_all_constraints (lhsc, rhsc);
4691 1.1 mrg }
4692 1.1 mrg return true;
4693 1.1 mrg }
4694 1.1 mrg break;
4695 1.1 mrg /* String / character search functions return a pointer into the
4696 1.1 mrg source string or NULL. */
4697 1.1 mrg case BUILT_IN_INDEX:
4698 1.1 mrg case BUILT_IN_STRCHR:
4699 1.1 mrg case BUILT_IN_STRRCHR:
4700 1.1 mrg case BUILT_IN_MEMCHR:
4701 1.1 mrg case BUILT_IN_STRSTR:
4702 1.1 mrg case BUILT_IN_STRPBRK:
4703 1.1 mrg if (gimple_call_lhs (t))
4704 1.1 mrg {
4705 1.1 mrg tree src = gimple_call_arg (t, 0);
4706 1.1 mrg get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4707 1.1 mrg constraint_expr nul;
4708 1.1 mrg nul.var = nothing_id;
4709 1.1 mrg nul.offset = 0;
4710 1.1 mrg nul.type = ADDRESSOF;
4711 1.1 mrg rhsc.safe_push (nul);
4712 1.1 mrg get_constraint_for (gimple_call_lhs (t), &lhsc);
4713 1.1 mrg process_all_all_constraints (lhsc, rhsc);
4714 1.1 mrg }
4715 1.1 mrg return true;
4716 1.1 mrg /* Pure functions that return something not based on any object and
4717 1.1 mrg that use the memory pointed to by their arguments (but not
4718 1.1 mrg transitively). */
4719 1.1 mrg case BUILT_IN_STRCMP:
4720 1.1 mrg case BUILT_IN_STRCMP_EQ:
4721 1.1 mrg case BUILT_IN_STRNCMP:
4722 1.1 mrg case BUILT_IN_STRNCMP_EQ:
4723 1.1 mrg case BUILT_IN_STRCASECMP:
4724 1.1 mrg case BUILT_IN_STRNCASECMP:
4725 1.1 mrg case BUILT_IN_MEMCMP:
4726 1.1 mrg case BUILT_IN_BCMP:
4727 1.1 mrg case BUILT_IN_STRSPN:
4728 1.1 mrg case BUILT_IN_STRCSPN:
4729 1.1 mrg {
4730 1.1 mrg varinfo_t uses = get_call_use_vi (t);
4731 1.1 mrg make_any_offset_constraints (uses);
4732 1.1 mrg make_constraint_to (uses->id, gimple_call_arg (t, 0));
4733 1.1 mrg make_constraint_to (uses->id, gimple_call_arg (t, 1));
4734 1.1 mrg /* No constraints are necessary for the return value. */
4735 1.1 mrg return true;
4736 1.1 mrg }
4737 1.1 mrg case BUILT_IN_STRLEN:
4738 1.1 mrg {
4739 1.1 mrg varinfo_t uses = get_call_use_vi (t);
4740 1.1 mrg make_any_offset_constraints (uses);
4741 1.1 mrg make_constraint_to (uses->id, gimple_call_arg (t, 0));
4742 1.1 mrg /* No constraints are necessary for the return value. */
4743 1.1 mrg return true;
4744 1.1 mrg }
4745 1.1 mrg case BUILT_IN_OBJECT_SIZE:
4746 1.1 mrg case BUILT_IN_CONSTANT_P:
4747 1.1 mrg {
4748 1.1 mrg /* No constraints are necessary for the return value or the
4749 1.1 mrg arguments. */
4750 1.1 mrg return true;
4751 1.1 mrg }
4752 1.1 mrg /* Trampolines are special - they set up passing the static
4753 1.1 mrg frame. */
4754 1.1 mrg case BUILT_IN_INIT_TRAMPOLINE:
4755 1.1 mrg {
4756 1.1 mrg tree tramp = gimple_call_arg (t, 0);
4757 1.1 mrg tree nfunc = gimple_call_arg (t, 1);
4758 1.1 mrg tree frame = gimple_call_arg (t, 2);
4759 1.1 mrg unsigned i;
4760 1.1 mrg struct constraint_expr lhs, *rhsp;
4761 1.1 mrg if (in_ipa_mode)
4762 1.1 mrg {
4763 1.1 mrg varinfo_t nfi = NULL;
4764 1.1 mrg gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4765 1.1 mrg nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4766 1.1 mrg if (nfi)
4767 1.1 mrg {
4768 1.1 mrg lhs = get_function_part_constraint (nfi, fi_static_chain);
4769 1.1 mrg get_constraint_for (frame, &rhsc);
4770 1.1 mrg FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4771 1.1 mrg process_constraint (new_constraint (lhs, *rhsp));
4772 1.1 mrg rhsc.truncate (0);
4773 1.1 mrg
4774 1.1 mrg /* Make the frame point to the function for
4775 1.1 mrg the trampoline adjustment call. */
4776 1.1 mrg get_constraint_for (tramp, &lhsc);
4777 1.1 mrg do_deref (&lhsc);
4778 1.1 mrg get_constraint_for (nfunc, &rhsc);
4779 1.1 mrg process_all_all_constraints (lhsc, rhsc);
4780 1.1 mrg
4781 1.1 mrg return true;
4782 1.1 mrg }
4783 1.1 mrg }
4784 1.1 mrg /* Else fallthru to generic handling which will let
4785 1.1 mrg the frame escape. */
4786 1.1 mrg break;
4787 1.1 mrg }
4788 1.1 mrg case BUILT_IN_ADJUST_TRAMPOLINE:
4789 1.1 mrg {
4790 1.1 mrg tree tramp = gimple_call_arg (t, 0);
4791 1.1 mrg tree res = gimple_call_lhs (t);
4792 1.1 mrg if (in_ipa_mode && res)
4793 1.1 mrg {
4794 1.1 mrg get_constraint_for (res, &lhsc);
4795 1.1 mrg get_constraint_for (tramp, &rhsc);
4796 1.1 mrg do_deref (&rhsc);
4797 1.1 mrg process_all_all_constraints (lhsc, rhsc);
4798 1.1 mrg }
4799 1.1 mrg return true;
4800 1.1 mrg }
4801 1.1 mrg CASE_BUILT_IN_TM_STORE (1):
4802 1.1 mrg CASE_BUILT_IN_TM_STORE (2):
4803 1.1 mrg CASE_BUILT_IN_TM_STORE (4):
4804 1.1 mrg CASE_BUILT_IN_TM_STORE (8):
4805 1.1 mrg CASE_BUILT_IN_TM_STORE (FLOAT):
4806 1.1 mrg CASE_BUILT_IN_TM_STORE (DOUBLE):
4807 1.1 mrg CASE_BUILT_IN_TM_STORE (LDOUBLE):
4808 1.1 mrg CASE_BUILT_IN_TM_STORE (M64):
4809 1.1 mrg CASE_BUILT_IN_TM_STORE (M128):
4810 1.1 mrg CASE_BUILT_IN_TM_STORE (M256):
4811 1.1 mrg {
4812 1.1 mrg tree addr = gimple_call_arg (t, 0);
4813 1.1 mrg tree src = gimple_call_arg (t, 1);
4814 1.1 mrg
4815 1.1 mrg get_constraint_for (addr, &lhsc);
4816 1.1 mrg do_deref (&lhsc);
4817 1.1 mrg get_constraint_for (src, &rhsc);
4818 1.1 mrg process_all_all_constraints (lhsc, rhsc);
4819 1.1 mrg return true;
4820 1.1 mrg }
4821 1.1 mrg CASE_BUILT_IN_TM_LOAD (1):
4822 1.1 mrg CASE_BUILT_IN_TM_LOAD (2):
4823 1.1 mrg CASE_BUILT_IN_TM_LOAD (4):
4824 1.1 mrg CASE_BUILT_IN_TM_LOAD (8):
4825 1.1 mrg CASE_BUILT_IN_TM_LOAD (FLOAT):
4826 1.1 mrg CASE_BUILT_IN_TM_LOAD (DOUBLE):
4827 1.1 mrg CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4828 1.1 mrg CASE_BUILT_IN_TM_LOAD (M64):
4829 1.1 mrg CASE_BUILT_IN_TM_LOAD (M128):
4830 1.1 mrg CASE_BUILT_IN_TM_LOAD (M256):
4831 1.1 mrg {
4832 1.1 mrg tree dest = gimple_call_lhs (t);
4833 1.1 mrg tree addr = gimple_call_arg (t, 0);
4834 1.1 mrg
4835 1.1 mrg get_constraint_for (dest, &lhsc);
4836 1.1 mrg get_constraint_for (addr, &rhsc);
4837 1.1 mrg do_deref (&rhsc);
4838 1.1 mrg process_all_all_constraints (lhsc, rhsc);
4839 1.1 mrg return true;
4840 1.1 mrg }
4841 1.1 mrg /* Variadic argument handling needs to be handled in IPA
4842 1.1 mrg mode as well. */
4843 1.1 mrg case BUILT_IN_VA_START:
4844 1.1 mrg {
4845 1.1 mrg tree valist = gimple_call_arg (t, 0);
4846 1.1 mrg struct constraint_expr rhs, *lhsp;
4847 1.1 mrg unsigned i;
4848 1.1 mrg get_constraint_for_ptr_offset (valist, NULL_TREE, &lhsc);
4849 1.1 mrg do_deref (&lhsc);
4850 1.1 mrg /* The va_list gets access to pointers in variadic
4851 1.1 mrg arguments. Which we know in the case of IPA analysis
4852 1.1 mrg and otherwise are just all nonlocal variables. */
4853 1.1 mrg if (in_ipa_mode)
4854 1.1 mrg {
4855 1.1 mrg fi = lookup_vi_for_tree (fn->decl);
4856 1.1 mrg rhs = get_function_part_constraint (fi, ~0);
4857 1.1 mrg rhs.type = ADDRESSOF;
4858 1.1 mrg }
4859 1.1 mrg else
4860 1.1 mrg {
4861 1.1 mrg rhs.var = nonlocal_id;
4862 1.1 mrg rhs.type = ADDRESSOF;
4863 1.1 mrg rhs.offset = 0;
4864 1.1 mrg }
4865 1.1 mrg FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4866 1.1 mrg process_constraint (new_constraint (*lhsp, rhs));
4867 1.1 mrg /* va_list is clobbered. */
4868 1.1 mrg make_constraint_to (get_call_clobber_vi (t)->id, valist);
4869 1.1 mrg return true;
4870 1.1 mrg }
4871 1.1 mrg /* va_end doesn't have any effect that matters. */
4872 1.1 mrg case BUILT_IN_VA_END:
4873 1.1 mrg return true;
4874 1.1 mrg /* Alternate return. Simply give up for now. */
4875 1.1 mrg case BUILT_IN_RETURN:
4876 1.1 mrg {
4877 1.1 mrg fi = NULL;
4878 1.1 mrg if (!in_ipa_mode
4879 1.1 mrg || !(fi = get_vi_for_tree (fn->decl)))
4880 1.1 mrg make_constraint_from (get_varinfo (escaped_id), anything_id);
4881 1.1 mrg else if (in_ipa_mode
4882 1.1 mrg && fi != NULL)
4883 1.1 mrg {
4884 1.1 mrg struct constraint_expr lhs, rhs;
4885 1.1 mrg lhs = get_function_part_constraint (fi, fi_result);
4886 1.1 mrg rhs.var = anything_id;
4887 1.1 mrg rhs.offset = 0;
4888 1.1 mrg rhs.type = SCALAR;
4889 1.1 mrg process_constraint (new_constraint (lhs, rhs));
4890 1.1 mrg }
4891 1.1 mrg return true;
4892 1.1 mrg }
4893 1.1 mrg case BUILT_IN_GOMP_PARALLEL:
4894 1.1 mrg case BUILT_IN_GOACC_PARALLEL:
4895 1.1 mrg {
4896 1.1 mrg if (in_ipa_mode)
4897 1.1 mrg {
4898 1.1 mrg unsigned int fnpos, argpos;
4899 1.1 mrg switch (DECL_FUNCTION_CODE (fndecl))
4900 1.1 mrg {
4901 1.1 mrg case BUILT_IN_GOMP_PARALLEL:
4902 1.1 mrg /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
4903 1.1 mrg fnpos = 0;
4904 1.1 mrg argpos = 1;
4905 1.1 mrg break;
4906 1.1 mrg case BUILT_IN_GOACC_PARALLEL:
4907 1.1 mrg /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
4908 1.1 mrg sizes, kinds, ...). */
4909 1.1 mrg fnpos = 1;
4910 1.1 mrg argpos = 3;
4911 1.1 mrg break;
4912 1.1 mrg default:
4913 1.1 mrg gcc_unreachable ();
4914 1.1 mrg }
4915 1.1 mrg
4916 1.1 mrg tree fnarg = gimple_call_arg (t, fnpos);
4917 1.1 mrg gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
4918 1.1 mrg tree fndecl = TREE_OPERAND (fnarg, 0);
4919 1.1 mrg if (fndecl_maybe_in_other_partition (fndecl))
4920 1.1 mrg /* Fallthru to general call handling. */
4921 1.1 mrg break;
4922 1.1 mrg
4923 1.1 mrg tree arg = gimple_call_arg (t, argpos);
4924 1.1 mrg
4925 1.1 mrg varinfo_t fi = get_vi_for_tree (fndecl);
4926 1.1 mrg find_func_aliases_for_call_arg (fi, 0, arg);
4927 1.1 mrg return true;
4928 1.1 mrg }
4929 1.1 mrg /* Else fallthru to generic call handling. */
4930 1.1 mrg break;
4931 1.1 mrg }
4932 1.1 mrg /* printf-style functions may have hooks to set pointers to
4933 1.1 mrg point to somewhere into the generated string. Leave them
4934 1.1 mrg for a later exercise... */
4935 1.1 mrg default:
4936 1.1 mrg /* Fallthru to general call handling. */;
4937 1.1 mrg }
4938 1.1 mrg
4939 1.1 mrg return false;
4940 1.1 mrg }
4941 1.1 mrg
4942 1.1 mrg /* Create constraints for the call T. */
4943 1.1 mrg
4944 1.1 mrg static void
4945 1.1 mrg find_func_aliases_for_call (struct function *fn, gcall *t)
4946 1.1 mrg {
4947 1.1 mrg tree fndecl = gimple_call_fndecl (t);
4948 1.1 mrg varinfo_t fi;
4949 1.1 mrg
4950 1.1 mrg if (fndecl != NULL_TREE
4951 1.1 mrg && fndecl_built_in_p (fndecl)
4952 1.1 mrg && find_func_aliases_for_builtin_call (fn, t))
4953 1.1 mrg return;
4954 1.1 mrg
4955 1.1 mrg if (gimple_call_internal_p (t, IFN_DEFERRED_INIT))
4956 1.1 mrg return;
4957 1.1 mrg
4958 1.1 mrg fi = get_fi_for_callee (t);
4959 1.1 mrg if (!in_ipa_mode
4960 1.1 mrg || (fi->decl && fndecl && !fi->is_fn_info))
4961 1.1 mrg {
4962 1.1 mrg auto_vec<ce_s, 16> rhsc;
4963 1.1 mrg int flags = gimple_call_flags (t);
4964 1.1 mrg
4965 1.1 mrg /* Const functions can return their arguments and addresses
4966 1.1 mrg of global memory but not of escaped memory. */
4967 1.1 mrg if (flags & (ECF_CONST|ECF_NOVOPS))
4968 1.1 mrg {
4969 1.1 mrg if (gimple_call_lhs (t))
4970 1.1 mrg handle_rhs_call (t, &rhsc, implicit_const_eaf_flags, false, false);
4971 1.1 mrg }
4972 1.1 mrg /* Pure functions can return addresses in and of memory
4973 1.1 mrg reachable from their arguments, but they are not an escape
4974 1.1 mrg point for reachable memory of their arguments. */
4975 1.1 mrg else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4976 1.1 mrg handle_rhs_call (t, &rhsc, implicit_pure_eaf_flags, false, true);
4977 1.1 mrg /* If the call is to a replaceable operator delete and results
4978 1.1 mrg from a delete expression as opposed to a direct call to
4979 1.1 mrg such operator, then the effects for PTA (in particular
4980 1.1 mrg the escaping of the pointer) can be ignored. */
4981 1.1 mrg else if (fndecl
4982 1.1 mrg && DECL_IS_OPERATOR_DELETE_P (fndecl)
4983 1.1 mrg && gimple_call_from_new_or_delete (t))
4984 1.1 mrg ;
4985 1.1 mrg else
4986 1.1 mrg handle_rhs_call (t, &rhsc, 0, true, true);
4987 1.1 mrg if (gimple_call_lhs (t))
4988 1.1 mrg handle_lhs_call (t, gimple_call_lhs (t),
4989 1.1 mrg gimple_call_return_flags (t), rhsc, fndecl);
4990 1.1 mrg }
4991 1.1 mrg else
4992 1.1 mrg {
4993 1.1 mrg auto_vec<ce_s, 2> rhsc;
4994 1.1 mrg tree lhsop;
4995 1.1 mrg unsigned j;
4996 1.1 mrg
4997 1.1 mrg /* Assign all the passed arguments to the appropriate incoming
4998 1.1 mrg parameters of the function. */
4999 1.1 mrg for (j = 0; j < gimple_call_num_args (t); j++)
5000 1.1 mrg {
5001 1.1 mrg tree arg = gimple_call_arg (t, j);
5002 1.1 mrg find_func_aliases_for_call_arg (fi, j, arg);
5003 1.1 mrg }
5004 1.1 mrg
5005 1.1 mrg /* If we are returning a value, assign it to the result. */
5006 1.1 mrg lhsop = gimple_call_lhs (t);
5007 1.1 mrg if (lhsop)
5008 1.1 mrg {
5009 1.1 mrg auto_vec<ce_s, 2> lhsc;
5010 1.1 mrg struct constraint_expr rhs;
5011 1.1 mrg struct constraint_expr *lhsp;
5012 1.1 mrg bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (t));
5013 1.1 mrg
5014 1.1 mrg get_constraint_for (lhsop, &lhsc);
5015 1.1 mrg rhs = get_function_part_constraint (fi, fi_result);
5016 1.1 mrg if (aggr_p)
5017 1.1 mrg {
5018 1.1 mrg auto_vec<ce_s, 2> tem;
5019 1.1 mrg tem.quick_push (rhs);
5020 1.1 mrg do_deref (&tem);
5021 1.1 mrg gcc_checking_assert (tem.length () == 1);
5022 1.1 mrg rhs = tem[0];
5023 1.1 mrg }
5024 1.1 mrg FOR_EACH_VEC_ELT (lhsc, j, lhsp)
5025 1.1 mrg process_constraint (new_constraint (*lhsp, rhs));
5026 1.1 mrg
5027 1.1 mrg /* If we pass the result decl by reference, honor that. */
5028 1.1 mrg if (aggr_p)
5029 1.1 mrg {
5030 1.1 mrg struct constraint_expr lhs;
5031 1.1 mrg struct constraint_expr *rhsp;
5032 1.1 mrg
5033 1.1 mrg get_constraint_for_address_of (lhsop, &rhsc);
5034 1.1 mrg lhs = get_function_part_constraint (fi, fi_result);
5035 1.1 mrg FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5036 1.1 mrg process_constraint (new_constraint (lhs, *rhsp));
5037 1.1 mrg rhsc.truncate (0);
5038 1.1 mrg }
5039 1.1 mrg }
5040 1.1 mrg
5041 1.1 mrg /* If we use a static chain, pass it along. */
5042 1.1 mrg if (gimple_call_chain (t))
5043 1.1 mrg {
5044 1.1 mrg struct constraint_expr lhs;
5045 1.1 mrg struct constraint_expr *rhsp;
5046 1.1 mrg
5047 1.1 mrg get_constraint_for (gimple_call_chain (t), &rhsc);
5048 1.1 mrg lhs = get_function_part_constraint (fi, fi_static_chain);
5049 1.1 mrg FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5050 1.1 mrg process_constraint (new_constraint (lhs, *rhsp));
5051 1.1 mrg }
5052 1.1 mrg }
5053 1.1 mrg }
5054 1.1 mrg
5055 1.1 mrg /* Walk statement T setting up aliasing constraints according to the
5056 1.1 mrg references found in T. This function is the main part of the
5057 1.1 mrg constraint builder. AI points to auxiliary alias information used
5058 1.1 mrg when building alias sets and computing alias grouping heuristics. */
5059 1.1 mrg
5060 1.1 mrg static void
5061 1.1 mrg find_func_aliases (struct function *fn, gimple *origt)
5062 1.1 mrg {
5063 1.1 mrg gimple *t = origt;
5064 1.1 mrg auto_vec<ce_s, 16> lhsc;
5065 1.1 mrg auto_vec<ce_s, 16> rhsc;
5066 1.1 mrg varinfo_t fi;
5067 1.1 mrg
5068 1.1 mrg /* Now build constraints expressions. */
5069 1.1 mrg if (gimple_code (t) == GIMPLE_PHI)
5070 1.1 mrg {
5071 1.1 mrg /* For a phi node, assign all the arguments to
5072 1.1 mrg the result. */
5073 1.1 mrg get_constraint_for (gimple_phi_result (t), &lhsc);
5074 1.1 mrg for (unsigned i = 0; i < gimple_phi_num_args (t); i++)
5075 1.1 mrg {
5076 1.1 mrg get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
5077 1.1 mrg process_all_all_constraints (lhsc, rhsc);
5078 1.1 mrg rhsc.truncate (0);
5079 1.1 mrg }
5080 1.1 mrg }
5081 1.1 mrg /* In IPA mode, we need to generate constraints to pass call
5082 1.1 mrg arguments through their calls. There are two cases,
5083 1.1 mrg either a GIMPLE_CALL returning a value, or just a plain
5084 1.1 mrg GIMPLE_CALL when we are not.
5085 1.1 mrg
5086 1.1 mrg In non-ipa mode, we need to generate constraints for each
5087 1.1 mrg pointer passed by address. */
5088 1.1 mrg else if (is_gimple_call (t))
5089 1.1 mrg find_func_aliases_for_call (fn, as_a <gcall *> (t));
5090 1.1 mrg
5091 1.1 mrg /* Otherwise, just a regular assignment statement. Only care about
5092 1.1 mrg operations with pointer result, others are dealt with as escape
5093 1.1 mrg points if they have pointer operands. */
5094 1.1 mrg else if (is_gimple_assign (t))
5095 1.1 mrg {
5096 1.1 mrg /* Otherwise, just a regular assignment statement. */
5097 1.1 mrg tree lhsop = gimple_assign_lhs (t);
5098 1.1 mrg tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
5099 1.1 mrg
5100 1.1 mrg if (rhsop && TREE_CLOBBER_P (rhsop))
5101 1.1 mrg /* Ignore clobbers, they don't actually store anything into
5102 1.1 mrg the LHS. */
5103 1.1 mrg ;
5104 1.1 mrg else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
5105 1.1 mrg do_structure_copy (lhsop, rhsop);
5106 1.1 mrg else
5107 1.1 mrg {
5108 1.1 mrg enum tree_code code = gimple_assign_rhs_code (t);
5109 1.1 mrg
5110 1.1 mrg get_constraint_for (lhsop, &lhsc);
5111 1.1 mrg
5112 1.1 mrg if (code == POINTER_PLUS_EXPR)
5113 1.1 mrg get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5114 1.1 mrg gimple_assign_rhs2 (t), &rhsc);
5115 1.1 mrg else if (code == POINTER_DIFF_EXPR)
5116 1.1 mrg /* The result is not a pointer (part). */
5117 1.1 mrg ;
5118 1.1 mrg else if (code == BIT_AND_EXPR
5119 1.1 mrg && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
5120 1.1 mrg {
5121 1.1 mrg /* Aligning a pointer via a BIT_AND_EXPR is offsetting
5122 1.1 mrg the pointer. Handle it by offsetting it by UNKNOWN. */
5123 1.1 mrg get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5124 1.1 mrg NULL_TREE, &rhsc);
5125 1.1 mrg }
5126 1.1 mrg else if (code == TRUNC_DIV_EXPR
5127 1.1 mrg || code == CEIL_DIV_EXPR
5128 1.1 mrg || code == FLOOR_DIV_EXPR
5129 1.1 mrg || code == ROUND_DIV_EXPR
5130 1.1 mrg || code == EXACT_DIV_EXPR
5131 1.1 mrg || code == TRUNC_MOD_EXPR
5132 1.1 mrg || code == CEIL_MOD_EXPR
5133 1.1 mrg || code == FLOOR_MOD_EXPR
5134 1.1 mrg || code == ROUND_MOD_EXPR)
5135 1.1 mrg /* Division and modulo transfer the pointer from the LHS. */
5136 1.1 mrg get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5137 1.1 mrg NULL_TREE, &rhsc);
5138 1.1 mrg else if (CONVERT_EXPR_CODE_P (code)
5139 1.1 mrg || gimple_assign_single_p (t))
5140 1.1 mrg /* See through conversions, single RHS are handled by
5141 1.1 mrg get_constraint_for_rhs. */
5142 1.1 mrg get_constraint_for_rhs (rhsop, &rhsc);
5143 1.1 mrg else if (code == COND_EXPR)
5144 1.1 mrg {
5145 1.1 mrg /* The result is a merge of both COND_EXPR arms. */
5146 1.1 mrg auto_vec<ce_s, 2> tmp;
5147 1.1 mrg struct constraint_expr *rhsp;
5148 1.1 mrg unsigned i;
5149 1.1 mrg get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
5150 1.1 mrg get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
5151 1.1 mrg FOR_EACH_VEC_ELT (tmp, i, rhsp)
5152 1.1 mrg rhsc.safe_push (*rhsp);
5153 1.1 mrg }
5154 1.1 mrg else if (truth_value_p (code))
5155 1.1 mrg /* Truth value results are not pointer (parts). Or at least
5156 1.1 mrg very unreasonable obfuscation of a part. */
5157 1.1 mrg ;
5158 1.1 mrg else
5159 1.1 mrg {
5160 1.1 mrg /* All other operations are possibly offsetting merges. */
5161 1.1 mrg auto_vec<ce_s, 4> tmp;
5162 1.1 mrg struct constraint_expr *rhsp;
5163 1.1 mrg unsigned i, j;
5164 1.1 mrg get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5165 1.1 mrg NULL_TREE, &rhsc);
5166 1.1 mrg for (i = 2; i < gimple_num_ops (t); ++i)
5167 1.1 mrg {
5168 1.1 mrg get_constraint_for_ptr_offset (gimple_op (t, i),
5169 1.1 mrg NULL_TREE, &tmp);
5170 1.1 mrg FOR_EACH_VEC_ELT (tmp, j, rhsp)
5171 1.1 mrg rhsc.safe_push (*rhsp);
5172 1.1 mrg tmp.truncate (0);
5173 1.1 mrg }
5174 1.1 mrg }
5175 1.1 mrg process_all_all_constraints (lhsc, rhsc);
5176 1.1 mrg }
5177 1.1 mrg /* If there is a store to a global variable the rhs escapes. */
5178 1.1 mrg if ((lhsop = get_base_address (lhsop)) != NULL_TREE
5179 1.1 mrg && DECL_P (lhsop))
5180 1.1 mrg {
5181 1.1 mrg varinfo_t vi = get_vi_for_tree (lhsop);
5182 1.1 mrg if ((! in_ipa_mode && vi->is_global_var)
5183 1.1 mrg || vi->is_ipa_escape_point)
5184 1.1 mrg make_escape_constraint (rhsop);
5185 1.1 mrg }
5186 1.1 mrg }
5187 1.1 mrg /* Handle escapes through return. */
5188 1.1 mrg else if (gimple_code (t) == GIMPLE_RETURN
5189 1.1 mrg && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
5190 1.1 mrg {
5191 1.1 mrg greturn *return_stmt = as_a <greturn *> (t);
5192 1.1 mrg fi = NULL;
5193 1.1 mrg if (!in_ipa_mode
5194 1.1 mrg && SSA_VAR_P (gimple_return_retval (return_stmt)))
5195 1.1 mrg {
5196 1.1 mrg /* We handle simple returns by post-processing the solutions. */
5197 1.1 mrg ;
5198 1.1 mrg }
5199 1.1 mrg if (!(fi = get_vi_for_tree (fn->decl)))
5200 1.1 mrg make_escape_constraint (gimple_return_retval (return_stmt));
5201 1.1 mrg else if (in_ipa_mode)
5202 1.1 mrg {
5203 1.1 mrg struct constraint_expr lhs ;
5204 1.1 mrg struct constraint_expr *rhsp;
5205 1.1 mrg unsigned i;
5206 1.1 mrg
5207 1.1 mrg lhs = get_function_part_constraint (fi, fi_result);
5208 1.1 mrg get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
5209 1.1 mrg FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5210 1.1 mrg process_constraint (new_constraint (lhs, *rhsp));
5211 1.1 mrg }
5212 1.1 mrg }
5213 1.1 mrg /* Handle asms conservatively by adding escape constraints to everything. */
5214 1.1 mrg else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
5215 1.1 mrg {
5216 1.1 mrg unsigned i, noutputs;
5217 1.1 mrg const char **oconstraints;
5218 1.1 mrg const char *constraint;
5219 1.1 mrg bool allows_mem, allows_reg, is_inout;
5220 1.1 mrg
5221 1.1 mrg noutputs = gimple_asm_noutputs (asm_stmt);
5222 1.1 mrg oconstraints = XALLOCAVEC (const char *, noutputs);
5223 1.1 mrg
5224 1.1 mrg for (i = 0; i < noutputs; ++i)
5225 1.1 mrg {
5226 1.1 mrg tree link = gimple_asm_output_op (asm_stmt, i);
5227 1.1 mrg tree op = TREE_VALUE (link);
5228 1.1 mrg
5229 1.1 mrg constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5230 1.1 mrg oconstraints[i] = constraint;
5231 1.1 mrg parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5232 1.1 mrg &allows_reg, &is_inout);
5233 1.1 mrg
5234 1.1 mrg /* A memory constraint makes the address of the operand escape. */
5235 1.1 mrg if (!allows_reg && allows_mem)
5236 1.1 mrg make_escape_constraint (build_fold_addr_expr (op));
5237 1.1 mrg
5238 1.1 mrg /* The asm may read global memory, so outputs may point to
5239 1.1 mrg any global memory. */
5240 1.1 mrg if (op)
5241 1.1 mrg {
5242 1.1 mrg auto_vec<ce_s, 2> lhsc;
5243 1.1 mrg struct constraint_expr rhsc, *lhsp;
5244 1.1 mrg unsigned j;
5245 1.1 mrg get_constraint_for (op, &lhsc);
5246 1.1 mrg rhsc.var = nonlocal_id;
5247 1.1 mrg rhsc.offset = 0;
5248 1.1 mrg rhsc.type = SCALAR;
5249 1.1 mrg FOR_EACH_VEC_ELT (lhsc, j, lhsp)
5250 1.1 mrg process_constraint (new_constraint (*lhsp, rhsc));
5251 1.1 mrg }
5252 1.1 mrg }
5253 1.1 mrg for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5254 1.1 mrg {
5255 1.1 mrg tree link = gimple_asm_input_op (asm_stmt, i);
5256 1.1 mrg tree op = TREE_VALUE (link);
5257 1.1 mrg
5258 1.1 mrg constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5259 1.1 mrg
5260 1.1 mrg parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
5261 1.1 mrg &allows_mem, &allows_reg);
5262 1.1 mrg
5263 1.1 mrg /* A memory constraint makes the address of the operand escape. */
5264 1.1 mrg if (!allows_reg && allows_mem)
5265 1.1 mrg make_escape_constraint (build_fold_addr_expr (op));
5266 1.1 mrg /* Strictly we'd only need the constraint to ESCAPED if
5267 1.1 mrg the asm clobbers memory, otherwise using something
5268 1.1 mrg along the lines of per-call clobbers/uses would be enough. */
5269 1.1 mrg else if (op)
5270 1.1 mrg make_escape_constraint (op);
5271 1.1 mrg }
5272 1.1 mrg }
5273 1.1 mrg }
5274 1.1 mrg
5275 1.1 mrg
5276 1.1 mrg /* Create a constraint adding to the clobber set of FI the memory
5277 1.1 mrg pointed to by PTR. */
5278 1.1 mrg
5279 1.1 mrg static void
5280 1.1 mrg process_ipa_clobber (varinfo_t fi, tree ptr)
5281 1.1 mrg {
5282 1.1 mrg vec<ce_s> ptrc = vNULL;
5283 1.1 mrg struct constraint_expr *c, lhs;
5284 1.1 mrg unsigned i;
5285 1.1 mrg get_constraint_for_rhs (ptr, &ptrc);
5286 1.1 mrg lhs = get_function_part_constraint (fi, fi_clobbers);
5287 1.1 mrg FOR_EACH_VEC_ELT (ptrc, i, c)
5288 1.1 mrg process_constraint (new_constraint (lhs, *c));
5289 1.1 mrg ptrc.release ();
5290 1.1 mrg }
5291 1.1 mrg
5292 1.1 mrg /* Walk statement T setting up clobber and use constraints according to the
5293 1.1 mrg references found in T. This function is a main part of the
5294 1.1 mrg IPA constraint builder. */
5295 1.1 mrg
5296 1.1 mrg static void
5297 1.1 mrg find_func_clobbers (struct function *fn, gimple *origt)
5298 1.1 mrg {
5299 1.1 mrg gimple *t = origt;
5300 1.1 mrg auto_vec<ce_s, 16> lhsc;
5301 1.1 mrg auto_vec<ce_s, 16> rhsc;
5302 1.1 mrg varinfo_t fi;
5303 1.1 mrg
5304 1.1 mrg /* Add constraints for clobbered/used in IPA mode.
5305 1.1 mrg We are not interested in what automatic variables are clobbered
5306 1.1 mrg or used as we only use the information in the caller to which
5307 1.1 mrg they do not escape. */
5308 1.1 mrg gcc_assert (in_ipa_mode);
5309 1.1 mrg
5310 1.1 mrg /* If the stmt refers to memory in any way it better had a VUSE. */
5311 1.1 mrg if (gimple_vuse (t) == NULL_TREE)
5312 1.1 mrg return;
5313 1.1 mrg
5314 1.1 mrg /* We'd better have function information for the current function. */
5315 1.1 mrg fi = lookup_vi_for_tree (fn->decl);
5316 1.1 mrg gcc_assert (fi != NULL);
5317 1.1 mrg
5318 1.1 mrg /* Account for stores in assignments and calls. */
5319 1.1 mrg if (gimple_vdef (t) != NULL_TREE
5320 1.1 mrg && gimple_has_lhs (t))
5321 1.1 mrg {
5322 1.1 mrg tree lhs = gimple_get_lhs (t);
5323 1.1 mrg tree tem = lhs;
5324 1.1 mrg while (handled_component_p (tem))
5325 1.1 mrg tem = TREE_OPERAND (tem, 0);
5326 1.1 mrg if ((DECL_P (tem)
5327 1.1 mrg && !auto_var_in_fn_p (tem, fn->decl))
5328 1.1 mrg || INDIRECT_REF_P (tem)
5329 1.1 mrg || (TREE_CODE (tem) == MEM_REF
5330 1.1 mrg && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5331 1.1 mrg && auto_var_in_fn_p
5332 1.1 mrg (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5333 1.1 mrg {
5334 1.1 mrg struct constraint_expr lhsc, *rhsp;
5335 1.1 mrg unsigned i;
5336 1.1 mrg lhsc = get_function_part_constraint (fi, fi_clobbers);
5337 1.1 mrg get_constraint_for_address_of (lhs, &rhsc);
5338 1.1 mrg FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5339 1.1 mrg process_constraint (new_constraint (lhsc, *rhsp));
5340 1.1 mrg rhsc.truncate (0);
5341 1.1 mrg }
5342 1.1 mrg }
5343 1.1 mrg
5344 1.1 mrg /* Account for uses in assigments and returns. */
5345 1.1 mrg if (gimple_assign_single_p (t)
5346 1.1 mrg || (gimple_code (t) == GIMPLE_RETURN
5347 1.1 mrg && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
5348 1.1 mrg {
5349 1.1 mrg tree rhs = (gimple_assign_single_p (t)
5350 1.1 mrg ? gimple_assign_rhs1 (t)
5351 1.1 mrg : gimple_return_retval (as_a <greturn *> (t)));
5352 1.1 mrg tree tem = rhs;
5353 1.1 mrg while (handled_component_p (tem))
5354 1.1 mrg tem = TREE_OPERAND (tem, 0);
5355 1.1 mrg if ((DECL_P (tem)
5356 1.1 mrg && !auto_var_in_fn_p (tem, fn->decl))
5357 1.1 mrg || INDIRECT_REF_P (tem)
5358 1.1 mrg || (TREE_CODE (tem) == MEM_REF
5359 1.1 mrg && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5360 1.1 mrg && auto_var_in_fn_p
5361 1.1 mrg (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5362 1.1 mrg {
5363 1.1 mrg struct constraint_expr lhs, *rhsp;
5364 1.1 mrg unsigned i;
5365 1.1 mrg lhs = get_function_part_constraint (fi, fi_uses);
5366 1.1 mrg get_constraint_for_address_of (rhs, &rhsc);
5367 1.1 mrg FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5368 1.1 mrg process_constraint (new_constraint (lhs, *rhsp));
5369 1.1 mrg rhsc.truncate (0);
5370 1.1 mrg }
5371 1.1 mrg }
5372 1.1 mrg
5373 1.1 mrg if (gcall *call_stmt = dyn_cast <gcall *> (t))
5374 1.1 mrg {
5375 1.1 mrg varinfo_t cfi = NULL;
5376 1.1 mrg tree decl = gimple_call_fndecl (t);
5377 1.1 mrg struct constraint_expr lhs, rhs;
5378 1.1 mrg unsigned i, j;
5379 1.1 mrg
5380 1.1 mrg /* For builtins we do not have separate function info. For those
5381 1.1 mrg we do not generate escapes for we have to generate clobbers/uses. */
5382 1.1 mrg if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
5383 1.1 mrg switch (DECL_FUNCTION_CODE (decl))
5384 1.1 mrg {
5385 1.1 mrg /* The following functions use and clobber memory pointed to
5386 1.1 mrg by their arguments. */
5387 1.1 mrg case BUILT_IN_STRCPY:
5388 1.1 mrg case BUILT_IN_STRNCPY:
5389 1.1 mrg case BUILT_IN_BCOPY:
5390 1.1 mrg case BUILT_IN_MEMCPY:
5391 1.1 mrg case BUILT_IN_MEMMOVE:
5392 1.1 mrg case BUILT_IN_MEMPCPY:
5393 1.1 mrg case BUILT_IN_STPCPY:
5394 1.1 mrg case BUILT_IN_STPNCPY:
5395 1.1 mrg case BUILT_IN_STRCAT:
5396 1.1 mrg case BUILT_IN_STRNCAT:
5397 1.1 mrg case BUILT_IN_STRCPY_CHK:
5398 1.1 mrg case BUILT_IN_STRNCPY_CHK:
5399 1.1 mrg case BUILT_IN_MEMCPY_CHK:
5400 1.1 mrg case BUILT_IN_MEMMOVE_CHK:
5401 1.1 mrg case BUILT_IN_MEMPCPY_CHK:
5402 1.1 mrg case BUILT_IN_STPCPY_CHK:
5403 1.1 mrg case BUILT_IN_STPNCPY_CHK:
5404 1.1 mrg case BUILT_IN_STRCAT_CHK:
5405 1.1 mrg case BUILT_IN_STRNCAT_CHK:
5406 1.1 mrg {
5407 1.1 mrg tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5408 1.1 mrg == BUILT_IN_BCOPY ? 1 : 0));
5409 1.1 mrg tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5410 1.1 mrg == BUILT_IN_BCOPY ? 0 : 1));
5411 1.1 mrg unsigned i;
5412 1.1 mrg struct constraint_expr *rhsp, *lhsp;
5413 1.1 mrg get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5414 1.1 mrg lhs = get_function_part_constraint (fi, fi_clobbers);
5415 1.1 mrg FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5416 1.1 mrg process_constraint (new_constraint (lhs, *lhsp));
5417 1.1 mrg get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
5418 1.1 mrg lhs = get_function_part_constraint (fi, fi_uses);
5419 1.1 mrg FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5420 1.1 mrg process_constraint (new_constraint (lhs, *rhsp));
5421 1.1 mrg return;
5422 1.1 mrg }
5423 1.1 mrg /* The following function clobbers memory pointed to by
5424 1.1 mrg its argument. */
5425 1.1 mrg case BUILT_IN_MEMSET:
5426 1.1 mrg case BUILT_IN_MEMSET_CHK:
5427 1.1 mrg case BUILT_IN_POSIX_MEMALIGN:
5428 1.1 mrg {
5429 1.1 mrg tree dest = gimple_call_arg (t, 0);
5430 1.1 mrg unsigned i;
5431 1.1 mrg ce_s *lhsp;
5432 1.1 mrg get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5433 1.1 mrg lhs = get_function_part_constraint (fi, fi_clobbers);
5434 1.1 mrg FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5435 1.1 mrg process_constraint (new_constraint (lhs, *lhsp));
5436 1.1 mrg return;
5437 1.1 mrg }
5438 1.1 mrg /* The following functions clobber their second and third
5439 1.1 mrg arguments. */
5440 1.1 mrg case BUILT_IN_SINCOS:
5441 1.1 mrg case BUILT_IN_SINCOSF:
5442 1.1 mrg case BUILT_IN_SINCOSL:
5443 1.1 mrg {
5444 1.1 mrg process_ipa_clobber (fi, gimple_call_arg (t, 1));
5445 1.1 mrg process_ipa_clobber (fi, gimple_call_arg (t, 2));
5446 1.1 mrg return;
5447 1.1 mrg }
5448 1.1 mrg /* The following functions clobber their second argument. */
5449 1.1 mrg case BUILT_IN_FREXP:
5450 1.1 mrg case BUILT_IN_FREXPF:
5451 1.1 mrg case BUILT_IN_FREXPL:
5452 1.1 mrg case BUILT_IN_LGAMMA_R:
5453 1.1 mrg case BUILT_IN_LGAMMAF_R:
5454 1.1 mrg case BUILT_IN_LGAMMAL_R:
5455 1.1 mrg case BUILT_IN_GAMMA_R:
5456 1.1 mrg case BUILT_IN_GAMMAF_R:
5457 1.1 mrg case BUILT_IN_GAMMAL_R:
5458 1.1 mrg case BUILT_IN_MODF:
5459 1.1 mrg case BUILT_IN_MODFF:
5460 1.1 mrg case BUILT_IN_MODFL:
5461 1.1 mrg {
5462 1.1 mrg process_ipa_clobber (fi, gimple_call_arg (t, 1));
5463 1.1 mrg return;
5464 1.1 mrg }
5465 1.1 mrg /* The following functions clobber their third argument. */
5466 1.1 mrg case BUILT_IN_REMQUO:
5467 1.1 mrg case BUILT_IN_REMQUOF:
5468 1.1 mrg case BUILT_IN_REMQUOL:
5469 1.1 mrg {
5470 1.1 mrg process_ipa_clobber (fi, gimple_call_arg (t, 2));
5471 1.1 mrg return;
5472 1.1 mrg }
5473 1.1 mrg /* The following functions neither read nor clobber memory. */
5474 1.1 mrg case BUILT_IN_ASSUME_ALIGNED:
5475 1.1 mrg case BUILT_IN_FREE:
5476 1.1 mrg return;
5477 1.1 mrg /* Trampolines are of no interest to us. */
5478 1.1 mrg case BUILT_IN_INIT_TRAMPOLINE:
5479 1.1 mrg case BUILT_IN_ADJUST_TRAMPOLINE:
5480 1.1 mrg return;
5481 1.1 mrg case BUILT_IN_VA_START:
5482 1.1 mrg case BUILT_IN_VA_END:
5483 1.1 mrg return;
5484 1.1 mrg case BUILT_IN_GOMP_PARALLEL:
5485 1.1 mrg case BUILT_IN_GOACC_PARALLEL:
5486 1.1 mrg {
5487 1.1 mrg unsigned int fnpos, argpos;
5488 1.1 mrg unsigned int implicit_use_args[2];
5489 1.1 mrg unsigned int num_implicit_use_args = 0;
5490 1.1 mrg switch (DECL_FUNCTION_CODE (decl))
5491 1.1 mrg {
5492 1.1 mrg case BUILT_IN_GOMP_PARALLEL:
5493 1.1 mrg /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5494 1.1 mrg fnpos = 0;
5495 1.1 mrg argpos = 1;
5496 1.1 mrg break;
5497 1.1 mrg case BUILT_IN_GOACC_PARALLEL:
5498 1.1 mrg /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
5499 1.1 mrg sizes, kinds, ...). */
5500 1.1 mrg fnpos = 1;
5501 1.1 mrg argpos = 3;
5502 1.1 mrg implicit_use_args[num_implicit_use_args++] = 4;
5503 1.1 mrg implicit_use_args[num_implicit_use_args++] = 5;
5504 1.1 mrg break;
5505 1.1 mrg default:
5506 1.1 mrg gcc_unreachable ();
5507 1.1 mrg }
5508 1.1 mrg
5509 1.1 mrg tree fnarg = gimple_call_arg (t, fnpos);
5510 1.1 mrg gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
5511 1.1 mrg tree fndecl = TREE_OPERAND (fnarg, 0);
5512 1.1 mrg if (fndecl_maybe_in_other_partition (fndecl))
5513 1.1 mrg /* Fallthru to general call handling. */
5514 1.1 mrg break;
5515 1.1 mrg
5516 1.1 mrg varinfo_t cfi = get_vi_for_tree (fndecl);
5517 1.1 mrg
5518 1.1 mrg tree arg = gimple_call_arg (t, argpos);
5519 1.1 mrg
5520 1.1 mrg /* Parameter passed by value is used. */
5521 1.1 mrg lhs = get_function_part_constraint (fi, fi_uses);
5522 1.1 mrg struct constraint_expr *rhsp;
5523 1.1 mrg get_constraint_for (arg, &rhsc);
5524 1.1 mrg FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5525 1.1 mrg process_constraint (new_constraint (lhs, *rhsp));
5526 1.1 mrg rhsc.truncate (0);
5527 1.1 mrg
5528 1.1 mrg /* Handle parameters used by the call, but not used in cfi, as
5529 1.1 mrg implicitly used by cfi. */
5530 1.1 mrg lhs = get_function_part_constraint (cfi, fi_uses);
5531 1.1 mrg for (unsigned i = 0; i < num_implicit_use_args; ++i)
5532 1.1 mrg {
5533 1.1 mrg tree arg = gimple_call_arg (t, implicit_use_args[i]);
5534 1.1 mrg get_constraint_for (arg, &rhsc);
5535 1.1 mrg FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5536 1.1 mrg process_constraint (new_constraint (lhs, *rhsp));
5537 1.1 mrg rhsc.truncate (0);
5538 1.1 mrg }
5539 1.1 mrg
5540 1.1 mrg /* The caller clobbers what the callee does. */
5541 1.1 mrg lhs = get_function_part_constraint (fi, fi_clobbers);
5542 1.1 mrg rhs = get_function_part_constraint (cfi, fi_clobbers);
5543 1.1 mrg process_constraint (new_constraint (lhs, rhs));
5544 1.1 mrg
5545 1.1 mrg /* The caller uses what the callee does. */
5546 1.1 mrg lhs = get_function_part_constraint (fi, fi_uses);
5547 1.1 mrg rhs = get_function_part_constraint (cfi, fi_uses);
5548 1.1 mrg process_constraint (new_constraint (lhs, rhs));
5549 1.1 mrg
5550 1.1 mrg return;
5551 1.1 mrg }
5552 1.1 mrg /* printf-style functions may have hooks to set pointers to
5553 1.1 mrg point to somewhere into the generated string. Leave them
5554 1.1 mrg for a later exercise... */
5555 1.1 mrg default:
5556 1.1 mrg /* Fallthru to general call handling. */;
5557 1.1 mrg }
5558 1.1 mrg
5559 1.1 mrg /* Parameters passed by value are used. */
5560 1.1 mrg lhs = get_function_part_constraint (fi, fi_uses);
5561 1.1 mrg for (i = 0; i < gimple_call_num_args (t); i++)
5562 1.1 mrg {
5563 1.1 mrg struct constraint_expr *rhsp;
5564 1.1 mrg tree arg = gimple_call_arg (t, i);
5565 1.1 mrg
5566 1.1 mrg if (TREE_CODE (arg) == SSA_NAME
5567 1.1 mrg || is_gimple_min_invariant (arg))
5568 1.1 mrg continue;
5569 1.1 mrg
5570 1.1 mrg get_constraint_for_address_of (arg, &rhsc);
5571 1.1 mrg FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5572 1.1 mrg process_constraint (new_constraint (lhs, *rhsp));
5573 1.1 mrg rhsc.truncate (0);
5574 1.1 mrg }
5575 1.1 mrg
5576 1.1 mrg /* Build constraints for propagating clobbers/uses along the
5577 1.1 mrg callgraph edges. */
5578 1.1 mrg cfi = get_fi_for_callee (call_stmt);
5579 1.1 mrg if (cfi->id == anything_id)
5580 1.1 mrg {
5581 1.1 mrg if (gimple_vdef (t))
5582 1.1 mrg make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5583 1.1 mrg anything_id);
5584 1.1 mrg make_constraint_from (first_vi_for_offset (fi, fi_uses),
5585 1.1 mrg anything_id);
5586 1.1 mrg return;
5587 1.1 mrg }
5588 1.1 mrg
5589 1.1 mrg /* For callees without function info (that's external functions),
5590 1.1 mrg ESCAPED is clobbered and used. */
5591 1.1 mrg if (cfi->decl
5592 1.1 mrg && TREE_CODE (cfi->decl) == FUNCTION_DECL
5593 1.1 mrg && !cfi->is_fn_info)
5594 1.1 mrg {
5595 1.1 mrg varinfo_t vi;
5596 1.1 mrg
5597 1.1 mrg if (gimple_vdef (t))
5598 1.1 mrg make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5599 1.1 mrg escaped_id);
5600 1.1 mrg make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5601 1.1 mrg
5602 1.1 mrg /* Also honor the call statement use/clobber info. */
5603 1.1 mrg if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5604 1.1 mrg make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5605 1.1 mrg vi->id);
5606 1.1 mrg if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5607 1.1 mrg make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5608 1.1 mrg vi->id);
5609 1.1 mrg return;
5610 1.1 mrg }
5611 1.1 mrg
5612 1.1 mrg /* Otherwise the caller clobbers and uses what the callee does.
5613 1.1 mrg ??? This should use a new complex constraint that filters
5614 1.1 mrg local variables of the callee. */
5615 1.1 mrg if (gimple_vdef (t))
5616 1.1 mrg {
5617 1.1 mrg lhs = get_function_part_constraint (fi, fi_clobbers);
5618 1.1 mrg rhs = get_function_part_constraint (cfi, fi_clobbers);
5619 1.1 mrg process_constraint (new_constraint (lhs, rhs));
5620 1.1 mrg }
5621 1.1 mrg lhs = get_function_part_constraint (fi, fi_uses);
5622 1.1 mrg rhs = get_function_part_constraint (cfi, fi_uses);
5623 1.1 mrg process_constraint (new_constraint (lhs, rhs));
5624 1.1 mrg }
5625 1.1 mrg else if (gimple_code (t) == GIMPLE_ASM)
5626 1.1 mrg {
5627 1.1 mrg /* ??? Ick. We can do better. */
5628 1.1 mrg if (gimple_vdef (t))
5629 1.1 mrg make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5630 1.1 mrg anything_id);
5631 1.1 mrg make_constraint_from (first_vi_for_offset (fi, fi_uses),
5632 1.1 mrg anything_id);
5633 1.1 mrg }
5634 1.1 mrg }
5635 1.1 mrg
5636 1.1 mrg
5637 1.1 mrg /* Find the first varinfo in the same variable as START that overlaps with
5638 1.1 mrg OFFSET. Return NULL if we can't find one. */
5639 1.1 mrg
5640 1.1 mrg static varinfo_t
5641 1.1 mrg first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5642 1.1 mrg {
5643 1.1 mrg /* If the offset is outside of the variable, bail out. */
5644 1.1 mrg if (offset >= start->fullsize)
5645 1.1 mrg return NULL;
5646 1.1 mrg
5647 1.1 mrg /* If we cannot reach offset from start, lookup the first field
5648 1.1 mrg and start from there. */
5649 1.1 mrg if (start->offset > offset)
5650 1.1 mrg start = get_varinfo (start->head);
5651 1.1 mrg
5652 1.1 mrg while (start)
5653 1.1 mrg {
5654 1.1 mrg /* We may not find a variable in the field list with the actual
5655 1.1 mrg offset when we have glommed a structure to a variable.
5656 1.1 mrg In that case, however, offset should still be within the size
5657 1.1 mrg of the variable. */
5658 1.1 mrg if (offset >= start->offset
5659 1.1 mrg && (offset - start->offset) < start->size)
5660 1.1 mrg return start;
5661 1.1 mrg
5662 1.1 mrg start = vi_next (start);
5663 1.1 mrg }
5664 1.1 mrg
5665 1.1 mrg return NULL;
5666 1.1 mrg }
5667 1.1 mrg
5668 1.1 mrg /* Find the first varinfo in the same variable as START that overlaps with
5669 1.1 mrg OFFSET. If there is no such varinfo the varinfo directly preceding
5670 1.1 mrg OFFSET is returned. */
5671 1.1 mrg
5672 1.1 mrg static varinfo_t
5673 1.1 mrg first_or_preceding_vi_for_offset (varinfo_t start,
5674 1.1 mrg unsigned HOST_WIDE_INT offset)
5675 1.1 mrg {
5676 1.1 mrg /* If we cannot reach offset from start, lookup the first field
5677 1.1 mrg and start from there. */
5678 1.1 mrg if (start->offset > offset)
5679 1.1 mrg start = get_varinfo (start->head);
5680 1.1 mrg
5681 1.1 mrg /* We may not find a variable in the field list with the actual
5682 1.1 mrg offset when we have glommed a structure to a variable.
5683 1.1 mrg In that case, however, offset should still be within the size
5684 1.1 mrg of the variable.
5685 1.1 mrg If we got beyond the offset we look for return the field
5686 1.1 mrg directly preceding offset which may be the last field. */
5687 1.1 mrg while (start->next
5688 1.1 mrg && offset >= start->offset
5689 1.1 mrg && !((offset - start->offset) < start->size))
5690 1.1 mrg start = vi_next (start);
5691 1.1 mrg
5692 1.1 mrg return start;
5693 1.1 mrg }
5694 1.1 mrg
5695 1.1 mrg
5696 1.1 mrg /* This structure is used during pushing fields onto the fieldstack
5697 1.1 mrg to track the offset of the field, since bitpos_of_field gives it
5698 1.1 mrg relative to its immediate containing type, and we want it relative
5699 1.1 mrg to the ultimate containing object. */
5700 1.1 mrg
5701 1.1 mrg struct fieldoff
5702 1.1 mrg {
5703 1.1 mrg /* Offset from the base of the base containing object to this field. */
5704 1.1 mrg HOST_WIDE_INT offset;
5705 1.1 mrg
5706 1.1 mrg /* Size, in bits, of the field. */
5707 1.1 mrg unsigned HOST_WIDE_INT size;
5708 1.1 mrg
5709 1.1 mrg unsigned has_unknown_size : 1;
5710 1.1 mrg
5711 1.1 mrg unsigned must_have_pointers : 1;
5712 1.1 mrg
5713 1.1 mrg unsigned may_have_pointers : 1;
5714 1.1 mrg
5715 1.1 mrg unsigned only_restrict_pointers : 1;
5716 1.1 mrg
5717 1.1 mrg tree restrict_pointed_type;
5718 1.1 mrg };
5719 1.1 mrg typedef struct fieldoff fieldoff_s;
5720 1.1 mrg
5721 1.1 mrg
5722 1.1 mrg /* qsort comparison function for two fieldoff's PA and PB */
5723 1.1 mrg
5724 1.1 mrg static int
5725 1.1 mrg fieldoff_compare (const void *pa, const void *pb)
5726 1.1 mrg {
5727 1.1 mrg const fieldoff_s *foa = (const fieldoff_s *)pa;
5728 1.1 mrg const fieldoff_s *fob = (const fieldoff_s *)pb;
5729 1.1 mrg unsigned HOST_WIDE_INT foasize, fobsize;
5730 1.1 mrg
5731 1.1 mrg if (foa->offset < fob->offset)
5732 1.1 mrg return -1;
5733 1.1 mrg else if (foa->offset > fob->offset)
5734 1.1 mrg return 1;
5735 1.1 mrg
5736 1.1 mrg foasize = foa->size;
5737 1.1 mrg fobsize = fob->size;
5738 1.1 mrg if (foasize < fobsize)
5739 1.1 mrg return -1;
5740 1.1 mrg else if (foasize > fobsize)
5741 1.1 mrg return 1;
5742 1.1 mrg return 0;
5743 1.1 mrg }
5744 1.1 mrg
5745 1.1 mrg /* Sort a fieldstack according to the field offset and sizes. */
5746 1.1 mrg static void
5747 1.1 mrg sort_fieldstack (vec<fieldoff_s> &fieldstack)
5748 1.1 mrg {
5749 1.1 mrg fieldstack.qsort (fieldoff_compare);
5750 1.1 mrg }
5751 1.1 mrg
5752 1.1 mrg /* Return true if T is a type that can have subvars. */
5753 1.1 mrg
5754 1.1 mrg static inline bool
5755 1.1 mrg type_can_have_subvars (const_tree t)
5756 1.1 mrg {
5757 1.1 mrg /* Aggregates without overlapping fields can have subvars. */
5758 1.1 mrg return TREE_CODE (t) == RECORD_TYPE;
5759 1.1 mrg }
5760 1.1 mrg
5761 1.1 mrg /* Return true if V is a tree that we can have subvars for.
5762 1.1 mrg Normally, this is any aggregate type. Also complex
5763 1.1 mrg types which are not gimple registers can have subvars. */
5764 1.1 mrg
5765 1.1 mrg static inline bool
5766 1.1 mrg var_can_have_subvars (const_tree v)
5767 1.1 mrg {
5768 1.1 mrg /* Volatile variables should never have subvars. */
5769 1.1 mrg if (TREE_THIS_VOLATILE (v))
5770 1.1 mrg return false;
5771 1.1 mrg
5772 1.1 mrg /* Non decls or memory tags can never have subvars. */
5773 1.1 mrg if (!DECL_P (v))
5774 1.1 mrg return false;
5775 1.1 mrg
5776 1.1 mrg return type_can_have_subvars (TREE_TYPE (v));
5777 1.1 mrg }
5778 1.1 mrg
5779 1.1 mrg /* Return true if T is a type that does contain pointers. */
5780 1.1 mrg
5781 1.1 mrg static bool
5782 1.1 mrg type_must_have_pointers (tree type)
5783 1.1 mrg {
5784 1.1 mrg if (POINTER_TYPE_P (type))
5785 1.1 mrg return true;
5786 1.1 mrg
5787 1.1 mrg if (TREE_CODE (type) == ARRAY_TYPE)
5788 1.1 mrg return type_must_have_pointers (TREE_TYPE (type));
5789 1.1 mrg
5790 1.1 mrg /* A function or method can have pointers as arguments, so track
5791 1.1 mrg those separately. */
5792 1.1 mrg if (TREE_CODE (type) == FUNCTION_TYPE
5793 1.1 mrg || TREE_CODE (type) == METHOD_TYPE)
5794 1.1 mrg return true;
5795 1.1 mrg
5796 1.1 mrg return false;
5797 1.1 mrg }
5798 1.1 mrg
5799 1.1 mrg static bool
5800 1.1 mrg field_must_have_pointers (tree t)
5801 1.1 mrg {
5802 1.1 mrg return type_must_have_pointers (TREE_TYPE (t));
5803 1.1 mrg }
5804 1.1 mrg
5805 1.1 mrg /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5806 1.1 mrg the fields of TYPE onto fieldstack, recording their offsets along
5807 1.1 mrg the way.
5808 1.1 mrg
5809 1.1 mrg OFFSET is used to keep track of the offset in this entire
5810 1.1 mrg structure, rather than just the immediately containing structure.
5811 1.1 mrg Returns false if the caller is supposed to handle the field we
5812 1.1 mrg recursed for. */
5813 1.1 mrg
5814 1.1 mrg static bool
5815 1.1 mrg push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5816 1.1 mrg HOST_WIDE_INT offset)
5817 1.1 mrg {
5818 1.1 mrg tree field;
5819 1.1 mrg bool empty_p = true;
5820 1.1 mrg
5821 1.1 mrg if (TREE_CODE (type) != RECORD_TYPE)
5822 1.1 mrg return false;
5823 1.1 mrg
5824 1.1 mrg /* If the vector of fields is growing too big, bail out early.
5825 1.1 mrg Callers check for vec::length <= param_max_fields_for_field_sensitive, make
5826 1.1 mrg sure this fails. */
5827 1.1 mrg if (fieldstack->length () > (unsigned)param_max_fields_for_field_sensitive)
5828 1.1 mrg return false;
5829 1.1 mrg
5830 1.1 mrg for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5831 1.1 mrg if (TREE_CODE (field) == FIELD_DECL)
5832 1.1 mrg {
5833 1.1 mrg bool push = false;
5834 1.1 mrg HOST_WIDE_INT foff = bitpos_of_field (field);
5835 1.1 mrg tree field_type = TREE_TYPE (field);
5836 1.1 mrg
5837 1.1 mrg if (!var_can_have_subvars (field)
5838 1.1 mrg || TREE_CODE (field_type) == QUAL_UNION_TYPE
5839 1.1 mrg || TREE_CODE (field_type) == UNION_TYPE)
5840 1.1 mrg push = true;
5841 1.1 mrg else if (!push_fields_onto_fieldstack
5842 1.1 mrg (field_type, fieldstack, offset + foff)
5843 1.1 mrg && (DECL_SIZE (field)
5844 1.1 mrg && !integer_zerop (DECL_SIZE (field))))
5845 1.1 mrg /* Empty structures may have actual size, like in C++. So
5846 1.1 mrg see if we didn't push any subfields and the size is
5847 1.1 mrg nonzero, push the field onto the stack. */
5848 1.1 mrg push = true;
5849 1.1 mrg
5850 1.1 mrg if (push)
5851 1.1 mrg {
5852 1.1 mrg fieldoff_s *pair = NULL;
5853 1.1 mrg bool has_unknown_size = false;
5854 1.1 mrg bool must_have_pointers_p;
5855 1.1 mrg
5856 1.1 mrg if (!fieldstack->is_empty ())
5857 1.1 mrg pair = &fieldstack->last ();
5858 1.1 mrg
5859 1.1 mrg /* If there isn't anything at offset zero, create sth. */
5860 1.1 mrg if (!pair
5861 1.1 mrg && offset + foff != 0)
5862 1.1 mrg {
5863 1.1 mrg fieldoff_s e
5864 1.1 mrg = {0, offset + foff, false, false, true, false, NULL_TREE};
5865 1.1 mrg pair = fieldstack->safe_push (e);
5866 1.1 mrg }
5867 1.1 mrg
5868 1.1 mrg if (!DECL_SIZE (field)
5869 1.1 mrg || !tree_fits_uhwi_p (DECL_SIZE (field)))
5870 1.1 mrg has_unknown_size = true;
5871 1.1 mrg
5872 1.1 mrg /* If adjacent fields do not contain pointers merge them. */
5873 1.1 mrg must_have_pointers_p = field_must_have_pointers (field);
5874 1.1 mrg if (pair
5875 1.1 mrg && !has_unknown_size
5876 1.1 mrg && !must_have_pointers_p
5877 1.1 mrg && !pair->must_have_pointers
5878 1.1 mrg && !pair->has_unknown_size
5879 1.1 mrg && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5880 1.1 mrg {
5881 1.1 mrg pair->size += tree_to_uhwi (DECL_SIZE (field));
5882 1.1 mrg }
5883 1.1 mrg else
5884 1.1 mrg {
5885 1.1 mrg fieldoff_s e;
5886 1.1 mrg e.offset = offset + foff;
5887 1.1 mrg e.has_unknown_size = has_unknown_size;
5888 1.1 mrg if (!has_unknown_size)
5889 1.1 mrg e.size = tree_to_uhwi (DECL_SIZE (field));
5890 1.1 mrg else
5891 1.1 mrg e.size = -1;
5892 1.1 mrg e.must_have_pointers = must_have_pointers_p;
5893 1.1 mrg e.may_have_pointers = true;
5894 1.1 mrg e.only_restrict_pointers
5895 1.1 mrg = (!has_unknown_size
5896 1.1 mrg && POINTER_TYPE_P (field_type)
5897 1.1 mrg && TYPE_RESTRICT (field_type));
5898 1.1 mrg if (e.only_restrict_pointers)
5899 1.1 mrg e.restrict_pointed_type = TREE_TYPE (field_type);
5900 1.1 mrg fieldstack->safe_push (e);
5901 1.1 mrg }
5902 1.1 mrg }
5903 1.1 mrg
5904 1.1 mrg empty_p = false;
5905 1.1 mrg }
5906 1.1 mrg
5907 1.1 mrg return !empty_p;
5908 1.1 mrg }
5909 1.1 mrg
5910 1.1 mrg /* Count the number of arguments DECL has, and set IS_VARARGS to true
5911 1.1 mrg if it is a varargs function. */
5912 1.1 mrg
5913 1.1 mrg static unsigned int
5914 1.1 mrg count_num_arguments (tree decl, bool *is_varargs)
5915 1.1 mrg {
5916 1.1 mrg unsigned int num = 0;
5917 1.1 mrg tree t;
5918 1.1 mrg
5919 1.1 mrg /* Capture named arguments for K&R functions. They do not
5920 1.1 mrg have a prototype and thus no TYPE_ARG_TYPES. */
5921 1.1 mrg for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5922 1.1 mrg ++num;
5923 1.1 mrg
5924 1.1 mrg /* Check if the function has variadic arguments. */
5925 1.1 mrg for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5926 1.1 mrg if (TREE_VALUE (t) == void_type_node)
5927 1.1 mrg break;
5928 1.1 mrg if (!t)
5929 1.1 mrg *is_varargs = true;
5930 1.1 mrg
5931 1.1 mrg return num;
5932 1.1 mrg }
5933 1.1 mrg
5934 1.1 mrg /* Creation function node for DECL, using NAME, and return the index
5935 1.1 mrg of the variable we've created for the function. If NONLOCAL_p, create
5936 1.1 mrg initial constraints. */
5937 1.1 mrg
5938 1.1 mrg static varinfo_t
5939 1.1 mrg create_function_info_for (tree decl, const char *name, bool add_id,
5940 1.1 mrg bool nonlocal_p)
5941 1.1 mrg {
5942 1.1 mrg struct function *fn = DECL_STRUCT_FUNCTION (decl);
5943 1.1 mrg varinfo_t vi, prev_vi;
5944 1.1 mrg tree arg;
5945 1.1 mrg unsigned int i;
5946 1.1 mrg bool is_varargs = false;
5947 1.1 mrg unsigned int num_args = count_num_arguments (decl, &is_varargs);
5948 1.1 mrg
5949 1.1 mrg /* Create the variable info. */
5950 1.1 mrg
5951 1.1 mrg vi = new_var_info (decl, name, add_id);
5952 1.1 mrg vi->offset = 0;
5953 1.1 mrg vi->size = 1;
5954 1.1 mrg vi->fullsize = fi_parm_base + num_args;
5955 1.1 mrg vi->is_fn_info = 1;
5956 1.1 mrg vi->may_have_pointers = false;
5957 1.1 mrg if (is_varargs)
5958 1.1 mrg vi->fullsize = ~0;
5959 1.1 mrg insert_vi_for_tree (vi->decl, vi);
5960 1.1 mrg
5961 1.1 mrg prev_vi = vi;
5962 1.1 mrg
5963 1.1 mrg /* Create a variable for things the function clobbers and one for
5964 1.1 mrg things the function uses. */
5965 1.1 mrg {
5966 1.1 mrg varinfo_t clobbervi, usevi;
5967 1.1 mrg const char *newname;
5968 1.1 mrg char *tempname;
5969 1.1 mrg
5970 1.1 mrg tempname = xasprintf ("%s.clobber", name);
5971 1.1 mrg newname = ggc_strdup (tempname);
5972 1.1 mrg free (tempname);
5973 1.1 mrg
5974 1.1 mrg clobbervi = new_var_info (NULL, newname, false);
5975 1.1 mrg clobbervi->offset = fi_clobbers;
5976 1.1 mrg clobbervi->size = 1;
5977 1.1 mrg clobbervi->fullsize = vi->fullsize;
5978 1.1 mrg clobbervi->is_full_var = true;
5979 1.1 mrg clobbervi->is_global_var = false;
5980 1.1 mrg clobbervi->is_reg_var = true;
5981 1.1 mrg
5982 1.1 mrg gcc_assert (prev_vi->offset < clobbervi->offset);
5983 1.1 mrg prev_vi->next = clobbervi->id;
5984 1.1 mrg prev_vi = clobbervi;
5985 1.1 mrg
5986 1.1 mrg tempname = xasprintf ("%s.use", name);
5987 1.1 mrg newname = ggc_strdup (tempname);
5988 1.1 mrg free (tempname);
5989 1.1 mrg
5990 1.1 mrg usevi = new_var_info (NULL, newname, false);
5991 1.1 mrg usevi->offset = fi_uses;
5992 1.1 mrg usevi->size = 1;
5993 1.1 mrg usevi->fullsize = vi->fullsize;
5994 1.1 mrg usevi->is_full_var = true;
5995 1.1 mrg usevi->is_global_var = false;
5996 1.1 mrg usevi->is_reg_var = true;
5997 1.1 mrg
5998 1.1 mrg gcc_assert (prev_vi->offset < usevi->offset);
5999 1.1 mrg prev_vi->next = usevi->id;
6000 1.1 mrg prev_vi = usevi;
6001 1.1 mrg }
6002 1.1 mrg
6003 1.1 mrg /* And one for the static chain. */
6004 1.1 mrg if (fn->static_chain_decl != NULL_TREE)
6005 1.1 mrg {
6006 1.1 mrg varinfo_t chainvi;
6007 1.1 mrg const char *newname;
6008 1.1 mrg char *tempname;
6009 1.1 mrg
6010 1.1 mrg tempname = xasprintf ("%s.chain", name);
6011 1.1 mrg newname = ggc_strdup (tempname);
6012 1.1 mrg free (tempname);
6013 1.1 mrg
6014 1.1 mrg chainvi = new_var_info (fn->static_chain_decl, newname, false);
6015 1.1 mrg chainvi->offset = fi_static_chain;
6016 1.1 mrg chainvi->size = 1;
6017 1.1 mrg chainvi->fullsize = vi->fullsize;
6018 1.1 mrg chainvi->is_full_var = true;
6019 1.1 mrg chainvi->is_global_var = false;
6020 1.1 mrg
6021 1.1 mrg insert_vi_for_tree (fn->static_chain_decl, chainvi);
6022 1.1 mrg
6023 1.1 mrg if (nonlocal_p
6024 1.1 mrg && chainvi->may_have_pointers)
6025 1.1 mrg make_constraint_from (chainvi, nonlocal_id);
6026 1.1 mrg
6027 1.1 mrg gcc_assert (prev_vi->offset < chainvi->offset);
6028 1.1 mrg prev_vi->next = chainvi->id;
6029 1.1 mrg prev_vi = chainvi;
6030 1.1 mrg }
6031 1.1 mrg
6032 1.1 mrg /* Create a variable for the return var. */
6033 1.1 mrg if (DECL_RESULT (decl) != NULL
6034 1.1 mrg || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
6035 1.1 mrg {
6036 1.1 mrg varinfo_t resultvi;
6037 1.1 mrg const char *newname;
6038 1.1 mrg char *tempname;
6039 1.1 mrg tree resultdecl = decl;
6040 1.1 mrg
6041 1.1 mrg if (DECL_RESULT (decl))
6042 1.1 mrg resultdecl = DECL_RESULT (decl);
6043 1.1 mrg
6044 1.1 mrg tempname = xasprintf ("%s.result", name);
6045 1.1 mrg newname = ggc_strdup (tempname);
6046 1.1 mrg free (tempname);
6047 1.1 mrg
6048 1.1 mrg resultvi = new_var_info (resultdecl, newname, false);
6049 1.1 mrg resultvi->offset = fi_result;
6050 1.1 mrg resultvi->size = 1;
6051 1.1 mrg resultvi->fullsize = vi->fullsize;
6052 1.1 mrg resultvi->is_full_var = true;
6053 1.1 mrg if (DECL_RESULT (decl))
6054 1.1 mrg resultvi->may_have_pointers = true;
6055 1.1 mrg
6056 1.1 mrg if (DECL_RESULT (decl))
6057 1.1 mrg insert_vi_for_tree (DECL_RESULT (decl), resultvi);
6058 1.1 mrg
6059 1.1 mrg if (nonlocal_p
6060 1.1 mrg && DECL_RESULT (decl)
6061 1.1 mrg && DECL_BY_REFERENCE (DECL_RESULT (decl)))
6062 1.1 mrg make_constraint_from (resultvi, nonlocal_id);
6063 1.1 mrg
6064 1.1 mrg gcc_assert (prev_vi->offset < resultvi->offset);
6065 1.1 mrg prev_vi->next = resultvi->id;
6066 1.1 mrg prev_vi = resultvi;
6067 1.1 mrg }
6068 1.1 mrg
6069 1.1 mrg /* We also need to make function return values escape. Nothing
6070 1.1 mrg escapes by returning from main though. */
6071 1.1 mrg if (nonlocal_p
6072 1.1 mrg && !MAIN_NAME_P (DECL_NAME (decl)))
6073 1.1 mrg {
6074 1.1 mrg varinfo_t fi, rvi;
6075 1.1 mrg fi = lookup_vi_for_tree (decl);
6076 1.1 mrg rvi = first_vi_for_offset (fi, fi_result);
6077 1.1 mrg if (rvi && rvi->offset == fi_result)
6078 1.1 mrg make_copy_constraint (get_varinfo (escaped_id), rvi->id);
6079 1.1 mrg }
6080 1.1 mrg
6081 1.1 mrg /* Set up variables for each argument. */
6082 1.1 mrg arg = DECL_ARGUMENTS (decl);
6083 1.1 mrg for (i = 0; i < num_args; i++)
6084 1.1 mrg {
6085 1.1 mrg varinfo_t argvi;
6086 1.1 mrg const char *newname;
6087 1.1 mrg char *tempname;
6088 1.1 mrg tree argdecl = decl;
6089 1.1 mrg
6090 1.1 mrg if (arg)
6091 1.1 mrg argdecl = arg;
6092 1.1 mrg
6093 1.1 mrg tempname = xasprintf ("%s.arg%d", name, i);
6094 1.1 mrg newname = ggc_strdup (tempname);
6095 1.1 mrg free (tempname);
6096 1.1 mrg
6097 1.1 mrg argvi = new_var_info (argdecl, newname, false);
6098 1.1 mrg argvi->offset = fi_parm_base + i;
6099 1.1 mrg argvi->size = 1;
6100 1.1 mrg argvi->is_full_var = true;
6101 1.1 mrg argvi->fullsize = vi->fullsize;
6102 1.1 mrg if (arg)
6103 1.1 mrg argvi->may_have_pointers = true;
6104 1.1 mrg
6105 1.1 mrg if (arg)
6106 1.1 mrg insert_vi_for_tree (arg, argvi);
6107 1.1 mrg
6108 1.1 mrg if (nonlocal_p
6109 1.1 mrg && argvi->may_have_pointers)
6110 1.1 mrg make_constraint_from (argvi, nonlocal_id);
6111 1.1 mrg
6112 1.1 mrg gcc_assert (prev_vi->offset < argvi->offset);
6113 1.1 mrg prev_vi->next = argvi->id;
6114 1.1 mrg prev_vi = argvi;
6115 1.1 mrg if (arg)
6116 1.1 mrg arg = DECL_CHAIN (arg);
6117 1.1 mrg }
6118 1.1 mrg
6119 1.1 mrg /* Add one representative for all further args. */
6120 1.1 mrg if (is_varargs)
6121 1.1 mrg {
6122 1.1 mrg varinfo_t argvi;
6123 1.1 mrg const char *newname;
6124 1.1 mrg char *tempname;
6125 1.1 mrg tree decl;
6126 1.1 mrg
6127 1.1 mrg tempname = xasprintf ("%s.varargs", name);
6128 1.1 mrg newname = ggc_strdup (tempname);
6129 1.1 mrg free (tempname);
6130 1.1 mrg
6131 1.1 mrg /* We need sth that can be pointed to for va_start. */
6132 1.1 mrg decl = build_fake_var_decl (ptr_type_node);
6133 1.1 mrg
6134 1.1 mrg argvi = new_var_info (decl, newname, false);
6135 1.1 mrg argvi->offset = fi_parm_base + num_args;
6136 1.1 mrg argvi->size = ~0;
6137 1.1 mrg argvi->is_full_var = true;
6138 1.1 mrg argvi->is_heap_var = true;
6139 1.1 mrg argvi->fullsize = vi->fullsize;
6140 1.1 mrg
6141 1.1 mrg if (nonlocal_p
6142 1.1 mrg && argvi->may_have_pointers)
6143 1.1 mrg make_constraint_from (argvi, nonlocal_id);
6144 1.1 mrg
6145 1.1 mrg gcc_assert (prev_vi->offset < argvi->offset);
6146 1.1 mrg prev_vi->next = argvi->id;
6147 1.1 mrg }
6148 1.1 mrg
6149 1.1 mrg return vi;
6150 1.1 mrg }
6151 1.1 mrg
6152 1.1 mrg
6153 1.1 mrg /* Return true if FIELDSTACK contains fields that overlap.
6154 1.1 mrg FIELDSTACK is assumed to be sorted by offset. */
6155 1.1 mrg
6156 1.1 mrg static bool
6157 1.1 mrg check_for_overlaps (const vec<fieldoff_s> &fieldstack)
6158 1.1 mrg {
6159 1.1 mrg fieldoff_s *fo = NULL;
6160 1.1 mrg unsigned int i;
6161 1.1 mrg HOST_WIDE_INT lastoffset = -1;
6162 1.1 mrg
6163 1.1 mrg FOR_EACH_VEC_ELT (fieldstack, i, fo)
6164 1.1 mrg {
6165 1.1 mrg if (fo->offset == lastoffset)
6166 1.1 mrg return true;
6167 1.1 mrg lastoffset = fo->offset;
6168 1.1 mrg }
6169 1.1 mrg return false;
6170 1.1 mrg }
6171 1.1 mrg
6172 1.1 mrg /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
6173 1.1 mrg This will also create any varinfo structures necessary for fields
6174 1.1 mrg of DECL. DECL is a function parameter if HANDLE_PARAM is set.
6175 1.1 mrg HANDLED_STRUCT_TYPE is used to register struct types reached by following
6176 1.1 mrg restrict pointers. This is needed to prevent infinite recursion.
6177 1.1 mrg If ADD_RESTRICT, pretend that the pointer NAME is restrict even if DECL
6178 1.1 mrg does not advertise it. */
6179 1.1 mrg
6180 1.1 mrg static varinfo_t
6181 1.1 mrg create_variable_info_for_1 (tree decl, const char *name, bool add_id,
6182 1.1 mrg bool handle_param, bitmap handled_struct_type,
6183 1.1 mrg bool add_restrict = false)
6184 1.1 mrg {
6185 1.1 mrg varinfo_t vi, newvi;
6186 1.1 mrg tree decl_type = TREE_TYPE (decl);
6187 1.1 mrg tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
6188 1.1 mrg auto_vec<fieldoff_s> fieldstack;
6189 1.1 mrg fieldoff_s *fo;
6190 1.1 mrg unsigned int i;
6191 1.1 mrg
6192 1.1 mrg if (!declsize
6193 1.1 mrg || !tree_fits_uhwi_p (declsize))
6194 1.1 mrg {
6195 1.1 mrg vi = new_var_info (decl, name, add_id);
6196 1.1 mrg vi->offset = 0;
6197 1.1 mrg vi->size = ~0;
6198 1.1 mrg vi->fullsize = ~0;
6199 1.1 mrg vi->is_unknown_size_var = true;
6200 1.1 mrg vi->is_full_var = true;
6201 1.1 mrg vi->may_have_pointers = true;
6202 1.1 mrg return vi;
6203 1.1 mrg }
6204 1.1 mrg
6205 1.1 mrg /* Collect field information. */
6206 1.1 mrg if (use_field_sensitive
6207 1.1 mrg && var_can_have_subvars (decl)
6208 1.1 mrg /* ??? Force us to not use subfields for globals in IPA mode.
6209 1.1 mrg Else we'd have to parse arbitrary initializers. */
6210 1.1 mrg && !(in_ipa_mode
6211 1.1 mrg && is_global_var (decl)))
6212 1.1 mrg {
6213 1.1 mrg fieldoff_s *fo = NULL;
6214 1.1 mrg bool notokay = false;
6215 1.1 mrg unsigned int i;
6216 1.1 mrg
6217 1.1 mrg push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
6218 1.1 mrg
6219 1.1 mrg for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
6220 1.1 mrg if (fo->has_unknown_size
6221 1.1 mrg || fo->offset < 0)
6222 1.1 mrg {
6223 1.1 mrg notokay = true;
6224 1.1 mrg break;
6225 1.1 mrg }
6226 1.1 mrg
6227 1.1 mrg /* We can't sort them if we have a field with a variable sized type,
6228 1.1 mrg which will make notokay = true. In that case, we are going to return
6229 1.1 mrg without creating varinfos for the fields anyway, so sorting them is a
6230 1.1 mrg waste to boot. */
6231 1.1 mrg if (!notokay)
6232 1.1 mrg {
6233 1.1 mrg sort_fieldstack (fieldstack);
6234 1.1 mrg /* Due to some C++ FE issues, like PR 22488, we might end up
6235 1.1 mrg what appear to be overlapping fields even though they,
6236 1.1 mrg in reality, do not overlap. Until the C++ FE is fixed,
6237 1.1 mrg we will simply disable field-sensitivity for these cases. */
6238 1.1 mrg notokay = check_for_overlaps (fieldstack);
6239 1.1 mrg }
6240 1.1 mrg
6241 1.1 mrg if (notokay)
6242 1.1 mrg fieldstack.release ();
6243 1.1 mrg }
6244 1.1 mrg
6245 1.1 mrg /* If we didn't end up collecting sub-variables create a full
6246 1.1 mrg variable for the decl. */
6247 1.1 mrg if (fieldstack.length () == 0
6248 1.1 mrg || fieldstack.length () > (unsigned)param_max_fields_for_field_sensitive)
6249 1.1 mrg {
6250 1.1 mrg vi = new_var_info (decl, name, add_id);
6251 1.1 mrg vi->offset = 0;
6252 1.1 mrg vi->may_have_pointers = true;
6253 1.1 mrg vi->fullsize = tree_to_uhwi (declsize);
6254 1.1 mrg vi->size = vi->fullsize;
6255 1.1 mrg vi->is_full_var = true;
6256 1.1 mrg if (POINTER_TYPE_P (decl_type)
6257 1.1 mrg && (TYPE_RESTRICT (decl_type) || add_restrict))
6258 1.1 mrg vi->only_restrict_pointers = 1;
6259 1.1 mrg if (vi->only_restrict_pointers
6260 1.1 mrg && !type_contains_placeholder_p (TREE_TYPE (decl_type))
6261 1.1 mrg && handle_param
6262 1.1 mrg && !bitmap_bit_p (handled_struct_type,
6263 1.1 mrg TYPE_UID (TREE_TYPE (decl_type))))
6264 1.1 mrg {
6265 1.1 mrg varinfo_t rvi;
6266 1.1 mrg tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type));
6267 1.1 mrg DECL_EXTERNAL (heapvar) = 1;
6268 1.1 mrg if (var_can_have_subvars (heapvar))
6269 1.1 mrg bitmap_set_bit (handled_struct_type,
6270 1.1 mrg TYPE_UID (TREE_TYPE (decl_type)));
6271 1.1 mrg rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6272 1.1 mrg true, handled_struct_type);
6273 1.1 mrg if (var_can_have_subvars (heapvar))
6274 1.1 mrg bitmap_clear_bit (handled_struct_type,
6275 1.1 mrg TYPE_UID (TREE_TYPE (decl_type)));
6276 1.1 mrg rvi->is_restrict_var = 1;
6277 1.1 mrg insert_vi_for_tree (heapvar, rvi);
6278 1.1 mrg make_constraint_from (vi, rvi->id);
6279 1.1 mrg make_param_constraints (rvi);
6280 1.1 mrg }
6281 1.1 mrg fieldstack.release ();
6282 1.1 mrg return vi;
6283 1.1 mrg }
6284 1.1 mrg
6285 1.1 mrg vi = new_var_info (decl, name, add_id);
6286 1.1 mrg vi->fullsize = tree_to_uhwi (declsize);
6287 1.1 mrg if (fieldstack.length () == 1)
6288 1.1 mrg vi->is_full_var = true;
6289 1.1 mrg for (i = 0, newvi = vi;
6290 1.1 mrg fieldstack.iterate (i, &fo);
6291 1.1 mrg ++i, newvi = vi_next (newvi))
6292 1.1 mrg {
6293 1.1 mrg const char *newname = NULL;
6294 1.1 mrg char *tempname;
6295 1.1 mrg
6296 1.1 mrg if (dump_file)
6297 1.1 mrg {
6298 1.1 mrg if (fieldstack.length () != 1)
6299 1.1 mrg {
6300 1.1 mrg tempname
6301 1.1 mrg = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
6302 1.1 mrg "+" HOST_WIDE_INT_PRINT_DEC, name,
6303 1.1 mrg fo->offset, fo->size);
6304 1.1 mrg newname = ggc_strdup (tempname);
6305 1.1 mrg free (tempname);
6306 1.1 mrg }
6307 1.1 mrg }
6308 1.1 mrg else
6309 1.1 mrg newname = "NULL";
6310 1.1 mrg
6311 1.1 mrg if (newname)
6312 1.1 mrg newvi->name = newname;
6313 1.1 mrg newvi->offset = fo->offset;
6314 1.1 mrg newvi->size = fo->size;
6315 1.1 mrg newvi->fullsize = vi->fullsize;
6316 1.1 mrg newvi->may_have_pointers = fo->may_have_pointers;
6317 1.1 mrg newvi->only_restrict_pointers = fo->only_restrict_pointers;
6318 1.1 mrg if (handle_param
6319 1.1 mrg && newvi->only_restrict_pointers
6320 1.1 mrg && !type_contains_placeholder_p (fo->restrict_pointed_type)
6321 1.1 mrg && !bitmap_bit_p (handled_struct_type,
6322 1.1 mrg TYPE_UID (fo->restrict_pointed_type)))
6323 1.1 mrg {
6324 1.1 mrg varinfo_t rvi;
6325 1.1 mrg tree heapvar = build_fake_var_decl (fo->restrict_pointed_type);
6326 1.1 mrg DECL_EXTERNAL (heapvar) = 1;
6327 1.1 mrg if (var_can_have_subvars (heapvar))
6328 1.1 mrg bitmap_set_bit (handled_struct_type,
6329 1.1 mrg TYPE_UID (fo->restrict_pointed_type));
6330 1.1 mrg rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6331 1.1 mrg true, handled_struct_type);
6332 1.1 mrg if (var_can_have_subvars (heapvar))
6333 1.1 mrg bitmap_clear_bit (handled_struct_type,
6334 1.1 mrg TYPE_UID (fo->restrict_pointed_type));
6335 1.1 mrg rvi->is_restrict_var = 1;
6336 1.1 mrg insert_vi_for_tree (heapvar, rvi);
6337 1.1 mrg make_constraint_from (newvi, rvi->id);
6338 1.1 mrg make_param_constraints (rvi);
6339 1.1 mrg }
6340 1.1 mrg if (i + 1 < fieldstack.length ())
6341 1.1 mrg {
6342 1.1 mrg varinfo_t tem = new_var_info (decl, name, false);
6343 1.1 mrg newvi->next = tem->id;
6344 1.1 mrg tem->head = vi->id;
6345 1.1 mrg }
6346 1.1 mrg }
6347 1.1 mrg
6348 1.1 mrg return vi;
6349 1.1 mrg }
6350 1.1 mrg
6351 1.1 mrg static unsigned int
6352 1.1 mrg create_variable_info_for (tree decl, const char *name, bool add_id)
6353 1.1 mrg {
6354 1.1 mrg /* First see if we are dealing with an ifunc resolver call and
6355 1.1 mrg assiociate that with a call to the resolver function result. */
6356 1.1 mrg cgraph_node *node;
6357 1.1 mrg if (in_ipa_mode
6358 1.1 mrg && TREE_CODE (decl) == FUNCTION_DECL
6359 1.1 mrg && (node = cgraph_node::get (decl))
6360 1.1 mrg && node->ifunc_resolver)
6361 1.1 mrg {
6362 1.1 mrg varinfo_t fi = get_vi_for_tree (node->get_alias_target ()->decl);
6363 1.1 mrg constraint_expr rhs
6364 1.1 mrg = get_function_part_constraint (fi, fi_result);
6365 1.1 mrg fi = new_var_info (NULL_TREE, "ifuncres", true);
6366 1.1 mrg fi->is_reg_var = true;
6367 1.1 mrg constraint_expr lhs;
6368 1.1 mrg lhs.type = SCALAR;
6369 1.1 mrg lhs.var = fi->id;
6370 1.1 mrg lhs.offset = 0;
6371 1.1 mrg process_constraint (new_constraint (lhs, rhs));
6372 1.1 mrg insert_vi_for_tree (decl, fi);
6373 1.1 mrg return fi->id;
6374 1.1 mrg }
6375 1.1 mrg
6376 1.1 mrg varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, false, NULL);
6377 1.1 mrg unsigned int id = vi->id;
6378 1.1 mrg
6379 1.1 mrg insert_vi_for_tree (decl, vi);
6380 1.1 mrg
6381 1.1 mrg if (!VAR_P (decl))
6382 1.1 mrg return id;
6383 1.1 mrg
6384 1.1 mrg /* Create initial constraints for globals. */
6385 1.1 mrg for (; vi; vi = vi_next (vi))
6386 1.1 mrg {
6387 1.1 mrg if (!vi->may_have_pointers
6388 1.1 mrg || !vi->is_global_var)
6389 1.1 mrg continue;
6390 1.1 mrg
6391 1.1 mrg /* Mark global restrict qualified pointers. */
6392 1.1 mrg if ((POINTER_TYPE_P (TREE_TYPE (decl))
6393 1.1 mrg && TYPE_RESTRICT (TREE_TYPE (decl)))
6394 1.1 mrg || vi->only_restrict_pointers)
6395 1.1 mrg {
6396 1.1 mrg varinfo_t rvi
6397 1.1 mrg = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT",
6398 1.1 mrg true);
6399 1.1 mrg /* ??? For now exclude reads from globals as restrict sources
6400 1.1 mrg if those are not (indirectly) from incoming parameters. */
6401 1.1 mrg rvi->is_restrict_var = false;
6402 1.1 mrg continue;
6403 1.1 mrg }
6404 1.1 mrg
6405 1.1 mrg /* In non-IPA mode the initializer from nonlocal is all we need. */
6406 1.1 mrg if (!in_ipa_mode
6407 1.1 mrg || DECL_HARD_REGISTER (decl))
6408 1.1 mrg make_copy_constraint (vi, nonlocal_id);
6409 1.1 mrg
6410 1.1 mrg /* In IPA mode parse the initializer and generate proper constraints
6411 1.1 mrg for it. */
6412 1.1 mrg else
6413 1.1 mrg {
6414 1.1 mrg varpool_node *vnode = varpool_node::get (decl);
6415 1.1 mrg
6416 1.1 mrg /* For escaped variables initialize them from nonlocal. */
6417 1.1 mrg if (!vnode->all_refs_explicit_p ())
6418 1.1 mrg make_copy_constraint (vi, nonlocal_id);
6419 1.1 mrg
6420 1.1 mrg /* If this is a global variable with an initializer and we are in
6421 1.1 mrg IPA mode generate constraints for it. */
6422 1.1 mrg ipa_ref *ref;
6423 1.1 mrg for (unsigned idx = 0; vnode->iterate_reference (idx, ref); ++idx)
6424 1.1 mrg {
6425 1.1 mrg auto_vec<ce_s> rhsc;
6426 1.1 mrg struct constraint_expr lhs, *rhsp;
6427 1.1 mrg unsigned i;
6428 1.1 mrg get_constraint_for_address_of (ref->referred->decl, &rhsc);
6429 1.1 mrg lhs.var = vi->id;
6430 1.1 mrg lhs.offset = 0;
6431 1.1 mrg lhs.type = SCALAR;
6432 1.1 mrg FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6433 1.1 mrg process_constraint (new_constraint (lhs, *rhsp));
6434 1.1 mrg /* If this is a variable that escapes from the unit
6435 1.1 mrg the initializer escapes as well. */
6436 1.1 mrg if (!vnode->all_refs_explicit_p ())
6437 1.1 mrg {
6438 1.1 mrg lhs.var = escaped_id;
6439 1.1 mrg lhs.offset = 0;
6440 1.1 mrg lhs.type = SCALAR;
6441 1.1 mrg FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6442 1.1 mrg process_constraint (new_constraint (lhs, *rhsp));
6443 1.1 mrg }
6444 1.1 mrg }
6445 1.1 mrg }
6446 1.1 mrg }
6447 1.1 mrg
6448 1.1 mrg return id;
6449 1.1 mrg }
6450 1.1 mrg
6451 1.1 mrg /* Print out the points-to solution for VAR to FILE. */
6452 1.1 mrg
6453 1.1 mrg static void
6454 1.1 mrg dump_solution_for_var (FILE *file, unsigned int var)
6455 1.1 mrg {
6456 1.1 mrg varinfo_t vi = get_varinfo (var);
6457 1.1 mrg unsigned int i;
6458 1.1 mrg bitmap_iterator bi;
6459 1.1 mrg
6460 1.1 mrg /* Dump the solution for unified vars anyway, this avoids difficulties
6461 1.1 mrg in scanning dumps in the testsuite. */
6462 1.1 mrg fprintf (file, "%s = { ", vi->name);
6463 1.1 mrg vi = get_varinfo (find (var));
6464 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6465 1.1 mrg fprintf (file, "%s ", get_varinfo (i)->name);
6466 1.1 mrg fprintf (file, "}");
6467 1.1 mrg
6468 1.1 mrg /* But note when the variable was unified. */
6469 1.1 mrg if (vi->id != var)
6470 1.1 mrg fprintf (file, " same as %s", vi->name);
6471 1.1 mrg
6472 1.1 mrg fprintf (file, "\n");
6473 1.1 mrg }
6474 1.1 mrg
6475 1.1 mrg /* Print the points-to solution for VAR to stderr. */
6476 1.1 mrg
6477 1.1 mrg DEBUG_FUNCTION void
6478 1.1 mrg debug_solution_for_var (unsigned int var)
6479 1.1 mrg {
6480 1.1 mrg dump_solution_for_var (stderr, var);
6481 1.1 mrg }
6482 1.1 mrg
6483 1.1 mrg /* Register the constraints for function parameter related VI. */
6484 1.1 mrg
6485 1.1 mrg static void
6486 1.1 mrg make_param_constraints (varinfo_t vi)
6487 1.1 mrg {
6488 1.1 mrg for (; vi; vi = vi_next (vi))
6489 1.1 mrg {
6490 1.1 mrg if (vi->only_restrict_pointers)
6491 1.1 mrg ;
6492 1.1 mrg else if (vi->may_have_pointers)
6493 1.1 mrg make_constraint_from (vi, nonlocal_id);
6494 1.1 mrg
6495 1.1 mrg if (vi->is_full_var)
6496 1.1 mrg break;
6497 1.1 mrg }
6498 1.1 mrg }
6499 1.1 mrg
6500 1.1 mrg /* Create varinfo structures for all of the variables in the
6501 1.1 mrg function for intraprocedural mode. */
6502 1.1 mrg
6503 1.1 mrg static void
6504 1.1 mrg intra_create_variable_infos (struct function *fn)
6505 1.1 mrg {
6506 1.1 mrg tree t;
6507 1.1 mrg bitmap handled_struct_type = NULL;
6508 1.1 mrg bool this_parm_in_ctor = DECL_CXX_CONSTRUCTOR_P (fn->decl);
6509 1.1 mrg
6510 1.1 mrg /* For each incoming pointer argument arg, create the constraint ARG
6511 1.1 mrg = NONLOCAL or a dummy variable if it is a restrict qualified
6512 1.1 mrg passed-by-reference argument. */
6513 1.1 mrg for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
6514 1.1 mrg {
6515 1.1 mrg if (handled_struct_type == NULL)
6516 1.1 mrg handled_struct_type = BITMAP_ALLOC (NULL);
6517 1.1 mrg
6518 1.1 mrg varinfo_t p
6519 1.1 mrg = create_variable_info_for_1 (t, alias_get_name (t), false, true,
6520 1.1 mrg handled_struct_type, this_parm_in_ctor);
6521 1.1 mrg insert_vi_for_tree (t, p);
6522 1.1 mrg
6523 1.1 mrg make_param_constraints (p);
6524 1.1 mrg
6525 1.1 mrg this_parm_in_ctor = false;
6526 1.1 mrg }
6527 1.1 mrg
6528 1.1 mrg if (handled_struct_type != NULL)
6529 1.1 mrg BITMAP_FREE (handled_struct_type);
6530 1.1 mrg
6531 1.1 mrg /* Add a constraint for a result decl that is passed by reference. */
6532 1.1 mrg if (DECL_RESULT (fn->decl)
6533 1.1 mrg && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
6534 1.1 mrg {
6535 1.1 mrg varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
6536 1.1 mrg
6537 1.1 mrg for (p = result_vi; p; p = vi_next (p))
6538 1.1 mrg make_constraint_from (p, nonlocal_id);
6539 1.1 mrg }
6540 1.1 mrg
6541 1.1 mrg /* Add a constraint for the incoming static chain parameter. */
6542 1.1 mrg if (fn->static_chain_decl != NULL_TREE)
6543 1.1 mrg {
6544 1.1 mrg varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
6545 1.1 mrg
6546 1.1 mrg for (p = chain_vi; p; p = vi_next (p))
6547 1.1 mrg make_constraint_from (p, nonlocal_id);
6548 1.1 mrg }
6549 1.1 mrg }
6550 1.1 mrg
6551 1.1 mrg /* Structure used to put solution bitmaps in a hashtable so they can
6552 1.1 mrg be shared among variables with the same points-to set. */
6553 1.1 mrg
6554 1.1 mrg typedef struct shared_bitmap_info
6555 1.1 mrg {
6556 1.1 mrg bitmap pt_vars;
6557 1.1 mrg hashval_t hashcode;
6558 1.1 mrg } *shared_bitmap_info_t;
6559 1.1 mrg typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
6560 1.1 mrg
6561 1.1 mrg /* Shared_bitmap hashtable helpers. */
6562 1.1 mrg
6563 1.1 mrg struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info>
6564 1.1 mrg {
6565 1.1 mrg static inline hashval_t hash (const shared_bitmap_info *);
6566 1.1 mrg static inline bool equal (const shared_bitmap_info *,
6567 1.1 mrg const shared_bitmap_info *);
6568 1.1 mrg };
6569 1.1 mrg
6570 1.1 mrg /* Hash function for a shared_bitmap_info_t */
6571 1.1 mrg
6572 1.1 mrg inline hashval_t
6573 1.1 mrg shared_bitmap_hasher::hash (const shared_bitmap_info *bi)
6574 1.1 mrg {
6575 1.1 mrg return bi->hashcode;
6576 1.1 mrg }
6577 1.1 mrg
6578 1.1 mrg /* Equality function for two shared_bitmap_info_t's. */
6579 1.1 mrg
6580 1.1 mrg inline bool
6581 1.1 mrg shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1,
6582 1.1 mrg const shared_bitmap_info *sbi2)
6583 1.1 mrg {
6584 1.1 mrg return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
6585 1.1 mrg }
6586 1.1 mrg
6587 1.1 mrg /* Shared_bitmap hashtable. */
6588 1.1 mrg
6589 1.1 mrg static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
6590 1.1 mrg
6591 1.1 mrg /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6592 1.1 mrg existing instance if there is one, NULL otherwise. */
6593 1.1 mrg
6594 1.1 mrg static bitmap
6595 1.1 mrg shared_bitmap_lookup (bitmap pt_vars)
6596 1.1 mrg {
6597 1.1 mrg shared_bitmap_info **slot;
6598 1.1 mrg struct shared_bitmap_info sbi;
6599 1.1 mrg
6600 1.1 mrg sbi.pt_vars = pt_vars;
6601 1.1 mrg sbi.hashcode = bitmap_hash (pt_vars);
6602 1.1 mrg
6603 1.1 mrg slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
6604 1.1 mrg if (!slot)
6605 1.1 mrg return NULL;
6606 1.1 mrg else
6607 1.1 mrg return (*slot)->pt_vars;
6608 1.1 mrg }
6609 1.1 mrg
6610 1.1 mrg
6611 1.1 mrg /* Add a bitmap to the shared bitmap hashtable. */
6612 1.1 mrg
6613 1.1 mrg static void
6614 1.1 mrg shared_bitmap_add (bitmap pt_vars)
6615 1.1 mrg {
6616 1.1 mrg shared_bitmap_info **slot;
6617 1.1 mrg shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6618 1.1 mrg
6619 1.1 mrg sbi->pt_vars = pt_vars;
6620 1.1 mrg sbi->hashcode = bitmap_hash (pt_vars);
6621 1.1 mrg
6622 1.1 mrg slot = shared_bitmap_table->find_slot (sbi, INSERT);
6623 1.1 mrg gcc_assert (!*slot);
6624 1.1 mrg *slot = sbi;
6625 1.1 mrg }
6626 1.1 mrg
6627 1.1 mrg
6628 1.1 mrg /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6629 1.1 mrg
6630 1.1 mrg static void
6631 1.1 mrg set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt,
6632 1.1 mrg tree fndecl)
6633 1.1 mrg {
6634 1.1 mrg unsigned int i;
6635 1.1 mrg bitmap_iterator bi;
6636 1.1 mrg varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6637 1.1 mrg bool everything_escaped
6638 1.1 mrg = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6639 1.1 mrg
6640 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6641 1.1 mrg {
6642 1.1 mrg varinfo_t vi = get_varinfo (i);
6643 1.1 mrg
6644 1.1 mrg if (vi->is_artificial_var)
6645 1.1 mrg continue;
6646 1.1 mrg
6647 1.1 mrg if (everything_escaped
6648 1.1 mrg || (escaped_vi->solution
6649 1.1 mrg && bitmap_bit_p (escaped_vi->solution, i)))
6650 1.1 mrg {
6651 1.1 mrg pt->vars_contains_escaped = true;
6652 1.1 mrg pt->vars_contains_escaped_heap |= vi->is_heap_var;
6653 1.1 mrg }
6654 1.1 mrg
6655 1.1 mrg if (vi->is_restrict_var)
6656 1.1 mrg pt->vars_contains_restrict = true;
6657 1.1 mrg
6658 1.1 mrg if (VAR_P (vi->decl)
6659 1.1 mrg || TREE_CODE (vi->decl) == PARM_DECL
6660 1.1 mrg || TREE_CODE (vi->decl) == RESULT_DECL)
6661 1.1 mrg {
6662 1.1 mrg /* If we are in IPA mode we will not recompute points-to
6663 1.1 mrg sets after inlining so make sure they stay valid. */
6664 1.1 mrg if (in_ipa_mode
6665 1.1 mrg && !DECL_PT_UID_SET_P (vi->decl))
6666 1.1 mrg SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6667 1.1 mrg
6668 1.1 mrg /* Add the decl to the points-to set. Note that the points-to
6669 1.1 mrg set contains global variables. */
6670 1.1 mrg bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6671 1.1 mrg if (vi->is_global_var
6672 1.1 mrg /* In IPA mode the escaped_heap trick doesn't work as
6673 1.1 mrg ESCAPED is escaped from the unit but
6674 1.1 mrg pt_solution_includes_global needs to answer true for
6675 1.1 mrg all variables not automatic within a function.
6676 1.1 mrg For the same reason is_global_var is not the
6677 1.1 mrg correct flag to track - local variables from other
6678 1.1 mrg functions also need to be considered global.
6679 1.1 mrg Conveniently all HEAP vars are not put in function
6680 1.1 mrg scope. */
6681 1.1 mrg || (in_ipa_mode
6682 1.1 mrg && fndecl
6683 1.1 mrg && ! auto_var_in_fn_p (vi->decl, fndecl)))
6684 1.1 mrg pt->vars_contains_nonlocal = true;
6685 1.1 mrg
6686 1.1 mrg /* If we have a variable that is interposable record that fact
6687 1.1 mrg for pointer comparison simplification. */
6688 1.1 mrg if (VAR_P (vi->decl)
6689 1.1 mrg && (TREE_STATIC (vi->decl) || DECL_EXTERNAL (vi->decl))
6690 1.1 mrg && ! decl_binds_to_current_def_p (vi->decl))
6691 1.1 mrg pt->vars_contains_interposable = true;
6692 1.1 mrg
6693 1.1 mrg /* If this is a local variable we can have overlapping lifetime
6694 1.1 mrg of different function invocations through recursion duplicate
6695 1.1 mrg it with its shadow variable. */
6696 1.1 mrg if (in_ipa_mode
6697 1.1 mrg && vi->shadow_var_uid != 0)
6698 1.1 mrg {
6699 1.1 mrg bitmap_set_bit (into, vi->shadow_var_uid);
6700 1.1 mrg pt->vars_contains_nonlocal = true;
6701 1.1 mrg }
6702 1.1 mrg }
6703 1.1 mrg
6704 1.1 mrg else if (TREE_CODE (vi->decl) == FUNCTION_DECL
6705 1.1 mrg || TREE_CODE (vi->decl) == LABEL_DECL)
6706 1.1 mrg {
6707 1.1 mrg /* Nothing should read/write from/to code so we can
6708 1.1 mrg save bits by not including them in the points-to bitmaps.
6709 1.1 mrg Still mark the points-to set as containing global memory
6710 1.1 mrg to make code-patching possible - see PR70128. */
6711 1.1 mrg pt->vars_contains_nonlocal = true;
6712 1.1 mrg }
6713 1.1 mrg }
6714 1.1 mrg }
6715 1.1 mrg
6716 1.1 mrg
6717 1.1 mrg /* Compute the points-to solution *PT for the variable VI. */
6718 1.1 mrg
6719 1.1 mrg static struct pt_solution
6720 1.1 mrg find_what_var_points_to (tree fndecl, varinfo_t orig_vi)
6721 1.1 mrg {
6722 1.1 mrg unsigned int i;
6723 1.1 mrg bitmap_iterator bi;
6724 1.1 mrg bitmap finished_solution;
6725 1.1 mrg bitmap result;
6726 1.1 mrg varinfo_t vi;
6727 1.1 mrg struct pt_solution *pt;
6728 1.1 mrg
6729 1.1 mrg /* This variable may have been collapsed, let's get the real
6730 1.1 mrg variable. */
6731 1.1 mrg vi = get_varinfo (find (orig_vi->id));
6732 1.1 mrg
6733 1.1 mrg /* See if we have already computed the solution and return it. */
6734 1.1 mrg pt_solution **slot = &final_solutions->get_or_insert (vi);
6735 1.1 mrg if (*slot != NULL)
6736 1.1 mrg return **slot;
6737 1.1 mrg
6738 1.1 mrg *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6739 1.1 mrg memset (pt, 0, sizeof (struct pt_solution));
6740 1.1 mrg
6741 1.1 mrg /* Translate artificial variables into SSA_NAME_PTR_INFO
6742 1.1 mrg attributes. */
6743 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6744 1.1 mrg {
6745 1.1 mrg varinfo_t vi = get_varinfo (i);
6746 1.1 mrg
6747 1.1 mrg if (vi->is_artificial_var)
6748 1.1 mrg {
6749 1.1 mrg if (vi->id == nothing_id)
6750 1.1 mrg pt->null = 1;
6751 1.1 mrg else if (vi->id == escaped_id)
6752 1.1 mrg {
6753 1.1 mrg if (in_ipa_mode)
6754 1.1 mrg pt->ipa_escaped = 1;
6755 1.1 mrg else
6756 1.1 mrg pt->escaped = 1;
6757 1.1 mrg /* Expand some special vars of ESCAPED in-place here. */
6758 1.1 mrg varinfo_t evi = get_varinfo (find (escaped_id));
6759 1.1 mrg if (bitmap_bit_p (evi->solution, nonlocal_id))
6760 1.1 mrg pt->nonlocal = 1;
6761 1.1 mrg }
6762 1.1 mrg else if (vi->id == nonlocal_id)
6763 1.1 mrg pt->nonlocal = 1;
6764 1.1 mrg else if (vi->id == string_id)
6765 1.1 mrg /* Nobody cares - STRING_CSTs are read-only entities. */
6766 1.1 mrg ;
6767 1.1 mrg else if (vi->id == anything_id
6768 1.1 mrg || vi->id == integer_id)
6769 1.1 mrg pt->anything = 1;
6770 1.1 mrg }
6771 1.1 mrg }
6772 1.1 mrg
6773 1.1 mrg /* Instead of doing extra work, simply do not create
6774 1.1 mrg elaborate points-to information for pt_anything pointers. */
6775 1.1 mrg if (pt->anything)
6776 1.1 mrg return *pt;
6777 1.1 mrg
6778 1.1 mrg /* Share the final set of variables when possible. */
6779 1.1 mrg finished_solution = BITMAP_GGC_ALLOC ();
6780 1.1 mrg stats.points_to_sets_created++;
6781 1.1 mrg
6782 1.1 mrg set_uids_in_ptset (finished_solution, vi->solution, pt, fndecl);
6783 1.1 mrg result = shared_bitmap_lookup (finished_solution);
6784 1.1 mrg if (!result)
6785 1.1 mrg {
6786 1.1 mrg shared_bitmap_add (finished_solution);
6787 1.1 mrg pt->vars = finished_solution;
6788 1.1 mrg }
6789 1.1 mrg else
6790 1.1 mrg {
6791 1.1 mrg pt->vars = result;
6792 1.1 mrg bitmap_clear (finished_solution);
6793 1.1 mrg }
6794 1.1 mrg
6795 1.1 mrg return *pt;
6796 1.1 mrg }
6797 1.1 mrg
6798 1.1 mrg /* Given a pointer variable P, fill in its points-to set. */
6799 1.1 mrg
6800 1.1 mrg static void
6801 1.1 mrg find_what_p_points_to (tree fndecl, tree p)
6802 1.1 mrg {
6803 1.1 mrg struct ptr_info_def *pi;
6804 1.1 mrg tree lookup_p = p;
6805 1.1 mrg varinfo_t vi;
6806 1.1 mrg value_range vr;
6807 1.1 mrg get_range_query (DECL_STRUCT_FUNCTION (fndecl))->range_of_expr (vr, p);
6808 1.1 mrg bool nonnull = vr.nonzero_p ();
6809 1.1 mrg
6810 1.1 mrg /* For parameters, get at the points-to set for the actual parm
6811 1.1 mrg decl. */
6812 1.1 mrg if (TREE_CODE (p) == SSA_NAME
6813 1.1 mrg && SSA_NAME_IS_DEFAULT_DEF (p)
6814 1.1 mrg && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6815 1.1 mrg || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6816 1.1 mrg lookup_p = SSA_NAME_VAR (p);
6817 1.1 mrg
6818 1.1 mrg vi = lookup_vi_for_tree (lookup_p);
6819 1.1 mrg if (!vi)
6820 1.1 mrg return;
6821 1.1 mrg
6822 1.1 mrg pi = get_ptr_info (p);
6823 1.1 mrg pi->pt = find_what_var_points_to (fndecl, vi);
6824 1.1 mrg /* Conservatively set to NULL from PTA (to true). */
6825 1.1 mrg pi->pt.null = 1;
6826 1.1 mrg /* Preserve pointer nonnull globally computed. */
6827 1.1 mrg if (nonnull)
6828 1.1 mrg set_ptr_nonnull (p);
6829 1.1 mrg }
6830 1.1 mrg
6831 1.1 mrg
6832 1.1 mrg /* Query statistics for points-to solutions. */
6833 1.1 mrg
6834 1.1 mrg static struct {
6835 1.1 mrg unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6836 1.1 mrg unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6837 1.1 mrg unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6838 1.1 mrg unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6839 1.1 mrg } pta_stats;
6840 1.1 mrg
6841 1.1 mrg void
6842 1.1 mrg dump_pta_stats (FILE *s)
6843 1.1 mrg {
6844 1.1 mrg fprintf (s, "\nPTA query stats:\n");
6845 1.1 mrg fprintf (s, " pt_solution_includes: "
6846 1.1 mrg HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6847 1.1 mrg HOST_WIDE_INT_PRINT_DEC" queries\n",
6848 1.1 mrg pta_stats.pt_solution_includes_no_alias,
6849 1.1 mrg pta_stats.pt_solution_includes_no_alias
6850 1.1 mrg + pta_stats.pt_solution_includes_may_alias);
6851 1.1 mrg fprintf (s, " pt_solutions_intersect: "
6852 1.1 mrg HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6853 1.1 mrg HOST_WIDE_INT_PRINT_DEC" queries\n",
6854 1.1 mrg pta_stats.pt_solutions_intersect_no_alias,
6855 1.1 mrg pta_stats.pt_solutions_intersect_no_alias
6856 1.1 mrg + pta_stats.pt_solutions_intersect_may_alias);
6857 1.1 mrg }
6858 1.1 mrg
6859 1.1 mrg
6860 1.1 mrg /* Reset the points-to solution *PT to a conservative default
6861 1.1 mrg (point to anything). */
6862 1.1 mrg
6863 1.1 mrg void
6864 1.1 mrg pt_solution_reset (struct pt_solution *pt)
6865 1.1 mrg {
6866 1.1 mrg memset (pt, 0, sizeof (struct pt_solution));
6867 1.1 mrg pt->anything = true;
6868 1.1 mrg pt->null = true;
6869 1.1 mrg }
6870 1.1 mrg
6871 1.1 mrg /* Set the points-to solution *PT to point only to the variables
6872 1.1 mrg in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6873 1.1 mrg global variables and VARS_CONTAINS_RESTRICT specifies whether
6874 1.1 mrg it contains restrict tag variables. */
6875 1.1 mrg
6876 1.1 mrg void
6877 1.1 mrg pt_solution_set (struct pt_solution *pt, bitmap vars,
6878 1.1 mrg bool vars_contains_nonlocal)
6879 1.1 mrg {
6880 1.1 mrg memset (pt, 0, sizeof (struct pt_solution));
6881 1.1 mrg pt->vars = vars;
6882 1.1 mrg pt->vars_contains_nonlocal = vars_contains_nonlocal;
6883 1.1 mrg pt->vars_contains_escaped
6884 1.1 mrg = (cfun->gimple_df->escaped.anything
6885 1.1 mrg || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6886 1.1 mrg }
6887 1.1 mrg
6888 1.1 mrg /* Set the points-to solution *PT to point only to the variable VAR. */
6889 1.1 mrg
6890 1.1 mrg void
6891 1.1 mrg pt_solution_set_var (struct pt_solution *pt, tree var)
6892 1.1 mrg {
6893 1.1 mrg memset (pt, 0, sizeof (struct pt_solution));
6894 1.1 mrg pt->vars = BITMAP_GGC_ALLOC ();
6895 1.1 mrg bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6896 1.1 mrg pt->vars_contains_nonlocal = is_global_var (var);
6897 1.1 mrg pt->vars_contains_escaped
6898 1.1 mrg = (cfun->gimple_df->escaped.anything
6899 1.1 mrg || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6900 1.1 mrg }
6901 1.1 mrg
6902 1.1 mrg /* Computes the union of the points-to solutions *DEST and *SRC and
6903 1.1 mrg stores the result in *DEST. This changes the points-to bitmap
6904 1.1 mrg of *DEST and thus may not be used if that might be shared.
6905 1.1 mrg The points-to bitmap of *SRC and *DEST will not be shared after
6906 1.1 mrg this function if they were not before. */
6907 1.1 mrg
6908 1.1 mrg static void
6909 1.1 mrg pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6910 1.1 mrg {
6911 1.1 mrg dest->anything |= src->anything;
6912 1.1 mrg if (dest->anything)
6913 1.1 mrg {
6914 1.1 mrg pt_solution_reset (dest);
6915 1.1 mrg return;
6916 1.1 mrg }
6917 1.1 mrg
6918 1.1 mrg dest->nonlocal |= src->nonlocal;
6919 1.1 mrg dest->escaped |= src->escaped;
6920 1.1 mrg dest->ipa_escaped |= src->ipa_escaped;
6921 1.1 mrg dest->null |= src->null;
6922 1.1 mrg dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6923 1.1 mrg dest->vars_contains_escaped |= src->vars_contains_escaped;
6924 1.1 mrg dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6925 1.1 mrg if (!src->vars)
6926 1.1 mrg return;
6927 1.1 mrg
6928 1.1 mrg if (!dest->vars)
6929 1.1 mrg dest->vars = BITMAP_GGC_ALLOC ();
6930 1.1 mrg bitmap_ior_into (dest->vars, src->vars);
6931 1.1 mrg }
6932 1.1 mrg
6933 1.1 mrg /* Return true if the points-to solution *PT is empty. */
6934 1.1 mrg
6935 1.1 mrg bool
6936 1.1 mrg pt_solution_empty_p (const pt_solution *pt)
6937 1.1 mrg {
6938 1.1 mrg if (pt->anything
6939 1.1 mrg || pt->nonlocal)
6940 1.1 mrg return false;
6941 1.1 mrg
6942 1.1 mrg if (pt->vars
6943 1.1 mrg && !bitmap_empty_p (pt->vars))
6944 1.1 mrg return false;
6945 1.1 mrg
6946 1.1 mrg /* If the solution includes ESCAPED, check if that is empty. */
6947 1.1 mrg if (pt->escaped
6948 1.1 mrg && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6949 1.1 mrg return false;
6950 1.1 mrg
6951 1.1 mrg /* If the solution includes ESCAPED, check if that is empty. */
6952 1.1 mrg if (pt->ipa_escaped
6953 1.1 mrg && !pt_solution_empty_p (&ipa_escaped_pt))
6954 1.1 mrg return false;
6955 1.1 mrg
6956 1.1 mrg return true;
6957 1.1 mrg }
6958 1.1 mrg
6959 1.1 mrg /* Return true if the points-to solution *PT only point to a single var, and
6960 1.1 mrg return the var uid in *UID. */
6961 1.1 mrg
6962 1.1 mrg bool
6963 1.1 mrg pt_solution_singleton_or_null_p (struct pt_solution *pt, unsigned *uid)
6964 1.1 mrg {
6965 1.1 mrg if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6966 1.1 mrg || pt->vars == NULL
6967 1.1 mrg || !bitmap_single_bit_set_p (pt->vars))
6968 1.1 mrg return false;
6969 1.1 mrg
6970 1.1 mrg *uid = bitmap_first_set_bit (pt->vars);
6971 1.1 mrg return true;
6972 1.1 mrg }
6973 1.1 mrg
6974 1.1 mrg /* Return true if the points-to solution *PT includes global memory.
6975 1.1 mrg If ESCAPED_LOCAL_P is true then escaped local variables are also
6976 1.1 mrg considered global. */
6977 1.1 mrg
6978 1.1 mrg bool
6979 1.1 mrg pt_solution_includes_global (struct pt_solution *pt, bool escaped_local_p)
6980 1.1 mrg {
6981 1.1 mrg if (pt->anything
6982 1.1 mrg || pt->nonlocal
6983 1.1 mrg || pt->vars_contains_nonlocal
6984 1.1 mrg /* The following is a hack to make the malloc escape hack work.
6985 1.1 mrg In reality we'd need different sets for escaped-through-return
6986 1.1 mrg and escaped-to-callees and passes would need to be updated. */
6987 1.1 mrg || pt->vars_contains_escaped_heap)
6988 1.1 mrg return true;
6989 1.1 mrg
6990 1.1 mrg if (escaped_local_p && pt->vars_contains_escaped)
6991 1.1 mrg return true;
6992 1.1 mrg
6993 1.1 mrg /* 'escaped' is also a placeholder so we have to look into it. */
6994 1.1 mrg if (pt->escaped)
6995 1.1 mrg return pt_solution_includes_global (&cfun->gimple_df->escaped,
6996 1.1 mrg escaped_local_p);
6997 1.1 mrg
6998 1.1 mrg if (pt->ipa_escaped)
6999 1.1 mrg return pt_solution_includes_global (&ipa_escaped_pt,
7000 1.1 mrg escaped_local_p);
7001 1.1 mrg
7002 1.1 mrg return false;
7003 1.1 mrg }
7004 1.1 mrg
7005 1.1 mrg /* Return true if the points-to solution *PT includes the variable
7006 1.1 mrg declaration DECL. */
7007 1.1 mrg
7008 1.1 mrg static bool
7009 1.1 mrg pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
7010 1.1 mrg {
7011 1.1 mrg if (pt->anything)
7012 1.1 mrg return true;
7013 1.1 mrg
7014 1.1 mrg if (pt->nonlocal
7015 1.1 mrg && is_global_var (decl))
7016 1.1 mrg return true;
7017 1.1 mrg
7018 1.1 mrg if (pt->vars
7019 1.1 mrg && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
7020 1.1 mrg return true;
7021 1.1 mrg
7022 1.1 mrg /* If the solution includes ESCAPED, check it. */
7023 1.1 mrg if (pt->escaped
7024 1.1 mrg && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
7025 1.1 mrg return true;
7026 1.1 mrg
7027 1.1 mrg /* If the solution includes ESCAPED, check it. */
7028 1.1 mrg if (pt->ipa_escaped
7029 1.1 mrg && pt_solution_includes_1 (&ipa_escaped_pt, decl))
7030 1.1 mrg return true;
7031 1.1 mrg
7032 1.1 mrg return false;
7033 1.1 mrg }
7034 1.1 mrg
7035 1.1 mrg bool
7036 1.1 mrg pt_solution_includes (struct pt_solution *pt, const_tree decl)
7037 1.1 mrg {
7038 1.1 mrg bool res = pt_solution_includes_1 (pt, decl);
7039 1.1 mrg if (res)
7040 1.1 mrg ++pta_stats.pt_solution_includes_may_alias;
7041 1.1 mrg else
7042 1.1 mrg ++pta_stats.pt_solution_includes_no_alias;
7043 1.1 mrg return res;
7044 1.1 mrg }
7045 1.1 mrg
7046 1.1 mrg /* Return true if both points-to solutions PT1 and PT2 have a non-empty
7047 1.1 mrg intersection. */
7048 1.1 mrg
7049 1.1 mrg static bool
7050 1.1 mrg pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
7051 1.1 mrg {
7052 1.1 mrg if (pt1->anything || pt2->anything)
7053 1.1 mrg return true;
7054 1.1 mrg
7055 1.1 mrg /* If either points to unknown global memory and the other points to
7056 1.1 mrg any global memory they alias. */
7057 1.1 mrg if ((pt1->nonlocal
7058 1.1 mrg && (pt2->nonlocal
7059 1.1 mrg || pt2->vars_contains_nonlocal))
7060 1.1 mrg || (pt2->nonlocal
7061 1.1 mrg && pt1->vars_contains_nonlocal))
7062 1.1 mrg return true;
7063 1.1 mrg
7064 1.1 mrg /* If either points to all escaped memory and the other points to
7065 1.1 mrg any escaped memory they alias. */
7066 1.1 mrg if ((pt1->escaped
7067 1.1 mrg && (pt2->escaped
7068 1.1 mrg || pt2->vars_contains_escaped))
7069 1.1 mrg || (pt2->escaped
7070 1.1 mrg && pt1->vars_contains_escaped))
7071 1.1 mrg return true;
7072 1.1 mrg
7073 1.1 mrg /* Check the escaped solution if required.
7074 1.1 mrg ??? Do we need to check the local against the IPA escaped sets? */
7075 1.1 mrg if ((pt1->ipa_escaped || pt2->ipa_escaped)
7076 1.1 mrg && !pt_solution_empty_p (&ipa_escaped_pt))
7077 1.1 mrg {
7078 1.1 mrg /* If both point to escaped memory and that solution
7079 1.1 mrg is not empty they alias. */
7080 1.1 mrg if (pt1->ipa_escaped && pt2->ipa_escaped)
7081 1.1 mrg return true;
7082 1.1 mrg
7083 1.1 mrg /* If either points to escaped memory see if the escaped solution
7084 1.1 mrg intersects with the other. */
7085 1.1 mrg if ((pt1->ipa_escaped
7086 1.1 mrg && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
7087 1.1 mrg || (pt2->ipa_escaped
7088 1.1 mrg && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
7089 1.1 mrg return true;
7090 1.1 mrg }
7091 1.1 mrg
7092 1.1 mrg /* Now both pointers alias if their points-to solution intersects. */
7093 1.1 mrg return (pt1->vars
7094 1.1 mrg && pt2->vars
7095 1.1 mrg && bitmap_intersect_p (pt1->vars, pt2->vars));
7096 1.1 mrg }
7097 1.1 mrg
7098 1.1 mrg bool
7099 1.1 mrg pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
7100 1.1 mrg {
7101 1.1 mrg bool res = pt_solutions_intersect_1 (pt1, pt2);
7102 1.1 mrg if (res)
7103 1.1 mrg ++pta_stats.pt_solutions_intersect_may_alias;
7104 1.1 mrg else
7105 1.1 mrg ++pta_stats.pt_solutions_intersect_no_alias;
7106 1.1 mrg return res;
7107 1.1 mrg }
7108 1.1 mrg
7109 1.1 mrg
7110 1.1 mrg /* Dump points-to information to OUTFILE. */
7111 1.1 mrg
7112 1.1 mrg static void
7113 1.1 mrg dump_sa_points_to_info (FILE *outfile)
7114 1.1 mrg {
7115 1.1 mrg unsigned int i;
7116 1.1 mrg
7117 1.1 mrg fprintf (outfile, "\nPoints-to sets\n\n");
7118 1.1 mrg
7119 1.1 mrg if (dump_flags & TDF_STATS)
7120 1.1 mrg {
7121 1.1 mrg fprintf (outfile, "Stats:\n");
7122 1.1 mrg fprintf (outfile, "Total vars: %d\n", stats.total_vars);
7123 1.1 mrg fprintf (outfile, "Non-pointer vars: %d\n",
7124 1.1 mrg stats.nonpointer_vars);
7125 1.1 mrg fprintf (outfile, "Statically unified vars: %d\n",
7126 1.1 mrg stats.unified_vars_static);
7127 1.1 mrg fprintf (outfile, "Dynamically unified vars: %d\n",
7128 1.1 mrg stats.unified_vars_dynamic);
7129 1.1 mrg fprintf (outfile, "Iterations: %d\n", stats.iterations);
7130 1.1 mrg fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
7131 1.1 mrg fprintf (outfile, "Number of implicit edges: %d\n",
7132 1.1 mrg stats.num_implicit_edges);
7133 1.1 mrg }
7134 1.1 mrg
7135 1.1 mrg for (i = 1; i < varmap.length (); i++)
7136 1.1 mrg {
7137 1.1 mrg varinfo_t vi = get_varinfo (i);
7138 1.1 mrg if (!vi->may_have_pointers)
7139 1.1 mrg continue;
7140 1.1 mrg dump_solution_for_var (outfile, i);
7141 1.1 mrg }
7142 1.1 mrg }
7143 1.1 mrg
7144 1.1 mrg
7145 1.1 mrg /* Debug points-to information to stderr. */
7146 1.1 mrg
7147 1.1 mrg DEBUG_FUNCTION void
7148 1.1 mrg debug_sa_points_to_info (void)
7149 1.1 mrg {
7150 1.1 mrg dump_sa_points_to_info (stderr);
7151 1.1 mrg }
7152 1.1 mrg
7153 1.1 mrg
7154 1.1 mrg /* Initialize the always-existing constraint variables for NULL
7155 1.1 mrg ANYTHING, READONLY, and INTEGER */
7156 1.1 mrg
7157 1.1 mrg static void
7158 1.1 mrg init_base_vars (void)
7159 1.1 mrg {
7160 1.1 mrg struct constraint_expr lhs, rhs;
7161 1.1 mrg varinfo_t var_anything;
7162 1.1 mrg varinfo_t var_nothing;
7163 1.1 mrg varinfo_t var_string;
7164 1.1 mrg varinfo_t var_escaped;
7165 1.1 mrg varinfo_t var_nonlocal;
7166 1.1 mrg varinfo_t var_storedanything;
7167 1.1 mrg varinfo_t var_integer;
7168 1.1 mrg
7169 1.1 mrg /* Variable ID zero is reserved and should be NULL. */
7170 1.1 mrg varmap.safe_push (NULL);
7171 1.1 mrg
7172 1.1 mrg /* Create the NULL variable, used to represent that a variable points
7173 1.1 mrg to NULL. */
7174 1.1 mrg var_nothing = new_var_info (NULL_TREE, "NULL", false);
7175 1.1 mrg gcc_assert (var_nothing->id == nothing_id);
7176 1.1 mrg var_nothing->is_artificial_var = 1;
7177 1.1 mrg var_nothing->offset = 0;
7178 1.1 mrg var_nothing->size = ~0;
7179 1.1 mrg var_nothing->fullsize = ~0;
7180 1.1 mrg var_nothing->is_special_var = 1;
7181 1.1 mrg var_nothing->may_have_pointers = 0;
7182 1.1 mrg var_nothing->is_global_var = 0;
7183 1.1 mrg
7184 1.1 mrg /* Create the ANYTHING variable, used to represent that a variable
7185 1.1 mrg points to some unknown piece of memory. */
7186 1.1 mrg var_anything = new_var_info (NULL_TREE, "ANYTHING", false);
7187 1.1 mrg gcc_assert (var_anything->id == anything_id);
7188 1.1 mrg var_anything->is_artificial_var = 1;
7189 1.1 mrg var_anything->size = ~0;
7190 1.1 mrg var_anything->offset = 0;
7191 1.1 mrg var_anything->fullsize = ~0;
7192 1.1 mrg var_anything->is_special_var = 1;
7193 1.1 mrg
7194 1.1 mrg /* Anything points to anything. This makes deref constraints just
7195 1.1 mrg work in the presence of linked list and other p = *p type loops,
7196 1.1 mrg by saying that *ANYTHING = ANYTHING. */
7197 1.1 mrg lhs.type = SCALAR;
7198 1.1 mrg lhs.var = anything_id;
7199 1.1 mrg lhs.offset = 0;
7200 1.1 mrg rhs.type = ADDRESSOF;
7201 1.1 mrg rhs.var = anything_id;
7202 1.1 mrg rhs.offset = 0;
7203 1.1 mrg
7204 1.1 mrg /* This specifically does not use process_constraint because
7205 1.1 mrg process_constraint ignores all anything = anything constraints, since all
7206 1.1 mrg but this one are redundant. */
7207 1.1 mrg constraints.safe_push (new_constraint (lhs, rhs));
7208 1.1 mrg
7209 1.1 mrg /* Create the STRING variable, used to represent that a variable
7210 1.1 mrg points to a string literal. String literals don't contain
7211 1.1 mrg pointers so STRING doesn't point to anything. */
7212 1.1 mrg var_string = new_var_info (NULL_TREE, "STRING", false);
7213 1.1 mrg gcc_assert (var_string->id == string_id);
7214 1.1 mrg var_string->is_artificial_var = 1;
7215 1.1 mrg var_string->offset = 0;
7216 1.1 mrg var_string->size = ~0;
7217 1.1 mrg var_string->fullsize = ~0;
7218 1.1 mrg var_string->is_special_var = 1;
7219 1.1 mrg var_string->may_have_pointers = 0;
7220 1.1 mrg
7221 1.1 mrg /* Create the ESCAPED variable, used to represent the set of escaped
7222 1.1 mrg memory. */
7223 1.1 mrg var_escaped = new_var_info (NULL_TREE, "ESCAPED", false);
7224 1.1 mrg gcc_assert (var_escaped->id == escaped_id);
7225 1.1 mrg var_escaped->is_artificial_var = 1;
7226 1.1 mrg var_escaped->offset = 0;
7227 1.1 mrg var_escaped->size = ~0;
7228 1.1 mrg var_escaped->fullsize = ~0;
7229 1.1 mrg var_escaped->is_special_var = 0;
7230 1.1 mrg
7231 1.1 mrg /* Create the NONLOCAL variable, used to represent the set of nonlocal
7232 1.1 mrg memory. */
7233 1.1 mrg var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL", false);
7234 1.1 mrg gcc_assert (var_nonlocal->id == nonlocal_id);
7235 1.1 mrg var_nonlocal->is_artificial_var = 1;
7236 1.1 mrg var_nonlocal->offset = 0;
7237 1.1 mrg var_nonlocal->size = ~0;
7238 1.1 mrg var_nonlocal->fullsize = ~0;
7239 1.1 mrg var_nonlocal->is_special_var = 1;
7240 1.1 mrg
7241 1.1 mrg /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
7242 1.1 mrg lhs.type = SCALAR;
7243 1.1 mrg lhs.var = escaped_id;
7244 1.1 mrg lhs.offset = 0;
7245 1.1 mrg rhs.type = DEREF;
7246 1.1 mrg rhs.var = escaped_id;
7247 1.1 mrg rhs.offset = 0;
7248 1.1 mrg process_constraint (new_constraint (lhs, rhs));
7249 1.1 mrg
7250 1.1 mrg /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
7251 1.1 mrg whole variable escapes. */
7252 1.1 mrg lhs.type = SCALAR;
7253 1.1 mrg lhs.var = escaped_id;
7254 1.1 mrg lhs.offset = 0;
7255 1.1 mrg rhs.type = SCALAR;
7256 1.1 mrg rhs.var = escaped_id;
7257 1.1 mrg rhs.offset = UNKNOWN_OFFSET;
7258 1.1 mrg process_constraint (new_constraint (lhs, rhs));
7259 1.1 mrg
7260 1.1 mrg /* *ESCAPED = NONLOCAL. This is true because we have to assume
7261 1.1 mrg everything pointed to by escaped points to what global memory can
7262 1.1 mrg point to. */
7263 1.1 mrg lhs.type = DEREF;
7264 1.1 mrg lhs.var = escaped_id;
7265 1.1 mrg lhs.offset = 0;
7266 1.1 mrg rhs.type = SCALAR;
7267 1.1 mrg rhs.var = nonlocal_id;
7268 1.1 mrg rhs.offset = 0;
7269 1.1 mrg process_constraint (new_constraint (lhs, rhs));
7270 1.1 mrg
7271 1.1 mrg /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
7272 1.1 mrg global memory may point to global memory and escaped memory. */
7273 1.1 mrg lhs.type = SCALAR;
7274 1.1 mrg lhs.var = nonlocal_id;
7275 1.1 mrg lhs.offset = 0;
7276 1.1 mrg rhs.type = ADDRESSOF;
7277 1.1 mrg rhs.var = nonlocal_id;
7278 1.1 mrg rhs.offset = 0;
7279 1.1 mrg process_constraint (new_constraint (lhs, rhs));
7280 1.1 mrg rhs.type = ADDRESSOF;
7281 1.1 mrg rhs.var = escaped_id;
7282 1.1 mrg rhs.offset = 0;
7283 1.1 mrg process_constraint (new_constraint (lhs, rhs));
7284 1.1 mrg
7285 1.1 mrg /* Create the STOREDANYTHING variable, used to represent the set of
7286 1.1 mrg variables stored to *ANYTHING. */
7287 1.1 mrg var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING", false);
7288 1.1 mrg gcc_assert (var_storedanything->id == storedanything_id);
7289 1.1 mrg var_storedanything->is_artificial_var = 1;
7290 1.1 mrg var_storedanything->offset = 0;
7291 1.1 mrg var_storedanything->size = ~0;
7292 1.1 mrg var_storedanything->fullsize = ~0;
7293 1.1 mrg var_storedanything->is_special_var = 0;
7294 1.1 mrg
7295 1.1 mrg /* Create the INTEGER variable, used to represent that a variable points
7296 1.1 mrg to what an INTEGER "points to". */
7297 1.1 mrg var_integer = new_var_info (NULL_TREE, "INTEGER", false);
7298 1.1 mrg gcc_assert (var_integer->id == integer_id);
7299 1.1 mrg var_integer->is_artificial_var = 1;
7300 1.1 mrg var_integer->size = ~0;
7301 1.1 mrg var_integer->fullsize = ~0;
7302 1.1 mrg var_integer->offset = 0;
7303 1.1 mrg var_integer->is_special_var = 1;
7304 1.1 mrg
7305 1.1 mrg /* INTEGER = ANYTHING, because we don't know where a dereference of
7306 1.1 mrg a random integer will point to. */
7307 1.1 mrg lhs.type = SCALAR;
7308 1.1 mrg lhs.var = integer_id;
7309 1.1 mrg lhs.offset = 0;
7310 1.1 mrg rhs.type = ADDRESSOF;
7311 1.1 mrg rhs.var = anything_id;
7312 1.1 mrg rhs.offset = 0;
7313 1.1 mrg process_constraint (new_constraint (lhs, rhs));
7314 1.1 mrg }
7315 1.1 mrg
7316 1.1 mrg /* Initialize things necessary to perform PTA */
7317 1.1 mrg
7318 1.1 mrg static void
7319 1.1 mrg init_alias_vars (void)
7320 1.1 mrg {
7321 1.1 mrg use_field_sensitive = (param_max_fields_for_field_sensitive > 1);
7322 1.1 mrg
7323 1.1 mrg bitmap_obstack_initialize (&pta_obstack);
7324 1.1 mrg bitmap_obstack_initialize (&oldpta_obstack);
7325 1.1 mrg bitmap_obstack_initialize (&predbitmap_obstack);
7326 1.1 mrg
7327 1.1 mrg constraints.create (8);
7328 1.1 mrg varmap.create (8);
7329 1.1 mrg vi_for_tree = new hash_map<tree, varinfo_t>;
7330 1.1 mrg call_stmt_vars = new hash_map<gimple *, varinfo_t>;
7331 1.1 mrg
7332 1.1 mrg memset (&stats, 0, sizeof (stats));
7333 1.1 mrg shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
7334 1.1 mrg init_base_vars ();
7335 1.1 mrg
7336 1.1 mrg gcc_obstack_init (&fake_var_decl_obstack);
7337 1.1 mrg
7338 1.1 mrg final_solutions = new hash_map<varinfo_t, pt_solution *>;
7339 1.1 mrg gcc_obstack_init (&final_solutions_obstack);
7340 1.1 mrg }
7341 1.1 mrg
7342 1.1 mrg /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
7343 1.1 mrg predecessor edges. */
7344 1.1 mrg
7345 1.1 mrg static void
7346 1.1 mrg remove_preds_and_fake_succs (constraint_graph_t graph)
7347 1.1 mrg {
7348 1.1 mrg unsigned int i;
7349 1.1 mrg
7350 1.1 mrg /* Clear the implicit ref and address nodes from the successor
7351 1.1 mrg lists. */
7352 1.1 mrg for (i = 1; i < FIRST_REF_NODE; i++)
7353 1.1 mrg {
7354 1.1 mrg if (graph->succs[i])
7355 1.1 mrg bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
7356 1.1 mrg FIRST_REF_NODE * 2);
7357 1.1 mrg }
7358 1.1 mrg
7359 1.1 mrg /* Free the successor list for the non-ref nodes. */
7360 1.1 mrg for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
7361 1.1 mrg {
7362 1.1 mrg if (graph->succs[i])
7363 1.1 mrg BITMAP_FREE (graph->succs[i]);
7364 1.1 mrg }
7365 1.1 mrg
7366 1.1 mrg /* Now reallocate the size of the successor list as, and blow away
7367 1.1 mrg the predecessor bitmaps. */
7368 1.1 mrg graph->size = varmap.length ();
7369 1.1 mrg graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
7370 1.1 mrg
7371 1.1 mrg free (graph->implicit_preds);
7372 1.1 mrg graph->implicit_preds = NULL;
7373 1.1 mrg free (graph->preds);
7374 1.1 mrg graph->preds = NULL;
7375 1.1 mrg bitmap_obstack_release (&predbitmap_obstack);
7376 1.1 mrg }
7377 1.1 mrg
7378 1.1 mrg /* Solve the constraint set. */
7379 1.1 mrg
7380 1.1 mrg static void
7381 1.1 mrg solve_constraints (void)
7382 1.1 mrg {
7383 1.1 mrg class scc_info *si;
7384 1.1 mrg
7385 1.1 mrg /* Sort varinfos so that ones that cannot be pointed to are last.
7386 1.1 mrg This makes bitmaps more efficient. */
7387 1.1 mrg unsigned int *map = XNEWVEC (unsigned int, varmap.length ());
7388 1.1 mrg for (unsigned i = 0; i < integer_id + 1; ++i)
7389 1.1 mrg map[i] = i;
7390 1.1 mrg /* Start with address-taken vars, followed by not address-taken vars
7391 1.1 mrg to move vars never appearing in the points-to solution bitmaps last. */
7392 1.1 mrg unsigned j = integer_id + 1;
7393 1.1 mrg for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7394 1.1 mrg if (varmap[varmap[i]->head]->address_taken)
7395 1.1 mrg map[i] = j++;
7396 1.1 mrg for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7397 1.1 mrg if (! varmap[varmap[i]->head]->address_taken)
7398 1.1 mrg map[i] = j++;
7399 1.1 mrg /* Shuffle varmap according to map. */
7400 1.1 mrg for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7401 1.1 mrg {
7402 1.1 mrg while (map[varmap[i]->id] != i)
7403 1.1 mrg std::swap (varmap[i], varmap[map[varmap[i]->id]]);
7404 1.1 mrg gcc_assert (bitmap_empty_p (varmap[i]->solution));
7405 1.1 mrg varmap[i]->id = i;
7406 1.1 mrg varmap[i]->next = map[varmap[i]->next];
7407 1.1 mrg varmap[i]->head = map[varmap[i]->head];
7408 1.1 mrg }
7409 1.1 mrg /* Finally rewrite constraints. */
7410 1.1 mrg for (unsigned i = 0; i < constraints.length (); ++i)
7411 1.1 mrg {
7412 1.1 mrg constraints[i]->lhs.var = map[constraints[i]->lhs.var];
7413 1.1 mrg constraints[i]->rhs.var = map[constraints[i]->rhs.var];
7414 1.1 mrg }
7415 1.1 mrg free (map);
7416 1.1 mrg
7417 1.1 mrg if (dump_file)
7418 1.1 mrg fprintf (dump_file,
7419 1.1 mrg "\nCollapsing static cycles and doing variable "
7420 1.1 mrg "substitution\n");
7421 1.1 mrg
7422 1.1 mrg init_graph (varmap.length () * 2);
7423 1.1 mrg
7424 1.1 mrg if (dump_file)
7425 1.1 mrg fprintf (dump_file, "Building predecessor graph\n");
7426 1.1 mrg build_pred_graph ();
7427 1.1 mrg
7428 1.1 mrg if (dump_file)
7429 1.1 mrg fprintf (dump_file, "Detecting pointer and location "
7430 1.1 mrg "equivalences\n");
7431 1.1 mrg si = perform_var_substitution (graph);
7432 1.1 mrg
7433 1.1 mrg if (dump_file)
7434 1.1 mrg fprintf (dump_file, "Rewriting constraints and unifying "
7435 1.1 mrg "variables\n");
7436 1.1 mrg rewrite_constraints (graph, si);
7437 1.1 mrg
7438 1.1 mrg build_succ_graph ();
7439 1.1 mrg
7440 1.1 mrg free_var_substitution_info (si);
7441 1.1 mrg
7442 1.1 mrg /* Attach complex constraints to graph nodes. */
7443 1.1 mrg move_complex_constraints (graph);
7444 1.1 mrg
7445 1.1 mrg if (dump_file)
7446 1.1 mrg fprintf (dump_file, "Uniting pointer but not location equivalent "
7447 1.1 mrg "variables\n");
7448 1.1 mrg unite_pointer_equivalences (graph);
7449 1.1 mrg
7450 1.1 mrg if (dump_file)
7451 1.1 mrg fprintf (dump_file, "Finding indirect cycles\n");
7452 1.1 mrg find_indirect_cycles (graph);
7453 1.1 mrg
7454 1.1 mrg /* Implicit nodes and predecessors are no longer necessary at this
7455 1.1 mrg point. */
7456 1.1 mrg remove_preds_and_fake_succs (graph);
7457 1.1 mrg
7458 1.1 mrg if (dump_file && (dump_flags & TDF_GRAPH))
7459 1.1 mrg {
7460 1.1 mrg fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
7461 1.1 mrg "in dot format:\n");
7462 1.1 mrg dump_constraint_graph (dump_file);
7463 1.1 mrg fprintf (dump_file, "\n\n");
7464 1.1 mrg }
7465 1.1 mrg
7466 1.1 mrg if (dump_file)
7467 1.1 mrg fprintf (dump_file, "Solving graph\n");
7468 1.1 mrg
7469 1.1 mrg solve_graph (graph);
7470 1.1 mrg
7471 1.1 mrg if (dump_file && (dump_flags & TDF_GRAPH))
7472 1.1 mrg {
7473 1.1 mrg fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
7474 1.1 mrg "in dot format:\n");
7475 1.1 mrg dump_constraint_graph (dump_file);
7476 1.1 mrg fprintf (dump_file, "\n\n");
7477 1.1 mrg }
7478 1.1 mrg }
7479 1.1 mrg
7480 1.1 mrg /* Create points-to sets for the current function. See the comments
7481 1.1 mrg at the start of the file for an algorithmic overview. */
7482 1.1 mrg
7483 1.1 mrg static void
7484 1.1 mrg compute_points_to_sets (void)
7485 1.1 mrg {
7486 1.1 mrg basic_block bb;
7487 1.1 mrg varinfo_t vi;
7488 1.1 mrg
7489 1.1 mrg timevar_push (TV_TREE_PTA);
7490 1.1 mrg
7491 1.1 mrg init_alias_vars ();
7492 1.1 mrg
7493 1.1 mrg intra_create_variable_infos (cfun);
7494 1.1 mrg
7495 1.1 mrg /* Now walk all statements and build the constraint set. */
7496 1.1 mrg FOR_EACH_BB_FN (bb, cfun)
7497 1.1 mrg {
7498 1.1 mrg for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7499 1.1 mrg gsi_next (&gsi))
7500 1.1 mrg {
7501 1.1 mrg gphi *phi = gsi.phi ();
7502 1.1 mrg
7503 1.1 mrg if (! virtual_operand_p (gimple_phi_result (phi)))
7504 1.1 mrg find_func_aliases (cfun, phi);
7505 1.1 mrg }
7506 1.1 mrg
7507 1.1 mrg for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7508 1.1 mrg gsi_next (&gsi))
7509 1.1 mrg {
7510 1.1 mrg gimple *stmt = gsi_stmt (gsi);
7511 1.1 mrg
7512 1.1 mrg find_func_aliases (cfun, stmt);
7513 1.1 mrg }
7514 1.1 mrg }
7515 1.1 mrg
7516 1.1 mrg if (dump_file)
7517 1.1 mrg {
7518 1.1 mrg fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
7519 1.1 mrg dump_constraints (dump_file, 0);
7520 1.1 mrg }
7521 1.1 mrg
7522 1.1 mrg /* From the constraints compute the points-to sets. */
7523 1.1 mrg solve_constraints ();
7524 1.1 mrg
7525 1.1 mrg /* Post-process solutions for escapes through returns. */
7526 1.1 mrg edge_iterator ei;
7527 1.1 mrg edge e;
7528 1.1 mrg FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
7529 1.1 mrg if (greturn *ret = safe_dyn_cast <greturn *> (last_stmt (e->src)))
7530 1.1 mrg {
7531 1.1 mrg tree val = gimple_return_retval (ret);
7532 1.1 mrg /* ??? Easy to handle simple indirections with some work.
7533 1.1 mrg Arbitrary references like foo.bar.baz are more difficult
7534 1.1 mrg (but conservatively easy enough with just looking at the base).
7535 1.1 mrg Mind to fixup find_func_aliases as well. */
7536 1.1 mrg if (!val || !SSA_VAR_P (val))
7537 1.1 mrg continue;
7538 1.1 mrg /* returns happen last in non-IPA so they only influence
7539 1.1 mrg the ESCAPED solution and we can filter local variables. */
7540 1.1 mrg varinfo_t escaped_vi = get_varinfo (find (escaped_id));
7541 1.1 mrg varinfo_t vi = lookup_vi_for_tree (val);
7542 1.1 mrg bitmap delta = BITMAP_ALLOC (&pta_obstack);
7543 1.1 mrg bitmap_iterator bi;
7544 1.1 mrg unsigned i;
7545 1.1 mrg for (; vi; vi = vi_next (vi))
7546 1.1 mrg {
7547 1.1 mrg varinfo_t part_vi = get_varinfo (find (vi->id));
7548 1.1 mrg EXECUTE_IF_AND_COMPL_IN_BITMAP (part_vi->solution,
7549 1.1 mrg escaped_vi->solution, 0, i, bi)
7550 1.1 mrg {
7551 1.1 mrg varinfo_t pointed_to_vi = get_varinfo (i);
7552 1.1 mrg if (pointed_to_vi->is_global_var
7553 1.1 mrg /* We delay marking of heap memory as global. */
7554 1.1 mrg || pointed_to_vi->is_heap_var)
7555 1.1 mrg bitmap_set_bit (delta, i);
7556 1.1 mrg }
7557 1.1 mrg }
7558 1.1 mrg
7559 1.1 mrg /* Now compute the transitive closure. */
7560 1.1 mrg bitmap_ior_into (escaped_vi->solution, delta);
7561 1.1 mrg bitmap new_delta = BITMAP_ALLOC (&pta_obstack);
7562 1.1 mrg while (!bitmap_empty_p (delta))
7563 1.1 mrg {
7564 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
7565 1.1 mrg {
7566 1.1 mrg varinfo_t pointed_to_vi = get_varinfo (i);
7567 1.1 mrg pointed_to_vi = get_varinfo (find (pointed_to_vi->id));
7568 1.1 mrg unsigned j;
7569 1.1 mrg bitmap_iterator bi2;
7570 1.1 mrg EXECUTE_IF_AND_COMPL_IN_BITMAP (pointed_to_vi->solution,
7571 1.1 mrg escaped_vi->solution,
7572 1.1 mrg 0, j, bi2)
7573 1.1 mrg {
7574 1.1 mrg varinfo_t pointed_to_vi2 = get_varinfo (j);
7575 1.1 mrg if (pointed_to_vi2->is_global_var
7576 1.1 mrg /* We delay marking of heap memory as global. */
7577 1.1 mrg || pointed_to_vi2->is_heap_var)
7578 1.1 mrg bitmap_set_bit (new_delta, j);
7579 1.1 mrg }
7580 1.1 mrg }
7581 1.1 mrg bitmap_ior_into (escaped_vi->solution, new_delta);
7582 1.1 mrg bitmap_clear (delta);
7583 1.1 mrg std::swap (delta, new_delta);
7584 1.1 mrg }
7585 1.1 mrg BITMAP_FREE (delta);
7586 1.1 mrg BITMAP_FREE (new_delta);
7587 1.1 mrg }
7588 1.1 mrg
7589 1.1 mrg if (dump_file)
7590 1.1 mrg dump_sa_points_to_info (dump_file);
7591 1.1 mrg
7592 1.1 mrg /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7593 1.1 mrg cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl,
7594 1.1 mrg get_varinfo (escaped_id));
7595 1.1 mrg
7596 1.1 mrg /* Make sure the ESCAPED solution (which is used as placeholder in
7597 1.1 mrg other solutions) does not reference itself. This simplifies
7598 1.1 mrg points-to solution queries. */
7599 1.1 mrg cfun->gimple_df->escaped.escaped = 0;
7600 1.1 mrg
7601 1.1 mrg /* Compute the points-to sets for pointer SSA_NAMEs. */
7602 1.1 mrg unsigned i;
7603 1.1 mrg tree ptr;
7604 1.1 mrg
7605 1.1 mrg FOR_EACH_SSA_NAME (i, ptr, cfun)
7606 1.1 mrg {
7607 1.1 mrg if (POINTER_TYPE_P (TREE_TYPE (ptr)))
7608 1.1 mrg find_what_p_points_to (cfun->decl, ptr);
7609 1.1 mrg }
7610 1.1 mrg
7611 1.1 mrg /* Compute the call-used/clobbered sets. */
7612 1.1 mrg FOR_EACH_BB_FN (bb, cfun)
7613 1.1 mrg {
7614 1.1 mrg gimple_stmt_iterator gsi;
7615 1.1 mrg
7616 1.1 mrg for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7617 1.1 mrg {
7618 1.1 mrg gcall *stmt;
7619 1.1 mrg struct pt_solution *pt;
7620 1.1 mrg
7621 1.1 mrg stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7622 1.1 mrg if (!stmt)
7623 1.1 mrg continue;
7624 1.1 mrg
7625 1.1 mrg pt = gimple_call_use_set (stmt);
7626 1.1 mrg if (gimple_call_flags (stmt) & ECF_CONST)
7627 1.1 mrg memset (pt, 0, sizeof (struct pt_solution));
7628 1.1 mrg else
7629 1.1 mrg {
7630 1.1 mrg bool uses_global_memory = true;
7631 1.1 mrg bool reads_global_memory = true;
7632 1.1 mrg
7633 1.1 mrg determine_global_memory_access (stmt, NULL,
7634 1.1 mrg &reads_global_memory,
7635 1.1 mrg &uses_global_memory);
7636 1.1 mrg if ((vi = lookup_call_use_vi (stmt)) != NULL)
7637 1.1 mrg {
7638 1.1 mrg *pt = find_what_var_points_to (cfun->decl, vi);
7639 1.1 mrg /* Escaped (and thus nonlocal) variables are always
7640 1.1 mrg implicitly used by calls. */
7641 1.1 mrg /* ??? ESCAPED can be empty even though NONLOCAL
7642 1.1 mrg always escaped. */
7643 1.1 mrg if (uses_global_memory)
7644 1.1 mrg {
7645 1.1 mrg pt->nonlocal = 1;
7646 1.1 mrg pt->escaped = 1;
7647 1.1 mrg }
7648 1.1 mrg }
7649 1.1 mrg else if (uses_global_memory)
7650 1.1 mrg {
7651 1.1 mrg /* If there is nothing special about this call then
7652 1.1 mrg we have made everything that is used also escape. */
7653 1.1 mrg *pt = cfun->gimple_df->escaped;
7654 1.1 mrg pt->nonlocal = 1;
7655 1.1 mrg }
7656 1.1 mrg else
7657 1.1 mrg memset (pt, 0, sizeof (struct pt_solution));
7658 1.1 mrg }
7659 1.1 mrg
7660 1.1 mrg pt = gimple_call_clobber_set (stmt);
7661 1.1 mrg if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7662 1.1 mrg memset (pt, 0, sizeof (struct pt_solution));
7663 1.1 mrg else
7664 1.1 mrg {
7665 1.1 mrg bool writes_global_memory = true;
7666 1.1 mrg
7667 1.1 mrg determine_global_memory_access (stmt, &writes_global_memory,
7668 1.1 mrg NULL, NULL);
7669 1.1 mrg
7670 1.1 mrg if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7671 1.1 mrg {
7672 1.1 mrg *pt = find_what_var_points_to (cfun->decl, vi);
7673 1.1 mrg /* Escaped (and thus nonlocal) variables are always
7674 1.1 mrg implicitly clobbered by calls. */
7675 1.1 mrg /* ??? ESCAPED can be empty even though NONLOCAL
7676 1.1 mrg always escaped. */
7677 1.1 mrg if (writes_global_memory)
7678 1.1 mrg {
7679 1.1 mrg pt->nonlocal = 1;
7680 1.1 mrg pt->escaped = 1;
7681 1.1 mrg }
7682 1.1 mrg }
7683 1.1 mrg else if (writes_global_memory)
7684 1.1 mrg {
7685 1.1 mrg /* If there is nothing special about this call then
7686 1.1 mrg we have made everything that is used also escape. */
7687 1.1 mrg *pt = cfun->gimple_df->escaped;
7688 1.1 mrg pt->nonlocal = 1;
7689 1.1 mrg }
7690 1.1 mrg else
7691 1.1 mrg memset (pt, 0, sizeof (struct pt_solution));
7692 1.1 mrg }
7693 1.1 mrg }
7694 1.1 mrg }
7695 1.1 mrg
7696 1.1 mrg timevar_pop (TV_TREE_PTA);
7697 1.1 mrg }
7698 1.1 mrg
7699 1.1 mrg
7700 1.1 mrg /* Delete created points-to sets. */
7701 1.1 mrg
7702 1.1 mrg static void
7703 1.1 mrg delete_points_to_sets (void)
7704 1.1 mrg {
7705 1.1 mrg unsigned int i;
7706 1.1 mrg
7707 1.1 mrg delete shared_bitmap_table;
7708 1.1 mrg shared_bitmap_table = NULL;
7709 1.1 mrg if (dump_file && (dump_flags & TDF_STATS))
7710 1.1 mrg fprintf (dump_file, "Points to sets created:%d\n",
7711 1.1 mrg stats.points_to_sets_created);
7712 1.1 mrg
7713 1.1 mrg delete vi_for_tree;
7714 1.1 mrg delete call_stmt_vars;
7715 1.1 mrg bitmap_obstack_release (&pta_obstack);
7716 1.1 mrg constraints.release ();
7717 1.1 mrg
7718 1.1 mrg for (i = 0; i < graph->size; i++)
7719 1.1 mrg graph->complex[i].release ();
7720 1.1 mrg free (graph->complex);
7721 1.1 mrg
7722 1.1 mrg free (graph->rep);
7723 1.1 mrg free (graph->succs);
7724 1.1 mrg free (graph->pe);
7725 1.1 mrg free (graph->pe_rep);
7726 1.1 mrg free (graph->indirect_cycles);
7727 1.1 mrg free (graph);
7728 1.1 mrg
7729 1.1 mrg varmap.release ();
7730 1.1 mrg variable_info_pool.release ();
7731 1.1 mrg constraint_pool.release ();
7732 1.1 mrg
7733 1.1 mrg obstack_free (&fake_var_decl_obstack, NULL);
7734 1.1 mrg
7735 1.1 mrg delete final_solutions;
7736 1.1 mrg obstack_free (&final_solutions_obstack, NULL);
7737 1.1 mrg }
7738 1.1 mrg
7739 1.1 mrg struct vls_data
7740 1.1 mrg {
7741 1.1 mrg unsigned short clique;
7742 1.1 mrg bool escaped_p;
7743 1.1 mrg bitmap rvars;
7744 1.1 mrg };
7745 1.1 mrg
7746 1.1 mrg /* Mark "other" loads and stores as belonging to CLIQUE and with
7747 1.1 mrg base zero. */
7748 1.1 mrg
7749 1.1 mrg static bool
7750 1.1 mrg visit_loadstore (gimple *, tree base, tree ref, void *data)
7751 1.1 mrg {
7752 1.1 mrg unsigned short clique = ((vls_data *) data)->clique;
7753 1.1 mrg bitmap rvars = ((vls_data *) data)->rvars;
7754 1.1 mrg bool escaped_p = ((vls_data *) data)->escaped_p;
7755 1.1 mrg if (TREE_CODE (base) == MEM_REF
7756 1.1 mrg || TREE_CODE (base) == TARGET_MEM_REF)
7757 1.1 mrg {
7758 1.1 mrg tree ptr = TREE_OPERAND (base, 0);
7759 1.1 mrg if (TREE_CODE (ptr) == SSA_NAME)
7760 1.1 mrg {
7761 1.1 mrg /* For parameters, get at the points-to set for the actual parm
7762 1.1 mrg decl. */
7763 1.1 mrg if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7764 1.1 mrg && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7765 1.1 mrg || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7766 1.1 mrg ptr = SSA_NAME_VAR (ptr);
7767 1.1 mrg
7768 1.1 mrg /* We need to make sure 'ptr' doesn't include any of
7769 1.1 mrg the restrict tags we added bases for in its points-to set. */
7770 1.1 mrg varinfo_t vi = lookup_vi_for_tree (ptr);
7771 1.1 mrg if (! vi)
7772 1.1 mrg return false;
7773 1.1 mrg
7774 1.1 mrg vi = get_varinfo (find (vi->id));
7775 1.1 mrg if (bitmap_intersect_p (rvars, vi->solution)
7776 1.1 mrg || (escaped_p && bitmap_bit_p (vi->solution, escaped_id)))
7777 1.1 mrg return false;
7778 1.1 mrg }
7779 1.1 mrg
7780 1.1 mrg /* Do not overwrite existing cliques (that includes clique, base
7781 1.1 mrg pairs we just set). */
7782 1.1 mrg if (MR_DEPENDENCE_CLIQUE (base) == 0)
7783 1.1 mrg {
7784 1.1 mrg MR_DEPENDENCE_CLIQUE (base) = clique;
7785 1.1 mrg MR_DEPENDENCE_BASE (base) = 0;
7786 1.1 mrg }
7787 1.1 mrg }
7788 1.1 mrg
7789 1.1 mrg /* For plain decl accesses see whether they are accesses to globals
7790 1.1 mrg and rewrite them to MEM_REFs with { clique, 0 }. */
7791 1.1 mrg if (VAR_P (base)
7792 1.1 mrg && is_global_var (base)
7793 1.1 mrg /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7794 1.1 mrg ops callback. */
7795 1.1 mrg && base != ref)
7796 1.1 mrg {
7797 1.1 mrg tree *basep = &ref;
7798 1.1 mrg while (handled_component_p (*basep))
7799 1.1 mrg basep = &TREE_OPERAND (*basep, 0);
7800 1.1 mrg gcc_assert (VAR_P (*basep));
7801 1.1 mrg tree ptr = build_fold_addr_expr (*basep);
7802 1.1 mrg tree zero = build_int_cst (TREE_TYPE (ptr), 0);
7803 1.1 mrg *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
7804 1.1 mrg MR_DEPENDENCE_CLIQUE (*basep) = clique;
7805 1.1 mrg MR_DEPENDENCE_BASE (*basep) = 0;
7806 1.1 mrg }
7807 1.1 mrg
7808 1.1 mrg return false;
7809 1.1 mrg }
7810 1.1 mrg
7811 1.1 mrg struct msdi_data {
7812 1.1 mrg tree ptr;
7813 1.1 mrg unsigned short *clique;
7814 1.1 mrg unsigned short *last_ruid;
7815 1.1 mrg varinfo_t restrict_var;
7816 1.1 mrg };
7817 1.1 mrg
7818 1.1 mrg /* If BASE is a MEM_REF then assign a clique, base pair to it, updating
7819 1.1 mrg CLIQUE, *RESTRICT_VAR and LAST_RUID as passed via DATA.
7820 1.1 mrg Return whether dependence info was assigned to BASE. */
7821 1.1 mrg
7822 1.1 mrg static bool
7823 1.1 mrg maybe_set_dependence_info (gimple *, tree base, tree, void *data)
7824 1.1 mrg {
7825 1.1 mrg tree ptr = ((msdi_data *)data)->ptr;
7826 1.1 mrg unsigned short &clique = *((msdi_data *)data)->clique;
7827 1.1 mrg unsigned short &last_ruid = *((msdi_data *)data)->last_ruid;
7828 1.1 mrg varinfo_t restrict_var = ((msdi_data *)data)->restrict_var;
7829 1.1 mrg if ((TREE_CODE (base) == MEM_REF
7830 1.1 mrg || TREE_CODE (base) == TARGET_MEM_REF)
7831 1.1 mrg && TREE_OPERAND (base, 0) == ptr)
7832 1.1 mrg {
7833 1.1 mrg /* Do not overwrite existing cliques. This avoids overwriting dependence
7834 1.1 mrg info inlined from a function with restrict parameters inlined
7835 1.1 mrg into a function with restrict parameters. This usually means we
7836 1.1 mrg prefer to be precise in innermost loops. */
7837 1.1 mrg if (MR_DEPENDENCE_CLIQUE (base) == 0)
7838 1.1 mrg {
7839 1.1 mrg if (clique == 0)
7840 1.1 mrg {
7841 1.1 mrg if (cfun->last_clique == 0)
7842 1.1 mrg cfun->last_clique = 1;
7843 1.1 mrg clique = 1;
7844 1.1 mrg }
7845 1.1 mrg if (restrict_var->ruid == 0)
7846 1.1 mrg restrict_var->ruid = ++last_ruid;
7847 1.1 mrg MR_DEPENDENCE_CLIQUE (base) = clique;
7848 1.1 mrg MR_DEPENDENCE_BASE (base) = restrict_var->ruid;
7849 1.1 mrg return true;
7850 1.1 mrg }
7851 1.1 mrg }
7852 1.1 mrg return false;
7853 1.1 mrg }
7854 1.1 mrg
7855 1.1 mrg /* Clear dependence info for the clique DATA. */
7856 1.1 mrg
7857 1.1 mrg static bool
7858 1.1 mrg clear_dependence_clique (gimple *, tree base, tree, void *data)
7859 1.1 mrg {
7860 1.1 mrg unsigned short clique = (uintptr_t)data;
7861 1.1 mrg if ((TREE_CODE (base) == MEM_REF
7862 1.1 mrg || TREE_CODE (base) == TARGET_MEM_REF)
7863 1.1 mrg && MR_DEPENDENCE_CLIQUE (base) == clique)
7864 1.1 mrg {
7865 1.1 mrg MR_DEPENDENCE_CLIQUE (base) = 0;
7866 1.1 mrg MR_DEPENDENCE_BASE (base) = 0;
7867 1.1 mrg }
7868 1.1 mrg
7869 1.1 mrg return false;
7870 1.1 mrg }
7871 1.1 mrg
7872 1.1 mrg /* Compute the set of independend memory references based on restrict
7873 1.1 mrg tags and their conservative propagation to the points-to sets. */
7874 1.1 mrg
7875 1.1 mrg static void
7876 1.1 mrg compute_dependence_clique (void)
7877 1.1 mrg {
7878 1.1 mrg /* First clear the special "local" clique. */
7879 1.1 mrg basic_block bb;
7880 1.1 mrg if (cfun->last_clique != 0)
7881 1.1 mrg FOR_EACH_BB_FN (bb, cfun)
7882 1.1 mrg for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7883 1.1 mrg !gsi_end_p (gsi); gsi_next (&gsi))
7884 1.1 mrg {
7885 1.1 mrg gimple *stmt = gsi_stmt (gsi);
7886 1.1 mrg walk_stmt_load_store_ops (stmt, (void *)(uintptr_t) 1,
7887 1.1 mrg clear_dependence_clique,
7888 1.1 mrg clear_dependence_clique);
7889 1.1 mrg }
7890 1.1 mrg
7891 1.1 mrg unsigned short clique = 0;
7892 1.1 mrg unsigned short last_ruid = 0;
7893 1.1 mrg bitmap rvars = BITMAP_ALLOC (NULL);
7894 1.1 mrg bool escaped_p = false;
7895 1.1 mrg for (unsigned i = 0; i < num_ssa_names; ++i)
7896 1.1 mrg {
7897 1.1 mrg tree ptr = ssa_name (i);
7898 1.1 mrg if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
7899 1.1 mrg continue;
7900 1.1 mrg
7901 1.1 mrg /* Avoid all this when ptr is not dereferenced? */
7902 1.1 mrg tree p = ptr;
7903 1.1 mrg if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7904 1.1 mrg && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7905 1.1 mrg || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7906 1.1 mrg p = SSA_NAME_VAR (ptr);
7907 1.1 mrg varinfo_t vi = lookup_vi_for_tree (p);
7908 1.1 mrg if (!vi)
7909 1.1 mrg continue;
7910 1.1 mrg vi = get_varinfo (find (vi->id));
7911 1.1 mrg bitmap_iterator bi;
7912 1.1 mrg unsigned j;
7913 1.1 mrg varinfo_t restrict_var = NULL;
7914 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
7915 1.1 mrg {
7916 1.1 mrg varinfo_t oi = get_varinfo (j);
7917 1.1 mrg if (oi->head != j)
7918 1.1 mrg oi = get_varinfo (oi->head);
7919 1.1 mrg if (oi->is_restrict_var)
7920 1.1 mrg {
7921 1.1 mrg if (restrict_var
7922 1.1 mrg && restrict_var != oi)
7923 1.1 mrg {
7924 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS))
7925 1.1 mrg {
7926 1.1 mrg fprintf (dump_file, "found restrict pointed-to "
7927 1.1 mrg "for ");
7928 1.1 mrg print_generic_expr (dump_file, ptr);
7929 1.1 mrg fprintf (dump_file, " but not exclusively\n");
7930 1.1 mrg }
7931 1.1 mrg restrict_var = NULL;
7932 1.1 mrg break;
7933 1.1 mrg }
7934 1.1 mrg restrict_var = oi;
7935 1.1 mrg }
7936 1.1 mrg /* NULL is the only other valid points-to entry. */
7937 1.1 mrg else if (oi->id != nothing_id)
7938 1.1 mrg {
7939 1.1 mrg restrict_var = NULL;
7940 1.1 mrg break;
7941 1.1 mrg }
7942 1.1 mrg }
7943 1.1 mrg /* Ok, found that ptr must(!) point to a single(!) restrict
7944 1.1 mrg variable. */
7945 1.1 mrg /* ??? PTA isn't really a proper propagation engine to compute
7946 1.1 mrg this property.
7947 1.1 mrg ??? We could handle merging of two restricts by unifying them. */
7948 1.1 mrg if (restrict_var)
7949 1.1 mrg {
7950 1.1 mrg /* Now look at possible dereferences of ptr. */
7951 1.1 mrg imm_use_iterator ui;
7952 1.1 mrg gimple *use_stmt;
7953 1.1 mrg bool used = false;
7954 1.1 mrg msdi_data data = { ptr, &clique, &last_ruid, restrict_var };
7955 1.1 mrg FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
7956 1.1 mrg used |= walk_stmt_load_store_ops (use_stmt, &data,
7957 1.1 mrg maybe_set_dependence_info,
7958 1.1 mrg maybe_set_dependence_info);
7959 1.1 mrg if (used)
7960 1.1 mrg {
7961 1.1 mrg /* Add all subvars to the set of restrict pointed-to set. */
7962 1.1 mrg for (unsigned sv = restrict_var->head; sv != 0;
7963 1.1 mrg sv = get_varinfo (sv)->next)
7964 1.1 mrg bitmap_set_bit (rvars, sv);
7965 1.1 mrg varinfo_t escaped = get_varinfo (find (escaped_id));
7966 1.1 mrg if (bitmap_bit_p (escaped->solution, restrict_var->id))
7967 1.1 mrg escaped_p = true;
7968 1.1 mrg }
7969 1.1 mrg }
7970 1.1 mrg }
7971 1.1 mrg
7972 1.1 mrg if (clique != 0)
7973 1.1 mrg {
7974 1.1 mrg /* Assign the BASE id zero to all accesses not based on a restrict
7975 1.1 mrg pointer. That way they get disambiguated against restrict
7976 1.1 mrg accesses but not against each other. */
7977 1.1 mrg /* ??? For restricts derived from globals (thus not incoming
7978 1.1 mrg parameters) we can't restrict scoping properly thus the following
7979 1.1 mrg is too aggressive there. For now we have excluded those globals from
7980 1.1 mrg getting into the MR_DEPENDENCE machinery. */
7981 1.1 mrg vls_data data = { clique, escaped_p, rvars };
7982 1.1 mrg basic_block bb;
7983 1.1 mrg FOR_EACH_BB_FN (bb, cfun)
7984 1.1 mrg for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7985 1.1 mrg !gsi_end_p (gsi); gsi_next (&gsi))
7986 1.1 mrg {
7987 1.1 mrg gimple *stmt = gsi_stmt (gsi);
7988 1.1 mrg walk_stmt_load_store_ops (stmt, &data,
7989 1.1 mrg visit_loadstore, visit_loadstore);
7990 1.1 mrg }
7991 1.1 mrg }
7992 1.1 mrg
7993 1.1 mrg BITMAP_FREE (rvars);
7994 1.1 mrg }
7995 1.1 mrg
7996 1.1 mrg /* Compute points-to information for every SSA_NAME pointer in the
7997 1.1 mrg current function and compute the transitive closure of escaped
7998 1.1 mrg variables to re-initialize the call-clobber states of local variables. */
7999 1.1 mrg
8000 1.1 mrg unsigned int
8001 1.1 mrg compute_may_aliases (void)
8002 1.1 mrg {
8003 1.1 mrg if (cfun->gimple_df->ipa_pta)
8004 1.1 mrg {
8005 1.1 mrg if (dump_file)
8006 1.1 mrg {
8007 1.1 mrg fprintf (dump_file, "\nNot re-computing points-to information "
8008 1.1 mrg "because IPA points-to information is available.\n\n");
8009 1.1 mrg
8010 1.1 mrg /* But still dump what we have remaining it. */
8011 1.1 mrg dump_alias_info (dump_file);
8012 1.1 mrg }
8013 1.1 mrg
8014 1.1 mrg return 0;
8015 1.1 mrg }
8016 1.1 mrg
8017 1.1 mrg /* For each pointer P_i, determine the sets of variables that P_i may
8018 1.1 mrg point-to. Compute the reachability set of escaped and call-used
8019 1.1 mrg variables. */
8020 1.1 mrg compute_points_to_sets ();
8021 1.1 mrg
8022 1.1 mrg /* Debugging dumps. */
8023 1.1 mrg if (dump_file)
8024 1.1 mrg dump_alias_info (dump_file);
8025 1.1 mrg
8026 1.1 mrg /* Compute restrict-based memory disambiguations. */
8027 1.1 mrg compute_dependence_clique ();
8028 1.1 mrg
8029 1.1 mrg /* Deallocate memory used by aliasing data structures and the internal
8030 1.1 mrg points-to solution. */
8031 1.1 mrg delete_points_to_sets ();
8032 1.1 mrg
8033 1.1 mrg gcc_assert (!need_ssa_update_p (cfun));
8034 1.1 mrg
8035 1.1 mrg return 0;
8036 1.1 mrg }
8037 1.1 mrg
8038 1.1 mrg /* A dummy pass to cause points-to information to be computed via
8039 1.1 mrg TODO_rebuild_alias. */
8040 1.1 mrg
8041 1.1 mrg namespace {
8042 1.1 mrg
8043 1.1 mrg const pass_data pass_data_build_alias =
8044 1.1 mrg {
8045 1.1 mrg GIMPLE_PASS, /* type */
8046 1.1 mrg "alias", /* name */
8047 1.1 mrg OPTGROUP_NONE, /* optinfo_flags */
8048 1.1 mrg TV_NONE, /* tv_id */
8049 1.1 mrg ( PROP_cfg | PROP_ssa ), /* properties_required */
8050 1.1 mrg 0, /* properties_provided */
8051 1.1 mrg 0, /* properties_destroyed */
8052 1.1 mrg 0, /* todo_flags_start */
8053 1.1 mrg TODO_rebuild_alias, /* todo_flags_finish */
8054 1.1 mrg };
8055 1.1 mrg
8056 1.1 mrg class pass_build_alias : public gimple_opt_pass
8057 1.1 mrg {
8058 1.1 mrg public:
8059 1.1 mrg pass_build_alias (gcc::context *ctxt)
8060 1.1 mrg : gimple_opt_pass (pass_data_build_alias, ctxt)
8061 1.1 mrg {}
8062 1.1 mrg
8063 1.1 mrg /* opt_pass methods: */
8064 1.1 mrg virtual bool gate (function *) { return flag_tree_pta; }
8065 1.1 mrg
8066 1.1 mrg }; // class pass_build_alias
8067 1.1 mrg
8068 1.1 mrg } // anon namespace
8069 1.1 mrg
8070 1.1 mrg gimple_opt_pass *
8071 1.1 mrg make_pass_build_alias (gcc::context *ctxt)
8072 1.1 mrg {
8073 1.1 mrg return new pass_build_alias (ctxt);
8074 1.1 mrg }
8075 1.1 mrg
8076 1.1 mrg /* A dummy pass to cause points-to information to be computed via
8077 1.1 mrg TODO_rebuild_alias. */
8078 1.1 mrg
8079 1.1 mrg namespace {
8080 1.1 mrg
8081 1.1 mrg const pass_data pass_data_build_ealias =
8082 1.1 mrg {
8083 1.1 mrg GIMPLE_PASS, /* type */
8084 1.1 mrg "ealias", /* name */
8085 1.1 mrg OPTGROUP_NONE, /* optinfo_flags */
8086 1.1 mrg TV_NONE, /* tv_id */
8087 1.1 mrg ( PROP_cfg | PROP_ssa ), /* properties_required */
8088 1.1 mrg 0, /* properties_provided */
8089 1.1 mrg 0, /* properties_destroyed */
8090 1.1 mrg 0, /* todo_flags_start */
8091 1.1 mrg TODO_rebuild_alias, /* todo_flags_finish */
8092 1.1 mrg };
8093 1.1 mrg
8094 1.1 mrg class pass_build_ealias : public gimple_opt_pass
8095 1.1 mrg {
8096 1.1 mrg public:
8097 1.1 mrg pass_build_ealias (gcc::context *ctxt)
8098 1.1 mrg : gimple_opt_pass (pass_data_build_ealias, ctxt)
8099 1.1 mrg {}
8100 1.1 mrg
8101 1.1 mrg /* opt_pass methods: */
8102 1.1 mrg virtual bool gate (function *) { return flag_tree_pta; }
8103 1.1 mrg
8104 1.1 mrg }; // class pass_build_ealias
8105 1.1 mrg
8106 1.1 mrg } // anon namespace
8107 1.1 mrg
8108 1.1 mrg gimple_opt_pass *
8109 1.1 mrg make_pass_build_ealias (gcc::context *ctxt)
8110 1.1 mrg {
8111 1.1 mrg return new pass_build_ealias (ctxt);
8112 1.1 mrg }
8113 1.1 mrg
8114 1.1 mrg
8115 1.1 mrg /* IPA PTA solutions for ESCAPED. */
8116 1.1 mrg struct pt_solution ipa_escaped_pt
8117 1.1 mrg = { true, false, false, false, false,
8118 1.1 mrg false, false, false, false, false, NULL };
8119 1.1 mrg
8120 1.1 mrg /* Associate node with varinfo DATA. Worker for
8121 1.1 mrg cgraph_for_symbol_thunks_and_aliases. */
8122 1.1 mrg static bool
8123 1.1 mrg associate_varinfo_to_alias (struct cgraph_node *node, void *data)
8124 1.1 mrg {
8125 1.1 mrg if ((node->alias
8126 1.1 mrg || (node->thunk
8127 1.1 mrg && ! node->inlined_to))
8128 1.1 mrg && node->analyzed
8129 1.1 mrg && !node->ifunc_resolver)
8130 1.1 mrg insert_vi_for_tree (node->decl, (varinfo_t)data);
8131 1.1 mrg return false;
8132 1.1 mrg }
8133 1.1 mrg
8134 1.1 mrg /* Dump varinfo VI to FILE. */
8135 1.1 mrg
8136 1.1 mrg static void
8137 1.1 mrg dump_varinfo (FILE *file, varinfo_t vi)
8138 1.1 mrg {
8139 1.1 mrg if (vi == NULL)
8140 1.1 mrg return;
8141 1.1 mrg
8142 1.1 mrg fprintf (file, "%u: %s\n", vi->id, vi->name);
8143 1.1 mrg
8144 1.1 mrg const char *sep = " ";
8145 1.1 mrg if (vi->is_artificial_var)
8146 1.1 mrg fprintf (file, "%sartificial", sep);
8147 1.1 mrg if (vi->is_special_var)
8148 1.1 mrg fprintf (file, "%sspecial", sep);
8149 1.1 mrg if (vi->is_unknown_size_var)
8150 1.1 mrg fprintf (file, "%sunknown-size", sep);
8151 1.1 mrg if (vi->is_full_var)
8152 1.1 mrg fprintf (file, "%sfull", sep);
8153 1.1 mrg if (vi->is_heap_var)
8154 1.1 mrg fprintf (file, "%sheap", sep);
8155 1.1 mrg if (vi->may_have_pointers)
8156 1.1 mrg fprintf (file, "%smay-have-pointers", sep);
8157 1.1 mrg if (vi->only_restrict_pointers)
8158 1.1 mrg fprintf (file, "%sonly-restrict-pointers", sep);
8159 1.1 mrg if (vi->is_restrict_var)
8160 1.1 mrg fprintf (file, "%sis-restrict-var", sep);
8161 1.1 mrg if (vi->is_global_var)
8162 1.1 mrg fprintf (file, "%sglobal", sep);
8163 1.1 mrg if (vi->is_ipa_escape_point)
8164 1.1 mrg fprintf (file, "%sipa-escape-point", sep);
8165 1.1 mrg if (vi->is_fn_info)
8166 1.1 mrg fprintf (file, "%sfn-info", sep);
8167 1.1 mrg if (vi->ruid)
8168 1.1 mrg fprintf (file, "%srestrict-uid:%u", sep, vi->ruid);
8169 1.1 mrg if (vi->next)
8170 1.1 mrg fprintf (file, "%snext:%u", sep, vi->next);
8171 1.1 mrg if (vi->head != vi->id)
8172 1.1 mrg fprintf (file, "%shead:%u", sep, vi->head);
8173 1.1 mrg if (vi->offset)
8174 1.1 mrg fprintf (file, "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset);
8175 1.1 mrg if (vi->size != ~(unsigned HOST_WIDE_INT)0)
8176 1.1 mrg fprintf (file, "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size);
8177 1.1 mrg if (vi->fullsize != ~(unsigned HOST_WIDE_INT)0
8178 1.1 mrg && vi->fullsize != vi->size)
8179 1.1 mrg fprintf (file, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep,
8180 1.1 mrg vi->fullsize);
8181 1.1 mrg fprintf (file, "\n");
8182 1.1 mrg
8183 1.1 mrg if (vi->solution && !bitmap_empty_p (vi->solution))
8184 1.1 mrg {
8185 1.1 mrg bitmap_iterator bi;
8186 1.1 mrg unsigned i;
8187 1.1 mrg fprintf (file, " solution: {");
8188 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
8189 1.1 mrg fprintf (file, " %u", i);
8190 1.1 mrg fprintf (file, " }\n");
8191 1.1 mrg }
8192 1.1 mrg
8193 1.1 mrg if (vi->oldsolution && !bitmap_empty_p (vi->oldsolution)
8194 1.1 mrg && !bitmap_equal_p (vi->solution, vi->oldsolution))
8195 1.1 mrg {
8196 1.1 mrg bitmap_iterator bi;
8197 1.1 mrg unsigned i;
8198 1.1 mrg fprintf (file, " oldsolution: {");
8199 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi)
8200 1.1 mrg fprintf (file, " %u", i);
8201 1.1 mrg fprintf (file, " }\n");
8202 1.1 mrg }
8203 1.1 mrg }
8204 1.1 mrg
8205 1.1 mrg /* Dump varinfo VI to stderr. */
8206 1.1 mrg
8207 1.1 mrg DEBUG_FUNCTION void
8208 1.1 mrg debug_varinfo (varinfo_t vi)
8209 1.1 mrg {
8210 1.1 mrg dump_varinfo (stderr, vi);
8211 1.1 mrg }
8212 1.1 mrg
8213 1.1 mrg /* Dump varmap to FILE. */
8214 1.1 mrg
8215 1.1 mrg static void
8216 1.1 mrg dump_varmap (FILE *file)
8217 1.1 mrg {
8218 1.1 mrg if (varmap.length () == 0)
8219 1.1 mrg return;
8220 1.1 mrg
8221 1.1 mrg fprintf (file, "variables:\n");
8222 1.1 mrg
8223 1.1 mrg for (unsigned int i = 0; i < varmap.length (); ++i)
8224 1.1 mrg {
8225 1.1 mrg varinfo_t vi = get_varinfo (i);
8226 1.1 mrg dump_varinfo (file, vi);
8227 1.1 mrg }
8228 1.1 mrg
8229 1.1 mrg fprintf (file, "\n");
8230 1.1 mrg }
8231 1.1 mrg
8232 1.1 mrg /* Dump varmap to stderr. */
8233 1.1 mrg
8234 1.1 mrg DEBUG_FUNCTION void
8235 1.1 mrg debug_varmap (void)
8236 1.1 mrg {
8237 1.1 mrg dump_varmap (stderr);
8238 1.1 mrg }
8239 1.1 mrg
8240 1.1 mrg /* Compute whether node is refered to non-locally. Worker for
8241 1.1 mrg cgraph_for_symbol_thunks_and_aliases. */
8242 1.1 mrg static bool
8243 1.1 mrg refered_from_nonlocal_fn (struct cgraph_node *node, void *data)
8244 1.1 mrg {
8245 1.1 mrg bool *nonlocal_p = (bool *)data;
8246 1.1 mrg *nonlocal_p |= (node->used_from_other_partition
8247 1.1 mrg || DECL_EXTERNAL (node->decl)
8248 1.1 mrg || TREE_PUBLIC (node->decl)
8249 1.1 mrg || node->force_output
8250 1.1 mrg || lookup_attribute ("noipa", DECL_ATTRIBUTES (node->decl)));
8251 1.1 mrg return false;
8252 1.1 mrg }
8253 1.1 mrg
8254 1.1 mrg /* Same for varpool nodes. */
8255 1.1 mrg static bool
8256 1.1 mrg refered_from_nonlocal_var (struct varpool_node *node, void *data)
8257 1.1 mrg {
8258 1.1 mrg bool *nonlocal_p = (bool *)data;
8259 1.1 mrg *nonlocal_p |= (node->used_from_other_partition
8260 1.1 mrg || DECL_EXTERNAL (node->decl)
8261 1.1 mrg || TREE_PUBLIC (node->decl)
8262 1.1 mrg || node->force_output);
8263 1.1 mrg return false;
8264 1.1 mrg }
8265 1.1 mrg
8266 1.1 mrg /* Execute the driver for IPA PTA. */
8267 1.1 mrg static unsigned int
8268 1.1 mrg ipa_pta_execute (void)
8269 1.1 mrg {
8270 1.1 mrg struct cgraph_node *node;
8271 1.1 mrg varpool_node *var;
8272 1.1 mrg unsigned int from = 0;
8273 1.1 mrg
8274 1.1 mrg in_ipa_mode = 1;
8275 1.1 mrg
8276 1.1 mrg init_alias_vars ();
8277 1.1 mrg
8278 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS))
8279 1.1 mrg {
8280 1.1 mrg symtab->dump (dump_file);
8281 1.1 mrg fprintf (dump_file, "\n");
8282 1.1 mrg }
8283 1.1 mrg
8284 1.1 mrg if (dump_file)
8285 1.1 mrg {
8286 1.1 mrg fprintf (dump_file, "Generating generic constraints\n\n");
8287 1.1 mrg dump_constraints (dump_file, from);
8288 1.1 mrg fprintf (dump_file, "\n");
8289 1.1 mrg from = constraints.length ();
8290 1.1 mrg }
8291 1.1 mrg
8292 1.1 mrg /* Build the constraints. */
8293 1.1 mrg FOR_EACH_DEFINED_FUNCTION (node)
8294 1.1 mrg {
8295 1.1 mrg varinfo_t vi;
8296 1.1 mrg /* Nodes without a body in this partition are not interesting.
8297 1.1 mrg Especially do not visit clones at this point for now - we
8298 1.1 mrg get duplicate decls there for inline clones at least. */
8299 1.1 mrg if (!node->has_gimple_body_p ()
8300 1.1 mrg || node->in_other_partition
8301 1.1 mrg || node->inlined_to)
8302 1.1 mrg continue;
8303 1.1 mrg node->get_body ();
8304 1.1 mrg
8305 1.1 mrg gcc_assert (!node->clone_of);
8306 1.1 mrg
8307 1.1 mrg /* For externally visible or attribute used annotated functions use
8308 1.1 mrg local constraints for their arguments.
8309 1.1 mrg For local functions we see all callers and thus do not need initial
8310 1.1 mrg constraints for parameters. */
8311 1.1 mrg bool nonlocal_p = (node->used_from_other_partition
8312 1.1 mrg || DECL_EXTERNAL (node->decl)
8313 1.1 mrg || TREE_PUBLIC (node->decl)
8314 1.1 mrg || node->force_output
8315 1.1 mrg || lookup_attribute ("noipa",
8316 1.1 mrg DECL_ATTRIBUTES (node->decl)));
8317 1.1 mrg node->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn,
8318 1.1 mrg &nonlocal_p, true);
8319 1.1 mrg
8320 1.1 mrg vi = create_function_info_for (node->decl,
8321 1.1 mrg alias_get_name (node->decl), false,
8322 1.1 mrg nonlocal_p);
8323 1.1 mrg if (dump_file
8324 1.1 mrg && from != constraints.length ())
8325 1.1 mrg {
8326 1.1 mrg fprintf (dump_file,
8327 1.1 mrg "Generating initial constraints for %s",
8328 1.1 mrg node->dump_name ());
8329 1.1 mrg if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8330 1.1 mrg fprintf (dump_file, " (%s)",
8331 1.1 mrg IDENTIFIER_POINTER
8332 1.1 mrg (DECL_ASSEMBLER_NAME (node->decl)));
8333 1.1 mrg fprintf (dump_file, "\n\n");
8334 1.1 mrg dump_constraints (dump_file, from);
8335 1.1 mrg fprintf (dump_file, "\n");
8336 1.1 mrg
8337 1.1 mrg from = constraints.length ();
8338 1.1 mrg }
8339 1.1 mrg
8340 1.1 mrg node->call_for_symbol_thunks_and_aliases
8341 1.1 mrg (associate_varinfo_to_alias, vi, true);
8342 1.1 mrg }
8343 1.1 mrg
8344 1.1 mrg /* Create constraints for global variables and their initializers. */
8345 1.1 mrg FOR_EACH_VARIABLE (var)
8346 1.1 mrg {
8347 1.1 mrg if (var->alias && var->analyzed)
8348 1.1 mrg continue;
8349 1.1 mrg
8350 1.1 mrg varinfo_t vi = get_vi_for_tree (var->decl);
8351 1.1 mrg
8352 1.1 mrg /* For the purpose of IPA PTA unit-local globals are not
8353 1.1 mrg escape points. */
8354 1.1 mrg bool nonlocal_p = (DECL_EXTERNAL (var->decl)
8355 1.1 mrg || TREE_PUBLIC (var->decl)
8356 1.1 mrg || var->used_from_other_partition
8357 1.1 mrg || var->force_output);
8358 1.1 mrg var->call_for_symbol_and_aliases (refered_from_nonlocal_var,
8359 1.1 mrg &nonlocal_p, true);
8360 1.1 mrg if (nonlocal_p)
8361 1.1 mrg vi->is_ipa_escape_point = true;
8362 1.1 mrg }
8363 1.1 mrg
8364 1.1 mrg if (dump_file
8365 1.1 mrg && from != constraints.length ())
8366 1.1 mrg {
8367 1.1 mrg fprintf (dump_file,
8368 1.1 mrg "Generating constraints for global initializers\n\n");
8369 1.1 mrg dump_constraints (dump_file, from);
8370 1.1 mrg fprintf (dump_file, "\n");
8371 1.1 mrg from = constraints.length ();
8372 1.1 mrg }
8373 1.1 mrg
8374 1.1 mrg FOR_EACH_DEFINED_FUNCTION (node)
8375 1.1 mrg {
8376 1.1 mrg struct function *func;
8377 1.1 mrg basic_block bb;
8378 1.1 mrg
8379 1.1 mrg /* Nodes without a body in this partition are not interesting. */
8380 1.1 mrg if (!node->has_gimple_body_p ()
8381 1.1 mrg || node->in_other_partition
8382 1.1 mrg || node->clone_of)
8383 1.1 mrg continue;
8384 1.1 mrg
8385 1.1 mrg if (dump_file)
8386 1.1 mrg {
8387 1.1 mrg fprintf (dump_file,
8388 1.1 mrg "Generating constraints for %s", node->dump_name ());
8389 1.1 mrg if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8390 1.1 mrg fprintf (dump_file, " (%s)",
8391 1.1 mrg IDENTIFIER_POINTER
8392 1.1 mrg (DECL_ASSEMBLER_NAME (node->decl)));
8393 1.1 mrg fprintf (dump_file, "\n");
8394 1.1 mrg }
8395 1.1 mrg
8396 1.1 mrg func = DECL_STRUCT_FUNCTION (node->decl);
8397 1.1 mrg gcc_assert (cfun == NULL);
8398 1.1 mrg
8399 1.1 mrg /* Build constriants for the function body. */
8400 1.1 mrg FOR_EACH_BB_FN (bb, func)
8401 1.1 mrg {
8402 1.1 mrg for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
8403 1.1 mrg gsi_next (&gsi))
8404 1.1 mrg {
8405 1.1 mrg gphi *phi = gsi.phi ();
8406 1.1 mrg
8407 1.1 mrg if (! virtual_operand_p (gimple_phi_result (phi)))
8408 1.1 mrg find_func_aliases (func, phi);
8409 1.1 mrg }
8410 1.1 mrg
8411 1.1 mrg for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
8412 1.1 mrg gsi_next (&gsi))
8413 1.1 mrg {
8414 1.1 mrg gimple *stmt = gsi_stmt (gsi);
8415 1.1 mrg
8416 1.1 mrg find_func_aliases (func, stmt);
8417 1.1 mrg find_func_clobbers (func, stmt);
8418 1.1 mrg }
8419 1.1 mrg }
8420 1.1 mrg
8421 1.1 mrg if (dump_file)
8422 1.1 mrg {
8423 1.1 mrg fprintf (dump_file, "\n");
8424 1.1 mrg dump_constraints (dump_file, from);
8425 1.1 mrg fprintf (dump_file, "\n");
8426 1.1 mrg from = constraints.length ();
8427 1.1 mrg }
8428 1.1 mrg }
8429 1.1 mrg
8430 1.1 mrg /* From the constraints compute the points-to sets. */
8431 1.1 mrg solve_constraints ();
8432 1.1 mrg
8433 1.1 mrg if (dump_file)
8434 1.1 mrg dump_sa_points_to_info (dump_file);
8435 1.1 mrg
8436 1.1 mrg /* Now post-process solutions to handle locals from different
8437 1.1 mrg runtime instantiations coming in through recursive invocations. */
8438 1.1 mrg unsigned shadow_var_cnt = 0;
8439 1.1 mrg for (unsigned i = 1; i < varmap.length (); ++i)
8440 1.1 mrg {
8441 1.1 mrg varinfo_t fi = get_varinfo (i);
8442 1.1 mrg if (fi->is_fn_info
8443 1.1 mrg && fi->decl)
8444 1.1 mrg /* Automatic variables pointed to by their containing functions
8445 1.1 mrg parameters need this treatment. */
8446 1.1 mrg for (varinfo_t ai = first_vi_for_offset (fi, fi_parm_base);
8447 1.1 mrg ai; ai = vi_next (ai))
8448 1.1 mrg {
8449 1.1 mrg varinfo_t vi = get_varinfo (find (ai->id));
8450 1.1 mrg bitmap_iterator bi;
8451 1.1 mrg unsigned j;
8452 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8453 1.1 mrg {
8454 1.1 mrg varinfo_t pt = get_varinfo (j);
8455 1.1 mrg if (pt->shadow_var_uid == 0
8456 1.1 mrg && pt->decl
8457 1.1 mrg && auto_var_in_fn_p (pt->decl, fi->decl))
8458 1.1 mrg {
8459 1.1 mrg pt->shadow_var_uid = allocate_decl_uid ();
8460 1.1 mrg shadow_var_cnt++;
8461 1.1 mrg }
8462 1.1 mrg }
8463 1.1 mrg }
8464 1.1 mrg /* As well as global variables which are another way of passing
8465 1.1 mrg arguments to recursive invocations. */
8466 1.1 mrg else if (fi->is_global_var)
8467 1.1 mrg {
8468 1.1 mrg for (varinfo_t ai = fi; ai; ai = vi_next (ai))
8469 1.1 mrg {
8470 1.1 mrg varinfo_t vi = get_varinfo (find (ai->id));
8471 1.1 mrg bitmap_iterator bi;
8472 1.1 mrg unsigned j;
8473 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8474 1.1 mrg {
8475 1.1 mrg varinfo_t pt = get_varinfo (j);
8476 1.1 mrg if (pt->shadow_var_uid == 0
8477 1.1 mrg && pt->decl
8478 1.1 mrg && auto_var_p (pt->decl))
8479 1.1 mrg {
8480 1.1 mrg pt->shadow_var_uid = allocate_decl_uid ();
8481 1.1 mrg shadow_var_cnt++;
8482 1.1 mrg }
8483 1.1 mrg }
8484 1.1 mrg }
8485 1.1 mrg }
8486 1.1 mrg }
8487 1.1 mrg if (shadow_var_cnt && dump_file && (dump_flags & TDF_DETAILS))
8488 1.1 mrg fprintf (dump_file, "Allocated %u shadow variables for locals "
8489 1.1 mrg "maybe leaking into recursive invocations of their containing "
8490 1.1 mrg "functions\n", shadow_var_cnt);
8491 1.1 mrg
8492 1.1 mrg /* Compute the global points-to sets for ESCAPED.
8493 1.1 mrg ??? Note that the computed escape set is not correct
8494 1.1 mrg for the whole unit as we fail to consider graph edges to
8495 1.1 mrg externally visible functions. */
8496 1.1 mrg ipa_escaped_pt = find_what_var_points_to (NULL, get_varinfo (escaped_id));
8497 1.1 mrg
8498 1.1 mrg /* Make sure the ESCAPED solution (which is used as placeholder in
8499 1.1 mrg other solutions) does not reference itself. This simplifies
8500 1.1 mrg points-to solution queries. */
8501 1.1 mrg ipa_escaped_pt.ipa_escaped = 0;
8502 1.1 mrg
8503 1.1 mrg /* Assign the points-to sets to the SSA names in the unit. */
8504 1.1 mrg FOR_EACH_DEFINED_FUNCTION (node)
8505 1.1 mrg {
8506 1.1 mrg tree ptr;
8507 1.1 mrg struct function *fn;
8508 1.1 mrg unsigned i;
8509 1.1 mrg basic_block bb;
8510 1.1 mrg
8511 1.1 mrg /* Nodes without a body in this partition are not interesting. */
8512 1.1 mrg if (!node->has_gimple_body_p ()
8513 1.1 mrg || node->in_other_partition
8514 1.1 mrg || node->clone_of)
8515 1.1 mrg continue;
8516 1.1 mrg
8517 1.1 mrg fn = DECL_STRUCT_FUNCTION (node->decl);
8518 1.1 mrg
8519 1.1 mrg /* Compute the points-to sets for pointer SSA_NAMEs. */
8520 1.1 mrg FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
8521 1.1 mrg {
8522 1.1 mrg if (ptr
8523 1.1 mrg && POINTER_TYPE_P (TREE_TYPE (ptr)))
8524 1.1 mrg find_what_p_points_to (node->decl, ptr);
8525 1.1 mrg }
8526 1.1 mrg
8527 1.1 mrg /* Compute the call-use and call-clobber sets for indirect calls
8528 1.1 mrg and calls to external functions. */
8529 1.1 mrg FOR_EACH_BB_FN (bb, fn)
8530 1.1 mrg {
8531 1.1 mrg gimple_stmt_iterator gsi;
8532 1.1 mrg
8533 1.1 mrg for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8534 1.1 mrg {
8535 1.1 mrg gcall *stmt;
8536 1.1 mrg struct pt_solution *pt;
8537 1.1 mrg varinfo_t vi, fi;
8538 1.1 mrg tree decl;
8539 1.1 mrg
8540 1.1 mrg stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
8541 1.1 mrg if (!stmt)
8542 1.1 mrg continue;
8543 1.1 mrg
8544 1.1 mrg /* Handle direct calls to functions with body. */
8545 1.1 mrg decl = gimple_call_fndecl (stmt);
8546 1.1 mrg
8547 1.1 mrg {
8548 1.1 mrg tree called_decl = NULL_TREE;
8549 1.1 mrg if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL))
8550 1.1 mrg called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
8551 1.1 mrg else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL))
8552 1.1 mrg called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
8553 1.1 mrg
8554 1.1 mrg if (called_decl != NULL_TREE
8555 1.1 mrg && !fndecl_maybe_in_other_partition (called_decl))
8556 1.1 mrg decl = called_decl;
8557 1.1 mrg }
8558 1.1 mrg
8559 1.1 mrg if (decl
8560 1.1 mrg && (fi = lookup_vi_for_tree (decl))
8561 1.1 mrg && fi->is_fn_info)
8562 1.1 mrg {
8563 1.1 mrg *gimple_call_clobber_set (stmt)
8564 1.1 mrg = find_what_var_points_to
8565 1.1 mrg (node->decl, first_vi_for_offset (fi, fi_clobbers));
8566 1.1 mrg *gimple_call_use_set (stmt)
8567 1.1 mrg = find_what_var_points_to
8568 1.1 mrg (node->decl, first_vi_for_offset (fi, fi_uses));
8569 1.1 mrg }
8570 1.1 mrg /* Handle direct calls to external functions. */
8571 1.1 mrg else if (decl && (!fi || fi->decl))
8572 1.1 mrg {
8573 1.1 mrg pt = gimple_call_use_set (stmt);
8574 1.1 mrg if (gimple_call_flags (stmt) & ECF_CONST)
8575 1.1 mrg memset (pt, 0, sizeof (struct pt_solution));
8576 1.1 mrg else if ((vi = lookup_call_use_vi (stmt)) != NULL)
8577 1.1 mrg {
8578 1.1 mrg *pt = find_what_var_points_to (node->decl, vi);
8579 1.1 mrg /* Escaped (and thus nonlocal) variables are always
8580 1.1 mrg implicitly used by calls. */
8581 1.1 mrg /* ??? ESCAPED can be empty even though NONLOCAL
8582 1.1 mrg always escaped. */
8583 1.1 mrg pt->nonlocal = 1;
8584 1.1 mrg pt->ipa_escaped = 1;
8585 1.1 mrg }
8586 1.1 mrg else
8587 1.1 mrg {
8588 1.1 mrg /* If there is nothing special about this call then
8589 1.1 mrg we have made everything that is used also escape. */
8590 1.1 mrg *pt = ipa_escaped_pt;
8591 1.1 mrg pt->nonlocal = 1;
8592 1.1 mrg }
8593 1.1 mrg
8594 1.1 mrg pt = gimple_call_clobber_set (stmt);
8595 1.1 mrg if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
8596 1.1 mrg memset (pt, 0, sizeof (struct pt_solution));
8597 1.1 mrg else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
8598 1.1 mrg {
8599 1.1 mrg *pt = find_what_var_points_to (node->decl, vi);
8600 1.1 mrg /* Escaped (and thus nonlocal) variables are always
8601 1.1 mrg implicitly clobbered by calls. */
8602 1.1 mrg /* ??? ESCAPED can be empty even though NONLOCAL
8603 1.1 mrg always escaped. */
8604 1.1 mrg pt->nonlocal = 1;
8605 1.1 mrg pt->ipa_escaped = 1;
8606 1.1 mrg }
8607 1.1 mrg else
8608 1.1 mrg {
8609 1.1 mrg /* If there is nothing special about this call then
8610 1.1 mrg we have made everything that is used also escape. */
8611 1.1 mrg *pt = ipa_escaped_pt;
8612 1.1 mrg pt->nonlocal = 1;
8613 1.1 mrg }
8614 1.1 mrg }
8615 1.1 mrg /* Handle indirect calls. */
8616 1.1 mrg else if ((fi = get_fi_for_callee (stmt)))
8617 1.1 mrg {
8618 1.1 mrg /* We need to accumulate all clobbers/uses of all possible
8619 1.1 mrg callees. */
8620 1.1 mrg fi = get_varinfo (find (fi->id));
8621 1.1 mrg /* If we cannot constrain the set of functions we'll end up
8622 1.1 mrg calling we end up using/clobbering everything. */
8623 1.1 mrg if (bitmap_bit_p (fi->solution, anything_id)
8624 1.1 mrg || bitmap_bit_p (fi->solution, nonlocal_id)
8625 1.1 mrg || bitmap_bit_p (fi->solution, escaped_id))
8626 1.1 mrg {
8627 1.1 mrg pt_solution_reset (gimple_call_clobber_set (stmt));
8628 1.1 mrg pt_solution_reset (gimple_call_use_set (stmt));
8629 1.1 mrg }
8630 1.1 mrg else
8631 1.1 mrg {
8632 1.1 mrg bitmap_iterator bi;
8633 1.1 mrg unsigned i;
8634 1.1 mrg struct pt_solution *uses, *clobbers;
8635 1.1 mrg
8636 1.1 mrg uses = gimple_call_use_set (stmt);
8637 1.1 mrg clobbers = gimple_call_clobber_set (stmt);
8638 1.1 mrg memset (uses, 0, sizeof (struct pt_solution));
8639 1.1 mrg memset (clobbers, 0, sizeof (struct pt_solution));
8640 1.1 mrg EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
8641 1.1 mrg {
8642 1.1 mrg struct pt_solution sol;
8643 1.1 mrg
8644 1.1 mrg vi = get_varinfo (i);
8645 1.1 mrg if (!vi->is_fn_info)
8646 1.1 mrg {
8647 1.1 mrg /* ??? We could be more precise here? */
8648 1.1 mrg uses->nonlocal = 1;
8649 1.1 mrg uses->ipa_escaped = 1;
8650 1.1 mrg clobbers->nonlocal = 1;
8651 1.1 mrg clobbers->ipa_escaped = 1;
8652 1.1 mrg continue;
8653 1.1 mrg }
8654 1.1 mrg
8655 1.1 mrg if (!uses->anything)
8656 1.1 mrg {
8657 1.1 mrg sol = find_what_var_points_to
8658 1.1 mrg (node->decl,
8659 1.1 mrg first_vi_for_offset (vi, fi_uses));
8660 1.1 mrg pt_solution_ior_into (uses, &sol);
8661 1.1 mrg }
8662 1.1 mrg if (!clobbers->anything)
8663 1.1 mrg {
8664 1.1 mrg sol = find_what_var_points_to
8665 1.1 mrg (node->decl,
8666 1.1 mrg first_vi_for_offset (vi, fi_clobbers));
8667 1.1 mrg pt_solution_ior_into (clobbers, &sol);
8668 1.1 mrg }
8669 1.1 mrg }
8670 1.1 mrg }
8671 1.1 mrg }
8672 1.1 mrg else
8673 1.1 mrg gcc_unreachable ();
8674 1.1 mrg }
8675 1.1 mrg }
8676 1.1 mrg
8677 1.1 mrg fn->gimple_df->ipa_pta = true;
8678 1.1 mrg
8679 1.1 mrg /* We have to re-set the final-solution cache after each function
8680 1.1 mrg because what is a "global" is dependent on function context. */
8681 1.1 mrg final_solutions->empty ();
8682 1.1 mrg obstack_free (&final_solutions_obstack, NULL);
8683 1.1 mrg gcc_obstack_init (&final_solutions_obstack);
8684 1.1 mrg }
8685 1.1 mrg
8686 1.1 mrg delete_points_to_sets ();
8687 1.1 mrg
8688 1.1 mrg in_ipa_mode = 0;
8689 1.1 mrg
8690 1.1 mrg return 0;
8691 1.1 mrg }
8692 1.1 mrg
8693 1.1 mrg namespace {
8694 1.1 mrg
8695 1.1 mrg const pass_data pass_data_ipa_pta =
8696 1.1 mrg {
8697 1.1 mrg SIMPLE_IPA_PASS, /* type */
8698 1.1 mrg "pta", /* name */
8699 1.1 mrg OPTGROUP_NONE, /* optinfo_flags */
8700 1.1 mrg TV_IPA_PTA, /* tv_id */
8701 1.1 mrg 0, /* properties_required */
8702 1.1 mrg 0, /* properties_provided */
8703 1.1 mrg 0, /* properties_destroyed */
8704 1.1 mrg 0, /* todo_flags_start */
8705 1.1 mrg 0, /* todo_flags_finish */
8706 1.1 mrg };
8707 1.1 mrg
8708 1.1 mrg class pass_ipa_pta : public simple_ipa_opt_pass
8709 1.1 mrg {
8710 1.1 mrg public:
8711 1.1 mrg pass_ipa_pta (gcc::context *ctxt)
8712 1.1 mrg : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
8713 1.1 mrg {}
8714 1.1 mrg
8715 1.1 mrg /* opt_pass methods: */
8716 1.1 mrg virtual bool gate (function *)
8717 1.1 mrg {
8718 1.1 mrg return (optimize
8719 1.1 mrg && flag_ipa_pta
8720 1.1 mrg /* Don't bother doing anything if the program has errors. */
8721 1.1 mrg && !seen_error ());
8722 1.1 mrg }
8723 1.1 mrg
8724 1.1 mrg opt_pass * clone () { return new pass_ipa_pta (m_ctxt); }
8725 1.1 mrg
8726 1.1 mrg virtual unsigned int execute (function *) { return ipa_pta_execute (); }
8727 1.1 mrg
8728 1.1 mrg }; // class pass_ipa_pta
8729 1.1 mrg
8730 1.1 mrg } // anon namespace
8731 1.1 mrg
8732 1.1 mrg simple_ipa_opt_pass *
8733 1.1 mrg make_pass_ipa_pta (gcc::context *ctxt)
8734 1.1 mrg {
8735 1.1 mrg return new pass_ipa_pta (ctxt);
8736 1.1 mrg }
8737