tree-ssa-structalias.cc revision 1.1 1 /* Tree based points-to analysis
2 Copyright (C) 2005-2022 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin (at) dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "tree-pass.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stmt.h"
37 #include "gimple-iterator.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "gimple-walk.h"
41 #include "varasm.h"
42 #include "stringpool.h"
43 #include "attribs.h"
44 #include "tree-ssa.h"
45 #include "tree-cfg.h"
46 #include "gimple-range.h"
47 #include "ipa-modref-tree.h"
48 #include "ipa-modref.h"
49 #include "attr-fnspec.h"
50
51 /* The idea behind this analyzer is to generate set constraints from the
52 program, then solve the resulting constraints in order to generate the
53 points-to sets.
54
55 Set constraints are a way of modeling program analysis problems that
56 involve sets. They consist of an inclusion constraint language,
57 describing the variables (each variable is a set) and operations that
58 are involved on the variables, and a set of rules that derive facts
59 from these operations. To solve a system of set constraints, you derive
60 all possible facts under the rules, which gives you the correct sets
61 as a consequence.
62
63 See "Efficient Field-sensitive pointer analysis for C" by "David
64 J. Pearce and Paul H. J. Kelly and Chris Hankin", at
65 http://citeseer.ist.psu.edu/pearce04efficient.html
66
67 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
68 of C Code in a Second" by "Nevin Heintze and Olivier Tardieu" at
69 http://citeseer.ist.psu.edu/heintze01ultrafast.html
70
71 There are three types of real constraint expressions, DEREF,
72 ADDRESSOF, and SCALAR. Each constraint expression consists
73 of a constraint type, a variable, and an offset.
74
75 SCALAR is a constraint expression type used to represent x, whether
76 it appears on the LHS or the RHS of a statement.
77 DEREF is a constraint expression type used to represent *x, whether
78 it appears on the LHS or the RHS of a statement.
79 ADDRESSOF is a constraint expression used to represent &x, whether
80 it appears on the LHS or the RHS of a statement.
81
82 Each pointer variable in the program is assigned an integer id, and
83 each field of a structure variable is assigned an integer id as well.
84
85 Structure variables are linked to their list of fields through a "next
86 field" in each variable that points to the next field in offset
87 order.
88 Each variable for a structure field has
89
90 1. "size", that tells the size in bits of that field.
91 2. "fullsize", that tells the size in bits of the entire structure.
92 3. "offset", that tells the offset in bits from the beginning of the
93 structure to this field.
94
95 Thus,
96 struct f
97 {
98 int a;
99 int b;
100 } foo;
101 int *bar;
102
103 looks like
104
105 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
106 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
107 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
108
109
110 In order to solve the system of set constraints, the following is
111 done:
112
113 1. Each constraint variable x has a solution set associated with it,
114 Sol(x).
115
116 2. Constraints are separated into direct, copy, and complex.
117 Direct constraints are ADDRESSOF constraints that require no extra
118 processing, such as P = &Q
119 Copy constraints are those of the form P = Q.
120 Complex constraints are all the constraints involving dereferences
121 and offsets (including offsetted copies).
122
123 3. All direct constraints of the form P = &Q are processed, such
124 that Q is added to Sol(P)
125
126 4. All complex constraints for a given constraint variable are stored in a
127 linked list attached to that variable's node.
128
129 5. A directed graph is built out of the copy constraints. Each
130 constraint variable is a node in the graph, and an edge from
131 Q to P is added for each copy constraint of the form P = Q
132
133 6. The graph is then walked, and solution sets are
134 propagated along the copy edges, such that an edge from Q to P
135 causes Sol(P) <- Sol(P) union Sol(Q).
136
137 7. As we visit each node, all complex constraints associated with
138 that node are processed by adding appropriate copy edges to the graph, or the
139 appropriate variables to the solution set.
140
141 8. The process of walking the graph is iterated until no solution
142 sets change.
143
144 Prior to walking the graph in steps 6 and 7, We perform static
145 cycle elimination on the constraint graph, as well
146 as off-line variable substitution.
147
148 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
149 on and turned into anything), but isn't. You can just see what offset
150 inside the pointed-to struct it's going to access.
151
152 TODO: Constant bounded arrays can be handled as if they were structs of the
153 same number of elements.
154
155 TODO: Modeling heap and incoming pointers becomes much better if we
156 add fields to them as we discover them, which we could do.
157
158 TODO: We could handle unions, but to be honest, it's probably not
159 worth the pain or slowdown. */
160
161 /* IPA-PTA optimizations possible.
162
163 When the indirect function called is ANYTHING we can add disambiguation
164 based on the function signatures (or simply the parameter count which
165 is the varinfo size). We also do not need to consider functions that
166 do not have their address taken.
167
168 The is_global_var bit which marks escape points is overly conservative
169 in IPA mode. Split it to is_escape_point and is_global_var - only
170 externally visible globals are escape points in IPA mode.
171 There is now is_ipa_escape_point but this is only used in a few
172 selected places.
173
174 The way we introduce DECL_PT_UID to avoid fixing up all points-to
175 sets in the translation unit when we copy a DECL during inlining
176 pessimizes precision. The advantage is that the DECL_PT_UID keeps
177 compile-time and memory usage overhead low - the points-to sets
178 do not grow or get unshared as they would during a fixup phase.
179 An alternative solution is to delay IPA PTA until after all
180 inlining transformations have been applied.
181
182 The way we propagate clobber/use information isn't optimized.
183 It should use a new complex constraint that properly filters
184 out local variables of the callee (though that would make
185 the sets invalid after inlining). OTOH we might as well
186 admit defeat to WHOPR and simply do all the clobber/use analysis
187 and propagation after PTA finished but before we threw away
188 points-to information for memory variables. WHOPR and PTA
189 do not play along well anyway - the whole constraint solving
190 would need to be done in WPA phase and it will be very interesting
191 to apply the results to local SSA names during LTRANS phase.
192
193 We probably should compute a per-function unit-ESCAPE solution
194 propagating it simply like the clobber / uses solutions. The
195 solution can go alongside the non-IPA escaped solution and be
196 used to query which vars escape the unit through a function.
197 This is also required to make the escaped-HEAP trick work in IPA mode.
198
199 We never put function decls in points-to sets so we do not
200 keep the set of called functions for indirect calls.
201
202 And probably more. */
203
204 static bool use_field_sensitive = true;
205 static int in_ipa_mode = 0;
206
207 /* Used for predecessor bitmaps. */
208 static bitmap_obstack predbitmap_obstack;
209
210 /* Used for points-to sets. */
211 static bitmap_obstack pta_obstack;
212
213 /* Used for oldsolution members of variables. */
214 static bitmap_obstack oldpta_obstack;
215
216 /* Used for per-solver-iteration bitmaps. */
217 static bitmap_obstack iteration_obstack;
218
219 static unsigned int create_variable_info_for (tree, const char *, bool);
220 typedef struct constraint_graph *constraint_graph_t;
221 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
222
223 struct constraint;
224 typedef struct constraint *constraint_t;
225
226
227 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
228 if (a) \
229 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
230
231 static struct constraint_stats
232 {
233 unsigned int total_vars;
234 unsigned int nonpointer_vars;
235 unsigned int unified_vars_static;
236 unsigned int unified_vars_dynamic;
237 unsigned int iterations;
238 unsigned int num_edges;
239 unsigned int num_implicit_edges;
240 unsigned int points_to_sets_created;
241 } stats;
242
243 struct variable_info
244 {
245 /* ID of this variable */
246 unsigned int id;
247
248 /* True if this is a variable created by the constraint analysis, such as
249 heap variables and constraints we had to break up. */
250 unsigned int is_artificial_var : 1;
251
252 /* True if this is a special variable whose solution set should not be
253 changed. */
254 unsigned int is_special_var : 1;
255
256 /* True for variables whose size is not known or variable. */
257 unsigned int is_unknown_size_var : 1;
258
259 /* True for (sub-)fields that represent a whole variable. */
260 unsigned int is_full_var : 1;
261
262 /* True if this is a heap variable. */
263 unsigned int is_heap_var : 1;
264
265 /* True if this is a register variable. */
266 unsigned int is_reg_var : 1;
267
268 /* True if this field may contain pointers. */
269 unsigned int may_have_pointers : 1;
270
271 /* True if this field has only restrict qualified pointers. */
272 unsigned int only_restrict_pointers : 1;
273
274 /* True if this represents a heap var created for a restrict qualified
275 pointer. */
276 unsigned int is_restrict_var : 1;
277
278 /* True if this represents a global variable. */
279 unsigned int is_global_var : 1;
280
281 /* True if this represents a module escape point for IPA analysis. */
282 unsigned int is_ipa_escape_point : 1;
283
284 /* True if this represents a IPA function info. */
285 unsigned int is_fn_info : 1;
286
287 /* True if this appears as RHS in a ADDRESSOF constraint. */
288 unsigned int address_taken : 1;
289
290 /* ??? Store somewhere better. */
291 unsigned short ruid;
292
293 /* The ID of the variable for the next field in this structure
294 or zero for the last field in this structure. */
295 unsigned next;
296
297 /* The ID of the variable for the first field in this structure. */
298 unsigned head;
299
300 /* Offset of this variable, in bits, from the base variable */
301 unsigned HOST_WIDE_INT offset;
302
303 /* Size of the variable, in bits. */
304 unsigned HOST_WIDE_INT size;
305
306 /* Full size of the base variable, in bits. */
307 unsigned HOST_WIDE_INT fullsize;
308
309 /* In IPA mode the shadow UID in case the variable needs to be duplicated in
310 the final points-to solution because it reaches its containing
311 function recursively. Zero if none is needed. */
312 unsigned int shadow_var_uid;
313
314 /* Name of this variable */
315 const char *name;
316
317 /* Tree that this variable is associated with. */
318 tree decl;
319
320 /* Points-to set for this variable. */
321 bitmap solution;
322
323 /* Old points-to set for this variable. */
324 bitmap oldsolution;
325 };
326 typedef struct variable_info *varinfo_t;
327
328 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
329 static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
330 unsigned HOST_WIDE_INT);
331 static varinfo_t lookup_vi_for_tree (tree);
332 static inline bool type_can_have_subvars (const_tree);
333 static void make_param_constraints (varinfo_t);
334
335 /* Pool of variable info structures. */
336 static object_allocator<variable_info> variable_info_pool
337 ("Variable info pool");
338
339 /* Map varinfo to final pt_solution. */
340 static hash_map<varinfo_t, pt_solution *> *final_solutions;
341 struct obstack final_solutions_obstack;
342
343 /* Table of variable info structures for constraint variables.
344 Indexed directly by variable info id. */
345 static vec<varinfo_t> varmap;
346
347 /* Return the varmap element N */
348
349 static inline varinfo_t
350 get_varinfo (unsigned int n)
351 {
352 return varmap[n];
353 }
354
355 /* Return the next variable in the list of sub-variables of VI
356 or NULL if VI is the last sub-variable. */
357
358 static inline varinfo_t
359 vi_next (varinfo_t vi)
360 {
361 return get_varinfo (vi->next);
362 }
363
364 /* Static IDs for the special variables. Variable ID zero is unused
365 and used as terminator for the sub-variable chain. */
366 enum { nothing_id = 1, anything_id = 2, string_id = 3,
367 escaped_id = 4, nonlocal_id = 5,
368 storedanything_id = 6, integer_id = 7 };
369
370 /* Return a new variable info structure consisting for a variable
371 named NAME, and using constraint graph node NODE. Append it
372 to the vector of variable info structures. */
373
374 static varinfo_t
375 new_var_info (tree t, const char *name, bool add_id)
376 {
377 unsigned index = varmap.length ();
378 varinfo_t ret = variable_info_pool.allocate ();
379
380 if (dump_file && add_id)
381 {
382 char *tempname = xasprintf ("%s(%d)", name, index);
383 name = ggc_strdup (tempname);
384 free (tempname);
385 }
386
387 ret->id = index;
388 ret->name = name;
389 ret->decl = t;
390 /* Vars without decl are artificial and do not have sub-variables. */
391 ret->is_artificial_var = (t == NULL_TREE);
392 ret->is_special_var = false;
393 ret->is_unknown_size_var = false;
394 ret->is_full_var = (t == NULL_TREE);
395 ret->is_heap_var = false;
396 ret->may_have_pointers = true;
397 ret->only_restrict_pointers = false;
398 ret->is_restrict_var = false;
399 ret->ruid = 0;
400 ret->is_global_var = (t == NULL_TREE);
401 ret->is_ipa_escape_point = false;
402 ret->is_fn_info = false;
403 ret->address_taken = false;
404 if (t && DECL_P (t))
405 ret->is_global_var = (is_global_var (t)
406 /* We have to treat even local register variables
407 as escape points. */
408 || (VAR_P (t) && DECL_HARD_REGISTER (t)));
409 ret->is_reg_var = (t && TREE_CODE (t) == SSA_NAME);
410 ret->solution = BITMAP_ALLOC (&pta_obstack);
411 ret->oldsolution = NULL;
412 ret->next = 0;
413 ret->shadow_var_uid = 0;
414 ret->head = ret->id;
415
416 stats.total_vars++;
417
418 varmap.safe_push (ret);
419
420 return ret;
421 }
422
423 /* A map mapping call statements to per-stmt variables for uses
424 and clobbers specific to the call. */
425 static hash_map<gimple *, varinfo_t> *call_stmt_vars;
426
427 /* Lookup or create the variable for the call statement CALL. */
428
429 static varinfo_t
430 get_call_vi (gcall *call)
431 {
432 varinfo_t vi, vi2;
433
434 bool existed;
435 varinfo_t *slot_p = &call_stmt_vars->get_or_insert (call, &existed);
436 if (existed)
437 return *slot_p;
438
439 vi = new_var_info (NULL_TREE, "CALLUSED", true);
440 vi->offset = 0;
441 vi->size = 1;
442 vi->fullsize = 2;
443 vi->is_full_var = true;
444 vi->is_reg_var = true;
445
446 vi2 = new_var_info (NULL_TREE, "CALLCLOBBERED", true);
447 vi2->offset = 1;
448 vi2->size = 1;
449 vi2->fullsize = 2;
450 vi2->is_full_var = true;
451 vi2->is_reg_var = true;
452
453 vi->next = vi2->id;
454
455 *slot_p = vi;
456 return vi;
457 }
458
459 /* Lookup the variable for the call statement CALL representing
460 the uses. Returns NULL if there is nothing special about this call. */
461
462 static varinfo_t
463 lookup_call_use_vi (gcall *call)
464 {
465 varinfo_t *slot_p = call_stmt_vars->get (call);
466 if (slot_p)
467 return *slot_p;
468
469 return NULL;
470 }
471
472 /* Lookup the variable for the call statement CALL representing
473 the clobbers. Returns NULL if there is nothing special about this call. */
474
475 static varinfo_t
476 lookup_call_clobber_vi (gcall *call)
477 {
478 varinfo_t uses = lookup_call_use_vi (call);
479 if (!uses)
480 return NULL;
481
482 return vi_next (uses);
483 }
484
485 /* Lookup or create the variable for the call statement CALL representing
486 the uses. */
487
488 static varinfo_t
489 get_call_use_vi (gcall *call)
490 {
491 return get_call_vi (call);
492 }
493
494 /* Lookup or create the variable for the call statement CALL representing
495 the clobbers. */
496
497 static varinfo_t ATTRIBUTE_UNUSED
498 get_call_clobber_vi (gcall *call)
499 {
500 return vi_next (get_call_vi (call));
501 }
502
503
504 enum constraint_expr_type {SCALAR, DEREF, ADDRESSOF};
505
506 /* An expression that appears in a constraint. */
507
508 struct constraint_expr
509 {
510 /* Constraint type. */
511 constraint_expr_type type;
512
513 /* Variable we are referring to in the constraint. */
514 unsigned int var;
515
516 /* Offset, in bits, of this constraint from the beginning of
517 variables it ends up referring to.
518
519 IOW, in a deref constraint, we would deref, get the result set,
520 then add OFFSET to each member. */
521 HOST_WIDE_INT offset;
522 };
523
524 /* Use 0x8000... as special unknown offset. */
525 #define UNKNOWN_OFFSET HOST_WIDE_INT_MIN
526
527 typedef struct constraint_expr ce_s;
528 static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
529 static void get_constraint_for (tree, vec<ce_s> *);
530 static void get_constraint_for_rhs (tree, vec<ce_s> *);
531 static void do_deref (vec<ce_s> *);
532
533 /* Our set constraints are made up of two constraint expressions, one
534 LHS, and one RHS.
535
536 As described in the introduction, our set constraints each represent an
537 operation between set valued variables.
538 */
539 struct constraint
540 {
541 struct constraint_expr lhs;
542 struct constraint_expr rhs;
543 };
544
545 /* List of constraints that we use to build the constraint graph from. */
546
547 static vec<constraint_t> constraints;
548 static object_allocator<constraint> constraint_pool ("Constraint pool");
549
550 /* The constraint graph is represented as an array of bitmaps
551 containing successor nodes. */
552
553 struct constraint_graph
554 {
555 /* Size of this graph, which may be different than the number of
556 nodes in the variable map. */
557 unsigned int size;
558
559 /* Explicit successors of each node. */
560 bitmap *succs;
561
562 /* Implicit predecessors of each node (Used for variable
563 substitution). */
564 bitmap *implicit_preds;
565
566 /* Explicit predecessors of each node (Used for variable substitution). */
567 bitmap *preds;
568
569 /* Indirect cycle representatives, or -1 if the node has no indirect
570 cycles. */
571 int *indirect_cycles;
572
573 /* Representative node for a node. rep[a] == a unless the node has
574 been unified. */
575 unsigned int *rep;
576
577 /* Equivalence class representative for a label. This is used for
578 variable substitution. */
579 int *eq_rep;
580
581 /* Pointer equivalence label for a node. All nodes with the same
582 pointer equivalence label can be unified together at some point
583 (either during constraint optimization or after the constraint
584 graph is built). */
585 unsigned int *pe;
586
587 /* Pointer equivalence representative for a label. This is used to
588 handle nodes that are pointer equivalent but not location
589 equivalent. We can unite these once the addressof constraints
590 are transformed into initial points-to sets. */
591 int *pe_rep;
592
593 /* Pointer equivalence label for each node, used during variable
594 substitution. */
595 unsigned int *pointer_label;
596
597 /* Location equivalence label for each node, used during location
598 equivalence finding. */
599 unsigned int *loc_label;
600
601 /* Pointed-by set for each node, used during location equivalence
602 finding. This is pointed-by rather than pointed-to, because it
603 is constructed using the predecessor graph. */
604 bitmap *pointed_by;
605
606 /* Points to sets for pointer equivalence. This is *not* the actual
607 points-to sets for nodes. */
608 bitmap *points_to;
609
610 /* Bitmap of nodes where the bit is set if the node is a direct
611 node. Used for variable substitution. */
612 sbitmap direct_nodes;
613
614 /* Bitmap of nodes where the bit is set if the node is address
615 taken. Used for variable substitution. */
616 bitmap address_taken;
617
618 /* Vector of complex constraints for each graph node. Complex
619 constraints are those involving dereferences or offsets that are
620 not 0. */
621 vec<constraint_t> *complex;
622 };
623
624 static constraint_graph_t graph;
625
626 /* During variable substitution and the offline version of indirect
627 cycle finding, we create nodes to represent dereferences and
628 address taken constraints. These represent where these start and
629 end. */
630 #define FIRST_REF_NODE (varmap).length ()
631 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
632
633 /* Return the representative node for NODE, if NODE has been unioned
634 with another NODE.
635 This function performs path compression along the way to finding
636 the representative. */
637
638 static unsigned int
639 find (unsigned int node)
640 {
641 gcc_checking_assert (node < graph->size);
642 if (graph->rep[node] != node)
643 return graph->rep[node] = find (graph->rep[node]);
644 return node;
645 }
646
647 /* Union the TO and FROM nodes to the TO nodes.
648 Note that at some point in the future, we may want to do
649 union-by-rank, in which case we are going to have to return the
650 node we unified to. */
651
652 static bool
653 unite (unsigned int to, unsigned int from)
654 {
655 gcc_checking_assert (to < graph->size && from < graph->size);
656 if (to != from && graph->rep[from] != to)
657 {
658 graph->rep[from] = to;
659 return true;
660 }
661 return false;
662 }
663
664 /* Create a new constraint consisting of LHS and RHS expressions. */
665
666 static constraint_t
667 new_constraint (const struct constraint_expr lhs,
668 const struct constraint_expr rhs)
669 {
670 constraint_t ret = constraint_pool.allocate ();
671 ret->lhs = lhs;
672 ret->rhs = rhs;
673 return ret;
674 }
675
676 /* Print out constraint C to FILE. */
677
678 static void
679 dump_constraint (FILE *file, constraint_t c)
680 {
681 if (c->lhs.type == ADDRESSOF)
682 fprintf (file, "&");
683 else if (c->lhs.type == DEREF)
684 fprintf (file, "*");
685 if (dump_file)
686 fprintf (file, "%s", get_varinfo (c->lhs.var)->name);
687 else
688 fprintf (file, "V%d", c->lhs.var);
689 if (c->lhs.offset == UNKNOWN_OFFSET)
690 fprintf (file, " + UNKNOWN");
691 else if (c->lhs.offset != 0)
692 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
693 fprintf (file, " = ");
694 if (c->rhs.type == ADDRESSOF)
695 fprintf (file, "&");
696 else if (c->rhs.type == DEREF)
697 fprintf (file, "*");
698 if (dump_file)
699 fprintf (file, "%s", get_varinfo (c->rhs.var)->name);
700 else
701 fprintf (file, "V%d", c->rhs.var);
702 if (c->rhs.offset == UNKNOWN_OFFSET)
703 fprintf (file, " + UNKNOWN");
704 else if (c->rhs.offset != 0)
705 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
706 }
707
708
709 void debug_constraint (constraint_t);
710 void debug_constraints (void);
711 void debug_constraint_graph (void);
712 void debug_solution_for_var (unsigned int);
713 void debug_sa_points_to_info (void);
714 void debug_varinfo (varinfo_t);
715 void debug_varmap (void);
716
717 /* Print out constraint C to stderr. */
718
719 DEBUG_FUNCTION void
720 debug_constraint (constraint_t c)
721 {
722 dump_constraint (stderr, c);
723 fprintf (stderr, "\n");
724 }
725
726 /* Print out all constraints to FILE */
727
728 static void
729 dump_constraints (FILE *file, int from)
730 {
731 int i;
732 constraint_t c;
733 for (i = from; constraints.iterate (i, &c); i++)
734 if (c)
735 {
736 dump_constraint (file, c);
737 fprintf (file, "\n");
738 }
739 }
740
741 /* Print out all constraints to stderr. */
742
743 DEBUG_FUNCTION void
744 debug_constraints (void)
745 {
746 dump_constraints (stderr, 0);
747 }
748
749 /* Print the constraint graph in dot format. */
750
751 static void
752 dump_constraint_graph (FILE *file)
753 {
754 unsigned int i;
755
756 /* Only print the graph if it has already been initialized: */
757 if (!graph)
758 return;
759
760 /* Prints the header of the dot file: */
761 fprintf (file, "strict digraph {\n");
762 fprintf (file, " node [\n shape = box\n ]\n");
763 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
764 fprintf (file, "\n // List of nodes and complex constraints in "
765 "the constraint graph:\n");
766
767 /* The next lines print the nodes in the graph together with the
768 complex constraints attached to them. */
769 for (i = 1; i < graph->size; i++)
770 {
771 if (i == FIRST_REF_NODE)
772 continue;
773 if (find (i) != i)
774 continue;
775 if (i < FIRST_REF_NODE)
776 fprintf (file, "\"%s\"", get_varinfo (i)->name);
777 else
778 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
779 if (graph->complex[i].exists ())
780 {
781 unsigned j;
782 constraint_t c;
783 fprintf (file, " [label=\"\\N\\n");
784 for (j = 0; graph->complex[i].iterate (j, &c); ++j)
785 {
786 dump_constraint (file, c);
787 fprintf (file, "\\l");
788 }
789 fprintf (file, "\"]");
790 }
791 fprintf (file, ";\n");
792 }
793
794 /* Go over the edges. */
795 fprintf (file, "\n // Edges in the constraint graph:\n");
796 for (i = 1; i < graph->size; i++)
797 {
798 unsigned j;
799 bitmap_iterator bi;
800 if (find (i) != i)
801 continue;
802 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
803 {
804 unsigned to = find (j);
805 if (i == to)
806 continue;
807 if (i < FIRST_REF_NODE)
808 fprintf (file, "\"%s\"", get_varinfo (i)->name);
809 else
810 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
811 fprintf (file, " -> ");
812 if (to < FIRST_REF_NODE)
813 fprintf (file, "\"%s\"", get_varinfo (to)->name);
814 else
815 fprintf (file, "\"*%s\"", get_varinfo (to - FIRST_REF_NODE)->name);
816 fprintf (file, ";\n");
817 }
818 }
819
820 /* Prints the tail of the dot file. */
821 fprintf (file, "}\n");
822 }
823
824 /* Print out the constraint graph to stderr. */
825
826 DEBUG_FUNCTION void
827 debug_constraint_graph (void)
828 {
829 dump_constraint_graph (stderr);
830 }
831
832 /* SOLVER FUNCTIONS
833
834 The solver is a simple worklist solver, that works on the following
835 algorithm:
836
837 sbitmap changed_nodes = all zeroes;
838 changed_count = 0;
839 For each node that is not already collapsed:
840 changed_count++;
841 set bit in changed nodes
842
843 while (changed_count > 0)
844 {
845 compute topological ordering for constraint graph
846
847 find and collapse cycles in the constraint graph (updating
848 changed if necessary)
849
850 for each node (n) in the graph in topological order:
851 changed_count--;
852
853 Process each complex constraint associated with the node,
854 updating changed if necessary.
855
856 For each outgoing edge from n, propagate the solution from n to
857 the destination of the edge, updating changed as necessary.
858
859 } */
860
861 /* Return true if two constraint expressions A and B are equal. */
862
863 static bool
864 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
865 {
866 return a.type == b.type && a.var == b.var && a.offset == b.offset;
867 }
868
869 /* Return true if constraint expression A is less than constraint expression
870 B. This is just arbitrary, but consistent, in order to give them an
871 ordering. */
872
873 static bool
874 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
875 {
876 if (a.type == b.type)
877 {
878 if (a.var == b.var)
879 return a.offset < b.offset;
880 else
881 return a.var < b.var;
882 }
883 else
884 return a.type < b.type;
885 }
886
887 /* Return true if constraint A is less than constraint B. This is just
888 arbitrary, but consistent, in order to give them an ordering. */
889
890 static bool
891 constraint_less (const constraint_t &a, const constraint_t &b)
892 {
893 if (constraint_expr_less (a->lhs, b->lhs))
894 return true;
895 else if (constraint_expr_less (b->lhs, a->lhs))
896 return false;
897 else
898 return constraint_expr_less (a->rhs, b->rhs);
899 }
900
901 /* Return true if two constraints A and B are equal. */
902
903 static bool
904 constraint_equal (struct constraint a, struct constraint b)
905 {
906 return constraint_expr_equal (a.lhs, b.lhs)
907 && constraint_expr_equal (a.rhs, b.rhs);
908 }
909
910
911 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
912
913 static constraint_t
914 constraint_vec_find (vec<constraint_t> vec,
915 struct constraint lookfor)
916 {
917 unsigned int place;
918 constraint_t found;
919
920 if (!vec.exists ())
921 return NULL;
922
923 place = vec.lower_bound (&lookfor, constraint_less);
924 if (place >= vec.length ())
925 return NULL;
926 found = vec[place];
927 if (!constraint_equal (*found, lookfor))
928 return NULL;
929 return found;
930 }
931
932 /* Union two constraint vectors, TO and FROM. Put the result in TO.
933 Returns true of TO set is changed. */
934
935 static bool
936 constraint_set_union (vec<constraint_t> *to,
937 vec<constraint_t> *from)
938 {
939 int i;
940 constraint_t c;
941 bool any_change = false;
942
943 FOR_EACH_VEC_ELT (*from, i, c)
944 {
945 if (constraint_vec_find (*to, *c) == NULL)
946 {
947 unsigned int place = to->lower_bound (c, constraint_less);
948 to->safe_insert (place, c);
949 any_change = true;
950 }
951 }
952 return any_change;
953 }
954
955 /* Expands the solution in SET to all sub-fields of variables included. */
956
957 static bitmap
958 solution_set_expand (bitmap set, bitmap *expanded)
959 {
960 bitmap_iterator bi;
961 unsigned j;
962
963 if (*expanded)
964 return *expanded;
965
966 *expanded = BITMAP_ALLOC (&iteration_obstack);
967
968 /* In a first pass expand to the head of the variables we need to
969 add all sub-fields off. This avoids quadratic behavior. */
970 EXECUTE_IF_SET_IN_BITMAP (set, 0, j, bi)
971 {
972 varinfo_t v = get_varinfo (j);
973 if (v->is_artificial_var
974 || v->is_full_var)
975 continue;
976 bitmap_set_bit (*expanded, v->head);
977 }
978
979 /* In the second pass now expand all head variables with subfields. */
980 EXECUTE_IF_SET_IN_BITMAP (*expanded, 0, j, bi)
981 {
982 varinfo_t v = get_varinfo (j);
983 if (v->head != j)
984 continue;
985 for (v = vi_next (v); v != NULL; v = vi_next (v))
986 bitmap_set_bit (*expanded, v->id);
987 }
988
989 /* And finally set the rest of the bits from SET. */
990 bitmap_ior_into (*expanded, set);
991
992 return *expanded;
993 }
994
995 /* Union solution sets TO and DELTA, and add INC to each member of DELTA in the
996 process. */
997
998 static bool
999 set_union_with_increment (bitmap to, bitmap delta, HOST_WIDE_INT inc,
1000 bitmap *expanded_delta)
1001 {
1002 bool changed = false;
1003 bitmap_iterator bi;
1004 unsigned int i;
1005
1006 /* If the solution of DELTA contains anything it is good enough to transfer
1007 this to TO. */
1008 if (bitmap_bit_p (delta, anything_id))
1009 return bitmap_set_bit (to, anything_id);
1010
1011 /* If the offset is unknown we have to expand the solution to
1012 all subfields. */
1013 if (inc == UNKNOWN_OFFSET)
1014 {
1015 delta = solution_set_expand (delta, expanded_delta);
1016 changed |= bitmap_ior_into (to, delta);
1017 return changed;
1018 }
1019
1020 /* For non-zero offset union the offsetted solution into the destination. */
1021 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
1022 {
1023 varinfo_t vi = get_varinfo (i);
1024
1025 /* If this is a variable with just one field just set its bit
1026 in the result. */
1027 if (vi->is_artificial_var
1028 || vi->is_unknown_size_var
1029 || vi->is_full_var)
1030 changed |= bitmap_set_bit (to, i);
1031 else
1032 {
1033 HOST_WIDE_INT fieldoffset = vi->offset + inc;
1034 unsigned HOST_WIDE_INT size = vi->size;
1035
1036 /* If the offset makes the pointer point to before the
1037 variable use offset zero for the field lookup. */
1038 if (fieldoffset < 0)
1039 vi = get_varinfo (vi->head);
1040 else
1041 vi = first_or_preceding_vi_for_offset (vi, fieldoffset);
1042
1043 do
1044 {
1045 changed |= bitmap_set_bit (to, vi->id);
1046 if (vi->is_full_var
1047 || vi->next == 0)
1048 break;
1049
1050 /* We have to include all fields that overlap the current field
1051 shifted by inc. */
1052 vi = vi_next (vi);
1053 }
1054 while (vi->offset < fieldoffset + size);
1055 }
1056 }
1057
1058 return changed;
1059 }
1060
1061 /* Insert constraint C into the list of complex constraints for graph
1062 node VAR. */
1063
1064 static void
1065 insert_into_complex (constraint_graph_t graph,
1066 unsigned int var, constraint_t c)
1067 {
1068 vec<constraint_t> complex = graph->complex[var];
1069 unsigned int place = complex.lower_bound (c, constraint_less);
1070
1071 /* Only insert constraints that do not already exist. */
1072 if (place >= complex.length ()
1073 || !constraint_equal (*c, *complex[place]))
1074 graph->complex[var].safe_insert (place, c);
1075 }
1076
1077
1078 /* Condense two variable nodes into a single variable node, by moving
1079 all associated info from FROM to TO. Returns true if TO node's
1080 constraint set changes after the merge. */
1081
1082 static bool
1083 merge_node_constraints (constraint_graph_t graph, unsigned int to,
1084 unsigned int from)
1085 {
1086 unsigned int i;
1087 constraint_t c;
1088 bool any_change = false;
1089
1090 gcc_checking_assert (find (from) == to);
1091
1092 /* Move all complex constraints from src node into to node */
1093 FOR_EACH_VEC_ELT (graph->complex[from], i, c)
1094 {
1095 /* In complex constraints for node FROM, we may have either
1096 a = *FROM, and *FROM = a, or an offseted constraint which are
1097 always added to the rhs node's constraints. */
1098
1099 if (c->rhs.type == DEREF)
1100 c->rhs.var = to;
1101 else if (c->lhs.type == DEREF)
1102 c->lhs.var = to;
1103 else
1104 c->rhs.var = to;
1105
1106 }
1107 any_change = constraint_set_union (&graph->complex[to],
1108 &graph->complex[from]);
1109 graph->complex[from].release ();
1110 return any_change;
1111 }
1112
1113
1114 /* Remove edges involving NODE from GRAPH. */
1115
1116 static void
1117 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
1118 {
1119 if (graph->succs[node])
1120 BITMAP_FREE (graph->succs[node]);
1121 }
1122
1123 /* Merge GRAPH nodes FROM and TO into node TO. */
1124
1125 static void
1126 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
1127 unsigned int from)
1128 {
1129 if (graph->indirect_cycles[from] != -1)
1130 {
1131 /* If we have indirect cycles with the from node, and we have
1132 none on the to node, the to node has indirect cycles from the
1133 from node now that they are unified.
1134 If indirect cycles exist on both, unify the nodes that they
1135 are in a cycle with, since we know they are in a cycle with
1136 each other. */
1137 if (graph->indirect_cycles[to] == -1)
1138 graph->indirect_cycles[to] = graph->indirect_cycles[from];
1139 }
1140
1141 /* Merge all the successor edges. */
1142 if (graph->succs[from])
1143 {
1144 if (!graph->succs[to])
1145 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
1146 bitmap_ior_into (graph->succs[to],
1147 graph->succs[from]);
1148 }
1149
1150 clear_edges_for_node (graph, from);
1151 }
1152
1153
1154 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
1155 it doesn't exist in the graph already. */
1156
1157 static void
1158 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
1159 unsigned int from)
1160 {
1161 if (to == from)
1162 return;
1163
1164 if (!graph->implicit_preds[to])
1165 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1166
1167 if (bitmap_set_bit (graph->implicit_preds[to], from))
1168 stats.num_implicit_edges++;
1169 }
1170
1171 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1172 it doesn't exist in the graph already.
1173 Return false if the edge already existed, true otherwise. */
1174
1175 static void
1176 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1177 unsigned int from)
1178 {
1179 if (!graph->preds[to])
1180 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1181 bitmap_set_bit (graph->preds[to], from);
1182 }
1183
1184 /* Add a graph edge to GRAPH, going from FROM to TO if
1185 it doesn't exist in the graph already.
1186 Return false if the edge already existed, true otherwise. */
1187
1188 static bool
1189 add_graph_edge (constraint_graph_t graph, unsigned int to,
1190 unsigned int from)
1191 {
1192 if (to == from)
1193 {
1194 return false;
1195 }
1196 else
1197 {
1198 bool r = false;
1199
1200 if (!graph->succs[from])
1201 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1202
1203 /* The graph solving process does not avoid "triangles", thus
1204 there can be multiple paths from a node to another involving
1205 intermediate other nodes. That causes extra copying which is
1206 most difficult to avoid when the intermediate node is ESCAPED
1207 because there are no edges added from ESCAPED. Avoid
1208 adding the direct edge FROM -> TO when we have FROM -> ESCAPED
1209 and TO contains ESCAPED.
1210 ??? Note this is only a heuristic, it does not prevent the
1211 situation from occuring. The heuristic helps PR38474 and
1212 PR99912 significantly. */
1213 if (to < FIRST_REF_NODE
1214 && bitmap_bit_p (graph->succs[from], find (escaped_id))
1215 && bitmap_bit_p (get_varinfo (find (to))->solution, escaped_id))
1216 return false;
1217
1218 if (bitmap_set_bit (graph->succs[from], to))
1219 {
1220 r = true;
1221 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1222 stats.num_edges++;
1223 }
1224 return r;
1225 }
1226 }
1227
1228
1229 /* Initialize the constraint graph structure to contain SIZE nodes. */
1230
1231 static void
1232 init_graph (unsigned int size)
1233 {
1234 unsigned int j;
1235
1236 graph = XCNEW (struct constraint_graph);
1237 graph->size = size;
1238 graph->succs = XCNEWVEC (bitmap, graph->size);
1239 graph->indirect_cycles = XNEWVEC (int, graph->size);
1240 graph->rep = XNEWVEC (unsigned int, graph->size);
1241 /* ??? Macros do not support template types with multiple arguments,
1242 so we use a typedef to work around it. */
1243 typedef vec<constraint_t> vec_constraint_t_heap;
1244 graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
1245 graph->pe = XCNEWVEC (unsigned int, graph->size);
1246 graph->pe_rep = XNEWVEC (int, graph->size);
1247
1248 for (j = 0; j < graph->size; j++)
1249 {
1250 graph->rep[j] = j;
1251 graph->pe_rep[j] = -1;
1252 graph->indirect_cycles[j] = -1;
1253 }
1254 }
1255
1256 /* Build the constraint graph, adding only predecessor edges right now. */
1257
1258 static void
1259 build_pred_graph (void)
1260 {
1261 int i;
1262 constraint_t c;
1263 unsigned int j;
1264
1265 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1266 graph->preds = XCNEWVEC (bitmap, graph->size);
1267 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1268 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1269 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1270 graph->points_to = XCNEWVEC (bitmap, graph->size);
1271 graph->eq_rep = XNEWVEC (int, graph->size);
1272 graph->direct_nodes = sbitmap_alloc (graph->size);
1273 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1274 bitmap_clear (graph->direct_nodes);
1275
1276 for (j = 1; j < FIRST_REF_NODE; j++)
1277 {
1278 if (!get_varinfo (j)->is_special_var)
1279 bitmap_set_bit (graph->direct_nodes, j);
1280 }
1281
1282 for (j = 0; j < graph->size; j++)
1283 graph->eq_rep[j] = -1;
1284
1285 for (j = 0; j < varmap.length (); j++)
1286 graph->indirect_cycles[j] = -1;
1287
1288 FOR_EACH_VEC_ELT (constraints, i, c)
1289 {
1290 struct constraint_expr lhs = c->lhs;
1291 struct constraint_expr rhs = c->rhs;
1292 unsigned int lhsvar = lhs.var;
1293 unsigned int rhsvar = rhs.var;
1294
1295 if (lhs.type == DEREF)
1296 {
1297 /* *x = y. */
1298 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1299 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1300 }
1301 else if (rhs.type == DEREF)
1302 {
1303 /* x = *y */
1304 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1305 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1306 else
1307 bitmap_clear_bit (graph->direct_nodes, lhsvar);
1308 }
1309 else if (rhs.type == ADDRESSOF)
1310 {
1311 varinfo_t v;
1312
1313 /* x = &y */
1314 if (graph->points_to[lhsvar] == NULL)
1315 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1316 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1317
1318 if (graph->pointed_by[rhsvar] == NULL)
1319 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1320 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1321
1322 /* Implicitly, *x = y */
1323 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1324
1325 /* All related variables are no longer direct nodes. */
1326 bitmap_clear_bit (graph->direct_nodes, rhsvar);
1327 v = get_varinfo (rhsvar);
1328 if (!v->is_full_var)
1329 {
1330 v = get_varinfo (v->head);
1331 do
1332 {
1333 bitmap_clear_bit (graph->direct_nodes, v->id);
1334 v = vi_next (v);
1335 }
1336 while (v != NULL);
1337 }
1338 bitmap_set_bit (graph->address_taken, rhsvar);
1339 }
1340 else if (lhsvar > anything_id
1341 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1342 {
1343 /* x = y */
1344 add_pred_graph_edge (graph, lhsvar, rhsvar);
1345 /* Implicitly, *x = *y */
1346 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1347 FIRST_REF_NODE + rhsvar);
1348 }
1349 else if (lhs.offset != 0 || rhs.offset != 0)
1350 {
1351 if (rhs.offset != 0)
1352 bitmap_clear_bit (graph->direct_nodes, lhs.var);
1353 else if (lhs.offset != 0)
1354 bitmap_clear_bit (graph->direct_nodes, rhs.var);
1355 }
1356 }
1357 }
1358
1359 /* Build the constraint graph, adding successor edges. */
1360
1361 static void
1362 build_succ_graph (void)
1363 {
1364 unsigned i, t;
1365 constraint_t c;
1366
1367 FOR_EACH_VEC_ELT (constraints, i, c)
1368 {
1369 struct constraint_expr lhs;
1370 struct constraint_expr rhs;
1371 unsigned int lhsvar;
1372 unsigned int rhsvar;
1373
1374 if (!c)
1375 continue;
1376
1377 lhs = c->lhs;
1378 rhs = c->rhs;
1379 lhsvar = find (lhs.var);
1380 rhsvar = find (rhs.var);
1381
1382 if (lhs.type == DEREF)
1383 {
1384 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1385 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1386 }
1387 else if (rhs.type == DEREF)
1388 {
1389 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1390 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1391 }
1392 else if (rhs.type == ADDRESSOF)
1393 {
1394 /* x = &y */
1395 gcc_checking_assert (find (rhs.var) == rhs.var);
1396 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1397 }
1398 else if (lhsvar > anything_id
1399 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1400 {
1401 add_graph_edge (graph, lhsvar, rhsvar);
1402 }
1403 }
1404
1405 /* Add edges from STOREDANYTHING to all non-direct nodes that can
1406 receive pointers. */
1407 t = find (storedanything_id);
1408 for (i = integer_id + 1; i < FIRST_REF_NODE; ++i)
1409 {
1410 if (!bitmap_bit_p (graph->direct_nodes, i)
1411 && get_varinfo (i)->may_have_pointers)
1412 add_graph_edge (graph, find (i), t);
1413 }
1414
1415 /* Everything stored to ANYTHING also potentially escapes. */
1416 add_graph_edge (graph, find (escaped_id), t);
1417 }
1418
1419
1420 /* Changed variables on the last iteration. */
1421 static bitmap changed;
1422
1423 /* Strongly Connected Component visitation info. */
1424
1425 class scc_info
1426 {
1427 public:
1428 scc_info (size_t size);
1429 ~scc_info ();
1430
1431 auto_sbitmap visited;
1432 auto_sbitmap deleted;
1433 unsigned int *dfs;
1434 unsigned int *node_mapping;
1435 int current_index;
1436 auto_vec<unsigned> scc_stack;
1437 };
1438
1439
1440 /* Recursive routine to find strongly connected components in GRAPH.
1441 SI is the SCC info to store the information in, and N is the id of current
1442 graph node we are processing.
1443
1444 This is Tarjan's strongly connected component finding algorithm, as
1445 modified by Nuutila to keep only non-root nodes on the stack.
1446 The algorithm can be found in "On finding the strongly connected
1447 connected components in a directed graph" by Esko Nuutila and Eljas
1448 Soisalon-Soininen, in Information Processing Letters volume 49,
1449 number 1, pages 9-14. */
1450
1451 static void
1452 scc_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
1453 {
1454 unsigned int i;
1455 bitmap_iterator bi;
1456 unsigned int my_dfs;
1457
1458 bitmap_set_bit (si->visited, n);
1459 si->dfs[n] = si->current_index ++;
1460 my_dfs = si->dfs[n];
1461
1462 /* Visit all the successors. */
1463 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1464 {
1465 unsigned int w;
1466
1467 if (i > LAST_REF_NODE)
1468 break;
1469
1470 w = find (i);
1471 if (bitmap_bit_p (si->deleted, w))
1472 continue;
1473
1474 if (!bitmap_bit_p (si->visited, w))
1475 scc_visit (graph, si, w);
1476
1477 unsigned int t = find (w);
1478 gcc_checking_assert (find (n) == n);
1479 if (si->dfs[t] < si->dfs[n])
1480 si->dfs[n] = si->dfs[t];
1481 }
1482
1483 /* See if any components have been identified. */
1484 if (si->dfs[n] == my_dfs)
1485 {
1486 if (si->scc_stack.length () > 0
1487 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1488 {
1489 bitmap scc = BITMAP_ALLOC (NULL);
1490 unsigned int lowest_node;
1491 bitmap_iterator bi;
1492
1493 bitmap_set_bit (scc, n);
1494
1495 while (si->scc_stack.length () != 0
1496 && si->dfs[si->scc_stack.last ()] >= my_dfs)
1497 {
1498 unsigned int w = si->scc_stack.pop ();
1499
1500 bitmap_set_bit (scc, w);
1501 }
1502
1503 lowest_node = bitmap_first_set_bit (scc);
1504 gcc_assert (lowest_node < FIRST_REF_NODE);
1505
1506 /* Collapse the SCC nodes into a single node, and mark the
1507 indirect cycles. */
1508 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1509 {
1510 if (i < FIRST_REF_NODE)
1511 {
1512 if (unite (lowest_node, i))
1513 unify_nodes (graph, lowest_node, i, false);
1514 }
1515 else
1516 {
1517 unite (lowest_node, i);
1518 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1519 }
1520 }
1521 }
1522 bitmap_set_bit (si->deleted, n);
1523 }
1524 else
1525 si->scc_stack.safe_push (n);
1526 }
1527
1528 /* Unify node FROM into node TO, updating the changed count if
1529 necessary when UPDATE_CHANGED is true. */
1530
1531 static void
1532 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1533 bool update_changed)
1534 {
1535 gcc_checking_assert (to != from && find (to) == to);
1536
1537 if (dump_file && (dump_flags & TDF_DETAILS))
1538 fprintf (dump_file, "Unifying %s to %s\n",
1539 get_varinfo (from)->name,
1540 get_varinfo (to)->name);
1541
1542 if (update_changed)
1543 stats.unified_vars_dynamic++;
1544 else
1545 stats.unified_vars_static++;
1546
1547 merge_graph_nodes (graph, to, from);
1548 if (merge_node_constraints (graph, to, from))
1549 {
1550 if (update_changed)
1551 bitmap_set_bit (changed, to);
1552 }
1553
1554 /* Mark TO as changed if FROM was changed. If TO was already marked
1555 as changed, decrease the changed count. */
1556
1557 if (update_changed
1558 && bitmap_clear_bit (changed, from))
1559 bitmap_set_bit (changed, to);
1560 varinfo_t fromvi = get_varinfo (from);
1561 if (fromvi->solution)
1562 {
1563 /* If the solution changes because of the merging, we need to mark
1564 the variable as changed. */
1565 varinfo_t tovi = get_varinfo (to);
1566 if (bitmap_ior_into (tovi->solution, fromvi->solution))
1567 {
1568 if (update_changed)
1569 bitmap_set_bit (changed, to);
1570 }
1571
1572 BITMAP_FREE (fromvi->solution);
1573 if (fromvi->oldsolution)
1574 BITMAP_FREE (fromvi->oldsolution);
1575
1576 if (stats.iterations > 0
1577 && tovi->oldsolution)
1578 BITMAP_FREE (tovi->oldsolution);
1579 }
1580 if (graph->succs[to])
1581 bitmap_clear_bit (graph->succs[to], to);
1582 }
1583
1584 /* Process a constraint C that represents x = *(y + off), using DELTA as the
1585 starting solution for y. */
1586
1587 static void
1588 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1589 bitmap delta, bitmap *expanded_delta)
1590 {
1591 unsigned int lhs = c->lhs.var;
1592 bool flag = false;
1593 bitmap sol = get_varinfo (lhs)->solution;
1594 unsigned int j;
1595 bitmap_iterator bi;
1596 HOST_WIDE_INT roffset = c->rhs.offset;
1597
1598 /* Our IL does not allow this. */
1599 gcc_checking_assert (c->lhs.offset == 0);
1600
1601 /* If the solution of Y contains anything it is good enough to transfer
1602 this to the LHS. */
1603 if (bitmap_bit_p (delta, anything_id))
1604 {
1605 flag |= bitmap_set_bit (sol, anything_id);
1606 goto done;
1607 }
1608
1609 /* If we do not know at with offset the rhs is dereferenced compute
1610 the reachability set of DELTA, conservatively assuming it is
1611 dereferenced at all valid offsets. */
1612 if (roffset == UNKNOWN_OFFSET)
1613 {
1614 delta = solution_set_expand (delta, expanded_delta);
1615 /* No further offset processing is necessary. */
1616 roffset = 0;
1617 }
1618
1619 /* For each variable j in delta (Sol(y)), add
1620 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1621 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1622 {
1623 varinfo_t v = get_varinfo (j);
1624 HOST_WIDE_INT fieldoffset = v->offset + roffset;
1625 unsigned HOST_WIDE_INT size = v->size;
1626 unsigned int t;
1627
1628 if (v->is_full_var)
1629 ;
1630 else if (roffset != 0)
1631 {
1632 if (fieldoffset < 0)
1633 v = get_varinfo (v->head);
1634 else
1635 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1636 }
1637
1638 /* We have to include all fields that overlap the current field
1639 shifted by roffset. */
1640 do
1641 {
1642 t = find (v->id);
1643
1644 /* Adding edges from the special vars is pointless.
1645 They don't have sets that can change. */
1646 if (get_varinfo (t)->is_special_var)
1647 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1648 /* Merging the solution from ESCAPED needlessly increases
1649 the set. Use ESCAPED as representative instead. */
1650 else if (v->id == escaped_id)
1651 flag |= bitmap_set_bit (sol, escaped_id);
1652 else if (v->may_have_pointers
1653 && add_graph_edge (graph, lhs, t))
1654 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1655
1656 if (v->is_full_var
1657 || v->next == 0)
1658 break;
1659
1660 v = vi_next (v);
1661 }
1662 while (v->offset < fieldoffset + size);
1663 }
1664
1665 done:
1666 /* If the LHS solution changed, mark the var as changed. */
1667 if (flag)
1668 {
1669 get_varinfo (lhs)->solution = sol;
1670 bitmap_set_bit (changed, lhs);
1671 }
1672 }
1673
1674 /* Process a constraint C that represents *(x + off) = y using DELTA
1675 as the starting solution for x. */
1676
1677 static void
1678 do_ds_constraint (constraint_t c, bitmap delta, bitmap *expanded_delta)
1679 {
1680 unsigned int rhs = c->rhs.var;
1681 bitmap sol = get_varinfo (rhs)->solution;
1682 unsigned int j;
1683 bitmap_iterator bi;
1684 HOST_WIDE_INT loff = c->lhs.offset;
1685 bool escaped_p = false;
1686
1687 /* Our IL does not allow this. */
1688 gcc_checking_assert (c->rhs.offset == 0);
1689
1690 /* If the solution of y contains ANYTHING simply use the ANYTHING
1691 solution. This avoids needlessly increasing the points-to sets. */
1692 if (bitmap_bit_p (sol, anything_id))
1693 sol = get_varinfo (find (anything_id))->solution;
1694
1695 /* If the solution for x contains ANYTHING we have to merge the
1696 solution of y into all pointer variables which we do via
1697 STOREDANYTHING. */
1698 if (bitmap_bit_p (delta, anything_id))
1699 {
1700 unsigned t = find (storedanything_id);
1701 if (add_graph_edge (graph, t, rhs))
1702 {
1703 if (bitmap_ior_into (get_varinfo (t)->solution, sol))
1704 bitmap_set_bit (changed, t);
1705 }
1706 return;
1707 }
1708
1709 /* If we do not know at with offset the rhs is dereferenced compute
1710 the reachability set of DELTA, conservatively assuming it is
1711 dereferenced at all valid offsets. */
1712 if (loff == UNKNOWN_OFFSET)
1713 {
1714 delta = solution_set_expand (delta, expanded_delta);
1715 loff = 0;
1716 }
1717
1718 /* For each member j of delta (Sol(x)), add an edge from y to j and
1719 union Sol(y) into Sol(j) */
1720 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1721 {
1722 varinfo_t v = get_varinfo (j);
1723 unsigned int t;
1724 HOST_WIDE_INT fieldoffset = v->offset + loff;
1725 unsigned HOST_WIDE_INT size = v->size;
1726
1727 if (v->is_full_var)
1728 ;
1729 else if (loff != 0)
1730 {
1731 if (fieldoffset < 0)
1732 v = get_varinfo (v->head);
1733 else
1734 v = first_or_preceding_vi_for_offset (v, fieldoffset);
1735 }
1736
1737 /* We have to include all fields that overlap the current field
1738 shifted by loff. */
1739 do
1740 {
1741 if (v->may_have_pointers)
1742 {
1743 /* If v is a global variable then this is an escape point. */
1744 if (v->is_global_var
1745 && !escaped_p)
1746 {
1747 t = find (escaped_id);
1748 if (add_graph_edge (graph, t, rhs)
1749 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1750 bitmap_set_bit (changed, t);
1751 /* Enough to let rhs escape once. */
1752 escaped_p = true;
1753 }
1754
1755 if (v->is_special_var)
1756 break;
1757
1758 t = find (v->id);
1759 if (add_graph_edge (graph, t, rhs)
1760 && bitmap_ior_into (get_varinfo (t)->solution, sol))
1761 bitmap_set_bit (changed, t);
1762 }
1763
1764 if (v->is_full_var
1765 || v->next == 0)
1766 break;
1767
1768 v = vi_next (v);
1769 }
1770 while (v->offset < fieldoffset + size);
1771 }
1772 }
1773
1774 /* Handle a non-simple (simple meaning requires no iteration),
1775 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1776
1777 static void
1778 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta,
1779 bitmap *expanded_delta)
1780 {
1781 if (c->lhs.type == DEREF)
1782 {
1783 if (c->rhs.type == ADDRESSOF)
1784 {
1785 gcc_unreachable ();
1786 }
1787 else
1788 {
1789 /* *x = y */
1790 do_ds_constraint (c, delta, expanded_delta);
1791 }
1792 }
1793 else if (c->rhs.type == DEREF)
1794 {
1795 /* x = *y */
1796 if (!(get_varinfo (c->lhs.var)->is_special_var))
1797 do_sd_constraint (graph, c, delta, expanded_delta);
1798 }
1799 else
1800 {
1801 bitmap tmp;
1802 bool flag = false;
1803
1804 gcc_checking_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR
1805 && c->rhs.offset != 0 && c->lhs.offset == 0);
1806 tmp = get_varinfo (c->lhs.var)->solution;
1807
1808 flag = set_union_with_increment (tmp, delta, c->rhs.offset,
1809 expanded_delta);
1810
1811 if (flag)
1812 bitmap_set_bit (changed, c->lhs.var);
1813 }
1814 }
1815
1816 /* Initialize and return a new SCC info structure. */
1817
1818 scc_info::scc_info (size_t size) :
1819 visited (size), deleted (size), current_index (0), scc_stack (1)
1820 {
1821 bitmap_clear (visited);
1822 bitmap_clear (deleted);
1823 node_mapping = XNEWVEC (unsigned int, size);
1824 dfs = XCNEWVEC (unsigned int, size);
1825
1826 for (size_t i = 0; i < size; i++)
1827 node_mapping[i] = i;
1828 }
1829
1830 /* Free an SCC info structure pointed to by SI */
1831
1832 scc_info::~scc_info ()
1833 {
1834 free (node_mapping);
1835 free (dfs);
1836 }
1837
1838
1839 /* Find indirect cycles in GRAPH that occur, using strongly connected
1840 components, and note them in the indirect cycles map.
1841
1842 This technique comes from Ben Hardekopf and Calvin Lin,
1843 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1844 Lines of Code", submitted to PLDI 2007. */
1845
1846 static void
1847 find_indirect_cycles (constraint_graph_t graph)
1848 {
1849 unsigned int i;
1850 unsigned int size = graph->size;
1851 scc_info si (size);
1852
1853 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1854 if (!bitmap_bit_p (si.visited, i) && find (i) == i)
1855 scc_visit (graph, &si, i);
1856 }
1857
1858 /* Visit the graph in topological order starting at node N, and store the
1859 order in TOPO_ORDER using VISITED to indicate visited nodes. */
1860
1861 static void
1862 topo_visit (constraint_graph_t graph, vec<unsigned> &topo_order,
1863 sbitmap visited, unsigned int n)
1864 {
1865 bitmap_iterator bi;
1866 unsigned int j;
1867
1868 bitmap_set_bit (visited, n);
1869
1870 if (graph->succs[n])
1871 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1872 {
1873 unsigned k = find (j);
1874 if (!bitmap_bit_p (visited, k))
1875 topo_visit (graph, topo_order, visited, k);
1876 }
1877
1878 topo_order.quick_push (n);
1879 }
1880
1881 /* Compute a topological ordering for GRAPH, and return the result. */
1882
1883 static auto_vec<unsigned>
1884 compute_topo_order (constraint_graph_t graph)
1885 {
1886 unsigned int i;
1887 unsigned int size = graph->size;
1888
1889 auto_sbitmap visited (size);
1890 bitmap_clear (visited);
1891
1892 /* For the heuristic in add_graph_edge to work optimally make sure to
1893 first visit the connected component of the graph containing
1894 ESCAPED. Do this by extracting the connected component
1895 with ESCAPED and append that to all other components as solve_graph
1896 pops from the order. */
1897 auto_vec<unsigned> tail (size);
1898 topo_visit (graph, tail, visited, find (escaped_id));
1899
1900 auto_vec<unsigned> topo_order (size);
1901
1902 for (i = 0; i != size; ++i)
1903 if (!bitmap_bit_p (visited, i) && find (i) == i)
1904 topo_visit (graph, topo_order, visited, i);
1905
1906 topo_order.splice (tail);
1907 return topo_order;
1908 }
1909
1910 /* Structure used to for hash value numbering of pointer equivalence
1911 classes. */
1912
1913 typedef struct equiv_class_label
1914 {
1915 hashval_t hashcode;
1916 unsigned int equivalence_class;
1917 bitmap labels;
1918 } *equiv_class_label_t;
1919 typedef const struct equiv_class_label *const_equiv_class_label_t;
1920
1921 /* Equiv_class_label hashtable helpers. */
1922
1923 struct equiv_class_hasher : nofree_ptr_hash <equiv_class_label>
1924 {
1925 static inline hashval_t hash (const equiv_class_label *);
1926 static inline bool equal (const equiv_class_label *,
1927 const equiv_class_label *);
1928 };
1929
1930 /* Hash function for a equiv_class_label_t */
1931
1932 inline hashval_t
1933 equiv_class_hasher::hash (const equiv_class_label *ecl)
1934 {
1935 return ecl->hashcode;
1936 }
1937
1938 /* Equality function for two equiv_class_label_t's. */
1939
1940 inline bool
1941 equiv_class_hasher::equal (const equiv_class_label *eql1,
1942 const equiv_class_label *eql2)
1943 {
1944 return (eql1->hashcode == eql2->hashcode
1945 && bitmap_equal_p (eql1->labels, eql2->labels));
1946 }
1947
1948 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1949 classes. */
1950 static hash_table<equiv_class_hasher> *pointer_equiv_class_table;
1951
1952 /* A hashtable for mapping a bitmap of labels->location equivalence
1953 classes. */
1954 static hash_table<equiv_class_hasher> *location_equiv_class_table;
1955
1956 struct obstack equiv_class_obstack;
1957
1958 /* Lookup a equivalence class in TABLE by the bitmap of LABELS with
1959 hash HAS it contains. Sets *REF_LABELS to the bitmap LABELS
1960 is equivalent to. */
1961
1962 static equiv_class_label *
1963 equiv_class_lookup_or_add (hash_table<equiv_class_hasher> *table,
1964 bitmap labels)
1965 {
1966 equiv_class_label **slot;
1967 equiv_class_label ecl;
1968
1969 ecl.labels = labels;
1970 ecl.hashcode = bitmap_hash (labels);
1971 slot = table->find_slot (&ecl, INSERT);
1972 if (!*slot)
1973 {
1974 *slot = XOBNEW (&equiv_class_obstack, struct equiv_class_label);
1975 (*slot)->labels = labels;
1976 (*slot)->hashcode = ecl.hashcode;
1977 (*slot)->equivalence_class = 0;
1978 }
1979
1980 return *slot;
1981 }
1982
1983 /* Perform offline variable substitution.
1984
1985 This is a worst case quadratic time way of identifying variables
1986 that must have equivalent points-to sets, including those caused by
1987 static cycles, and single entry subgraphs, in the constraint graph.
1988
1989 The technique is described in "Exploiting Pointer and Location
1990 Equivalence to Optimize Pointer Analysis. In the 14th International
1991 Static Analysis Symposium (SAS), August 2007." It is known as the
1992 "HU" algorithm, and is equivalent to value numbering the collapsed
1993 constraint graph including evaluating unions.
1994
1995 The general method of finding equivalence classes is as follows:
1996 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1997 Initialize all non-REF nodes to be direct nodes.
1998 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1999 variable}
2000 For each constraint containing the dereference, we also do the same
2001 thing.
2002
2003 We then compute SCC's in the graph and unify nodes in the same SCC,
2004 including pts sets.
2005
2006 For each non-collapsed node x:
2007 Visit all unvisited explicit incoming edges.
2008 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
2009 where y->x.
2010 Lookup the equivalence class for pts(x).
2011 If we found one, equivalence_class(x) = found class.
2012 Otherwise, equivalence_class(x) = new class, and new_class is
2013 added to the lookup table.
2014
2015 All direct nodes with the same equivalence class can be replaced
2016 with a single representative node.
2017 All unlabeled nodes (label == 0) are not pointers and all edges
2018 involving them can be eliminated.
2019 We perform these optimizations during rewrite_constraints
2020
2021 In addition to pointer equivalence class finding, we also perform
2022 location equivalence class finding. This is the set of variables
2023 that always appear together in points-to sets. We use this to
2024 compress the size of the points-to sets. */
2025
2026 /* Current maximum pointer equivalence class id. */
2027 static int pointer_equiv_class;
2028
2029 /* Current maximum location equivalence class id. */
2030 static int location_equiv_class;
2031
2032 /* Recursive routine to find strongly connected components in GRAPH,
2033 and label it's nodes with DFS numbers. */
2034
2035 static void
2036 condense_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
2037 {
2038 unsigned int i;
2039 bitmap_iterator bi;
2040 unsigned int my_dfs;
2041
2042 gcc_checking_assert (si->node_mapping[n] == n);
2043 bitmap_set_bit (si->visited, n);
2044 si->dfs[n] = si->current_index ++;
2045 my_dfs = si->dfs[n];
2046
2047 /* Visit all the successors. */
2048 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2049 {
2050 unsigned int w = si->node_mapping[i];
2051
2052 if (bitmap_bit_p (si->deleted, w))
2053 continue;
2054
2055 if (!bitmap_bit_p (si->visited, w))
2056 condense_visit (graph, si, w);
2057
2058 unsigned int t = si->node_mapping[w];
2059 gcc_checking_assert (si->node_mapping[n] == n);
2060 if (si->dfs[t] < si->dfs[n])
2061 si->dfs[n] = si->dfs[t];
2062 }
2063
2064 /* Visit all the implicit predecessors. */
2065 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
2066 {
2067 unsigned int w = si->node_mapping[i];
2068
2069 if (bitmap_bit_p (si->deleted, w))
2070 continue;
2071
2072 if (!bitmap_bit_p (si->visited, w))
2073 condense_visit (graph, si, w);
2074
2075 unsigned int t = si->node_mapping[w];
2076 gcc_assert (si->node_mapping[n] == n);
2077 if (si->dfs[t] < si->dfs[n])
2078 si->dfs[n] = si->dfs[t];
2079 }
2080
2081 /* See if any components have been identified. */
2082 if (si->dfs[n] == my_dfs)
2083 {
2084 if (si->scc_stack.length () != 0
2085 && si->dfs[si->scc_stack.last ()] >= my_dfs)
2086 {
2087 /* Find the first node of the SCC and do non-bitmap work. */
2088 bool direct_p = true;
2089 unsigned first = si->scc_stack.length ();
2090 do
2091 {
2092 --first;
2093 unsigned int w = si->scc_stack[first];
2094 si->node_mapping[w] = n;
2095 if (!bitmap_bit_p (graph->direct_nodes, w))
2096 direct_p = false;
2097 }
2098 while (first > 0
2099 && si->dfs[si->scc_stack[first - 1]] >= my_dfs);
2100 if (!direct_p)
2101 bitmap_clear_bit (graph->direct_nodes, n);
2102
2103 /* Want to reduce to node n, push that first. */
2104 si->scc_stack.reserve (1);
2105 si->scc_stack.quick_push (si->scc_stack[first]);
2106 si->scc_stack[first] = n;
2107
2108 unsigned scc_size = si->scc_stack.length () - first;
2109 unsigned split = scc_size / 2;
2110 unsigned carry = scc_size - split * 2;
2111 while (split > 0)
2112 {
2113 for (unsigned i = 0; i < split; ++i)
2114 {
2115 unsigned a = si->scc_stack[first + i];
2116 unsigned b = si->scc_stack[first + split + carry + i];
2117
2118 /* Unify our nodes. */
2119 if (graph->preds[b])
2120 {
2121 if (!graph->preds[a])
2122 std::swap (graph->preds[a], graph->preds[b]);
2123 else
2124 bitmap_ior_into_and_free (graph->preds[a],
2125 &graph->preds[b]);
2126 }
2127 if (graph->implicit_preds[b])
2128 {
2129 if (!graph->implicit_preds[a])
2130 std::swap (graph->implicit_preds[a],
2131 graph->implicit_preds[b]);
2132 else
2133 bitmap_ior_into_and_free (graph->implicit_preds[a],
2134 &graph->implicit_preds[b]);
2135 }
2136 if (graph->points_to[b])
2137 {
2138 if (!graph->points_to[a])
2139 std::swap (graph->points_to[a], graph->points_to[b]);
2140 else
2141 bitmap_ior_into_and_free (graph->points_to[a],
2142 &graph->points_to[b]);
2143 }
2144 }
2145 unsigned remain = split + carry;
2146 split = remain / 2;
2147 carry = remain - split * 2;
2148 }
2149 /* Actually pop the SCC. */
2150 si->scc_stack.truncate (first);
2151 }
2152 bitmap_set_bit (si->deleted, n);
2153 }
2154 else
2155 si->scc_stack.safe_push (n);
2156 }
2157
2158 /* Label pointer equivalences.
2159
2160 This performs a value numbering of the constraint graph to
2161 discover which variables will always have the same points-to sets
2162 under the current set of constraints.
2163
2164 The way it value numbers is to store the set of points-to bits
2165 generated by the constraints and graph edges. This is just used as a
2166 hash and equality comparison. The *actual set of points-to bits* is
2167 completely irrelevant, in that we don't care about being able to
2168 extract them later.
2169
2170 The equality values (currently bitmaps) just have to satisfy a few
2171 constraints, the main ones being:
2172 1. The combining operation must be order independent.
2173 2. The end result of a given set of operations must be unique iff the
2174 combination of input values is unique
2175 3. Hashable. */
2176
2177 static void
2178 label_visit (constraint_graph_t graph, class scc_info *si, unsigned int n)
2179 {
2180 unsigned int i, first_pred;
2181 bitmap_iterator bi;
2182
2183 bitmap_set_bit (si->visited, n);
2184
2185 /* Label and union our incoming edges's points to sets. */
2186 first_pred = -1U;
2187 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2188 {
2189 unsigned int w = si->node_mapping[i];
2190 if (!bitmap_bit_p (si->visited, w))
2191 label_visit (graph, si, w);
2192
2193 /* Skip unused edges */
2194 if (w == n || graph->pointer_label[w] == 0)
2195 continue;
2196
2197 if (graph->points_to[w])
2198 {
2199 if (!graph->points_to[n])
2200 {
2201 if (first_pred == -1U)
2202 first_pred = w;
2203 else
2204 {
2205 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2206 bitmap_ior (graph->points_to[n],
2207 graph->points_to[first_pred],
2208 graph->points_to[w]);
2209 }
2210 }
2211 else
2212 bitmap_ior_into (graph->points_to[n], graph->points_to[w]);
2213 }
2214 }
2215
2216 /* Indirect nodes get fresh variables and a new pointer equiv class. */
2217 if (!bitmap_bit_p (graph->direct_nodes, n))
2218 {
2219 if (!graph->points_to[n])
2220 {
2221 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2222 if (first_pred != -1U)
2223 bitmap_copy (graph->points_to[n], graph->points_to[first_pred]);
2224 }
2225 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2226 graph->pointer_label[n] = pointer_equiv_class++;
2227 equiv_class_label_t ecl;
2228 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2229 graph->points_to[n]);
2230 ecl->equivalence_class = graph->pointer_label[n];
2231 return;
2232 }
2233
2234 /* If there was only a single non-empty predecessor the pointer equiv
2235 class is the same. */
2236 if (!graph->points_to[n])
2237 {
2238 if (first_pred != -1U)
2239 {
2240 graph->pointer_label[n] = graph->pointer_label[first_pred];
2241 graph->points_to[n] = graph->points_to[first_pred];
2242 }
2243 return;
2244 }
2245
2246 if (!bitmap_empty_p (graph->points_to[n]))
2247 {
2248 equiv_class_label_t ecl;
2249 ecl = equiv_class_lookup_or_add (pointer_equiv_class_table,
2250 graph->points_to[n]);
2251 if (ecl->equivalence_class == 0)
2252 ecl->equivalence_class = pointer_equiv_class++;
2253 else
2254 {
2255 BITMAP_FREE (graph->points_to[n]);
2256 graph->points_to[n] = ecl->labels;
2257 }
2258 graph->pointer_label[n] = ecl->equivalence_class;
2259 }
2260 }
2261
2262 /* Print the pred graph in dot format. */
2263
2264 static void
2265 dump_pred_graph (class scc_info *si, FILE *file)
2266 {
2267 unsigned int i;
2268
2269 /* Only print the graph if it has already been initialized: */
2270 if (!graph)
2271 return;
2272
2273 /* Prints the header of the dot file: */
2274 fprintf (file, "strict digraph {\n");
2275 fprintf (file, " node [\n shape = box\n ]\n");
2276 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
2277 fprintf (file, "\n // List of nodes and complex constraints in "
2278 "the constraint graph:\n");
2279
2280 /* The next lines print the nodes in the graph together with the
2281 complex constraints attached to them. */
2282 for (i = 1; i < graph->size; i++)
2283 {
2284 if (i == FIRST_REF_NODE)
2285 continue;
2286 if (si->node_mapping[i] != i)
2287 continue;
2288 if (i < FIRST_REF_NODE)
2289 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2290 else
2291 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2292 if (graph->points_to[i]
2293 && !bitmap_empty_p (graph->points_to[i]))
2294 {
2295 if (i < FIRST_REF_NODE)
2296 fprintf (file, "[label=\"%s = {", get_varinfo (i)->name);
2297 else
2298 fprintf (file, "[label=\"*%s = {",
2299 get_varinfo (i - FIRST_REF_NODE)->name);
2300 unsigned j;
2301 bitmap_iterator bi;
2302 EXECUTE_IF_SET_IN_BITMAP (graph->points_to[i], 0, j, bi)
2303 fprintf (file, " %d", j);
2304 fprintf (file, " }\"]");
2305 }
2306 fprintf (file, ";\n");
2307 }
2308
2309 /* Go over the edges. */
2310 fprintf (file, "\n // Edges in the constraint graph:\n");
2311 for (i = 1; i < graph->size; i++)
2312 {
2313 unsigned j;
2314 bitmap_iterator bi;
2315 if (si->node_mapping[i] != i)
2316 continue;
2317 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[i], 0, j, bi)
2318 {
2319 unsigned from = si->node_mapping[j];
2320 if (from < FIRST_REF_NODE)
2321 fprintf (file, "\"%s\"", get_varinfo (from)->name);
2322 else
2323 fprintf (file, "\"*%s\"", get_varinfo (from - FIRST_REF_NODE)->name);
2324 fprintf (file, " -> ");
2325 if (i < FIRST_REF_NODE)
2326 fprintf (file, "\"%s\"", get_varinfo (i)->name);
2327 else
2328 fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
2329 fprintf (file, ";\n");
2330 }
2331 }
2332
2333 /* Prints the tail of the dot file. */
2334 fprintf (file, "}\n");
2335 }
2336
2337 /* Perform offline variable substitution, discovering equivalence
2338 classes, and eliminating non-pointer variables. */
2339
2340 static class scc_info *
2341 perform_var_substitution (constraint_graph_t graph)
2342 {
2343 unsigned int i;
2344 unsigned int size = graph->size;
2345 scc_info *si = new scc_info (size);
2346
2347 bitmap_obstack_initialize (&iteration_obstack);
2348 gcc_obstack_init (&equiv_class_obstack);
2349 pointer_equiv_class_table = new hash_table<equiv_class_hasher> (511);
2350 location_equiv_class_table
2351 = new hash_table<equiv_class_hasher> (511);
2352 pointer_equiv_class = 1;
2353 location_equiv_class = 1;
2354
2355 /* Condense the nodes, which means to find SCC's, count incoming
2356 predecessors, and unite nodes in SCC's. */
2357 for (i = 1; i < FIRST_REF_NODE; i++)
2358 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2359 condense_visit (graph, si, si->node_mapping[i]);
2360
2361 if (dump_file && (dump_flags & TDF_GRAPH))
2362 {
2363 fprintf (dump_file, "\n\n// The constraint graph before var-substitution "
2364 "in dot format:\n");
2365 dump_pred_graph (si, dump_file);
2366 fprintf (dump_file, "\n\n");
2367 }
2368
2369 bitmap_clear (si->visited);
2370 /* Actually the label the nodes for pointer equivalences */
2371 for (i = 1; i < FIRST_REF_NODE; i++)
2372 if (!bitmap_bit_p (si->visited, si->node_mapping[i]))
2373 label_visit (graph, si, si->node_mapping[i]);
2374
2375 /* Calculate location equivalence labels. */
2376 for (i = 1; i < FIRST_REF_NODE; i++)
2377 {
2378 bitmap pointed_by;
2379 bitmap_iterator bi;
2380 unsigned int j;
2381
2382 if (!graph->pointed_by[i])
2383 continue;
2384 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2385
2386 /* Translate the pointed-by mapping for pointer equivalence
2387 labels. */
2388 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2389 {
2390 bitmap_set_bit (pointed_by,
2391 graph->pointer_label[si->node_mapping[j]]);
2392 }
2393 /* The original pointed_by is now dead. */
2394 BITMAP_FREE (graph->pointed_by[i]);
2395
2396 /* Look up the location equivalence label if one exists, or make
2397 one otherwise. */
2398 equiv_class_label_t ecl;
2399 ecl = equiv_class_lookup_or_add (location_equiv_class_table, pointed_by);
2400 if (ecl->equivalence_class == 0)
2401 ecl->equivalence_class = location_equiv_class++;
2402 else
2403 {
2404 if (dump_file && (dump_flags & TDF_DETAILS))
2405 fprintf (dump_file, "Found location equivalence for node %s\n",
2406 get_varinfo (i)->name);
2407 BITMAP_FREE (pointed_by);
2408 }
2409 graph->loc_label[i] = ecl->equivalence_class;
2410
2411 }
2412
2413 if (dump_file && (dump_flags & TDF_DETAILS))
2414 for (i = 1; i < FIRST_REF_NODE; i++)
2415 {
2416 unsigned j = si->node_mapping[i];
2417 if (j != i)
2418 {
2419 fprintf (dump_file, "%s node id %d ",
2420 bitmap_bit_p (graph->direct_nodes, i)
2421 ? "Direct" : "Indirect", i);
2422 if (i < FIRST_REF_NODE)
2423 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2424 else
2425 fprintf (dump_file, "\"*%s\"",
2426 get_varinfo (i - FIRST_REF_NODE)->name);
2427 fprintf (dump_file, " mapped to SCC leader node id %d ", j);
2428 if (j < FIRST_REF_NODE)
2429 fprintf (dump_file, "\"%s\"\n", get_varinfo (j)->name);
2430 else
2431 fprintf (dump_file, "\"*%s\"\n",
2432 get_varinfo (j - FIRST_REF_NODE)->name);
2433 }
2434 else
2435 {
2436 fprintf (dump_file,
2437 "Equivalence classes for %s node id %d ",
2438 bitmap_bit_p (graph->direct_nodes, i)
2439 ? "direct" : "indirect", i);
2440 if (i < FIRST_REF_NODE)
2441 fprintf (dump_file, "\"%s\"", get_varinfo (i)->name);
2442 else
2443 fprintf (dump_file, "\"*%s\"",
2444 get_varinfo (i - FIRST_REF_NODE)->name);
2445 fprintf (dump_file,
2446 ": pointer %d, location %d\n",
2447 graph->pointer_label[i], graph->loc_label[i]);
2448 }
2449 }
2450
2451 /* Quickly eliminate our non-pointer variables. */
2452
2453 for (i = 1; i < FIRST_REF_NODE; i++)
2454 {
2455 unsigned int node = si->node_mapping[i];
2456
2457 if (graph->pointer_label[node] == 0)
2458 {
2459 if (dump_file && (dump_flags & TDF_DETAILS))
2460 fprintf (dump_file,
2461 "%s is a non-pointer variable, eliminating edges.\n",
2462 get_varinfo (node)->name);
2463 stats.nonpointer_vars++;
2464 clear_edges_for_node (graph, node);
2465 }
2466 }
2467
2468 return si;
2469 }
2470
2471 /* Free information that was only necessary for variable
2472 substitution. */
2473
2474 static void
2475 free_var_substitution_info (class scc_info *si)
2476 {
2477 delete si;
2478 free (graph->pointer_label);
2479 free (graph->loc_label);
2480 free (graph->pointed_by);
2481 free (graph->points_to);
2482 free (graph->eq_rep);
2483 sbitmap_free (graph->direct_nodes);
2484 delete pointer_equiv_class_table;
2485 pointer_equiv_class_table = NULL;
2486 delete location_equiv_class_table;
2487 location_equiv_class_table = NULL;
2488 obstack_free (&equiv_class_obstack, NULL);
2489 bitmap_obstack_release (&iteration_obstack);
2490 }
2491
2492 /* Return an existing node that is equivalent to NODE, which has
2493 equivalence class LABEL, if one exists. Return NODE otherwise. */
2494
2495 static unsigned int
2496 find_equivalent_node (constraint_graph_t graph,
2497 unsigned int node, unsigned int label)
2498 {
2499 /* If the address version of this variable is unused, we can
2500 substitute it for anything else with the same label.
2501 Otherwise, we know the pointers are equivalent, but not the
2502 locations, and we can unite them later. */
2503
2504 if (!bitmap_bit_p (graph->address_taken, node))
2505 {
2506 gcc_checking_assert (label < graph->size);
2507
2508 if (graph->eq_rep[label] != -1)
2509 {
2510 /* Unify the two variables since we know they are equivalent. */
2511 if (unite (graph->eq_rep[label], node))
2512 unify_nodes (graph, graph->eq_rep[label], node, false);
2513 return graph->eq_rep[label];
2514 }
2515 else
2516 {
2517 graph->eq_rep[label] = node;
2518 graph->pe_rep[label] = node;
2519 }
2520 }
2521 else
2522 {
2523 gcc_checking_assert (label < graph->size);
2524 graph->pe[node] = label;
2525 if (graph->pe_rep[label] == -1)
2526 graph->pe_rep[label] = node;
2527 }
2528
2529 return node;
2530 }
2531
2532 /* Unite pointer equivalent but not location equivalent nodes in
2533 GRAPH. This may only be performed once variable substitution is
2534 finished. */
2535
2536 static void
2537 unite_pointer_equivalences (constraint_graph_t graph)
2538 {
2539 unsigned int i;
2540
2541 /* Go through the pointer equivalences and unite them to their
2542 representative, if they aren't already. */
2543 for (i = 1; i < FIRST_REF_NODE; i++)
2544 {
2545 unsigned int label = graph->pe[i];
2546 if (label)
2547 {
2548 int label_rep = graph->pe_rep[label];
2549
2550 if (label_rep == -1)
2551 continue;
2552
2553 label_rep = find (label_rep);
2554 if (label_rep >= 0 && unite (label_rep, find (i)))
2555 unify_nodes (graph, label_rep, i, false);
2556 }
2557 }
2558 }
2559
2560 /* Move complex constraints to the GRAPH nodes they belong to. */
2561
2562 static void
2563 move_complex_constraints (constraint_graph_t graph)
2564 {
2565 int i;
2566 constraint_t c;
2567
2568 FOR_EACH_VEC_ELT (constraints, i, c)
2569 {
2570 if (c)
2571 {
2572 struct constraint_expr lhs = c->lhs;
2573 struct constraint_expr rhs = c->rhs;
2574
2575 if (lhs.type == DEREF)
2576 {
2577 insert_into_complex (graph, lhs.var, c);
2578 }
2579 else if (rhs.type == DEREF)
2580 {
2581 if (!(get_varinfo (lhs.var)->is_special_var))
2582 insert_into_complex (graph, rhs.var, c);
2583 }
2584 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2585 && (lhs.offset != 0 || rhs.offset != 0))
2586 {
2587 insert_into_complex (graph, rhs.var, c);
2588 }
2589 }
2590 }
2591 }
2592
2593
2594 /* Optimize and rewrite complex constraints while performing
2595 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2596 result of perform_variable_substitution. */
2597
2598 static void
2599 rewrite_constraints (constraint_graph_t graph,
2600 class scc_info *si)
2601 {
2602 int i;
2603 constraint_t c;
2604
2605 if (flag_checking)
2606 {
2607 for (unsigned int j = 0; j < graph->size; j++)
2608 gcc_assert (find (j) == j);
2609 }
2610
2611 FOR_EACH_VEC_ELT (constraints, i, c)
2612 {
2613 struct constraint_expr lhs = c->lhs;
2614 struct constraint_expr rhs = c->rhs;
2615 unsigned int lhsvar = find (lhs.var);
2616 unsigned int rhsvar = find (rhs.var);
2617 unsigned int lhsnode, rhsnode;
2618 unsigned int lhslabel, rhslabel;
2619
2620 lhsnode = si->node_mapping[lhsvar];
2621 rhsnode = si->node_mapping[rhsvar];
2622 lhslabel = graph->pointer_label[lhsnode];
2623 rhslabel = graph->pointer_label[rhsnode];
2624
2625 /* See if it is really a non-pointer variable, and if so, ignore
2626 the constraint. */
2627 if (lhslabel == 0)
2628 {
2629 if (dump_file && (dump_flags & TDF_DETAILS))
2630 {
2631
2632 fprintf (dump_file, "%s is a non-pointer variable, "
2633 "ignoring constraint:",
2634 get_varinfo (lhs.var)->name);
2635 dump_constraint (dump_file, c);
2636 fprintf (dump_file, "\n");
2637 }
2638 constraints[i] = NULL;
2639 continue;
2640 }
2641
2642 if (rhslabel == 0)
2643 {
2644 if (dump_file && (dump_flags & TDF_DETAILS))
2645 {
2646
2647 fprintf (dump_file, "%s is a non-pointer variable, "
2648 "ignoring constraint:",
2649 get_varinfo (rhs.var)->name);
2650 dump_constraint (dump_file, c);
2651 fprintf (dump_file, "\n");
2652 }
2653 constraints[i] = NULL;
2654 continue;
2655 }
2656
2657 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2658 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2659 c->lhs.var = lhsvar;
2660 c->rhs.var = rhsvar;
2661 }
2662 }
2663
2664 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2665 part of an SCC, false otherwise. */
2666
2667 static bool
2668 eliminate_indirect_cycles (unsigned int node)
2669 {
2670 if (graph->indirect_cycles[node] != -1
2671 && !bitmap_empty_p (get_varinfo (node)->solution))
2672 {
2673 unsigned int i;
2674 auto_vec<unsigned> queue;
2675 int queuepos;
2676 unsigned int to = find (graph->indirect_cycles[node]);
2677 bitmap_iterator bi;
2678
2679 /* We can't touch the solution set and call unify_nodes
2680 at the same time, because unify_nodes is going to do
2681 bitmap unions into it. */
2682
2683 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2684 {
2685 if (find (i) == i && i != to)
2686 {
2687 if (unite (to, i))
2688 queue.safe_push (i);
2689 }
2690 }
2691
2692 for (queuepos = 0;
2693 queue.iterate (queuepos, &i);
2694 queuepos++)
2695 {
2696 unify_nodes (graph, to, i, true);
2697 }
2698 return true;
2699 }
2700 return false;
2701 }
2702
2703 /* Solve the constraint graph GRAPH using our worklist solver.
2704 This is based on the PW* family of solvers from the "Efficient Field
2705 Sensitive Pointer Analysis for C" paper.
2706 It works by iterating over all the graph nodes, processing the complex
2707 constraints and propagating the copy constraints, until everything stops
2708 changed. This corresponds to steps 6-8 in the solving list given above. */
2709
2710 static void
2711 solve_graph (constraint_graph_t graph)
2712 {
2713 unsigned int size = graph->size;
2714 unsigned int i;
2715 bitmap pts;
2716
2717 changed = BITMAP_ALLOC (NULL);
2718
2719 /* Mark all initial non-collapsed nodes as changed. */
2720 for (i = 1; i < size; i++)
2721 {
2722 varinfo_t ivi = get_varinfo (i);
2723 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2724 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2725 || graph->complex[i].length () > 0))
2726 bitmap_set_bit (changed, i);
2727 }
2728
2729 /* Allocate a bitmap to be used to store the changed bits. */
2730 pts = BITMAP_ALLOC (&pta_obstack);
2731
2732 while (!bitmap_empty_p (changed))
2733 {
2734 unsigned int i;
2735 stats.iterations++;
2736
2737 bitmap_obstack_initialize (&iteration_obstack);
2738
2739 auto_vec<unsigned> topo_order = compute_topo_order (graph);
2740 while (topo_order.length () != 0)
2741 {
2742 i = topo_order.pop ();
2743
2744 /* If this variable is not a representative, skip it. */
2745 if (find (i) != i)
2746 continue;
2747
2748 /* In certain indirect cycle cases, we may merge this
2749 variable to another. */
2750 if (eliminate_indirect_cycles (i) && find (i) != i)
2751 continue;
2752
2753 /* If the node has changed, we need to process the
2754 complex constraints and outgoing edges again. */
2755 if (bitmap_clear_bit (changed, i))
2756 {
2757 unsigned int j;
2758 constraint_t c;
2759 bitmap solution;
2760 vec<constraint_t> complex = graph->complex[i];
2761 varinfo_t vi = get_varinfo (i);
2762 bool solution_empty;
2763
2764 /* Compute the changed set of solution bits. If anything
2765 is in the solution just propagate that. */
2766 if (bitmap_bit_p (vi->solution, anything_id))
2767 {
2768 /* If anything is also in the old solution there is
2769 nothing to do.
2770 ??? But we shouldn't ended up with "changed" set ... */
2771 if (vi->oldsolution
2772 && bitmap_bit_p (vi->oldsolution, anything_id))
2773 continue;
2774 bitmap_copy (pts, get_varinfo (find (anything_id))->solution);
2775 }
2776 else if (vi->oldsolution)
2777 bitmap_and_compl (pts, vi->solution, vi->oldsolution);
2778 else
2779 bitmap_copy (pts, vi->solution);
2780
2781 if (bitmap_empty_p (pts))
2782 continue;
2783
2784 if (vi->oldsolution)
2785 bitmap_ior_into (vi->oldsolution, pts);
2786 else
2787 {
2788 vi->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
2789 bitmap_copy (vi->oldsolution, pts);
2790 }
2791
2792 solution = vi->solution;
2793 solution_empty = bitmap_empty_p (solution);
2794
2795 /* Process the complex constraints */
2796 bitmap expanded_pts = NULL;
2797 FOR_EACH_VEC_ELT (complex, j, c)
2798 {
2799 /* XXX: This is going to unsort the constraints in
2800 some cases, which will occasionally add duplicate
2801 constraints during unification. This does not
2802 affect correctness. */
2803 c->lhs.var = find (c->lhs.var);
2804 c->rhs.var = find (c->rhs.var);
2805
2806 /* The only complex constraint that can change our
2807 solution to non-empty, given an empty solution,
2808 is a constraint where the lhs side is receiving
2809 some set from elsewhere. */
2810 if (!solution_empty || c->lhs.type != DEREF)
2811 do_complex_constraint (graph, c, pts, &expanded_pts);
2812 }
2813 BITMAP_FREE (expanded_pts);
2814
2815 solution_empty = bitmap_empty_p (solution);
2816
2817 if (!solution_empty)
2818 {
2819 bitmap_iterator bi;
2820 unsigned eff_escaped_id = find (escaped_id);
2821
2822 /* Propagate solution to all successors. */
2823 unsigned to_remove = ~0U;
2824 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2825 0, j, bi)
2826 {
2827 if (to_remove != ~0U)
2828 {
2829 bitmap_clear_bit (graph->succs[i], to_remove);
2830 to_remove = ~0U;
2831 }
2832 unsigned int to = find (j);
2833 if (to != j)
2834 {
2835 /* Update the succ graph, avoiding duplicate
2836 work. */
2837 to_remove = j;
2838 if (! bitmap_set_bit (graph->succs[i], to))
2839 continue;
2840 /* We eventually end up processing 'to' twice
2841 as it is undefined whether bitmap iteration
2842 iterates over bits set during iteration.
2843 Play safe instead of doing tricks. */
2844 }
2845 /* Don't try to propagate to ourselves. */
2846 if (to == i)
2847 continue;
2848
2849 bitmap tmp = get_varinfo (to)->solution;
2850 bool flag = false;
2851
2852 /* If we propagate from ESCAPED use ESCAPED as
2853 placeholder. */
2854 if (i == eff_escaped_id)
2855 flag = bitmap_set_bit (tmp, escaped_id);
2856 else
2857 flag = bitmap_ior_into (tmp, pts);
2858
2859 if (flag)
2860 bitmap_set_bit (changed, to);
2861 }
2862 if (to_remove != ~0U)
2863 bitmap_clear_bit (graph->succs[i], to_remove);
2864 }
2865 }
2866 }
2867 bitmap_obstack_release (&iteration_obstack);
2868 }
2869
2870 BITMAP_FREE (pts);
2871 BITMAP_FREE (changed);
2872 bitmap_obstack_release (&oldpta_obstack);
2873 }
2874
2875 /* Map from trees to variable infos. */
2876 static hash_map<tree, varinfo_t> *vi_for_tree;
2877
2878
2879 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2880
2881 static void
2882 insert_vi_for_tree (tree t, varinfo_t vi)
2883 {
2884 gcc_assert (vi);
2885 bool existed = vi_for_tree->put (t, vi);
2886 gcc_assert (!existed);
2887 }
2888
2889 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2890 exist in the map, return NULL, otherwise, return the varinfo we found. */
2891
2892 static varinfo_t
2893 lookup_vi_for_tree (tree t)
2894 {
2895 varinfo_t *slot = vi_for_tree->get (t);
2896 if (slot == NULL)
2897 return NULL;
2898
2899 return *slot;
2900 }
2901
2902 /* Return a printable name for DECL */
2903
2904 static const char *
2905 alias_get_name (tree decl)
2906 {
2907 const char *res = "NULL";
2908 if (dump_file)
2909 {
2910 char *temp = NULL;
2911 if (TREE_CODE (decl) == SSA_NAME)
2912 {
2913 res = get_name (decl);
2914 temp = xasprintf ("%s_%u", res ? res : "", SSA_NAME_VERSION (decl));
2915 }
2916 else if (HAS_DECL_ASSEMBLER_NAME_P (decl)
2917 && DECL_ASSEMBLER_NAME_SET_P (decl))
2918 res = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME_RAW (decl));
2919 else if (DECL_P (decl))
2920 {
2921 res = get_name (decl);
2922 if (!res)
2923 temp = xasprintf ("D.%u", DECL_UID (decl));
2924 }
2925
2926 if (temp)
2927 {
2928 res = ggc_strdup (temp);
2929 free (temp);
2930 }
2931 }
2932
2933 return res;
2934 }
2935
2936 /* Find the variable id for tree T in the map.
2937 If T doesn't exist in the map, create an entry for it and return it. */
2938
2939 static varinfo_t
2940 get_vi_for_tree (tree t)
2941 {
2942 varinfo_t *slot = vi_for_tree->get (t);
2943 if (slot == NULL)
2944 {
2945 unsigned int id = create_variable_info_for (t, alias_get_name (t), false);
2946 return get_varinfo (id);
2947 }
2948
2949 return *slot;
2950 }
2951
2952 /* Get a scalar constraint expression for a new temporary variable. */
2953
2954 static struct constraint_expr
2955 new_scalar_tmp_constraint_exp (const char *name, bool add_id)
2956 {
2957 struct constraint_expr tmp;
2958 varinfo_t vi;
2959
2960 vi = new_var_info (NULL_TREE, name, add_id);
2961 vi->offset = 0;
2962 vi->size = -1;
2963 vi->fullsize = -1;
2964 vi->is_full_var = 1;
2965 vi->is_reg_var = 1;
2966
2967 tmp.var = vi->id;
2968 tmp.type = SCALAR;
2969 tmp.offset = 0;
2970
2971 return tmp;
2972 }
2973
2974 /* Get a constraint expression vector from an SSA_VAR_P node.
2975 If address_p is true, the result will be taken its address of. */
2976
2977 static void
2978 get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
2979 {
2980 struct constraint_expr cexpr;
2981 varinfo_t vi;
2982
2983 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2984 gcc_assert (TREE_CODE (t) == SSA_NAME || DECL_P (t));
2985
2986 if (TREE_CODE (t) == SSA_NAME
2987 && SSA_NAME_IS_DEFAULT_DEF (t))
2988 {
2989 /* For parameters, get at the points-to set for the actual parm
2990 decl. */
2991 if (TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2992 || TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL)
2993 {
2994 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2995 return;
2996 }
2997 /* For undefined SSA names return nothing. */
2998 else if (!ssa_defined_default_def_p (t))
2999 {
3000 cexpr.var = nothing_id;
3001 cexpr.type = SCALAR;
3002 cexpr.offset = 0;
3003 results->safe_push (cexpr);
3004 return;
3005 }
3006 }
3007
3008 /* For global variables resort to the alias target. */
3009 if (VAR_P (t) && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
3010 {
3011 varpool_node *node = varpool_node::get (t);
3012 if (node && node->alias && node->analyzed)
3013 {
3014 node = node->ultimate_alias_target ();
3015 /* Canonicalize the PT uid of all aliases to the ultimate target.
3016 ??? Hopefully the set of aliases can't change in a way that
3017 changes the ultimate alias target. */
3018 gcc_assert ((! DECL_PT_UID_SET_P (node->decl)
3019 || DECL_PT_UID (node->decl) == DECL_UID (node->decl))
3020 && (! DECL_PT_UID_SET_P (t)
3021 || DECL_PT_UID (t) == DECL_UID (node->decl)));
3022 DECL_PT_UID (t) = DECL_UID (node->decl);
3023 t = node->decl;
3024 }
3025
3026 /* If this is decl may bind to NULL note that. */
3027 if (address_p
3028 && (! node || ! node->nonzero_address ()))
3029 {
3030 cexpr.var = nothing_id;
3031 cexpr.type = SCALAR;
3032 cexpr.offset = 0;
3033 results->safe_push (cexpr);
3034 }
3035 }
3036
3037 vi = get_vi_for_tree (t);
3038 cexpr.var = vi->id;
3039 cexpr.type = SCALAR;
3040 cexpr.offset = 0;
3041
3042 /* If we are not taking the address of the constraint expr, add all
3043 sub-fiels of the variable as well. */
3044 if (!address_p
3045 && !vi->is_full_var)
3046 {
3047 for (; vi; vi = vi_next (vi))
3048 {
3049 cexpr.var = vi->id;
3050 results->safe_push (cexpr);
3051 }
3052 return;
3053 }
3054
3055 results->safe_push (cexpr);
3056 }
3057
3058 /* Process constraint T, performing various simplifications and then
3059 adding it to our list of overall constraints. */
3060
3061 static void
3062 process_constraint (constraint_t t)
3063 {
3064 struct constraint_expr rhs = t->rhs;
3065 struct constraint_expr lhs = t->lhs;
3066
3067 gcc_assert (rhs.var < varmap.length ());
3068 gcc_assert (lhs.var < varmap.length ());
3069
3070 /* If we didn't get any useful constraint from the lhs we get
3071 &ANYTHING as fallback from get_constraint_for. Deal with
3072 it here by turning it into *ANYTHING. */
3073 if (lhs.type == ADDRESSOF
3074 && lhs.var == anything_id)
3075 lhs.type = DEREF;
3076
3077 /* ADDRESSOF on the lhs is invalid. */
3078 gcc_assert (lhs.type != ADDRESSOF);
3079
3080 /* We shouldn't add constraints from things that cannot have pointers.
3081 It's not completely trivial to avoid in the callers, so do it here. */
3082 if (rhs.type != ADDRESSOF
3083 && !get_varinfo (rhs.var)->may_have_pointers)
3084 return;
3085
3086 /* Likewise adding to the solution of a non-pointer var isn't useful. */
3087 if (!get_varinfo (lhs.var)->may_have_pointers)
3088 return;
3089
3090 /* This can happen in our IR with things like n->a = *p */
3091 if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
3092 {
3093 /* Split into tmp = *rhs, *lhs = tmp */
3094 struct constraint_expr tmplhs;
3095 tmplhs = new_scalar_tmp_constraint_exp ("doubledereftmp", true);
3096 process_constraint (new_constraint (tmplhs, rhs));
3097 process_constraint (new_constraint (lhs, tmplhs));
3098 }
3099 else if ((rhs.type != SCALAR || rhs.offset != 0) && lhs.type == DEREF)
3100 {
3101 /* Split into tmp = &rhs, *lhs = tmp */
3102 struct constraint_expr tmplhs;
3103 tmplhs = new_scalar_tmp_constraint_exp ("derefaddrtmp", true);
3104 process_constraint (new_constraint (tmplhs, rhs));
3105 process_constraint (new_constraint (lhs, tmplhs));
3106 }
3107 else
3108 {
3109 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
3110 if (rhs.type == ADDRESSOF)
3111 get_varinfo (get_varinfo (rhs.var)->head)->address_taken = true;
3112 constraints.safe_push (t);
3113 }
3114 }
3115
3116
3117 /* Return the position, in bits, of FIELD_DECL from the beginning of its
3118 structure. */
3119
3120 static HOST_WIDE_INT
3121 bitpos_of_field (const tree fdecl)
3122 {
3123 if (!tree_fits_shwi_p (DECL_FIELD_OFFSET (fdecl))
3124 || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl)))
3125 return -1;
3126
3127 return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
3128 + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl)));
3129 }
3130
3131
3132 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
3133 resulting constraint expressions in *RESULTS. */
3134
3135 static void
3136 get_constraint_for_ptr_offset (tree ptr, tree offset,
3137 vec<ce_s> *results)
3138 {
3139 struct constraint_expr c;
3140 unsigned int j, n;
3141 HOST_WIDE_INT rhsoffset;
3142
3143 /* If we do not do field-sensitive PTA adding offsets to pointers
3144 does not change the points-to solution. */
3145 if (!use_field_sensitive)
3146 {
3147 get_constraint_for_rhs (ptr, results);
3148 return;
3149 }
3150
3151 /* If the offset is not a non-negative integer constant that fits
3152 in a HOST_WIDE_INT, we have to fall back to a conservative
3153 solution which includes all sub-fields of all pointed-to
3154 variables of ptr. */
3155 if (offset == NULL_TREE
3156 || TREE_CODE (offset) != INTEGER_CST)
3157 rhsoffset = UNKNOWN_OFFSET;
3158 else
3159 {
3160 /* Sign-extend the offset. */
3161 offset_int soffset = offset_int::from (wi::to_wide (offset), SIGNED);
3162 if (!wi::fits_shwi_p (soffset))
3163 rhsoffset = UNKNOWN_OFFSET;
3164 else
3165 {
3166 /* Make sure the bit-offset also fits. */
3167 HOST_WIDE_INT rhsunitoffset = soffset.to_shwi ();
3168 rhsoffset = rhsunitoffset * (unsigned HOST_WIDE_INT) BITS_PER_UNIT;
3169 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
3170 rhsoffset = UNKNOWN_OFFSET;
3171 }
3172 }
3173
3174 get_constraint_for_rhs (ptr, results);
3175 if (rhsoffset == 0)
3176 return;
3177
3178 /* As we are eventually appending to the solution do not use
3179 vec::iterate here. */
3180 n = results->length ();
3181 for (j = 0; j < n; j++)
3182 {
3183 varinfo_t curr;
3184 c = (*results)[j];
3185 curr = get_varinfo (c.var);
3186
3187 if (c.type == ADDRESSOF
3188 /* If this varinfo represents a full variable just use it. */
3189 && curr->is_full_var)
3190 ;
3191 else if (c.type == ADDRESSOF
3192 /* If we do not know the offset add all subfields. */
3193 && rhsoffset == UNKNOWN_OFFSET)
3194 {
3195 varinfo_t temp = get_varinfo (curr->head);
3196 do
3197 {
3198 struct constraint_expr c2;
3199 c2.var = temp->id;
3200 c2.type = ADDRESSOF;
3201 c2.offset = 0;
3202 if (c2.var != c.var)
3203 results->safe_push (c2);
3204 temp = vi_next (temp);
3205 }
3206 while (temp);
3207 }
3208 else if (c.type == ADDRESSOF)
3209 {
3210 varinfo_t temp;
3211 unsigned HOST_WIDE_INT offset = curr->offset + rhsoffset;
3212
3213 /* If curr->offset + rhsoffset is less than zero adjust it. */
3214 if (rhsoffset < 0
3215 && curr->offset < offset)
3216 offset = 0;
3217
3218 /* We have to include all fields that overlap the current
3219 field shifted by rhsoffset. And we include at least
3220 the last or the first field of the variable to represent
3221 reachability of off-bound addresses, in particular &object + 1,
3222 conservatively correct. */
3223 temp = first_or_preceding_vi_for_offset (curr, offset);
3224 c.var = temp->id;
3225 c.offset = 0;
3226 temp = vi_next (temp);
3227 while (temp
3228 && temp->offset < offset + curr->size)
3229 {
3230 struct constraint_expr c2;
3231 c2.var = temp->id;
3232 c2.type = ADDRESSOF;
3233 c2.offset = 0;
3234 results->safe_push (c2);
3235 temp = vi_next (temp);
3236 }
3237 }
3238 else if (c.type == SCALAR)
3239 {
3240 gcc_assert (c.offset == 0);
3241 c.offset = rhsoffset;
3242 }
3243 else
3244 /* We shouldn't get any DEREFs here. */
3245 gcc_unreachable ();
3246
3247 (*results)[j] = c;
3248 }
3249 }
3250
3251
3252 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
3253 If address_p is true the result will be taken its address of.
3254 If lhs_p is true then the constraint expression is assumed to be used
3255 as the lhs. */
3256
3257 static void
3258 get_constraint_for_component_ref (tree t, vec<ce_s> *results,
3259 bool address_p, bool lhs_p)
3260 {
3261 tree orig_t = t;
3262 poly_int64 bitsize = -1;
3263 poly_int64 bitmaxsize = -1;
3264 poly_int64 bitpos;
3265 bool reverse;
3266 tree forzero;
3267
3268 /* Some people like to do cute things like take the address of
3269 &0->a.b */
3270 forzero = t;
3271 while (handled_component_p (forzero)
3272 || INDIRECT_REF_P (forzero)
3273 || TREE_CODE (forzero) == MEM_REF)
3274 forzero = TREE_OPERAND (forzero, 0);
3275
3276 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
3277 {
3278 struct constraint_expr temp;
3279
3280 temp.offset = 0;
3281 temp.var = integer_id;
3282 temp.type = SCALAR;
3283 results->safe_push (temp);
3284 return;
3285 }
3286
3287 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize, &reverse);
3288
3289 /* We can end up here for component references on a
3290 VIEW_CONVERT_EXPR <>(&foobar) or things like a
3291 BIT_FIELD_REF <&MEM[(void *)&b + 4B], ...>. So for
3292 symbolic constants simply give up. */
3293 if (TREE_CODE (t) == ADDR_EXPR)
3294 {
3295 constraint_expr result;
3296 result.type = SCALAR;
3297 result.var = anything_id;
3298 result.offset = 0;
3299 results->safe_push (result);
3300 return;
3301 }
3302
3303 /* Avoid creating pointer-offset constraints, so handle MEM_REF
3304 offsets directly. Pretend to take the address of the base,
3305 we'll take care of adding the required subset of sub-fields below. */
3306 if (TREE_CODE (t) == MEM_REF
3307 && !integer_zerop (TREE_OPERAND (t, 0)))
3308 {
3309 poly_offset_int off = mem_ref_offset (t);
3310 off <<= LOG2_BITS_PER_UNIT;
3311 off += bitpos;
3312 poly_int64 off_hwi;
3313 if (off.to_shwi (&off_hwi))
3314 bitpos = off_hwi;
3315 else
3316 {
3317 bitpos = 0;
3318 bitmaxsize = -1;
3319 }
3320 get_constraint_for_1 (TREE_OPERAND (t, 0), results, false, lhs_p);
3321 do_deref (results);
3322 }
3323 else
3324 get_constraint_for_1 (t, results, true, lhs_p);
3325
3326 /* Strip off nothing_id. */
3327 if (results->length () == 2)
3328 {
3329 gcc_assert ((*results)[0].var == nothing_id);
3330 results->unordered_remove (0);
3331 }
3332 gcc_assert (results->length () == 1);
3333 struct constraint_expr &result = results->last ();
3334
3335 if (result.type == SCALAR
3336 && get_varinfo (result.var)->is_full_var)
3337 /* For single-field vars do not bother about the offset. */
3338 result.offset = 0;
3339 else if (result.type == SCALAR)
3340 {
3341 /* In languages like C, you can access one past the end of an
3342 array. You aren't allowed to dereference it, so we can
3343 ignore this constraint. When we handle pointer subtraction,
3344 we may have to do something cute here. */
3345
3346 if (maybe_lt (poly_uint64 (bitpos), get_varinfo (result.var)->fullsize)
3347 && maybe_ne (bitmaxsize, 0))
3348 {
3349 /* It's also not true that the constraint will actually start at the
3350 right offset, it may start in some padding. We only care about
3351 setting the constraint to the first actual field it touches, so
3352 walk to find it. */
3353 struct constraint_expr cexpr = result;
3354 varinfo_t curr;
3355 results->pop ();
3356 cexpr.offset = 0;
3357 for (curr = get_varinfo (cexpr.var); curr; curr = vi_next (curr))
3358 {
3359 if (ranges_maybe_overlap_p (poly_int64 (curr->offset),
3360 curr->size, bitpos, bitmaxsize))
3361 {
3362 cexpr.var = curr->id;
3363 results->safe_push (cexpr);
3364 if (address_p)
3365 break;
3366 }
3367 }
3368 /* If we are going to take the address of this field then
3369 to be able to compute reachability correctly add at least
3370 the last field of the variable. */
3371 if (address_p && results->length () == 0)
3372 {
3373 curr = get_varinfo (cexpr.var);
3374 while (curr->next != 0)
3375 curr = vi_next (curr);
3376 cexpr.var = curr->id;
3377 results->safe_push (cexpr);
3378 }
3379 else if (results->length () == 0)
3380 /* Assert that we found *some* field there. The user couldn't be
3381 accessing *only* padding. */
3382 /* Still the user could access one past the end of an array
3383 embedded in a struct resulting in accessing *only* padding. */
3384 /* Or accessing only padding via type-punning to a type
3385 that has a filed just in padding space. */
3386 {
3387 cexpr.type = SCALAR;
3388 cexpr.var = anything_id;
3389 cexpr.offset = 0;
3390 results->safe_push (cexpr);
3391 }
3392 }
3393 else if (known_eq (bitmaxsize, 0))
3394 {
3395 if (dump_file && (dump_flags & TDF_DETAILS))
3396 fprintf (dump_file, "Access to zero-sized part of variable, "
3397 "ignoring\n");
3398 }
3399 else
3400 if (dump_file && (dump_flags & TDF_DETAILS))
3401 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
3402 }
3403 else if (result.type == DEREF)
3404 {
3405 /* If we do not know exactly where the access goes say so. Note
3406 that only for non-structure accesses we know that we access
3407 at most one subfiled of any variable. */
3408 HOST_WIDE_INT const_bitpos;
3409 if (!bitpos.is_constant (&const_bitpos)
3410 || const_bitpos == -1
3411 || maybe_ne (bitsize, bitmaxsize)
3412 || AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
3413 || result.offset == UNKNOWN_OFFSET)
3414 result.offset = UNKNOWN_OFFSET;
3415 else
3416 result.offset += const_bitpos;
3417 }
3418 else if (result.type == ADDRESSOF)
3419 {
3420 /* We can end up here for component references on constants like
3421 VIEW_CONVERT_EXPR <>({ 0, 1, 2, 3 })[i]. */
3422 result.type = SCALAR;
3423 result.var = anything_id;
3424 result.offset = 0;
3425 }
3426 else
3427 gcc_unreachable ();
3428 }
3429
3430
3431 /* Dereference the constraint expression CONS, and return the result.
3432 DEREF (ADDRESSOF) = SCALAR
3433 DEREF (SCALAR) = DEREF
3434 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
3435 This is needed so that we can handle dereferencing DEREF constraints. */
3436
3437 static void
3438 do_deref (vec<ce_s> *constraints)
3439 {
3440 struct constraint_expr *c;
3441 unsigned int i = 0;
3442
3443 FOR_EACH_VEC_ELT (*constraints, i, c)
3444 {
3445 if (c->type == SCALAR)
3446 c->type = DEREF;
3447 else if (c->type == ADDRESSOF)
3448 c->type = SCALAR;
3449 else if (c->type == DEREF)
3450 {
3451 struct constraint_expr tmplhs;
3452 tmplhs = new_scalar_tmp_constraint_exp ("dereftmp", true);
3453 process_constraint (new_constraint (tmplhs, *c));
3454 c->var = tmplhs.var;
3455 }
3456 else
3457 gcc_unreachable ();
3458 }
3459 }
3460
3461 /* Given a tree T, return the constraint expression for taking the
3462 address of it. */
3463
3464 static void
3465 get_constraint_for_address_of (tree t, vec<ce_s> *results)
3466 {
3467 struct constraint_expr *c;
3468 unsigned int i;
3469
3470 get_constraint_for_1 (t, results, true, true);
3471
3472 FOR_EACH_VEC_ELT (*results, i, c)
3473 {
3474 if (c->type == DEREF)
3475 c->type = SCALAR;
3476 else
3477 c->type = ADDRESSOF;
3478 }
3479 }
3480
3481 /* Given a tree T, return the constraint expression for it. */
3482
3483 static void
3484 get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
3485 bool lhs_p)
3486 {
3487 struct constraint_expr temp;
3488
3489 /* x = integer is all glommed to a single variable, which doesn't
3490 point to anything by itself. That is, of course, unless it is an
3491 integer constant being treated as a pointer, in which case, we
3492 will return that this is really the addressof anything. This
3493 happens below, since it will fall into the default case. The only
3494 case we know something about an integer treated like a pointer is
3495 when it is the NULL pointer, and then we just say it points to
3496 NULL.
3497
3498 Do not do that if -fno-delete-null-pointer-checks though, because
3499 in that case *NULL does not fail, so it _should_ alias *anything.
3500 It is not worth adding a new option or renaming the existing one,
3501 since this case is relatively obscure. */
3502 if ((TREE_CODE (t) == INTEGER_CST
3503 && integer_zerop (t))
3504 /* The only valid CONSTRUCTORs in gimple with pointer typed
3505 elements are zero-initializer. But in IPA mode we also
3506 process global initializers, so verify at least. */
3507 || (TREE_CODE (t) == CONSTRUCTOR
3508 && CONSTRUCTOR_NELTS (t) == 0))
3509 {
3510 if (flag_delete_null_pointer_checks)
3511 temp.var = nothing_id;
3512 else
3513 temp.var = nonlocal_id;
3514 temp.type = ADDRESSOF;
3515 temp.offset = 0;
3516 results->safe_push (temp);
3517 return;
3518 }
3519
3520 /* String constants are read-only, ideally we'd have a CONST_DECL
3521 for those. */
3522 if (TREE_CODE (t) == STRING_CST)
3523 {
3524 temp.var = string_id;
3525 temp.type = SCALAR;
3526 temp.offset = 0;
3527 results->safe_push (temp);
3528 return;
3529 }
3530
3531 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3532 {
3533 case tcc_expression:
3534 {
3535 switch (TREE_CODE (t))
3536 {
3537 case ADDR_EXPR:
3538 get_constraint_for_address_of (TREE_OPERAND (t, 0), results);
3539 return;
3540 default:;
3541 }
3542 break;
3543 }
3544 case tcc_reference:
3545 {
3546 switch (TREE_CODE (t))
3547 {
3548 case MEM_REF:
3549 {
3550 struct constraint_expr cs;
3551 varinfo_t vi, curr;
3552 get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
3553 TREE_OPERAND (t, 1), results);
3554 do_deref (results);
3555
3556 /* If we are not taking the address then make sure to process
3557 all subvariables we might access. */
3558 if (address_p)
3559 return;
3560
3561 cs = results->last ();
3562 if (cs.type == DEREF
3563 && type_can_have_subvars (TREE_TYPE (t)))
3564 {
3565 /* For dereferences this means we have to defer it
3566 to solving time. */
3567 results->last ().offset = UNKNOWN_OFFSET;
3568 return;
3569 }
3570 if (cs.type != SCALAR)
3571 return;
3572
3573 vi = get_varinfo (cs.var);
3574 curr = vi_next (vi);
3575 if (!vi->is_full_var
3576 && curr)
3577 {
3578 unsigned HOST_WIDE_INT size;
3579 if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t))))
3580 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t)));
3581 else
3582 size = -1;
3583 for (; curr; curr = vi_next (curr))
3584 {
3585 if (curr->offset - vi->offset < size)
3586 {
3587 cs.var = curr->id;
3588 results->safe_push (cs);
3589 }
3590 else
3591 break;
3592 }
3593 }
3594 return;
3595 }
3596 case ARRAY_REF:
3597 case ARRAY_RANGE_REF:
3598 case COMPONENT_REF:
3599 case IMAGPART_EXPR:
3600 case REALPART_EXPR:
3601 case BIT_FIELD_REF:
3602 get_constraint_for_component_ref (t, results, address_p, lhs_p);
3603 return;
3604 case VIEW_CONVERT_EXPR:
3605 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p,
3606 lhs_p);
3607 return;
3608 /* We are missing handling for TARGET_MEM_REF here. */
3609 default:;
3610 }
3611 break;
3612 }
3613 case tcc_exceptional:
3614 {
3615 switch (TREE_CODE (t))
3616 {
3617 case SSA_NAME:
3618 {
3619 get_constraint_for_ssa_var (t, results, address_p);
3620 return;
3621 }
3622 case CONSTRUCTOR:
3623 {
3624 unsigned int i;
3625 tree val;
3626 auto_vec<ce_s> tmp;
3627 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
3628 {
3629 struct constraint_expr *rhsp;
3630 unsigned j;
3631 get_constraint_for_1 (val, &tmp, address_p, lhs_p);
3632 FOR_EACH_VEC_ELT (tmp, j, rhsp)
3633 results->safe_push (*rhsp);
3634 tmp.truncate (0);
3635 }
3636 /* We do not know whether the constructor was complete,
3637 so technically we have to add &NOTHING or &ANYTHING
3638 like we do for an empty constructor as well. */
3639 return;
3640 }
3641 default:;
3642 }
3643 break;
3644 }
3645 case tcc_declaration:
3646 {
3647 get_constraint_for_ssa_var (t, results, address_p);
3648 return;
3649 }
3650 case tcc_constant:
3651 {
3652 /* We cannot refer to automatic variables through constants. */
3653 temp.type = ADDRESSOF;
3654 temp.var = nonlocal_id;
3655 temp.offset = 0;
3656 results->safe_push (temp);
3657 return;
3658 }
3659 default:;
3660 }
3661
3662 /* The default fallback is a constraint from anything. */
3663 temp.type = ADDRESSOF;
3664 temp.var = anything_id;
3665 temp.offset = 0;
3666 results->safe_push (temp);
3667 }
3668
3669 /* Given a gimple tree T, return the constraint expression vector for it. */
3670
3671 static void
3672 get_constraint_for (tree t, vec<ce_s> *results)
3673 {
3674 gcc_assert (results->length () == 0);
3675
3676 get_constraint_for_1 (t, results, false, true);
3677 }
3678
3679 /* Given a gimple tree T, return the constraint expression vector for it
3680 to be used as the rhs of a constraint. */
3681
3682 static void
3683 get_constraint_for_rhs (tree t, vec<ce_s> *results)
3684 {
3685 gcc_assert (results->length () == 0);
3686
3687 get_constraint_for_1 (t, results, false, false);
3688 }
3689
3690
3691 /* Efficiently generates constraints from all entries in *RHSC to all
3692 entries in *LHSC. */
3693
3694 static void
3695 process_all_all_constraints (const vec<ce_s> &lhsc,
3696 const vec<ce_s> &rhsc)
3697 {
3698 struct constraint_expr *lhsp, *rhsp;
3699 unsigned i, j;
3700
3701 if (lhsc.length () <= 1 || rhsc.length () <= 1)
3702 {
3703 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3704 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
3705 process_constraint (new_constraint (*lhsp, *rhsp));
3706 }
3707 else
3708 {
3709 struct constraint_expr tmp;
3710 tmp = new_scalar_tmp_constraint_exp ("allalltmp", true);
3711 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
3712 process_constraint (new_constraint (tmp, *rhsp));
3713 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
3714 process_constraint (new_constraint (*lhsp, tmp));
3715 }
3716 }
3717
3718 /* Handle aggregate copies by expanding into copies of the respective
3719 fields of the structures. */
3720
3721 static void
3722 do_structure_copy (tree lhsop, tree rhsop)
3723 {
3724 struct constraint_expr *lhsp, *rhsp;
3725 auto_vec<ce_s> lhsc;
3726 auto_vec<ce_s> rhsc;
3727 unsigned j;
3728
3729 get_constraint_for (lhsop, &lhsc);
3730 get_constraint_for_rhs (rhsop, &rhsc);
3731 lhsp = &lhsc[0];
3732 rhsp = &rhsc[0];
3733 if (lhsp->type == DEREF
3734 || (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
3735 || rhsp->type == DEREF)
3736 {
3737 if (lhsp->type == DEREF)
3738 {
3739 gcc_assert (lhsc.length () == 1);
3740 lhsp->offset = UNKNOWN_OFFSET;
3741 }
3742 if (rhsp->type == DEREF)
3743 {
3744 gcc_assert (rhsc.length () == 1);
3745 rhsp->offset = UNKNOWN_OFFSET;
3746 }
3747 process_all_all_constraints (lhsc, rhsc);
3748 }
3749 else if (lhsp->type == SCALAR
3750 && (rhsp->type == SCALAR
3751 || rhsp->type == ADDRESSOF))
3752 {
3753 HOST_WIDE_INT lhssize, lhsoffset;
3754 HOST_WIDE_INT rhssize, rhsoffset;
3755 bool reverse;
3756 unsigned k = 0;
3757 if (!get_ref_base_and_extent_hwi (lhsop, &lhsoffset, &lhssize, &reverse)
3758 || !get_ref_base_and_extent_hwi (rhsop, &rhsoffset, &rhssize,
3759 &reverse))
3760 {
3761 process_all_all_constraints (lhsc, rhsc);
3762 return;
3763 }
3764 for (j = 0; lhsc.iterate (j, &lhsp);)
3765 {
3766 varinfo_t lhsv, rhsv;
3767 rhsp = &rhsc[k];
3768 lhsv = get_varinfo (lhsp->var);
3769 rhsv = get_varinfo (rhsp->var);
3770 if (lhsv->may_have_pointers
3771 && (lhsv->is_full_var
3772 || rhsv->is_full_var
3773 || ranges_overlap_p (lhsv->offset + rhsoffset, lhsv->size,
3774 rhsv->offset + lhsoffset, rhsv->size)))
3775 process_constraint (new_constraint (*lhsp, *rhsp));
3776 if (!rhsv->is_full_var
3777 && (lhsv->is_full_var
3778 || (lhsv->offset + rhsoffset + lhsv->size
3779 > rhsv->offset + lhsoffset + rhsv->size)))
3780 {
3781 ++k;
3782 if (k >= rhsc.length ())
3783 break;
3784 }
3785 else
3786 ++j;
3787 }
3788 }
3789 else
3790 gcc_unreachable ();
3791 }
3792
3793 /* Create constraints ID = { rhsc }. */
3794
3795 static void
3796 make_constraints_to (unsigned id, const vec<ce_s> &rhsc)
3797 {
3798 struct constraint_expr *c;
3799 struct constraint_expr includes;
3800 unsigned int j;
3801
3802 includes.var = id;
3803 includes.offset = 0;
3804 includes.type = SCALAR;
3805
3806 FOR_EACH_VEC_ELT (rhsc, j, c)
3807 process_constraint (new_constraint (includes, *c));
3808 }
3809
3810 /* Create a constraint ID = OP. */
3811
3812 static void
3813 make_constraint_to (unsigned id, tree op)
3814 {
3815 auto_vec<ce_s> rhsc;
3816 get_constraint_for_rhs (op, &rhsc);
3817 make_constraints_to (id, rhsc);
3818 }
3819
3820 /* Create a constraint ID = &FROM. */
3821
3822 static void
3823 make_constraint_from (varinfo_t vi, int from)
3824 {
3825 struct constraint_expr lhs, rhs;
3826
3827 lhs.var = vi->id;
3828 lhs.offset = 0;
3829 lhs.type = SCALAR;
3830
3831 rhs.var = from;
3832 rhs.offset = 0;
3833 rhs.type = ADDRESSOF;
3834 process_constraint (new_constraint (lhs, rhs));
3835 }
3836
3837 /* Create a constraint ID = FROM. */
3838
3839 static void
3840 make_copy_constraint (varinfo_t vi, int from)
3841 {
3842 struct constraint_expr lhs, rhs;
3843
3844 lhs.var = vi->id;
3845 lhs.offset = 0;
3846 lhs.type = SCALAR;
3847
3848 rhs.var = from;
3849 rhs.offset = 0;
3850 rhs.type = SCALAR;
3851 process_constraint (new_constraint (lhs, rhs));
3852 }
3853
3854 /* Make constraints necessary to make OP escape. */
3855
3856 static void
3857 make_escape_constraint (tree op)
3858 {
3859 make_constraint_to (escaped_id, op);
3860 }
3861
3862 /* Make constraint necessary to make all indirect references
3863 from VI escape. */
3864
3865 static void
3866 make_indirect_escape_constraint (varinfo_t vi)
3867 {
3868 struct constraint_expr lhs, rhs;
3869 /* escaped = *(VAR + UNKNOWN); */
3870 lhs.type = SCALAR;
3871 lhs.var = escaped_id;
3872 lhs.offset = 0;
3873 rhs.type = DEREF;
3874 rhs.var = vi->id;
3875 rhs.offset = UNKNOWN_OFFSET;
3876 process_constraint (new_constraint (lhs, rhs));
3877 }
3878
3879 /* Add constraints to that the solution of VI is transitively closed. */
3880
3881 static void
3882 make_transitive_closure_constraints (varinfo_t vi)
3883 {
3884 struct constraint_expr lhs, rhs;
3885
3886 /* VAR = *(VAR + UNKNOWN); */
3887 lhs.type = SCALAR;
3888 lhs.var = vi->id;
3889 lhs.offset = 0;
3890 rhs.type = DEREF;
3891 rhs.var = vi->id;
3892 rhs.offset = UNKNOWN_OFFSET;
3893 process_constraint (new_constraint (lhs, rhs));
3894 }
3895
3896 /* Add constraints to that the solution of VI has all subvariables added. */
3897
3898 static void
3899 make_any_offset_constraints (varinfo_t vi)
3900 {
3901 struct constraint_expr lhs, rhs;
3902
3903 /* VAR = VAR + UNKNOWN; */
3904 lhs.type = SCALAR;
3905 lhs.var = vi->id;
3906 lhs.offset = 0;
3907 rhs.type = SCALAR;
3908 rhs.var = vi->id;
3909 rhs.offset = UNKNOWN_OFFSET;
3910 process_constraint (new_constraint (lhs, rhs));
3911 }
3912
3913 /* Temporary storage for fake var decls. */
3914 struct obstack fake_var_decl_obstack;
3915
3916 /* Build a fake VAR_DECL acting as referrer to a DECL_UID. */
3917
3918 static tree
3919 build_fake_var_decl (tree type)
3920 {
3921 tree decl = (tree) XOBNEW (&fake_var_decl_obstack, struct tree_var_decl);
3922 memset (decl, 0, sizeof (struct tree_var_decl));
3923 TREE_SET_CODE (decl, VAR_DECL);
3924 TREE_TYPE (decl) = type;
3925 DECL_UID (decl) = allocate_decl_uid ();
3926 SET_DECL_PT_UID (decl, -1);
3927 layout_decl (decl, 0);
3928 return decl;
3929 }
3930
3931 /* Create a new artificial heap variable with NAME.
3932 Return the created variable. */
3933
3934 static varinfo_t
3935 make_heapvar (const char *name, bool add_id)
3936 {
3937 varinfo_t vi;
3938 tree heapvar;
3939
3940 heapvar = build_fake_var_decl (ptr_type_node);
3941 DECL_EXTERNAL (heapvar) = 1;
3942
3943 vi = new_var_info (heapvar, name, add_id);
3944 vi->is_heap_var = true;
3945 vi->is_unknown_size_var = true;
3946 vi->offset = 0;
3947 vi->fullsize = ~0;
3948 vi->size = ~0;
3949 vi->is_full_var = true;
3950 insert_vi_for_tree (heapvar, vi);
3951
3952 return vi;
3953 }
3954
3955 /* Create a new artificial heap variable with NAME and make a
3956 constraint from it to LHS. Set flags according to a tag used
3957 for tracking restrict pointers. */
3958
3959 static varinfo_t
3960 make_constraint_from_restrict (varinfo_t lhs, const char *name, bool add_id)
3961 {
3962 varinfo_t vi = make_heapvar (name, add_id);
3963 vi->is_restrict_var = 1;
3964 vi->is_global_var = 1;
3965 vi->may_have_pointers = 1;
3966 make_constraint_from (lhs, vi->id);
3967 return vi;
3968 }
3969
3970 /* Create a new artificial heap variable with NAME and make a
3971 constraint from it to LHS. Set flags according to a tag used
3972 for tracking restrict pointers and make the artificial heap
3973 point to global memory. */
3974
3975 static varinfo_t
3976 make_constraint_from_global_restrict (varinfo_t lhs, const char *name,
3977 bool add_id)
3978 {
3979 varinfo_t vi = make_constraint_from_restrict (lhs, name, add_id);
3980 make_copy_constraint (vi, nonlocal_id);
3981 return vi;
3982 }
3983
3984 /* In IPA mode there are varinfos for different aspects of reach
3985 function designator. One for the points-to set of the return
3986 value, one for the variables that are clobbered by the function,
3987 one for its uses and one for each parameter (including a single
3988 glob for remaining variadic arguments). */
3989
3990 enum { fi_clobbers = 1, fi_uses = 2,
3991 fi_static_chain = 3, fi_result = 4, fi_parm_base = 5 };
3992
3993 /* Get a constraint for the requested part of a function designator FI
3994 when operating in IPA mode. */
3995
3996 static struct constraint_expr
3997 get_function_part_constraint (varinfo_t fi, unsigned part)
3998 {
3999 struct constraint_expr c;
4000
4001 gcc_assert (in_ipa_mode);
4002
4003 if (fi->id == anything_id)
4004 {
4005 /* ??? We probably should have a ANYFN special variable. */
4006 c.var = anything_id;
4007 c.offset = 0;
4008 c.type = SCALAR;
4009 }
4010 else if (fi->decl && TREE_CODE (fi->decl) == FUNCTION_DECL)
4011 {
4012 varinfo_t ai = first_vi_for_offset (fi, part);
4013 if (ai)
4014 c.var = ai->id;
4015 else
4016 c.var = anything_id;
4017 c.offset = 0;
4018 c.type = SCALAR;
4019 }
4020 else
4021 {
4022 c.var = fi->id;
4023 c.offset = part;
4024 c.type = DEREF;
4025 }
4026
4027 return c;
4028 }
4029
4030 /* Produce constraints for argument ARG of call STMT with eaf flags
4031 FLAGS. RESULTS is array holding constraints for return value.
4032 CALLESCAPE_ID is variable where call loocal escapes are added.
4033 WRITES_GLOVEL_MEMORY is true if callee may write global memory. */
4034
4035 static void
4036 handle_call_arg (gcall *stmt, tree arg, vec<ce_s> *results, int flags,
4037 int callescape_id, bool writes_global_memory)
4038 {
4039 int relevant_indirect_flags = EAF_NO_INDIRECT_CLOBBER | EAF_NO_INDIRECT_READ
4040 | EAF_NO_INDIRECT_ESCAPE;
4041 int relevant_flags = relevant_indirect_flags
4042 | EAF_NO_DIRECT_CLOBBER
4043 | EAF_NO_DIRECT_READ
4044 | EAF_NO_DIRECT_ESCAPE;
4045 if (gimple_call_lhs (stmt))
4046 {
4047 relevant_flags |= EAF_NOT_RETURNED_DIRECTLY | EAF_NOT_RETURNED_INDIRECTLY;
4048 relevant_indirect_flags |= EAF_NOT_RETURNED_INDIRECTLY;
4049
4050 /* If value is never read from it can not be returned indirectly
4051 (except through the escape solution).
4052 For all flags we get these implications right except for
4053 not_returned because we miss return functions in ipa-prop. */
4054
4055 if (flags & EAF_NO_DIRECT_READ)
4056 flags |= EAF_NOT_RETURNED_INDIRECTLY;
4057 }
4058
4059 /* If the argument is not used we can ignore it.
4060 Similarly argument is invisile for us if it not clobbered, does not
4061 escape, is not read and can not be returned. */
4062 if ((flags & EAF_UNUSED) || ((flags & relevant_flags) == relevant_flags))
4063 return;
4064
4065 /* Produce varinfo for direct accesses to ARG. */
4066 varinfo_t tem = new_var_info (NULL_TREE, "callarg", true);
4067 tem->is_reg_var = true;
4068 make_constraint_to (tem->id, arg);
4069 make_any_offset_constraints (tem);
4070
4071 bool callarg_transitive = false;
4072
4073 /* As an compile time optimization if we make no difference between
4074 direct and indirect accesses make arg transitively closed.
4075 This avoids the need to build indir arg and do everything twice. */
4076 if (((flags & EAF_NO_INDIRECT_CLOBBER) != 0)
4077 == ((flags & EAF_NO_DIRECT_CLOBBER) != 0)
4078 && (((flags & EAF_NO_INDIRECT_READ) != 0)
4079 == ((flags & EAF_NO_DIRECT_READ) != 0))
4080 && (((flags & EAF_NO_INDIRECT_ESCAPE) != 0)
4081 == ((flags & EAF_NO_DIRECT_ESCAPE) != 0))
4082 && (((flags & EAF_NOT_RETURNED_INDIRECTLY) != 0)
4083 == ((flags & EAF_NOT_RETURNED_DIRECTLY) != 0)))
4084 {
4085 make_transitive_closure_constraints (tem);
4086 callarg_transitive = true;
4087 gcc_checking_assert (!(flags & EAF_NO_DIRECT_READ));
4088 }
4089
4090 /* If necessary, produce varinfo for indirect accesses to ARG. */
4091 varinfo_t indir_tem = NULL;
4092 if (!callarg_transitive
4093 && (flags & relevant_indirect_flags) != relevant_indirect_flags)
4094 {
4095 struct constraint_expr lhs, rhs;
4096 indir_tem = new_var_info (NULL_TREE, "indircallarg", true);
4097 indir_tem->is_reg_var = true;
4098
4099 /* indir_term = *tem. */
4100 lhs.type = SCALAR;
4101 lhs.var = indir_tem->id;
4102 lhs.offset = 0;
4103
4104 rhs.type = DEREF;
4105 rhs.var = tem->id;
4106 rhs.offset = UNKNOWN_OFFSET;
4107 process_constraint (new_constraint (lhs, rhs));
4108
4109 make_any_offset_constraints (indir_tem);
4110
4111 /* If we do not read indirectly there is no need for transitive closure.
4112 We know there is only one level of indirection. */
4113 if (!(flags & EAF_NO_INDIRECT_READ))
4114 make_transitive_closure_constraints (indir_tem);
4115 gcc_checking_assert (!(flags & EAF_NO_DIRECT_READ));
4116 }
4117
4118 if (gimple_call_lhs (stmt))
4119 {
4120 if (!(flags & EAF_NOT_RETURNED_DIRECTLY))
4121 {
4122 struct constraint_expr cexpr;
4123 cexpr.var = tem->id;
4124 cexpr.type = SCALAR;
4125 cexpr.offset = 0;
4126 results->safe_push (cexpr);
4127 }
4128 if (!callarg_transitive & !(flags & EAF_NOT_RETURNED_INDIRECTLY))
4129 {
4130 struct constraint_expr cexpr;
4131 cexpr.var = indir_tem->id;
4132 cexpr.type = SCALAR;
4133 cexpr.offset = 0;
4134 results->safe_push (cexpr);
4135 }
4136 }
4137
4138 if (!(flags & EAF_NO_DIRECT_READ))
4139 {
4140 varinfo_t uses = get_call_use_vi (stmt);
4141 make_copy_constraint (uses, tem->id);
4142 if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_READ))
4143 make_copy_constraint (uses, indir_tem->id);
4144 }
4145 else
4146 /* To read indirectly we need to read directly. */
4147 gcc_checking_assert (flags & EAF_NO_INDIRECT_READ);
4148
4149 if (!(flags & EAF_NO_DIRECT_CLOBBER))
4150 {
4151 struct constraint_expr lhs, rhs;
4152
4153 /* *arg = callescape. */
4154 lhs.type = DEREF;
4155 lhs.var = tem->id;
4156 lhs.offset = 0;
4157
4158 rhs.type = SCALAR;
4159 rhs.var = callescape_id;
4160 rhs.offset = 0;
4161 process_constraint (new_constraint (lhs, rhs));
4162
4163 /* callclobbered = arg. */
4164 make_copy_constraint (get_call_clobber_vi (stmt), tem->id);
4165 }
4166 if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_CLOBBER))
4167 {
4168 struct constraint_expr lhs, rhs;
4169
4170 /* *indir_arg = callescape. */
4171 lhs.type = DEREF;
4172 lhs.var = indir_tem->id;
4173 lhs.offset = 0;
4174
4175 rhs.type = SCALAR;
4176 rhs.var = callescape_id;
4177 rhs.offset = 0;
4178 process_constraint (new_constraint (lhs, rhs));
4179
4180 /* callclobbered = indir_arg. */
4181 make_copy_constraint (get_call_clobber_vi (stmt), indir_tem->id);
4182 }
4183
4184 if (!(flags & (EAF_NO_DIRECT_ESCAPE | EAF_NO_INDIRECT_ESCAPE)))
4185 {
4186 struct constraint_expr lhs, rhs;
4187
4188 /* callescape = arg; */
4189 lhs.var = callescape_id;
4190 lhs.offset = 0;
4191 lhs.type = SCALAR;
4192
4193 rhs.var = tem->id;
4194 rhs.offset = 0;
4195 rhs.type = SCALAR;
4196 process_constraint (new_constraint (lhs, rhs));
4197
4198 if (writes_global_memory)
4199 make_escape_constraint (arg);
4200 }
4201 else if (!callarg_transitive & !(flags & EAF_NO_INDIRECT_ESCAPE))
4202 {
4203 struct constraint_expr lhs, rhs;
4204
4205 /* callescape = *(indir_arg + UNKNOWN); */
4206 lhs.var = callescape_id;
4207 lhs.offset = 0;
4208 lhs.type = SCALAR;
4209
4210 rhs.var = indir_tem->id;
4211 rhs.offset = 0;
4212 rhs.type = SCALAR;
4213 process_constraint (new_constraint (lhs, rhs));
4214
4215 if (writes_global_memory)
4216 make_indirect_escape_constraint (tem);
4217 }
4218 }
4219
4220 /* Determine global memory access of call STMT and update
4221 WRITES_GLOBAL_MEMORY, READS_GLOBAL_MEMORY and USES_GLOBAL_MEMORY. */
4222
4223 static void
4224 determine_global_memory_access (gcall *stmt,
4225 bool *writes_global_memory,
4226 bool *reads_global_memory,
4227 bool *uses_global_memory)
4228 {
4229 tree callee;
4230 cgraph_node *node;
4231 modref_summary *summary;
4232
4233 /* We need to detrmine reads to set uses. */
4234 gcc_assert (!uses_global_memory || reads_global_memory);
4235
4236 if ((callee = gimple_call_fndecl (stmt)) != NULL_TREE
4237 && (node = cgraph_node::get (callee)) != NULL
4238 && (summary = get_modref_function_summary (node)))
4239 {
4240 if (writes_global_memory && *writes_global_memory)
4241 *writes_global_memory = summary->global_memory_written;
4242 if (reads_global_memory && *reads_global_memory)
4243 *reads_global_memory = summary->global_memory_read;
4244 if (reads_global_memory && uses_global_memory
4245 && !summary->calls_interposable
4246 && !*reads_global_memory && node->binds_to_current_def_p ())
4247 *uses_global_memory = false;
4248 }
4249 if ((writes_global_memory && *writes_global_memory)
4250 || (uses_global_memory && *uses_global_memory)
4251 || (reads_global_memory && *reads_global_memory))
4252 {
4253 attr_fnspec fnspec = gimple_call_fnspec (stmt);
4254 if (fnspec.known_p ())
4255 {
4256 if (writes_global_memory
4257 && !fnspec.global_memory_written_p ())
4258 *writes_global_memory = false;
4259 if (reads_global_memory && !fnspec.global_memory_read_p ())
4260 {
4261 *reads_global_memory = false;
4262 if (uses_global_memory)
4263 *uses_global_memory = false;
4264 }
4265 }
4266 }
4267 }
4268
4269 /* For non-IPA mode, generate constraints necessary for a call on the
4270 RHS and collect return value constraint to RESULTS to be used later in
4271 handle_lhs_call.
4272
4273 IMPLICIT_EAF_FLAGS are added to each function argument. If
4274 WRITES_GLOBAL_MEMORY is true function is assumed to possibly write to global
4275 memory. Similar for READS_GLOBAL_MEMORY. */
4276
4277 static void
4278 handle_rhs_call (gcall *stmt, vec<ce_s> *results,
4279 int implicit_eaf_flags,
4280 bool writes_global_memory,
4281 bool reads_global_memory)
4282 {
4283 determine_global_memory_access (stmt, &writes_global_memory,
4284 &reads_global_memory,
4285 NULL);
4286
4287 varinfo_t callescape = new_var_info (NULL_TREE, "callescape", true);
4288
4289 /* If function can use global memory, add it to callescape
4290 and to possible return values. If not we can still use/return addresses
4291 of global symbols. */
4292 struct constraint_expr lhs, rhs;
4293
4294 lhs.type = SCALAR;
4295 lhs.var = callescape->id;
4296 lhs.offset = 0;
4297
4298 rhs.type = reads_global_memory ? SCALAR : ADDRESSOF;
4299 rhs.var = nonlocal_id;
4300 rhs.offset = 0;
4301
4302 process_constraint (new_constraint (lhs, rhs));
4303 results->safe_push (rhs);
4304
4305 varinfo_t uses = get_call_use_vi (stmt);
4306 make_copy_constraint (uses, callescape->id);
4307
4308 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
4309 {
4310 tree arg = gimple_call_arg (stmt, i);
4311 int flags = gimple_call_arg_flags (stmt, i);
4312 handle_call_arg (stmt, arg, results,
4313 flags | implicit_eaf_flags,
4314 callescape->id, writes_global_memory);
4315 }
4316
4317 /* The static chain escapes as well. */
4318 if (gimple_call_chain (stmt))
4319 handle_call_arg (stmt, gimple_call_chain (stmt), results,
4320 implicit_eaf_flags
4321 | gimple_call_static_chain_flags (stmt),
4322 callescape->id, writes_global_memory);
4323
4324 /* And if we applied NRV the address of the return slot escapes as well. */
4325 if (gimple_call_return_slot_opt_p (stmt)
4326 && gimple_call_lhs (stmt) != NULL_TREE
4327 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
4328 {
4329 int flags = gimple_call_retslot_flags (stmt);
4330 const int relevant_flags = EAF_NO_DIRECT_ESCAPE
4331 | EAF_NOT_RETURNED_DIRECTLY;
4332
4333 if (!(flags & EAF_UNUSED) && (flags & relevant_flags) != relevant_flags)
4334 {
4335 auto_vec<ce_s> tmpc;
4336
4337 get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
4338
4339 if (!(flags & EAF_NO_DIRECT_ESCAPE))
4340 {
4341 make_constraints_to (callescape->id, tmpc);
4342 if (writes_global_memory)
4343 make_constraints_to (escaped_id, tmpc);
4344 }
4345 if (!(flags & EAF_NOT_RETURNED_DIRECTLY))
4346 {
4347 struct constraint_expr *c;
4348 unsigned i;
4349 FOR_EACH_VEC_ELT (tmpc, i, c)
4350 results->safe_push (*c);
4351 }
4352 }
4353 }
4354 }
4355
4356 /* For non-IPA mode, generate constraints necessary for a call
4357 that returns a pointer and assigns it to LHS. This simply makes
4358 the LHS point to global and escaped variables. */
4359
4360 static void
4361 handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> &rhsc,
4362 tree fndecl)
4363 {
4364 auto_vec<ce_s> lhsc;
4365
4366 get_constraint_for (lhs, &lhsc);
4367 /* If the store is to a global decl make sure to
4368 add proper escape constraints. */
4369 lhs = get_base_address (lhs);
4370 if (lhs
4371 && DECL_P (lhs)
4372 && is_global_var (lhs))
4373 {
4374 struct constraint_expr tmpc;
4375 tmpc.var = escaped_id;
4376 tmpc.offset = 0;
4377 tmpc.type = SCALAR;
4378 lhsc.safe_push (tmpc);
4379 }
4380
4381 /* If the call returns an argument unmodified override the rhs
4382 constraints. */
4383 if (flags & ERF_RETURNS_ARG
4384 && (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
4385 {
4386 tree arg;
4387 rhsc.create (0);
4388 arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
4389 get_constraint_for (arg, &rhsc);
4390 process_all_all_constraints (lhsc, rhsc);
4391 rhsc.release ();
4392 }
4393 else if (flags & ERF_NOALIAS)
4394 {
4395 varinfo_t vi;
4396 struct constraint_expr tmpc;
4397 rhsc.create (0);
4398 vi = make_heapvar ("HEAP", true);
4399 /* We are marking allocated storage local, we deal with it becoming
4400 global by escaping and setting of vars_contains_escaped_heap. */
4401 DECL_EXTERNAL (vi->decl) = 0;
4402 vi->is_global_var = 0;
4403 /* If this is not a real malloc call assume the memory was
4404 initialized and thus may point to global memory. All
4405 builtin functions with the malloc attribute behave in a sane way. */
4406 if (!fndecl
4407 || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
4408 make_constraint_from (vi, nonlocal_id);
4409 tmpc.var = vi->id;
4410 tmpc.offset = 0;
4411 tmpc.type = ADDRESSOF;
4412 rhsc.safe_push (tmpc);
4413 process_all_all_constraints (lhsc, rhsc);
4414 rhsc.release ();
4415 }
4416 else
4417 process_all_all_constraints (lhsc, rhsc);
4418 }
4419
4420
4421 /* Return the varinfo for the callee of CALL. */
4422
4423 static varinfo_t
4424 get_fi_for_callee (gcall *call)
4425 {
4426 tree decl, fn = gimple_call_fn (call);
4427
4428 if (fn && TREE_CODE (fn) == OBJ_TYPE_REF)
4429 fn = OBJ_TYPE_REF_EXPR (fn);
4430
4431 /* If we can directly resolve the function being called, do so.
4432 Otherwise, it must be some sort of indirect expression that
4433 we should still be able to handle. */
4434 decl = gimple_call_addr_fndecl (fn);
4435 if (decl)
4436 return get_vi_for_tree (decl);
4437
4438 /* If the function is anything other than a SSA name pointer we have no
4439 clue and should be getting ANYFN (well, ANYTHING for now). */
4440 if (!fn || TREE_CODE (fn) != SSA_NAME)
4441 return get_varinfo (anything_id);
4442
4443 if (SSA_NAME_IS_DEFAULT_DEF (fn)
4444 && (TREE_CODE (SSA_NAME_VAR (fn)) == PARM_DECL
4445 || TREE_CODE (SSA_NAME_VAR (fn)) == RESULT_DECL))
4446 fn = SSA_NAME_VAR (fn);
4447
4448 return get_vi_for_tree (fn);
4449 }
4450
4451 /* Create constraints for assigning call argument ARG to the incoming parameter
4452 INDEX of function FI. */
4453
4454 static void
4455 find_func_aliases_for_call_arg (varinfo_t fi, unsigned index, tree arg)
4456 {
4457 struct constraint_expr lhs;
4458 lhs = get_function_part_constraint (fi, fi_parm_base + index);
4459
4460 auto_vec<ce_s, 2> rhsc;
4461 get_constraint_for_rhs (arg, &rhsc);
4462
4463 unsigned j;
4464 struct constraint_expr *rhsp;
4465 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
4466 process_constraint (new_constraint (lhs, *rhsp));
4467 }
4468
4469 /* Return true if FNDECL may be part of another lto partition. */
4470
4471 static bool
4472 fndecl_maybe_in_other_partition (tree fndecl)
4473 {
4474 cgraph_node *fn_node = cgraph_node::get (fndecl);
4475 if (fn_node == NULL)
4476 return true;
4477
4478 return fn_node->in_other_partition;
4479 }
4480
4481 /* Create constraints for the builtin call T. Return true if the call
4482 was handled, otherwise false. */
4483
4484 static bool
4485 find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
4486 {
4487 tree fndecl = gimple_call_fndecl (t);
4488 auto_vec<ce_s, 2> lhsc;
4489 auto_vec<ce_s, 4> rhsc;
4490 varinfo_t fi;
4491
4492 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
4493 /* ??? All builtins that are handled here need to be handled
4494 in the alias-oracle query functions explicitly! */
4495 switch (DECL_FUNCTION_CODE (fndecl))
4496 {
4497 /* All the following functions return a pointer to the same object
4498 as their first argument points to. The functions do not add
4499 to the ESCAPED solution. The functions make the first argument
4500 pointed to memory point to what the second argument pointed to
4501 memory points to. */
4502 case BUILT_IN_STRCPY:
4503 case BUILT_IN_STRNCPY:
4504 case BUILT_IN_BCOPY:
4505 case BUILT_IN_MEMCPY:
4506 case BUILT_IN_MEMMOVE:
4507 case BUILT_IN_MEMPCPY:
4508 case BUILT_IN_STPCPY:
4509 case BUILT_IN_STPNCPY:
4510 case BUILT_IN_STRCAT:
4511 case BUILT_IN_STRNCAT:
4512 case BUILT_IN_STRCPY_CHK:
4513 case BUILT_IN_STRNCPY_CHK:
4514 case BUILT_IN_MEMCPY_CHK:
4515 case BUILT_IN_MEMMOVE_CHK:
4516 case BUILT_IN_MEMPCPY_CHK:
4517 case BUILT_IN_STPCPY_CHK:
4518 case BUILT_IN_STPNCPY_CHK:
4519 case BUILT_IN_STRCAT_CHK:
4520 case BUILT_IN_STRNCAT_CHK:
4521 case BUILT_IN_TM_MEMCPY:
4522 case BUILT_IN_TM_MEMMOVE:
4523 {
4524 tree res = gimple_call_lhs (t);
4525 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4526 == BUILT_IN_BCOPY ? 1 : 0));
4527 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (fndecl)
4528 == BUILT_IN_BCOPY ? 0 : 1));
4529 if (res != NULL_TREE)
4530 {
4531 get_constraint_for (res, &lhsc);
4532 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY
4533 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY
4534 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY
4535 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMPCPY_CHK
4536 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPCPY_CHK
4537 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STPNCPY_CHK)
4538 get_constraint_for_ptr_offset (dest, NULL_TREE, &rhsc);
4539 else
4540 get_constraint_for (dest, &rhsc);
4541 process_all_all_constraints (lhsc, rhsc);
4542 lhsc.truncate (0);
4543 rhsc.truncate (0);
4544 }
4545 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4546 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4547 do_deref (&lhsc);
4548 do_deref (&rhsc);
4549 process_all_all_constraints (lhsc, rhsc);
4550 return true;
4551 }
4552 case BUILT_IN_MEMSET:
4553 case BUILT_IN_MEMSET_CHK:
4554 case BUILT_IN_TM_MEMSET:
4555 {
4556 tree res = gimple_call_lhs (t);
4557 tree dest = gimple_call_arg (t, 0);
4558 unsigned i;
4559 ce_s *lhsp;
4560 struct constraint_expr ac;
4561 if (res != NULL_TREE)
4562 {
4563 get_constraint_for (res, &lhsc);
4564 get_constraint_for (dest, &rhsc);
4565 process_all_all_constraints (lhsc, rhsc);
4566 lhsc.truncate (0);
4567 }
4568 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
4569 do_deref (&lhsc);
4570 if (flag_delete_null_pointer_checks
4571 && integer_zerop (gimple_call_arg (t, 1)))
4572 {
4573 ac.type = ADDRESSOF;
4574 ac.var = nothing_id;
4575 }
4576 else
4577 {
4578 ac.type = SCALAR;
4579 ac.var = integer_id;
4580 }
4581 ac.offset = 0;
4582 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4583 process_constraint (new_constraint (*lhsp, ac));
4584 return true;
4585 }
4586 case BUILT_IN_STACK_SAVE:
4587 case BUILT_IN_STACK_RESTORE:
4588 /* Nothing interesting happens. */
4589 return true;
4590 case BUILT_IN_ALLOCA:
4591 case BUILT_IN_ALLOCA_WITH_ALIGN:
4592 case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX:
4593 {
4594 tree ptr = gimple_call_lhs (t);
4595 if (ptr == NULL_TREE)
4596 return true;
4597 get_constraint_for (ptr, &lhsc);
4598 varinfo_t vi = make_heapvar ("HEAP", true);
4599 /* Alloca storage is never global. To exempt it from escaped
4600 handling make it a non-heap var. */
4601 DECL_EXTERNAL (vi->decl) = 0;
4602 vi->is_global_var = 0;
4603 vi->is_heap_var = 0;
4604 struct constraint_expr tmpc;
4605 tmpc.var = vi->id;
4606 tmpc.offset = 0;
4607 tmpc.type = ADDRESSOF;
4608 rhsc.safe_push (tmpc);
4609 process_all_all_constraints (lhsc, rhsc);
4610 return true;
4611 }
4612 case BUILT_IN_POSIX_MEMALIGN:
4613 {
4614 tree ptrptr = gimple_call_arg (t, 0);
4615 get_constraint_for (ptrptr, &lhsc);
4616 do_deref (&lhsc);
4617 varinfo_t vi = make_heapvar ("HEAP", true);
4618 /* We are marking allocated storage local, we deal with it becoming
4619 global by escaping and setting of vars_contains_escaped_heap. */
4620 DECL_EXTERNAL (vi->decl) = 0;
4621 vi->is_global_var = 0;
4622 struct constraint_expr tmpc;
4623 tmpc.var = vi->id;
4624 tmpc.offset = 0;
4625 tmpc.type = ADDRESSOF;
4626 rhsc.safe_push (tmpc);
4627 process_all_all_constraints (lhsc, rhsc);
4628 return true;
4629 }
4630 case BUILT_IN_ASSUME_ALIGNED:
4631 {
4632 tree res = gimple_call_lhs (t);
4633 tree dest = gimple_call_arg (t, 0);
4634 if (res != NULL_TREE)
4635 {
4636 get_constraint_for (res, &lhsc);
4637 get_constraint_for (dest, &rhsc);
4638 process_all_all_constraints (lhsc, rhsc);
4639 }
4640 return true;
4641 }
4642 /* All the following functions do not return pointers, do not
4643 modify the points-to sets of memory reachable from their
4644 arguments and do not add to the ESCAPED solution. */
4645 case BUILT_IN_SINCOS:
4646 case BUILT_IN_SINCOSF:
4647 case BUILT_IN_SINCOSL:
4648 case BUILT_IN_FREXP:
4649 case BUILT_IN_FREXPF:
4650 case BUILT_IN_FREXPL:
4651 case BUILT_IN_GAMMA_R:
4652 case BUILT_IN_GAMMAF_R:
4653 case BUILT_IN_GAMMAL_R:
4654 case BUILT_IN_LGAMMA_R:
4655 case BUILT_IN_LGAMMAF_R:
4656 case BUILT_IN_LGAMMAL_R:
4657 case BUILT_IN_MODF:
4658 case BUILT_IN_MODFF:
4659 case BUILT_IN_MODFL:
4660 case BUILT_IN_REMQUO:
4661 case BUILT_IN_REMQUOF:
4662 case BUILT_IN_REMQUOL:
4663 case BUILT_IN_FREE:
4664 return true;
4665 case BUILT_IN_STRDUP:
4666 case BUILT_IN_STRNDUP:
4667 case BUILT_IN_REALLOC:
4668 if (gimple_call_lhs (t))
4669 {
4670 auto_vec<ce_s> rhsc;
4671 handle_lhs_call (t, gimple_call_lhs (t),
4672 gimple_call_return_flags (t) | ERF_NOALIAS,
4673 rhsc, fndecl);
4674 get_constraint_for_ptr_offset (gimple_call_lhs (t),
4675 NULL_TREE, &lhsc);
4676 get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
4677 NULL_TREE, &rhsc);
4678 do_deref (&lhsc);
4679 do_deref (&rhsc);
4680 process_all_all_constraints (lhsc, rhsc);
4681 lhsc.truncate (0);
4682 rhsc.truncate (0);
4683 /* For realloc the resulting pointer can be equal to the
4684 argument as well. But only doing this wouldn't be
4685 correct because with ptr == 0 realloc behaves like malloc. */
4686 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_REALLOC)
4687 {
4688 get_constraint_for (gimple_call_lhs (t), &lhsc);
4689 get_constraint_for (gimple_call_arg (t, 0), &rhsc);
4690 process_all_all_constraints (lhsc, rhsc);
4691 }
4692 return true;
4693 }
4694 break;
4695 /* String / character search functions return a pointer into the
4696 source string or NULL. */
4697 case BUILT_IN_INDEX:
4698 case BUILT_IN_STRCHR:
4699 case BUILT_IN_STRRCHR:
4700 case BUILT_IN_MEMCHR:
4701 case BUILT_IN_STRSTR:
4702 case BUILT_IN_STRPBRK:
4703 if (gimple_call_lhs (t))
4704 {
4705 tree src = gimple_call_arg (t, 0);
4706 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
4707 constraint_expr nul;
4708 nul.var = nothing_id;
4709 nul.offset = 0;
4710 nul.type = ADDRESSOF;
4711 rhsc.safe_push (nul);
4712 get_constraint_for (gimple_call_lhs (t), &lhsc);
4713 process_all_all_constraints (lhsc, rhsc);
4714 }
4715 return true;
4716 /* Pure functions that return something not based on any object and
4717 that use the memory pointed to by their arguments (but not
4718 transitively). */
4719 case BUILT_IN_STRCMP:
4720 case BUILT_IN_STRCMP_EQ:
4721 case BUILT_IN_STRNCMP:
4722 case BUILT_IN_STRNCMP_EQ:
4723 case BUILT_IN_STRCASECMP:
4724 case BUILT_IN_STRNCASECMP:
4725 case BUILT_IN_MEMCMP:
4726 case BUILT_IN_BCMP:
4727 case BUILT_IN_STRSPN:
4728 case BUILT_IN_STRCSPN:
4729 {
4730 varinfo_t uses = get_call_use_vi (t);
4731 make_any_offset_constraints (uses);
4732 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4733 make_constraint_to (uses->id, gimple_call_arg (t, 1));
4734 /* No constraints are necessary for the return value. */
4735 return true;
4736 }
4737 case BUILT_IN_STRLEN:
4738 {
4739 varinfo_t uses = get_call_use_vi (t);
4740 make_any_offset_constraints (uses);
4741 make_constraint_to (uses->id, gimple_call_arg (t, 0));
4742 /* No constraints are necessary for the return value. */
4743 return true;
4744 }
4745 case BUILT_IN_OBJECT_SIZE:
4746 case BUILT_IN_CONSTANT_P:
4747 {
4748 /* No constraints are necessary for the return value or the
4749 arguments. */
4750 return true;
4751 }
4752 /* Trampolines are special - they set up passing the static
4753 frame. */
4754 case BUILT_IN_INIT_TRAMPOLINE:
4755 {
4756 tree tramp = gimple_call_arg (t, 0);
4757 tree nfunc = gimple_call_arg (t, 1);
4758 tree frame = gimple_call_arg (t, 2);
4759 unsigned i;
4760 struct constraint_expr lhs, *rhsp;
4761 if (in_ipa_mode)
4762 {
4763 varinfo_t nfi = NULL;
4764 gcc_assert (TREE_CODE (nfunc) == ADDR_EXPR);
4765 nfi = lookup_vi_for_tree (TREE_OPERAND (nfunc, 0));
4766 if (nfi)
4767 {
4768 lhs = get_function_part_constraint (nfi, fi_static_chain);
4769 get_constraint_for (frame, &rhsc);
4770 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
4771 process_constraint (new_constraint (lhs, *rhsp));
4772 rhsc.truncate (0);
4773
4774 /* Make the frame point to the function for
4775 the trampoline adjustment call. */
4776 get_constraint_for (tramp, &lhsc);
4777 do_deref (&lhsc);
4778 get_constraint_for (nfunc, &rhsc);
4779 process_all_all_constraints (lhsc, rhsc);
4780
4781 return true;
4782 }
4783 }
4784 /* Else fallthru to generic handling which will let
4785 the frame escape. */
4786 break;
4787 }
4788 case BUILT_IN_ADJUST_TRAMPOLINE:
4789 {
4790 tree tramp = gimple_call_arg (t, 0);
4791 tree res = gimple_call_lhs (t);
4792 if (in_ipa_mode && res)
4793 {
4794 get_constraint_for (res, &lhsc);
4795 get_constraint_for (tramp, &rhsc);
4796 do_deref (&rhsc);
4797 process_all_all_constraints (lhsc, rhsc);
4798 }
4799 return true;
4800 }
4801 CASE_BUILT_IN_TM_STORE (1):
4802 CASE_BUILT_IN_TM_STORE (2):
4803 CASE_BUILT_IN_TM_STORE (4):
4804 CASE_BUILT_IN_TM_STORE (8):
4805 CASE_BUILT_IN_TM_STORE (FLOAT):
4806 CASE_BUILT_IN_TM_STORE (DOUBLE):
4807 CASE_BUILT_IN_TM_STORE (LDOUBLE):
4808 CASE_BUILT_IN_TM_STORE (M64):
4809 CASE_BUILT_IN_TM_STORE (M128):
4810 CASE_BUILT_IN_TM_STORE (M256):
4811 {
4812 tree addr = gimple_call_arg (t, 0);
4813 tree src = gimple_call_arg (t, 1);
4814
4815 get_constraint_for (addr, &lhsc);
4816 do_deref (&lhsc);
4817 get_constraint_for (src, &rhsc);
4818 process_all_all_constraints (lhsc, rhsc);
4819 return true;
4820 }
4821 CASE_BUILT_IN_TM_LOAD (1):
4822 CASE_BUILT_IN_TM_LOAD (2):
4823 CASE_BUILT_IN_TM_LOAD (4):
4824 CASE_BUILT_IN_TM_LOAD (8):
4825 CASE_BUILT_IN_TM_LOAD (FLOAT):
4826 CASE_BUILT_IN_TM_LOAD (DOUBLE):
4827 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
4828 CASE_BUILT_IN_TM_LOAD (M64):
4829 CASE_BUILT_IN_TM_LOAD (M128):
4830 CASE_BUILT_IN_TM_LOAD (M256):
4831 {
4832 tree dest = gimple_call_lhs (t);
4833 tree addr = gimple_call_arg (t, 0);
4834
4835 get_constraint_for (dest, &lhsc);
4836 get_constraint_for (addr, &rhsc);
4837 do_deref (&rhsc);
4838 process_all_all_constraints (lhsc, rhsc);
4839 return true;
4840 }
4841 /* Variadic argument handling needs to be handled in IPA
4842 mode as well. */
4843 case BUILT_IN_VA_START:
4844 {
4845 tree valist = gimple_call_arg (t, 0);
4846 struct constraint_expr rhs, *lhsp;
4847 unsigned i;
4848 get_constraint_for_ptr_offset (valist, NULL_TREE, &lhsc);
4849 do_deref (&lhsc);
4850 /* The va_list gets access to pointers in variadic
4851 arguments. Which we know in the case of IPA analysis
4852 and otherwise are just all nonlocal variables. */
4853 if (in_ipa_mode)
4854 {
4855 fi = lookup_vi_for_tree (fn->decl);
4856 rhs = get_function_part_constraint (fi, ~0);
4857 rhs.type = ADDRESSOF;
4858 }
4859 else
4860 {
4861 rhs.var = nonlocal_id;
4862 rhs.type = ADDRESSOF;
4863 rhs.offset = 0;
4864 }
4865 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
4866 process_constraint (new_constraint (*lhsp, rhs));
4867 /* va_list is clobbered. */
4868 make_constraint_to (get_call_clobber_vi (t)->id, valist);
4869 return true;
4870 }
4871 /* va_end doesn't have any effect that matters. */
4872 case BUILT_IN_VA_END:
4873 return true;
4874 /* Alternate return. Simply give up for now. */
4875 case BUILT_IN_RETURN:
4876 {
4877 fi = NULL;
4878 if (!in_ipa_mode
4879 || !(fi = get_vi_for_tree (fn->decl)))
4880 make_constraint_from (get_varinfo (escaped_id), anything_id);
4881 else if (in_ipa_mode
4882 && fi != NULL)
4883 {
4884 struct constraint_expr lhs, rhs;
4885 lhs = get_function_part_constraint (fi, fi_result);
4886 rhs.var = anything_id;
4887 rhs.offset = 0;
4888 rhs.type = SCALAR;
4889 process_constraint (new_constraint (lhs, rhs));
4890 }
4891 return true;
4892 }
4893 case BUILT_IN_GOMP_PARALLEL:
4894 case BUILT_IN_GOACC_PARALLEL:
4895 {
4896 if (in_ipa_mode)
4897 {
4898 unsigned int fnpos, argpos;
4899 switch (DECL_FUNCTION_CODE (fndecl))
4900 {
4901 case BUILT_IN_GOMP_PARALLEL:
4902 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
4903 fnpos = 0;
4904 argpos = 1;
4905 break;
4906 case BUILT_IN_GOACC_PARALLEL:
4907 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
4908 sizes, kinds, ...). */
4909 fnpos = 1;
4910 argpos = 3;
4911 break;
4912 default:
4913 gcc_unreachable ();
4914 }
4915
4916 tree fnarg = gimple_call_arg (t, fnpos);
4917 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
4918 tree fndecl = TREE_OPERAND (fnarg, 0);
4919 if (fndecl_maybe_in_other_partition (fndecl))
4920 /* Fallthru to general call handling. */
4921 break;
4922
4923 tree arg = gimple_call_arg (t, argpos);
4924
4925 varinfo_t fi = get_vi_for_tree (fndecl);
4926 find_func_aliases_for_call_arg (fi, 0, arg);
4927 return true;
4928 }
4929 /* Else fallthru to generic call handling. */
4930 break;
4931 }
4932 /* printf-style functions may have hooks to set pointers to
4933 point to somewhere into the generated string. Leave them
4934 for a later exercise... */
4935 default:
4936 /* Fallthru to general call handling. */;
4937 }
4938
4939 return false;
4940 }
4941
4942 /* Create constraints for the call T. */
4943
4944 static void
4945 find_func_aliases_for_call (struct function *fn, gcall *t)
4946 {
4947 tree fndecl = gimple_call_fndecl (t);
4948 varinfo_t fi;
4949
4950 if (fndecl != NULL_TREE
4951 && fndecl_built_in_p (fndecl)
4952 && find_func_aliases_for_builtin_call (fn, t))
4953 return;
4954
4955 if (gimple_call_internal_p (t, IFN_DEFERRED_INIT))
4956 return;
4957
4958 fi = get_fi_for_callee (t);
4959 if (!in_ipa_mode
4960 || (fi->decl && fndecl && !fi->is_fn_info))
4961 {
4962 auto_vec<ce_s, 16> rhsc;
4963 int flags = gimple_call_flags (t);
4964
4965 /* Const functions can return their arguments and addresses
4966 of global memory but not of escaped memory. */
4967 if (flags & (ECF_CONST|ECF_NOVOPS))
4968 {
4969 if (gimple_call_lhs (t))
4970 handle_rhs_call (t, &rhsc, implicit_const_eaf_flags, false, false);
4971 }
4972 /* Pure functions can return addresses in and of memory
4973 reachable from their arguments, but they are not an escape
4974 point for reachable memory of their arguments. */
4975 else if (flags & (ECF_PURE|ECF_LOOPING_CONST_OR_PURE))
4976 handle_rhs_call (t, &rhsc, implicit_pure_eaf_flags, false, true);
4977 /* If the call is to a replaceable operator delete and results
4978 from a delete expression as opposed to a direct call to
4979 such operator, then the effects for PTA (in particular
4980 the escaping of the pointer) can be ignored. */
4981 else if (fndecl
4982 && DECL_IS_OPERATOR_DELETE_P (fndecl)
4983 && gimple_call_from_new_or_delete (t))
4984 ;
4985 else
4986 handle_rhs_call (t, &rhsc, 0, true, true);
4987 if (gimple_call_lhs (t))
4988 handle_lhs_call (t, gimple_call_lhs (t),
4989 gimple_call_return_flags (t), rhsc, fndecl);
4990 }
4991 else
4992 {
4993 auto_vec<ce_s, 2> rhsc;
4994 tree lhsop;
4995 unsigned j;
4996
4997 /* Assign all the passed arguments to the appropriate incoming
4998 parameters of the function. */
4999 for (j = 0; j < gimple_call_num_args (t); j++)
5000 {
5001 tree arg = gimple_call_arg (t, j);
5002 find_func_aliases_for_call_arg (fi, j, arg);
5003 }
5004
5005 /* If we are returning a value, assign it to the result. */
5006 lhsop = gimple_call_lhs (t);
5007 if (lhsop)
5008 {
5009 auto_vec<ce_s, 2> lhsc;
5010 struct constraint_expr rhs;
5011 struct constraint_expr *lhsp;
5012 bool aggr_p = aggregate_value_p (lhsop, gimple_call_fntype (t));
5013
5014 get_constraint_for (lhsop, &lhsc);
5015 rhs = get_function_part_constraint (fi, fi_result);
5016 if (aggr_p)
5017 {
5018 auto_vec<ce_s, 2> tem;
5019 tem.quick_push (rhs);
5020 do_deref (&tem);
5021 gcc_checking_assert (tem.length () == 1);
5022 rhs = tem[0];
5023 }
5024 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
5025 process_constraint (new_constraint (*lhsp, rhs));
5026
5027 /* If we pass the result decl by reference, honor that. */
5028 if (aggr_p)
5029 {
5030 struct constraint_expr lhs;
5031 struct constraint_expr *rhsp;
5032
5033 get_constraint_for_address_of (lhsop, &rhsc);
5034 lhs = get_function_part_constraint (fi, fi_result);
5035 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5036 process_constraint (new_constraint (lhs, *rhsp));
5037 rhsc.truncate (0);
5038 }
5039 }
5040
5041 /* If we use a static chain, pass it along. */
5042 if (gimple_call_chain (t))
5043 {
5044 struct constraint_expr lhs;
5045 struct constraint_expr *rhsp;
5046
5047 get_constraint_for (gimple_call_chain (t), &rhsc);
5048 lhs = get_function_part_constraint (fi, fi_static_chain);
5049 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5050 process_constraint (new_constraint (lhs, *rhsp));
5051 }
5052 }
5053 }
5054
5055 /* Walk statement T setting up aliasing constraints according to the
5056 references found in T. This function is the main part of the
5057 constraint builder. AI points to auxiliary alias information used
5058 when building alias sets and computing alias grouping heuristics. */
5059
5060 static void
5061 find_func_aliases (struct function *fn, gimple *origt)
5062 {
5063 gimple *t = origt;
5064 auto_vec<ce_s, 16> lhsc;
5065 auto_vec<ce_s, 16> rhsc;
5066 varinfo_t fi;
5067
5068 /* Now build constraints expressions. */
5069 if (gimple_code (t) == GIMPLE_PHI)
5070 {
5071 /* For a phi node, assign all the arguments to
5072 the result. */
5073 get_constraint_for (gimple_phi_result (t), &lhsc);
5074 for (unsigned i = 0; i < gimple_phi_num_args (t); i++)
5075 {
5076 get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
5077 process_all_all_constraints (lhsc, rhsc);
5078 rhsc.truncate (0);
5079 }
5080 }
5081 /* In IPA mode, we need to generate constraints to pass call
5082 arguments through their calls. There are two cases,
5083 either a GIMPLE_CALL returning a value, or just a plain
5084 GIMPLE_CALL when we are not.
5085
5086 In non-ipa mode, we need to generate constraints for each
5087 pointer passed by address. */
5088 else if (is_gimple_call (t))
5089 find_func_aliases_for_call (fn, as_a <gcall *> (t));
5090
5091 /* Otherwise, just a regular assignment statement. Only care about
5092 operations with pointer result, others are dealt with as escape
5093 points if they have pointer operands. */
5094 else if (is_gimple_assign (t))
5095 {
5096 /* Otherwise, just a regular assignment statement. */
5097 tree lhsop = gimple_assign_lhs (t);
5098 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
5099
5100 if (rhsop && TREE_CLOBBER_P (rhsop))
5101 /* Ignore clobbers, they don't actually store anything into
5102 the LHS. */
5103 ;
5104 else if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
5105 do_structure_copy (lhsop, rhsop);
5106 else
5107 {
5108 enum tree_code code = gimple_assign_rhs_code (t);
5109
5110 get_constraint_for (lhsop, &lhsc);
5111
5112 if (code == POINTER_PLUS_EXPR)
5113 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5114 gimple_assign_rhs2 (t), &rhsc);
5115 else if (code == POINTER_DIFF_EXPR)
5116 /* The result is not a pointer (part). */
5117 ;
5118 else if (code == BIT_AND_EXPR
5119 && TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
5120 {
5121 /* Aligning a pointer via a BIT_AND_EXPR is offsetting
5122 the pointer. Handle it by offsetting it by UNKNOWN. */
5123 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5124 NULL_TREE, &rhsc);
5125 }
5126 else if (code == TRUNC_DIV_EXPR
5127 || code == CEIL_DIV_EXPR
5128 || code == FLOOR_DIV_EXPR
5129 || code == ROUND_DIV_EXPR
5130 || code == EXACT_DIV_EXPR
5131 || code == TRUNC_MOD_EXPR
5132 || code == CEIL_MOD_EXPR
5133 || code == FLOOR_MOD_EXPR
5134 || code == ROUND_MOD_EXPR)
5135 /* Division and modulo transfer the pointer from the LHS. */
5136 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5137 NULL_TREE, &rhsc);
5138 else if (CONVERT_EXPR_CODE_P (code)
5139 || gimple_assign_single_p (t))
5140 /* See through conversions, single RHS are handled by
5141 get_constraint_for_rhs. */
5142 get_constraint_for_rhs (rhsop, &rhsc);
5143 else if (code == COND_EXPR)
5144 {
5145 /* The result is a merge of both COND_EXPR arms. */
5146 auto_vec<ce_s, 2> tmp;
5147 struct constraint_expr *rhsp;
5148 unsigned i;
5149 get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
5150 get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
5151 FOR_EACH_VEC_ELT (tmp, i, rhsp)
5152 rhsc.safe_push (*rhsp);
5153 }
5154 else if (truth_value_p (code))
5155 /* Truth value results are not pointer (parts). Or at least
5156 very unreasonable obfuscation of a part. */
5157 ;
5158 else
5159 {
5160 /* All other operations are possibly offsetting merges. */
5161 auto_vec<ce_s, 4> tmp;
5162 struct constraint_expr *rhsp;
5163 unsigned i, j;
5164 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
5165 NULL_TREE, &rhsc);
5166 for (i = 2; i < gimple_num_ops (t); ++i)
5167 {
5168 get_constraint_for_ptr_offset (gimple_op (t, i),
5169 NULL_TREE, &tmp);
5170 FOR_EACH_VEC_ELT (tmp, j, rhsp)
5171 rhsc.safe_push (*rhsp);
5172 tmp.truncate (0);
5173 }
5174 }
5175 process_all_all_constraints (lhsc, rhsc);
5176 }
5177 /* If there is a store to a global variable the rhs escapes. */
5178 if ((lhsop = get_base_address (lhsop)) != NULL_TREE
5179 && DECL_P (lhsop))
5180 {
5181 varinfo_t vi = get_vi_for_tree (lhsop);
5182 if ((! in_ipa_mode && vi->is_global_var)
5183 || vi->is_ipa_escape_point)
5184 make_escape_constraint (rhsop);
5185 }
5186 }
5187 /* Handle escapes through return. */
5188 else if (gimple_code (t) == GIMPLE_RETURN
5189 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
5190 {
5191 greturn *return_stmt = as_a <greturn *> (t);
5192 fi = NULL;
5193 if (!in_ipa_mode
5194 && SSA_VAR_P (gimple_return_retval (return_stmt)))
5195 {
5196 /* We handle simple returns by post-processing the solutions. */
5197 ;
5198 }
5199 if (!(fi = get_vi_for_tree (fn->decl)))
5200 make_escape_constraint (gimple_return_retval (return_stmt));
5201 else if (in_ipa_mode)
5202 {
5203 struct constraint_expr lhs ;
5204 struct constraint_expr *rhsp;
5205 unsigned i;
5206
5207 lhs = get_function_part_constraint (fi, fi_result);
5208 get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
5209 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5210 process_constraint (new_constraint (lhs, *rhsp));
5211 }
5212 }
5213 /* Handle asms conservatively by adding escape constraints to everything. */
5214 else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
5215 {
5216 unsigned i, noutputs;
5217 const char **oconstraints;
5218 const char *constraint;
5219 bool allows_mem, allows_reg, is_inout;
5220
5221 noutputs = gimple_asm_noutputs (asm_stmt);
5222 oconstraints = XALLOCAVEC (const char *, noutputs);
5223
5224 for (i = 0; i < noutputs; ++i)
5225 {
5226 tree link = gimple_asm_output_op (asm_stmt, i);
5227 tree op = TREE_VALUE (link);
5228
5229 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5230 oconstraints[i] = constraint;
5231 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5232 &allows_reg, &is_inout);
5233
5234 /* A memory constraint makes the address of the operand escape. */
5235 if (!allows_reg && allows_mem)
5236 make_escape_constraint (build_fold_addr_expr (op));
5237
5238 /* The asm may read global memory, so outputs may point to
5239 any global memory. */
5240 if (op)
5241 {
5242 auto_vec<ce_s, 2> lhsc;
5243 struct constraint_expr rhsc, *lhsp;
5244 unsigned j;
5245 get_constraint_for (op, &lhsc);
5246 rhsc.var = nonlocal_id;
5247 rhsc.offset = 0;
5248 rhsc.type = SCALAR;
5249 FOR_EACH_VEC_ELT (lhsc, j, lhsp)
5250 process_constraint (new_constraint (*lhsp, rhsc));
5251 }
5252 }
5253 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
5254 {
5255 tree link = gimple_asm_input_op (asm_stmt, i);
5256 tree op = TREE_VALUE (link);
5257
5258 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5259
5260 parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
5261 &allows_mem, &allows_reg);
5262
5263 /* A memory constraint makes the address of the operand escape. */
5264 if (!allows_reg && allows_mem)
5265 make_escape_constraint (build_fold_addr_expr (op));
5266 /* Strictly we'd only need the constraint to ESCAPED if
5267 the asm clobbers memory, otherwise using something
5268 along the lines of per-call clobbers/uses would be enough. */
5269 else if (op)
5270 make_escape_constraint (op);
5271 }
5272 }
5273 }
5274
5275
5276 /* Create a constraint adding to the clobber set of FI the memory
5277 pointed to by PTR. */
5278
5279 static void
5280 process_ipa_clobber (varinfo_t fi, tree ptr)
5281 {
5282 vec<ce_s> ptrc = vNULL;
5283 struct constraint_expr *c, lhs;
5284 unsigned i;
5285 get_constraint_for_rhs (ptr, &ptrc);
5286 lhs = get_function_part_constraint (fi, fi_clobbers);
5287 FOR_EACH_VEC_ELT (ptrc, i, c)
5288 process_constraint (new_constraint (lhs, *c));
5289 ptrc.release ();
5290 }
5291
5292 /* Walk statement T setting up clobber and use constraints according to the
5293 references found in T. This function is a main part of the
5294 IPA constraint builder. */
5295
5296 static void
5297 find_func_clobbers (struct function *fn, gimple *origt)
5298 {
5299 gimple *t = origt;
5300 auto_vec<ce_s, 16> lhsc;
5301 auto_vec<ce_s, 16> rhsc;
5302 varinfo_t fi;
5303
5304 /* Add constraints for clobbered/used in IPA mode.
5305 We are not interested in what automatic variables are clobbered
5306 or used as we only use the information in the caller to which
5307 they do not escape. */
5308 gcc_assert (in_ipa_mode);
5309
5310 /* If the stmt refers to memory in any way it better had a VUSE. */
5311 if (gimple_vuse (t) == NULL_TREE)
5312 return;
5313
5314 /* We'd better have function information for the current function. */
5315 fi = lookup_vi_for_tree (fn->decl);
5316 gcc_assert (fi != NULL);
5317
5318 /* Account for stores in assignments and calls. */
5319 if (gimple_vdef (t) != NULL_TREE
5320 && gimple_has_lhs (t))
5321 {
5322 tree lhs = gimple_get_lhs (t);
5323 tree tem = lhs;
5324 while (handled_component_p (tem))
5325 tem = TREE_OPERAND (tem, 0);
5326 if ((DECL_P (tem)
5327 && !auto_var_in_fn_p (tem, fn->decl))
5328 || INDIRECT_REF_P (tem)
5329 || (TREE_CODE (tem) == MEM_REF
5330 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5331 && auto_var_in_fn_p
5332 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5333 {
5334 struct constraint_expr lhsc, *rhsp;
5335 unsigned i;
5336 lhsc = get_function_part_constraint (fi, fi_clobbers);
5337 get_constraint_for_address_of (lhs, &rhsc);
5338 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5339 process_constraint (new_constraint (lhsc, *rhsp));
5340 rhsc.truncate (0);
5341 }
5342 }
5343
5344 /* Account for uses in assigments and returns. */
5345 if (gimple_assign_single_p (t)
5346 || (gimple_code (t) == GIMPLE_RETURN
5347 && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
5348 {
5349 tree rhs = (gimple_assign_single_p (t)
5350 ? gimple_assign_rhs1 (t)
5351 : gimple_return_retval (as_a <greturn *> (t)));
5352 tree tem = rhs;
5353 while (handled_component_p (tem))
5354 tem = TREE_OPERAND (tem, 0);
5355 if ((DECL_P (tem)
5356 && !auto_var_in_fn_p (tem, fn->decl))
5357 || INDIRECT_REF_P (tem)
5358 || (TREE_CODE (tem) == MEM_REF
5359 && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
5360 && auto_var_in_fn_p
5361 (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), fn->decl))))
5362 {
5363 struct constraint_expr lhs, *rhsp;
5364 unsigned i;
5365 lhs = get_function_part_constraint (fi, fi_uses);
5366 get_constraint_for_address_of (rhs, &rhsc);
5367 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5368 process_constraint (new_constraint (lhs, *rhsp));
5369 rhsc.truncate (0);
5370 }
5371 }
5372
5373 if (gcall *call_stmt = dyn_cast <gcall *> (t))
5374 {
5375 varinfo_t cfi = NULL;
5376 tree decl = gimple_call_fndecl (t);
5377 struct constraint_expr lhs, rhs;
5378 unsigned i, j;
5379
5380 /* For builtins we do not have separate function info. For those
5381 we do not generate escapes for we have to generate clobbers/uses. */
5382 if (gimple_call_builtin_p (t, BUILT_IN_NORMAL))
5383 switch (DECL_FUNCTION_CODE (decl))
5384 {
5385 /* The following functions use and clobber memory pointed to
5386 by their arguments. */
5387 case BUILT_IN_STRCPY:
5388 case BUILT_IN_STRNCPY:
5389 case BUILT_IN_BCOPY:
5390 case BUILT_IN_MEMCPY:
5391 case BUILT_IN_MEMMOVE:
5392 case BUILT_IN_MEMPCPY:
5393 case BUILT_IN_STPCPY:
5394 case BUILT_IN_STPNCPY:
5395 case BUILT_IN_STRCAT:
5396 case BUILT_IN_STRNCAT:
5397 case BUILT_IN_STRCPY_CHK:
5398 case BUILT_IN_STRNCPY_CHK:
5399 case BUILT_IN_MEMCPY_CHK:
5400 case BUILT_IN_MEMMOVE_CHK:
5401 case BUILT_IN_MEMPCPY_CHK:
5402 case BUILT_IN_STPCPY_CHK:
5403 case BUILT_IN_STPNCPY_CHK:
5404 case BUILT_IN_STRCAT_CHK:
5405 case BUILT_IN_STRNCAT_CHK:
5406 {
5407 tree dest = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5408 == BUILT_IN_BCOPY ? 1 : 0));
5409 tree src = gimple_call_arg (t, (DECL_FUNCTION_CODE (decl)
5410 == BUILT_IN_BCOPY ? 0 : 1));
5411 unsigned i;
5412 struct constraint_expr *rhsp, *lhsp;
5413 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5414 lhs = get_function_part_constraint (fi, fi_clobbers);
5415 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5416 process_constraint (new_constraint (lhs, *lhsp));
5417 get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
5418 lhs = get_function_part_constraint (fi, fi_uses);
5419 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
5420 process_constraint (new_constraint (lhs, *rhsp));
5421 return;
5422 }
5423 /* The following function clobbers memory pointed to by
5424 its argument. */
5425 case BUILT_IN_MEMSET:
5426 case BUILT_IN_MEMSET_CHK:
5427 case BUILT_IN_POSIX_MEMALIGN:
5428 {
5429 tree dest = gimple_call_arg (t, 0);
5430 unsigned i;
5431 ce_s *lhsp;
5432 get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
5433 lhs = get_function_part_constraint (fi, fi_clobbers);
5434 FOR_EACH_VEC_ELT (lhsc, i, lhsp)
5435 process_constraint (new_constraint (lhs, *lhsp));
5436 return;
5437 }
5438 /* The following functions clobber their second and third
5439 arguments. */
5440 case BUILT_IN_SINCOS:
5441 case BUILT_IN_SINCOSF:
5442 case BUILT_IN_SINCOSL:
5443 {
5444 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5445 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5446 return;
5447 }
5448 /* The following functions clobber their second argument. */
5449 case BUILT_IN_FREXP:
5450 case BUILT_IN_FREXPF:
5451 case BUILT_IN_FREXPL:
5452 case BUILT_IN_LGAMMA_R:
5453 case BUILT_IN_LGAMMAF_R:
5454 case BUILT_IN_LGAMMAL_R:
5455 case BUILT_IN_GAMMA_R:
5456 case BUILT_IN_GAMMAF_R:
5457 case BUILT_IN_GAMMAL_R:
5458 case BUILT_IN_MODF:
5459 case BUILT_IN_MODFF:
5460 case BUILT_IN_MODFL:
5461 {
5462 process_ipa_clobber (fi, gimple_call_arg (t, 1));
5463 return;
5464 }
5465 /* The following functions clobber their third argument. */
5466 case BUILT_IN_REMQUO:
5467 case BUILT_IN_REMQUOF:
5468 case BUILT_IN_REMQUOL:
5469 {
5470 process_ipa_clobber (fi, gimple_call_arg (t, 2));
5471 return;
5472 }
5473 /* The following functions neither read nor clobber memory. */
5474 case BUILT_IN_ASSUME_ALIGNED:
5475 case BUILT_IN_FREE:
5476 return;
5477 /* Trampolines are of no interest to us. */
5478 case BUILT_IN_INIT_TRAMPOLINE:
5479 case BUILT_IN_ADJUST_TRAMPOLINE:
5480 return;
5481 case BUILT_IN_VA_START:
5482 case BUILT_IN_VA_END:
5483 return;
5484 case BUILT_IN_GOMP_PARALLEL:
5485 case BUILT_IN_GOACC_PARALLEL:
5486 {
5487 unsigned int fnpos, argpos;
5488 unsigned int implicit_use_args[2];
5489 unsigned int num_implicit_use_args = 0;
5490 switch (DECL_FUNCTION_CODE (decl))
5491 {
5492 case BUILT_IN_GOMP_PARALLEL:
5493 /* __builtin_GOMP_parallel (fn, data, num_threads, flags). */
5494 fnpos = 0;
5495 argpos = 1;
5496 break;
5497 case BUILT_IN_GOACC_PARALLEL:
5498 /* __builtin_GOACC_parallel (flags_m, fn, mapnum, hostaddrs,
5499 sizes, kinds, ...). */
5500 fnpos = 1;
5501 argpos = 3;
5502 implicit_use_args[num_implicit_use_args++] = 4;
5503 implicit_use_args[num_implicit_use_args++] = 5;
5504 break;
5505 default:
5506 gcc_unreachable ();
5507 }
5508
5509 tree fnarg = gimple_call_arg (t, fnpos);
5510 gcc_assert (TREE_CODE (fnarg) == ADDR_EXPR);
5511 tree fndecl = TREE_OPERAND (fnarg, 0);
5512 if (fndecl_maybe_in_other_partition (fndecl))
5513 /* Fallthru to general call handling. */
5514 break;
5515
5516 varinfo_t cfi = get_vi_for_tree (fndecl);
5517
5518 tree arg = gimple_call_arg (t, argpos);
5519
5520 /* Parameter passed by value is used. */
5521 lhs = get_function_part_constraint (fi, fi_uses);
5522 struct constraint_expr *rhsp;
5523 get_constraint_for (arg, &rhsc);
5524 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5525 process_constraint (new_constraint (lhs, *rhsp));
5526 rhsc.truncate (0);
5527
5528 /* Handle parameters used by the call, but not used in cfi, as
5529 implicitly used by cfi. */
5530 lhs = get_function_part_constraint (cfi, fi_uses);
5531 for (unsigned i = 0; i < num_implicit_use_args; ++i)
5532 {
5533 tree arg = gimple_call_arg (t, implicit_use_args[i]);
5534 get_constraint_for (arg, &rhsc);
5535 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5536 process_constraint (new_constraint (lhs, *rhsp));
5537 rhsc.truncate (0);
5538 }
5539
5540 /* The caller clobbers what the callee does. */
5541 lhs = get_function_part_constraint (fi, fi_clobbers);
5542 rhs = get_function_part_constraint (cfi, fi_clobbers);
5543 process_constraint (new_constraint (lhs, rhs));
5544
5545 /* The caller uses what the callee does. */
5546 lhs = get_function_part_constraint (fi, fi_uses);
5547 rhs = get_function_part_constraint (cfi, fi_uses);
5548 process_constraint (new_constraint (lhs, rhs));
5549
5550 return;
5551 }
5552 /* printf-style functions may have hooks to set pointers to
5553 point to somewhere into the generated string. Leave them
5554 for a later exercise... */
5555 default:
5556 /* Fallthru to general call handling. */;
5557 }
5558
5559 /* Parameters passed by value are used. */
5560 lhs = get_function_part_constraint (fi, fi_uses);
5561 for (i = 0; i < gimple_call_num_args (t); i++)
5562 {
5563 struct constraint_expr *rhsp;
5564 tree arg = gimple_call_arg (t, i);
5565
5566 if (TREE_CODE (arg) == SSA_NAME
5567 || is_gimple_min_invariant (arg))
5568 continue;
5569
5570 get_constraint_for_address_of (arg, &rhsc);
5571 FOR_EACH_VEC_ELT (rhsc, j, rhsp)
5572 process_constraint (new_constraint (lhs, *rhsp));
5573 rhsc.truncate (0);
5574 }
5575
5576 /* Build constraints for propagating clobbers/uses along the
5577 callgraph edges. */
5578 cfi = get_fi_for_callee (call_stmt);
5579 if (cfi->id == anything_id)
5580 {
5581 if (gimple_vdef (t))
5582 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5583 anything_id);
5584 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5585 anything_id);
5586 return;
5587 }
5588
5589 /* For callees without function info (that's external functions),
5590 ESCAPED is clobbered and used. */
5591 if (cfi->decl
5592 && TREE_CODE (cfi->decl) == FUNCTION_DECL
5593 && !cfi->is_fn_info)
5594 {
5595 varinfo_t vi;
5596
5597 if (gimple_vdef (t))
5598 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5599 escaped_id);
5600 make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
5601
5602 /* Also honor the call statement use/clobber info. */
5603 if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
5604 make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
5605 vi->id);
5606 if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
5607 make_copy_constraint (first_vi_for_offset (fi, fi_uses),
5608 vi->id);
5609 return;
5610 }
5611
5612 /* Otherwise the caller clobbers and uses what the callee does.
5613 ??? This should use a new complex constraint that filters
5614 local variables of the callee. */
5615 if (gimple_vdef (t))
5616 {
5617 lhs = get_function_part_constraint (fi, fi_clobbers);
5618 rhs = get_function_part_constraint (cfi, fi_clobbers);
5619 process_constraint (new_constraint (lhs, rhs));
5620 }
5621 lhs = get_function_part_constraint (fi, fi_uses);
5622 rhs = get_function_part_constraint (cfi, fi_uses);
5623 process_constraint (new_constraint (lhs, rhs));
5624 }
5625 else if (gimple_code (t) == GIMPLE_ASM)
5626 {
5627 /* ??? Ick. We can do better. */
5628 if (gimple_vdef (t))
5629 make_constraint_from (first_vi_for_offset (fi, fi_clobbers),
5630 anything_id);
5631 make_constraint_from (first_vi_for_offset (fi, fi_uses),
5632 anything_id);
5633 }
5634 }
5635
5636
5637 /* Find the first varinfo in the same variable as START that overlaps with
5638 OFFSET. Return NULL if we can't find one. */
5639
5640 static varinfo_t
5641 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
5642 {
5643 /* If the offset is outside of the variable, bail out. */
5644 if (offset >= start->fullsize)
5645 return NULL;
5646
5647 /* If we cannot reach offset from start, lookup the first field
5648 and start from there. */
5649 if (start->offset > offset)
5650 start = get_varinfo (start->head);
5651
5652 while (start)
5653 {
5654 /* We may not find a variable in the field list with the actual
5655 offset when we have glommed a structure to a variable.
5656 In that case, however, offset should still be within the size
5657 of the variable. */
5658 if (offset >= start->offset
5659 && (offset - start->offset) < start->size)
5660 return start;
5661
5662 start = vi_next (start);
5663 }
5664
5665 return NULL;
5666 }
5667
5668 /* Find the first varinfo in the same variable as START that overlaps with
5669 OFFSET. If there is no such varinfo the varinfo directly preceding
5670 OFFSET is returned. */
5671
5672 static varinfo_t
5673 first_or_preceding_vi_for_offset (varinfo_t start,
5674 unsigned HOST_WIDE_INT offset)
5675 {
5676 /* If we cannot reach offset from start, lookup the first field
5677 and start from there. */
5678 if (start->offset > offset)
5679 start = get_varinfo (start->head);
5680
5681 /* We may not find a variable in the field list with the actual
5682 offset when we have glommed a structure to a variable.
5683 In that case, however, offset should still be within the size
5684 of the variable.
5685 If we got beyond the offset we look for return the field
5686 directly preceding offset which may be the last field. */
5687 while (start->next
5688 && offset >= start->offset
5689 && !((offset - start->offset) < start->size))
5690 start = vi_next (start);
5691
5692 return start;
5693 }
5694
5695
5696 /* This structure is used during pushing fields onto the fieldstack
5697 to track the offset of the field, since bitpos_of_field gives it
5698 relative to its immediate containing type, and we want it relative
5699 to the ultimate containing object. */
5700
5701 struct fieldoff
5702 {
5703 /* Offset from the base of the base containing object to this field. */
5704 HOST_WIDE_INT offset;
5705
5706 /* Size, in bits, of the field. */
5707 unsigned HOST_WIDE_INT size;
5708
5709 unsigned has_unknown_size : 1;
5710
5711 unsigned must_have_pointers : 1;
5712
5713 unsigned may_have_pointers : 1;
5714
5715 unsigned only_restrict_pointers : 1;
5716
5717 tree restrict_pointed_type;
5718 };
5719 typedef struct fieldoff fieldoff_s;
5720
5721
5722 /* qsort comparison function for two fieldoff's PA and PB */
5723
5724 static int
5725 fieldoff_compare (const void *pa, const void *pb)
5726 {
5727 const fieldoff_s *foa = (const fieldoff_s *)pa;
5728 const fieldoff_s *fob = (const fieldoff_s *)pb;
5729 unsigned HOST_WIDE_INT foasize, fobsize;
5730
5731 if (foa->offset < fob->offset)
5732 return -1;
5733 else if (foa->offset > fob->offset)
5734 return 1;
5735
5736 foasize = foa->size;
5737 fobsize = fob->size;
5738 if (foasize < fobsize)
5739 return -1;
5740 else if (foasize > fobsize)
5741 return 1;
5742 return 0;
5743 }
5744
5745 /* Sort a fieldstack according to the field offset and sizes. */
5746 static void
5747 sort_fieldstack (vec<fieldoff_s> &fieldstack)
5748 {
5749 fieldstack.qsort (fieldoff_compare);
5750 }
5751
5752 /* Return true if T is a type that can have subvars. */
5753
5754 static inline bool
5755 type_can_have_subvars (const_tree t)
5756 {
5757 /* Aggregates without overlapping fields can have subvars. */
5758 return TREE_CODE (t) == RECORD_TYPE;
5759 }
5760
5761 /* Return true if V is a tree that we can have subvars for.
5762 Normally, this is any aggregate type. Also complex
5763 types which are not gimple registers can have subvars. */
5764
5765 static inline bool
5766 var_can_have_subvars (const_tree v)
5767 {
5768 /* Volatile variables should never have subvars. */
5769 if (TREE_THIS_VOLATILE (v))
5770 return false;
5771
5772 /* Non decls or memory tags can never have subvars. */
5773 if (!DECL_P (v))
5774 return false;
5775
5776 return type_can_have_subvars (TREE_TYPE (v));
5777 }
5778
5779 /* Return true if T is a type that does contain pointers. */
5780
5781 static bool
5782 type_must_have_pointers (tree type)
5783 {
5784 if (POINTER_TYPE_P (type))
5785 return true;
5786
5787 if (TREE_CODE (type) == ARRAY_TYPE)
5788 return type_must_have_pointers (TREE_TYPE (type));
5789
5790 /* A function or method can have pointers as arguments, so track
5791 those separately. */
5792 if (TREE_CODE (type) == FUNCTION_TYPE
5793 || TREE_CODE (type) == METHOD_TYPE)
5794 return true;
5795
5796 return false;
5797 }
5798
5799 static bool
5800 field_must_have_pointers (tree t)
5801 {
5802 return type_must_have_pointers (TREE_TYPE (t));
5803 }
5804
5805 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
5806 the fields of TYPE onto fieldstack, recording their offsets along
5807 the way.
5808
5809 OFFSET is used to keep track of the offset in this entire
5810 structure, rather than just the immediately containing structure.
5811 Returns false if the caller is supposed to handle the field we
5812 recursed for. */
5813
5814 static bool
5815 push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
5816 HOST_WIDE_INT offset)
5817 {
5818 tree field;
5819 bool empty_p = true;
5820
5821 if (TREE_CODE (type) != RECORD_TYPE)
5822 return false;
5823
5824 /* If the vector of fields is growing too big, bail out early.
5825 Callers check for vec::length <= param_max_fields_for_field_sensitive, make
5826 sure this fails. */
5827 if (fieldstack->length () > (unsigned)param_max_fields_for_field_sensitive)
5828 return false;
5829
5830 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5831 if (TREE_CODE (field) == FIELD_DECL)
5832 {
5833 bool push = false;
5834 HOST_WIDE_INT foff = bitpos_of_field (field);
5835 tree field_type = TREE_TYPE (field);
5836
5837 if (!var_can_have_subvars (field)
5838 || TREE_CODE (field_type) == QUAL_UNION_TYPE
5839 || TREE_CODE (field_type) == UNION_TYPE)
5840 push = true;
5841 else if (!push_fields_onto_fieldstack
5842 (field_type, fieldstack, offset + foff)
5843 && (DECL_SIZE (field)
5844 && !integer_zerop (DECL_SIZE (field))))
5845 /* Empty structures may have actual size, like in C++. So
5846 see if we didn't push any subfields and the size is
5847 nonzero, push the field onto the stack. */
5848 push = true;
5849
5850 if (push)
5851 {
5852 fieldoff_s *pair = NULL;
5853 bool has_unknown_size = false;
5854 bool must_have_pointers_p;
5855
5856 if (!fieldstack->is_empty ())
5857 pair = &fieldstack->last ();
5858
5859 /* If there isn't anything at offset zero, create sth. */
5860 if (!pair
5861 && offset + foff != 0)
5862 {
5863 fieldoff_s e
5864 = {0, offset + foff, false, false, true, false, NULL_TREE};
5865 pair = fieldstack->safe_push (e);
5866 }
5867
5868 if (!DECL_SIZE (field)
5869 || !tree_fits_uhwi_p (DECL_SIZE (field)))
5870 has_unknown_size = true;
5871
5872 /* If adjacent fields do not contain pointers merge them. */
5873 must_have_pointers_p = field_must_have_pointers (field);
5874 if (pair
5875 && !has_unknown_size
5876 && !must_have_pointers_p
5877 && !pair->must_have_pointers
5878 && !pair->has_unknown_size
5879 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
5880 {
5881 pair->size += tree_to_uhwi (DECL_SIZE (field));
5882 }
5883 else
5884 {
5885 fieldoff_s e;
5886 e.offset = offset + foff;
5887 e.has_unknown_size = has_unknown_size;
5888 if (!has_unknown_size)
5889 e.size = tree_to_uhwi (DECL_SIZE (field));
5890 else
5891 e.size = -1;
5892 e.must_have_pointers = must_have_pointers_p;
5893 e.may_have_pointers = true;
5894 e.only_restrict_pointers
5895 = (!has_unknown_size
5896 && POINTER_TYPE_P (field_type)
5897 && TYPE_RESTRICT (field_type));
5898 if (e.only_restrict_pointers)
5899 e.restrict_pointed_type = TREE_TYPE (field_type);
5900 fieldstack->safe_push (e);
5901 }
5902 }
5903
5904 empty_p = false;
5905 }
5906
5907 return !empty_p;
5908 }
5909
5910 /* Count the number of arguments DECL has, and set IS_VARARGS to true
5911 if it is a varargs function. */
5912
5913 static unsigned int
5914 count_num_arguments (tree decl, bool *is_varargs)
5915 {
5916 unsigned int num = 0;
5917 tree t;
5918
5919 /* Capture named arguments for K&R functions. They do not
5920 have a prototype and thus no TYPE_ARG_TYPES. */
5921 for (t = DECL_ARGUMENTS (decl); t; t = DECL_CHAIN (t))
5922 ++num;
5923
5924 /* Check if the function has variadic arguments. */
5925 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl)); t; t = TREE_CHAIN (t))
5926 if (TREE_VALUE (t) == void_type_node)
5927 break;
5928 if (!t)
5929 *is_varargs = true;
5930
5931 return num;
5932 }
5933
5934 /* Creation function node for DECL, using NAME, and return the index
5935 of the variable we've created for the function. If NONLOCAL_p, create
5936 initial constraints. */
5937
5938 static varinfo_t
5939 create_function_info_for (tree decl, const char *name, bool add_id,
5940 bool nonlocal_p)
5941 {
5942 struct function *fn = DECL_STRUCT_FUNCTION (decl);
5943 varinfo_t vi, prev_vi;
5944 tree arg;
5945 unsigned int i;
5946 bool is_varargs = false;
5947 unsigned int num_args = count_num_arguments (decl, &is_varargs);
5948
5949 /* Create the variable info. */
5950
5951 vi = new_var_info (decl, name, add_id);
5952 vi->offset = 0;
5953 vi->size = 1;
5954 vi->fullsize = fi_parm_base + num_args;
5955 vi->is_fn_info = 1;
5956 vi->may_have_pointers = false;
5957 if (is_varargs)
5958 vi->fullsize = ~0;
5959 insert_vi_for_tree (vi->decl, vi);
5960
5961 prev_vi = vi;
5962
5963 /* Create a variable for things the function clobbers and one for
5964 things the function uses. */
5965 {
5966 varinfo_t clobbervi, usevi;
5967 const char *newname;
5968 char *tempname;
5969
5970 tempname = xasprintf ("%s.clobber", name);
5971 newname = ggc_strdup (tempname);
5972 free (tempname);
5973
5974 clobbervi = new_var_info (NULL, newname, false);
5975 clobbervi->offset = fi_clobbers;
5976 clobbervi->size = 1;
5977 clobbervi->fullsize = vi->fullsize;
5978 clobbervi->is_full_var = true;
5979 clobbervi->is_global_var = false;
5980 clobbervi->is_reg_var = true;
5981
5982 gcc_assert (prev_vi->offset < clobbervi->offset);
5983 prev_vi->next = clobbervi->id;
5984 prev_vi = clobbervi;
5985
5986 tempname = xasprintf ("%s.use", name);
5987 newname = ggc_strdup (tempname);
5988 free (tempname);
5989
5990 usevi = new_var_info (NULL, newname, false);
5991 usevi->offset = fi_uses;
5992 usevi->size = 1;
5993 usevi->fullsize = vi->fullsize;
5994 usevi->is_full_var = true;
5995 usevi->is_global_var = false;
5996 usevi->is_reg_var = true;
5997
5998 gcc_assert (prev_vi->offset < usevi->offset);
5999 prev_vi->next = usevi->id;
6000 prev_vi = usevi;
6001 }
6002
6003 /* And one for the static chain. */
6004 if (fn->static_chain_decl != NULL_TREE)
6005 {
6006 varinfo_t chainvi;
6007 const char *newname;
6008 char *tempname;
6009
6010 tempname = xasprintf ("%s.chain", name);
6011 newname = ggc_strdup (tempname);
6012 free (tempname);
6013
6014 chainvi = new_var_info (fn->static_chain_decl, newname, false);
6015 chainvi->offset = fi_static_chain;
6016 chainvi->size = 1;
6017 chainvi->fullsize = vi->fullsize;
6018 chainvi->is_full_var = true;
6019 chainvi->is_global_var = false;
6020
6021 insert_vi_for_tree (fn->static_chain_decl, chainvi);
6022
6023 if (nonlocal_p
6024 && chainvi->may_have_pointers)
6025 make_constraint_from (chainvi, nonlocal_id);
6026
6027 gcc_assert (prev_vi->offset < chainvi->offset);
6028 prev_vi->next = chainvi->id;
6029 prev_vi = chainvi;
6030 }
6031
6032 /* Create a variable for the return var. */
6033 if (DECL_RESULT (decl) != NULL
6034 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
6035 {
6036 varinfo_t resultvi;
6037 const char *newname;
6038 char *tempname;
6039 tree resultdecl = decl;
6040
6041 if (DECL_RESULT (decl))
6042 resultdecl = DECL_RESULT (decl);
6043
6044 tempname = xasprintf ("%s.result", name);
6045 newname = ggc_strdup (tempname);
6046 free (tempname);
6047
6048 resultvi = new_var_info (resultdecl, newname, false);
6049 resultvi->offset = fi_result;
6050 resultvi->size = 1;
6051 resultvi->fullsize = vi->fullsize;
6052 resultvi->is_full_var = true;
6053 if (DECL_RESULT (decl))
6054 resultvi->may_have_pointers = true;
6055
6056 if (DECL_RESULT (decl))
6057 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
6058
6059 if (nonlocal_p
6060 && DECL_RESULT (decl)
6061 && DECL_BY_REFERENCE (DECL_RESULT (decl)))
6062 make_constraint_from (resultvi, nonlocal_id);
6063
6064 gcc_assert (prev_vi->offset < resultvi->offset);
6065 prev_vi->next = resultvi->id;
6066 prev_vi = resultvi;
6067 }
6068
6069 /* We also need to make function return values escape. Nothing
6070 escapes by returning from main though. */
6071 if (nonlocal_p
6072 && !MAIN_NAME_P (DECL_NAME (decl)))
6073 {
6074 varinfo_t fi, rvi;
6075 fi = lookup_vi_for_tree (decl);
6076 rvi = first_vi_for_offset (fi, fi_result);
6077 if (rvi && rvi->offset == fi_result)
6078 make_copy_constraint (get_varinfo (escaped_id), rvi->id);
6079 }
6080
6081 /* Set up variables for each argument. */
6082 arg = DECL_ARGUMENTS (decl);
6083 for (i = 0; i < num_args; i++)
6084 {
6085 varinfo_t argvi;
6086 const char *newname;
6087 char *tempname;
6088 tree argdecl = decl;
6089
6090 if (arg)
6091 argdecl = arg;
6092
6093 tempname = xasprintf ("%s.arg%d", name, i);
6094 newname = ggc_strdup (tempname);
6095 free (tempname);
6096
6097 argvi = new_var_info (argdecl, newname, false);
6098 argvi->offset = fi_parm_base + i;
6099 argvi->size = 1;
6100 argvi->is_full_var = true;
6101 argvi->fullsize = vi->fullsize;
6102 if (arg)
6103 argvi->may_have_pointers = true;
6104
6105 if (arg)
6106 insert_vi_for_tree (arg, argvi);
6107
6108 if (nonlocal_p
6109 && argvi->may_have_pointers)
6110 make_constraint_from (argvi, nonlocal_id);
6111
6112 gcc_assert (prev_vi->offset < argvi->offset);
6113 prev_vi->next = argvi->id;
6114 prev_vi = argvi;
6115 if (arg)
6116 arg = DECL_CHAIN (arg);
6117 }
6118
6119 /* Add one representative for all further args. */
6120 if (is_varargs)
6121 {
6122 varinfo_t argvi;
6123 const char *newname;
6124 char *tempname;
6125 tree decl;
6126
6127 tempname = xasprintf ("%s.varargs", name);
6128 newname = ggc_strdup (tempname);
6129 free (tempname);
6130
6131 /* We need sth that can be pointed to for va_start. */
6132 decl = build_fake_var_decl (ptr_type_node);
6133
6134 argvi = new_var_info (decl, newname, false);
6135 argvi->offset = fi_parm_base + num_args;
6136 argvi->size = ~0;
6137 argvi->is_full_var = true;
6138 argvi->is_heap_var = true;
6139 argvi->fullsize = vi->fullsize;
6140
6141 if (nonlocal_p
6142 && argvi->may_have_pointers)
6143 make_constraint_from (argvi, nonlocal_id);
6144
6145 gcc_assert (prev_vi->offset < argvi->offset);
6146 prev_vi->next = argvi->id;
6147 }
6148
6149 return vi;
6150 }
6151
6152
6153 /* Return true if FIELDSTACK contains fields that overlap.
6154 FIELDSTACK is assumed to be sorted by offset. */
6155
6156 static bool
6157 check_for_overlaps (const vec<fieldoff_s> &fieldstack)
6158 {
6159 fieldoff_s *fo = NULL;
6160 unsigned int i;
6161 HOST_WIDE_INT lastoffset = -1;
6162
6163 FOR_EACH_VEC_ELT (fieldstack, i, fo)
6164 {
6165 if (fo->offset == lastoffset)
6166 return true;
6167 lastoffset = fo->offset;
6168 }
6169 return false;
6170 }
6171
6172 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
6173 This will also create any varinfo structures necessary for fields
6174 of DECL. DECL is a function parameter if HANDLE_PARAM is set.
6175 HANDLED_STRUCT_TYPE is used to register struct types reached by following
6176 restrict pointers. This is needed to prevent infinite recursion.
6177 If ADD_RESTRICT, pretend that the pointer NAME is restrict even if DECL
6178 does not advertise it. */
6179
6180 static varinfo_t
6181 create_variable_info_for_1 (tree decl, const char *name, bool add_id,
6182 bool handle_param, bitmap handled_struct_type,
6183 bool add_restrict = false)
6184 {
6185 varinfo_t vi, newvi;
6186 tree decl_type = TREE_TYPE (decl);
6187 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
6188 auto_vec<fieldoff_s> fieldstack;
6189 fieldoff_s *fo;
6190 unsigned int i;
6191
6192 if (!declsize
6193 || !tree_fits_uhwi_p (declsize))
6194 {
6195 vi = new_var_info (decl, name, add_id);
6196 vi->offset = 0;
6197 vi->size = ~0;
6198 vi->fullsize = ~0;
6199 vi->is_unknown_size_var = true;
6200 vi->is_full_var = true;
6201 vi->may_have_pointers = true;
6202 return vi;
6203 }
6204
6205 /* Collect field information. */
6206 if (use_field_sensitive
6207 && var_can_have_subvars (decl)
6208 /* ??? Force us to not use subfields for globals in IPA mode.
6209 Else we'd have to parse arbitrary initializers. */
6210 && !(in_ipa_mode
6211 && is_global_var (decl)))
6212 {
6213 fieldoff_s *fo = NULL;
6214 bool notokay = false;
6215 unsigned int i;
6216
6217 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
6218
6219 for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
6220 if (fo->has_unknown_size
6221 || fo->offset < 0)
6222 {
6223 notokay = true;
6224 break;
6225 }
6226
6227 /* We can't sort them if we have a field with a variable sized type,
6228 which will make notokay = true. In that case, we are going to return
6229 without creating varinfos for the fields anyway, so sorting them is a
6230 waste to boot. */
6231 if (!notokay)
6232 {
6233 sort_fieldstack (fieldstack);
6234 /* Due to some C++ FE issues, like PR 22488, we might end up
6235 what appear to be overlapping fields even though they,
6236 in reality, do not overlap. Until the C++ FE is fixed,
6237 we will simply disable field-sensitivity for these cases. */
6238 notokay = check_for_overlaps (fieldstack);
6239 }
6240
6241 if (notokay)
6242 fieldstack.release ();
6243 }
6244
6245 /* If we didn't end up collecting sub-variables create a full
6246 variable for the decl. */
6247 if (fieldstack.length () == 0
6248 || fieldstack.length () > (unsigned)param_max_fields_for_field_sensitive)
6249 {
6250 vi = new_var_info (decl, name, add_id);
6251 vi->offset = 0;
6252 vi->may_have_pointers = true;
6253 vi->fullsize = tree_to_uhwi (declsize);
6254 vi->size = vi->fullsize;
6255 vi->is_full_var = true;
6256 if (POINTER_TYPE_P (decl_type)
6257 && (TYPE_RESTRICT (decl_type) || add_restrict))
6258 vi->only_restrict_pointers = 1;
6259 if (vi->only_restrict_pointers
6260 && !type_contains_placeholder_p (TREE_TYPE (decl_type))
6261 && handle_param
6262 && !bitmap_bit_p (handled_struct_type,
6263 TYPE_UID (TREE_TYPE (decl_type))))
6264 {
6265 varinfo_t rvi;
6266 tree heapvar = build_fake_var_decl (TREE_TYPE (decl_type));
6267 DECL_EXTERNAL (heapvar) = 1;
6268 if (var_can_have_subvars (heapvar))
6269 bitmap_set_bit (handled_struct_type,
6270 TYPE_UID (TREE_TYPE (decl_type)));
6271 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6272 true, handled_struct_type);
6273 if (var_can_have_subvars (heapvar))
6274 bitmap_clear_bit (handled_struct_type,
6275 TYPE_UID (TREE_TYPE (decl_type)));
6276 rvi->is_restrict_var = 1;
6277 insert_vi_for_tree (heapvar, rvi);
6278 make_constraint_from (vi, rvi->id);
6279 make_param_constraints (rvi);
6280 }
6281 fieldstack.release ();
6282 return vi;
6283 }
6284
6285 vi = new_var_info (decl, name, add_id);
6286 vi->fullsize = tree_to_uhwi (declsize);
6287 if (fieldstack.length () == 1)
6288 vi->is_full_var = true;
6289 for (i = 0, newvi = vi;
6290 fieldstack.iterate (i, &fo);
6291 ++i, newvi = vi_next (newvi))
6292 {
6293 const char *newname = NULL;
6294 char *tempname;
6295
6296 if (dump_file)
6297 {
6298 if (fieldstack.length () != 1)
6299 {
6300 tempname
6301 = xasprintf ("%s." HOST_WIDE_INT_PRINT_DEC
6302 "+" HOST_WIDE_INT_PRINT_DEC, name,
6303 fo->offset, fo->size);
6304 newname = ggc_strdup (tempname);
6305 free (tempname);
6306 }
6307 }
6308 else
6309 newname = "NULL";
6310
6311 if (newname)
6312 newvi->name = newname;
6313 newvi->offset = fo->offset;
6314 newvi->size = fo->size;
6315 newvi->fullsize = vi->fullsize;
6316 newvi->may_have_pointers = fo->may_have_pointers;
6317 newvi->only_restrict_pointers = fo->only_restrict_pointers;
6318 if (handle_param
6319 && newvi->only_restrict_pointers
6320 && !type_contains_placeholder_p (fo->restrict_pointed_type)
6321 && !bitmap_bit_p (handled_struct_type,
6322 TYPE_UID (fo->restrict_pointed_type)))
6323 {
6324 varinfo_t rvi;
6325 tree heapvar = build_fake_var_decl (fo->restrict_pointed_type);
6326 DECL_EXTERNAL (heapvar) = 1;
6327 if (var_can_have_subvars (heapvar))
6328 bitmap_set_bit (handled_struct_type,
6329 TYPE_UID (fo->restrict_pointed_type));
6330 rvi = create_variable_info_for_1 (heapvar, "PARM_NOALIAS", true,
6331 true, handled_struct_type);
6332 if (var_can_have_subvars (heapvar))
6333 bitmap_clear_bit (handled_struct_type,
6334 TYPE_UID (fo->restrict_pointed_type));
6335 rvi->is_restrict_var = 1;
6336 insert_vi_for_tree (heapvar, rvi);
6337 make_constraint_from (newvi, rvi->id);
6338 make_param_constraints (rvi);
6339 }
6340 if (i + 1 < fieldstack.length ())
6341 {
6342 varinfo_t tem = new_var_info (decl, name, false);
6343 newvi->next = tem->id;
6344 tem->head = vi->id;
6345 }
6346 }
6347
6348 return vi;
6349 }
6350
6351 static unsigned int
6352 create_variable_info_for (tree decl, const char *name, bool add_id)
6353 {
6354 /* First see if we are dealing with an ifunc resolver call and
6355 assiociate that with a call to the resolver function result. */
6356 cgraph_node *node;
6357 if (in_ipa_mode
6358 && TREE_CODE (decl) == FUNCTION_DECL
6359 && (node = cgraph_node::get (decl))
6360 && node->ifunc_resolver)
6361 {
6362 varinfo_t fi = get_vi_for_tree (node->get_alias_target ()->decl);
6363 constraint_expr rhs
6364 = get_function_part_constraint (fi, fi_result);
6365 fi = new_var_info (NULL_TREE, "ifuncres", true);
6366 fi->is_reg_var = true;
6367 constraint_expr lhs;
6368 lhs.type = SCALAR;
6369 lhs.var = fi->id;
6370 lhs.offset = 0;
6371 process_constraint (new_constraint (lhs, rhs));
6372 insert_vi_for_tree (decl, fi);
6373 return fi->id;
6374 }
6375
6376 varinfo_t vi = create_variable_info_for_1 (decl, name, add_id, false, NULL);
6377 unsigned int id = vi->id;
6378
6379 insert_vi_for_tree (decl, vi);
6380
6381 if (!VAR_P (decl))
6382 return id;
6383
6384 /* Create initial constraints for globals. */
6385 for (; vi; vi = vi_next (vi))
6386 {
6387 if (!vi->may_have_pointers
6388 || !vi->is_global_var)
6389 continue;
6390
6391 /* Mark global restrict qualified pointers. */
6392 if ((POINTER_TYPE_P (TREE_TYPE (decl))
6393 && TYPE_RESTRICT (TREE_TYPE (decl)))
6394 || vi->only_restrict_pointers)
6395 {
6396 varinfo_t rvi
6397 = make_constraint_from_global_restrict (vi, "GLOBAL_RESTRICT",
6398 true);
6399 /* ??? For now exclude reads from globals as restrict sources
6400 if those are not (indirectly) from incoming parameters. */
6401 rvi->is_restrict_var = false;
6402 continue;
6403 }
6404
6405 /* In non-IPA mode the initializer from nonlocal is all we need. */
6406 if (!in_ipa_mode
6407 || DECL_HARD_REGISTER (decl))
6408 make_copy_constraint (vi, nonlocal_id);
6409
6410 /* In IPA mode parse the initializer and generate proper constraints
6411 for it. */
6412 else
6413 {
6414 varpool_node *vnode = varpool_node::get (decl);
6415
6416 /* For escaped variables initialize them from nonlocal. */
6417 if (!vnode->all_refs_explicit_p ())
6418 make_copy_constraint (vi, nonlocal_id);
6419
6420 /* If this is a global variable with an initializer and we are in
6421 IPA mode generate constraints for it. */
6422 ipa_ref *ref;
6423 for (unsigned idx = 0; vnode->iterate_reference (idx, ref); ++idx)
6424 {
6425 auto_vec<ce_s> rhsc;
6426 struct constraint_expr lhs, *rhsp;
6427 unsigned i;
6428 get_constraint_for_address_of (ref->referred->decl, &rhsc);
6429 lhs.var = vi->id;
6430 lhs.offset = 0;
6431 lhs.type = SCALAR;
6432 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6433 process_constraint (new_constraint (lhs, *rhsp));
6434 /* If this is a variable that escapes from the unit
6435 the initializer escapes as well. */
6436 if (!vnode->all_refs_explicit_p ())
6437 {
6438 lhs.var = escaped_id;
6439 lhs.offset = 0;
6440 lhs.type = SCALAR;
6441 FOR_EACH_VEC_ELT (rhsc, i, rhsp)
6442 process_constraint (new_constraint (lhs, *rhsp));
6443 }
6444 }
6445 }
6446 }
6447
6448 return id;
6449 }
6450
6451 /* Print out the points-to solution for VAR to FILE. */
6452
6453 static void
6454 dump_solution_for_var (FILE *file, unsigned int var)
6455 {
6456 varinfo_t vi = get_varinfo (var);
6457 unsigned int i;
6458 bitmap_iterator bi;
6459
6460 /* Dump the solution for unified vars anyway, this avoids difficulties
6461 in scanning dumps in the testsuite. */
6462 fprintf (file, "%s = { ", vi->name);
6463 vi = get_varinfo (find (var));
6464 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6465 fprintf (file, "%s ", get_varinfo (i)->name);
6466 fprintf (file, "}");
6467
6468 /* But note when the variable was unified. */
6469 if (vi->id != var)
6470 fprintf (file, " same as %s", vi->name);
6471
6472 fprintf (file, "\n");
6473 }
6474
6475 /* Print the points-to solution for VAR to stderr. */
6476
6477 DEBUG_FUNCTION void
6478 debug_solution_for_var (unsigned int var)
6479 {
6480 dump_solution_for_var (stderr, var);
6481 }
6482
6483 /* Register the constraints for function parameter related VI. */
6484
6485 static void
6486 make_param_constraints (varinfo_t vi)
6487 {
6488 for (; vi; vi = vi_next (vi))
6489 {
6490 if (vi->only_restrict_pointers)
6491 ;
6492 else if (vi->may_have_pointers)
6493 make_constraint_from (vi, nonlocal_id);
6494
6495 if (vi->is_full_var)
6496 break;
6497 }
6498 }
6499
6500 /* Create varinfo structures for all of the variables in the
6501 function for intraprocedural mode. */
6502
6503 static void
6504 intra_create_variable_infos (struct function *fn)
6505 {
6506 tree t;
6507 bitmap handled_struct_type = NULL;
6508 bool this_parm_in_ctor = DECL_CXX_CONSTRUCTOR_P (fn->decl);
6509
6510 /* For each incoming pointer argument arg, create the constraint ARG
6511 = NONLOCAL or a dummy variable if it is a restrict qualified
6512 passed-by-reference argument. */
6513 for (t = DECL_ARGUMENTS (fn->decl); t; t = DECL_CHAIN (t))
6514 {
6515 if (handled_struct_type == NULL)
6516 handled_struct_type = BITMAP_ALLOC (NULL);
6517
6518 varinfo_t p
6519 = create_variable_info_for_1 (t, alias_get_name (t), false, true,
6520 handled_struct_type, this_parm_in_ctor);
6521 insert_vi_for_tree (t, p);
6522
6523 make_param_constraints (p);
6524
6525 this_parm_in_ctor = false;
6526 }
6527
6528 if (handled_struct_type != NULL)
6529 BITMAP_FREE (handled_struct_type);
6530
6531 /* Add a constraint for a result decl that is passed by reference. */
6532 if (DECL_RESULT (fn->decl)
6533 && DECL_BY_REFERENCE (DECL_RESULT (fn->decl)))
6534 {
6535 varinfo_t p, result_vi = get_vi_for_tree (DECL_RESULT (fn->decl));
6536
6537 for (p = result_vi; p; p = vi_next (p))
6538 make_constraint_from (p, nonlocal_id);
6539 }
6540
6541 /* Add a constraint for the incoming static chain parameter. */
6542 if (fn->static_chain_decl != NULL_TREE)
6543 {
6544 varinfo_t p, chain_vi = get_vi_for_tree (fn->static_chain_decl);
6545
6546 for (p = chain_vi; p; p = vi_next (p))
6547 make_constraint_from (p, nonlocal_id);
6548 }
6549 }
6550
6551 /* Structure used to put solution bitmaps in a hashtable so they can
6552 be shared among variables with the same points-to set. */
6553
6554 typedef struct shared_bitmap_info
6555 {
6556 bitmap pt_vars;
6557 hashval_t hashcode;
6558 } *shared_bitmap_info_t;
6559 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
6560
6561 /* Shared_bitmap hashtable helpers. */
6562
6563 struct shared_bitmap_hasher : free_ptr_hash <shared_bitmap_info>
6564 {
6565 static inline hashval_t hash (const shared_bitmap_info *);
6566 static inline bool equal (const shared_bitmap_info *,
6567 const shared_bitmap_info *);
6568 };
6569
6570 /* Hash function for a shared_bitmap_info_t */
6571
6572 inline hashval_t
6573 shared_bitmap_hasher::hash (const shared_bitmap_info *bi)
6574 {
6575 return bi->hashcode;
6576 }
6577
6578 /* Equality function for two shared_bitmap_info_t's. */
6579
6580 inline bool
6581 shared_bitmap_hasher::equal (const shared_bitmap_info *sbi1,
6582 const shared_bitmap_info *sbi2)
6583 {
6584 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
6585 }
6586
6587 /* Shared_bitmap hashtable. */
6588
6589 static hash_table<shared_bitmap_hasher> *shared_bitmap_table;
6590
6591 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
6592 existing instance if there is one, NULL otherwise. */
6593
6594 static bitmap
6595 shared_bitmap_lookup (bitmap pt_vars)
6596 {
6597 shared_bitmap_info **slot;
6598 struct shared_bitmap_info sbi;
6599
6600 sbi.pt_vars = pt_vars;
6601 sbi.hashcode = bitmap_hash (pt_vars);
6602
6603 slot = shared_bitmap_table->find_slot (&sbi, NO_INSERT);
6604 if (!slot)
6605 return NULL;
6606 else
6607 return (*slot)->pt_vars;
6608 }
6609
6610
6611 /* Add a bitmap to the shared bitmap hashtable. */
6612
6613 static void
6614 shared_bitmap_add (bitmap pt_vars)
6615 {
6616 shared_bitmap_info **slot;
6617 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
6618
6619 sbi->pt_vars = pt_vars;
6620 sbi->hashcode = bitmap_hash (pt_vars);
6621
6622 slot = shared_bitmap_table->find_slot (sbi, INSERT);
6623 gcc_assert (!*slot);
6624 *slot = sbi;
6625 }
6626
6627
6628 /* Set bits in INTO corresponding to the variable uids in solution set FROM. */
6629
6630 static void
6631 set_uids_in_ptset (bitmap into, bitmap from, struct pt_solution *pt,
6632 tree fndecl)
6633 {
6634 unsigned int i;
6635 bitmap_iterator bi;
6636 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
6637 bool everything_escaped
6638 = escaped_vi->solution && bitmap_bit_p (escaped_vi->solution, anything_id);
6639
6640 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
6641 {
6642 varinfo_t vi = get_varinfo (i);
6643
6644 if (vi->is_artificial_var)
6645 continue;
6646
6647 if (everything_escaped
6648 || (escaped_vi->solution
6649 && bitmap_bit_p (escaped_vi->solution, i)))
6650 {
6651 pt->vars_contains_escaped = true;
6652 pt->vars_contains_escaped_heap |= vi->is_heap_var;
6653 }
6654
6655 if (vi->is_restrict_var)
6656 pt->vars_contains_restrict = true;
6657
6658 if (VAR_P (vi->decl)
6659 || TREE_CODE (vi->decl) == PARM_DECL
6660 || TREE_CODE (vi->decl) == RESULT_DECL)
6661 {
6662 /* If we are in IPA mode we will not recompute points-to
6663 sets after inlining so make sure they stay valid. */
6664 if (in_ipa_mode
6665 && !DECL_PT_UID_SET_P (vi->decl))
6666 SET_DECL_PT_UID (vi->decl, DECL_UID (vi->decl));
6667
6668 /* Add the decl to the points-to set. Note that the points-to
6669 set contains global variables. */
6670 bitmap_set_bit (into, DECL_PT_UID (vi->decl));
6671 if (vi->is_global_var
6672 /* In IPA mode the escaped_heap trick doesn't work as
6673 ESCAPED is escaped from the unit but
6674 pt_solution_includes_global needs to answer true for
6675 all variables not automatic within a function.
6676 For the same reason is_global_var is not the
6677 correct flag to track - local variables from other
6678 functions also need to be considered global.
6679 Conveniently all HEAP vars are not put in function
6680 scope. */
6681 || (in_ipa_mode
6682 && fndecl
6683 && ! auto_var_in_fn_p (vi->decl, fndecl)))
6684 pt->vars_contains_nonlocal = true;
6685
6686 /* If we have a variable that is interposable record that fact
6687 for pointer comparison simplification. */
6688 if (VAR_P (vi->decl)
6689 && (TREE_STATIC (vi->decl) || DECL_EXTERNAL (vi->decl))
6690 && ! decl_binds_to_current_def_p (vi->decl))
6691 pt->vars_contains_interposable = true;
6692
6693 /* If this is a local variable we can have overlapping lifetime
6694 of different function invocations through recursion duplicate
6695 it with its shadow variable. */
6696 if (in_ipa_mode
6697 && vi->shadow_var_uid != 0)
6698 {
6699 bitmap_set_bit (into, vi->shadow_var_uid);
6700 pt->vars_contains_nonlocal = true;
6701 }
6702 }
6703
6704 else if (TREE_CODE (vi->decl) == FUNCTION_DECL
6705 || TREE_CODE (vi->decl) == LABEL_DECL)
6706 {
6707 /* Nothing should read/write from/to code so we can
6708 save bits by not including them in the points-to bitmaps.
6709 Still mark the points-to set as containing global memory
6710 to make code-patching possible - see PR70128. */
6711 pt->vars_contains_nonlocal = true;
6712 }
6713 }
6714 }
6715
6716
6717 /* Compute the points-to solution *PT for the variable VI. */
6718
6719 static struct pt_solution
6720 find_what_var_points_to (tree fndecl, varinfo_t orig_vi)
6721 {
6722 unsigned int i;
6723 bitmap_iterator bi;
6724 bitmap finished_solution;
6725 bitmap result;
6726 varinfo_t vi;
6727 struct pt_solution *pt;
6728
6729 /* This variable may have been collapsed, let's get the real
6730 variable. */
6731 vi = get_varinfo (find (orig_vi->id));
6732
6733 /* See if we have already computed the solution and return it. */
6734 pt_solution **slot = &final_solutions->get_or_insert (vi);
6735 if (*slot != NULL)
6736 return **slot;
6737
6738 *slot = pt = XOBNEW (&final_solutions_obstack, struct pt_solution);
6739 memset (pt, 0, sizeof (struct pt_solution));
6740
6741 /* Translate artificial variables into SSA_NAME_PTR_INFO
6742 attributes. */
6743 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
6744 {
6745 varinfo_t vi = get_varinfo (i);
6746
6747 if (vi->is_artificial_var)
6748 {
6749 if (vi->id == nothing_id)
6750 pt->null = 1;
6751 else if (vi->id == escaped_id)
6752 {
6753 if (in_ipa_mode)
6754 pt->ipa_escaped = 1;
6755 else
6756 pt->escaped = 1;
6757 /* Expand some special vars of ESCAPED in-place here. */
6758 varinfo_t evi = get_varinfo (find (escaped_id));
6759 if (bitmap_bit_p (evi->solution, nonlocal_id))
6760 pt->nonlocal = 1;
6761 }
6762 else if (vi->id == nonlocal_id)
6763 pt->nonlocal = 1;
6764 else if (vi->id == string_id)
6765 /* Nobody cares - STRING_CSTs are read-only entities. */
6766 ;
6767 else if (vi->id == anything_id
6768 || vi->id == integer_id)
6769 pt->anything = 1;
6770 }
6771 }
6772
6773 /* Instead of doing extra work, simply do not create
6774 elaborate points-to information for pt_anything pointers. */
6775 if (pt->anything)
6776 return *pt;
6777
6778 /* Share the final set of variables when possible. */
6779 finished_solution = BITMAP_GGC_ALLOC ();
6780 stats.points_to_sets_created++;
6781
6782 set_uids_in_ptset (finished_solution, vi->solution, pt, fndecl);
6783 result = shared_bitmap_lookup (finished_solution);
6784 if (!result)
6785 {
6786 shared_bitmap_add (finished_solution);
6787 pt->vars = finished_solution;
6788 }
6789 else
6790 {
6791 pt->vars = result;
6792 bitmap_clear (finished_solution);
6793 }
6794
6795 return *pt;
6796 }
6797
6798 /* Given a pointer variable P, fill in its points-to set. */
6799
6800 static void
6801 find_what_p_points_to (tree fndecl, tree p)
6802 {
6803 struct ptr_info_def *pi;
6804 tree lookup_p = p;
6805 varinfo_t vi;
6806 value_range vr;
6807 get_range_query (DECL_STRUCT_FUNCTION (fndecl))->range_of_expr (vr, p);
6808 bool nonnull = vr.nonzero_p ();
6809
6810 /* For parameters, get at the points-to set for the actual parm
6811 decl. */
6812 if (TREE_CODE (p) == SSA_NAME
6813 && SSA_NAME_IS_DEFAULT_DEF (p)
6814 && (TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
6815 || TREE_CODE (SSA_NAME_VAR (p)) == RESULT_DECL))
6816 lookup_p = SSA_NAME_VAR (p);
6817
6818 vi = lookup_vi_for_tree (lookup_p);
6819 if (!vi)
6820 return;
6821
6822 pi = get_ptr_info (p);
6823 pi->pt = find_what_var_points_to (fndecl, vi);
6824 /* Conservatively set to NULL from PTA (to true). */
6825 pi->pt.null = 1;
6826 /* Preserve pointer nonnull globally computed. */
6827 if (nonnull)
6828 set_ptr_nonnull (p);
6829 }
6830
6831
6832 /* Query statistics for points-to solutions. */
6833
6834 static struct {
6835 unsigned HOST_WIDE_INT pt_solution_includes_may_alias;
6836 unsigned HOST_WIDE_INT pt_solution_includes_no_alias;
6837 unsigned HOST_WIDE_INT pt_solutions_intersect_may_alias;
6838 unsigned HOST_WIDE_INT pt_solutions_intersect_no_alias;
6839 } pta_stats;
6840
6841 void
6842 dump_pta_stats (FILE *s)
6843 {
6844 fprintf (s, "\nPTA query stats:\n");
6845 fprintf (s, " pt_solution_includes: "
6846 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6847 HOST_WIDE_INT_PRINT_DEC" queries\n",
6848 pta_stats.pt_solution_includes_no_alias,
6849 pta_stats.pt_solution_includes_no_alias
6850 + pta_stats.pt_solution_includes_may_alias);
6851 fprintf (s, " pt_solutions_intersect: "
6852 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
6853 HOST_WIDE_INT_PRINT_DEC" queries\n",
6854 pta_stats.pt_solutions_intersect_no_alias,
6855 pta_stats.pt_solutions_intersect_no_alias
6856 + pta_stats.pt_solutions_intersect_may_alias);
6857 }
6858
6859
6860 /* Reset the points-to solution *PT to a conservative default
6861 (point to anything). */
6862
6863 void
6864 pt_solution_reset (struct pt_solution *pt)
6865 {
6866 memset (pt, 0, sizeof (struct pt_solution));
6867 pt->anything = true;
6868 pt->null = true;
6869 }
6870
6871 /* Set the points-to solution *PT to point only to the variables
6872 in VARS. VARS_CONTAINS_GLOBAL specifies whether that contains
6873 global variables and VARS_CONTAINS_RESTRICT specifies whether
6874 it contains restrict tag variables. */
6875
6876 void
6877 pt_solution_set (struct pt_solution *pt, bitmap vars,
6878 bool vars_contains_nonlocal)
6879 {
6880 memset (pt, 0, sizeof (struct pt_solution));
6881 pt->vars = vars;
6882 pt->vars_contains_nonlocal = vars_contains_nonlocal;
6883 pt->vars_contains_escaped
6884 = (cfun->gimple_df->escaped.anything
6885 || bitmap_intersect_p (cfun->gimple_df->escaped.vars, vars));
6886 }
6887
6888 /* Set the points-to solution *PT to point only to the variable VAR. */
6889
6890 void
6891 pt_solution_set_var (struct pt_solution *pt, tree var)
6892 {
6893 memset (pt, 0, sizeof (struct pt_solution));
6894 pt->vars = BITMAP_GGC_ALLOC ();
6895 bitmap_set_bit (pt->vars, DECL_PT_UID (var));
6896 pt->vars_contains_nonlocal = is_global_var (var);
6897 pt->vars_contains_escaped
6898 = (cfun->gimple_df->escaped.anything
6899 || bitmap_bit_p (cfun->gimple_df->escaped.vars, DECL_PT_UID (var)));
6900 }
6901
6902 /* Computes the union of the points-to solutions *DEST and *SRC and
6903 stores the result in *DEST. This changes the points-to bitmap
6904 of *DEST and thus may not be used if that might be shared.
6905 The points-to bitmap of *SRC and *DEST will not be shared after
6906 this function if they were not before. */
6907
6908 static void
6909 pt_solution_ior_into (struct pt_solution *dest, struct pt_solution *src)
6910 {
6911 dest->anything |= src->anything;
6912 if (dest->anything)
6913 {
6914 pt_solution_reset (dest);
6915 return;
6916 }
6917
6918 dest->nonlocal |= src->nonlocal;
6919 dest->escaped |= src->escaped;
6920 dest->ipa_escaped |= src->ipa_escaped;
6921 dest->null |= src->null;
6922 dest->vars_contains_nonlocal |= src->vars_contains_nonlocal;
6923 dest->vars_contains_escaped |= src->vars_contains_escaped;
6924 dest->vars_contains_escaped_heap |= src->vars_contains_escaped_heap;
6925 if (!src->vars)
6926 return;
6927
6928 if (!dest->vars)
6929 dest->vars = BITMAP_GGC_ALLOC ();
6930 bitmap_ior_into (dest->vars, src->vars);
6931 }
6932
6933 /* Return true if the points-to solution *PT is empty. */
6934
6935 bool
6936 pt_solution_empty_p (const pt_solution *pt)
6937 {
6938 if (pt->anything
6939 || pt->nonlocal)
6940 return false;
6941
6942 if (pt->vars
6943 && !bitmap_empty_p (pt->vars))
6944 return false;
6945
6946 /* If the solution includes ESCAPED, check if that is empty. */
6947 if (pt->escaped
6948 && !pt_solution_empty_p (&cfun->gimple_df->escaped))
6949 return false;
6950
6951 /* If the solution includes ESCAPED, check if that is empty. */
6952 if (pt->ipa_escaped
6953 && !pt_solution_empty_p (&ipa_escaped_pt))
6954 return false;
6955
6956 return true;
6957 }
6958
6959 /* Return true if the points-to solution *PT only point to a single var, and
6960 return the var uid in *UID. */
6961
6962 bool
6963 pt_solution_singleton_or_null_p (struct pt_solution *pt, unsigned *uid)
6964 {
6965 if (pt->anything || pt->nonlocal || pt->escaped || pt->ipa_escaped
6966 || pt->vars == NULL
6967 || !bitmap_single_bit_set_p (pt->vars))
6968 return false;
6969
6970 *uid = bitmap_first_set_bit (pt->vars);
6971 return true;
6972 }
6973
6974 /* Return true if the points-to solution *PT includes global memory.
6975 If ESCAPED_LOCAL_P is true then escaped local variables are also
6976 considered global. */
6977
6978 bool
6979 pt_solution_includes_global (struct pt_solution *pt, bool escaped_local_p)
6980 {
6981 if (pt->anything
6982 || pt->nonlocal
6983 || pt->vars_contains_nonlocal
6984 /* The following is a hack to make the malloc escape hack work.
6985 In reality we'd need different sets for escaped-through-return
6986 and escaped-to-callees and passes would need to be updated. */
6987 || pt->vars_contains_escaped_heap)
6988 return true;
6989
6990 if (escaped_local_p && pt->vars_contains_escaped)
6991 return true;
6992
6993 /* 'escaped' is also a placeholder so we have to look into it. */
6994 if (pt->escaped)
6995 return pt_solution_includes_global (&cfun->gimple_df->escaped,
6996 escaped_local_p);
6997
6998 if (pt->ipa_escaped)
6999 return pt_solution_includes_global (&ipa_escaped_pt,
7000 escaped_local_p);
7001
7002 return false;
7003 }
7004
7005 /* Return true if the points-to solution *PT includes the variable
7006 declaration DECL. */
7007
7008 static bool
7009 pt_solution_includes_1 (struct pt_solution *pt, const_tree decl)
7010 {
7011 if (pt->anything)
7012 return true;
7013
7014 if (pt->nonlocal
7015 && is_global_var (decl))
7016 return true;
7017
7018 if (pt->vars
7019 && bitmap_bit_p (pt->vars, DECL_PT_UID (decl)))
7020 return true;
7021
7022 /* If the solution includes ESCAPED, check it. */
7023 if (pt->escaped
7024 && pt_solution_includes_1 (&cfun->gimple_df->escaped, decl))
7025 return true;
7026
7027 /* If the solution includes ESCAPED, check it. */
7028 if (pt->ipa_escaped
7029 && pt_solution_includes_1 (&ipa_escaped_pt, decl))
7030 return true;
7031
7032 return false;
7033 }
7034
7035 bool
7036 pt_solution_includes (struct pt_solution *pt, const_tree decl)
7037 {
7038 bool res = pt_solution_includes_1 (pt, decl);
7039 if (res)
7040 ++pta_stats.pt_solution_includes_may_alias;
7041 else
7042 ++pta_stats.pt_solution_includes_no_alias;
7043 return res;
7044 }
7045
7046 /* Return true if both points-to solutions PT1 and PT2 have a non-empty
7047 intersection. */
7048
7049 static bool
7050 pt_solutions_intersect_1 (struct pt_solution *pt1, struct pt_solution *pt2)
7051 {
7052 if (pt1->anything || pt2->anything)
7053 return true;
7054
7055 /* If either points to unknown global memory and the other points to
7056 any global memory they alias. */
7057 if ((pt1->nonlocal
7058 && (pt2->nonlocal
7059 || pt2->vars_contains_nonlocal))
7060 || (pt2->nonlocal
7061 && pt1->vars_contains_nonlocal))
7062 return true;
7063
7064 /* If either points to all escaped memory and the other points to
7065 any escaped memory they alias. */
7066 if ((pt1->escaped
7067 && (pt2->escaped
7068 || pt2->vars_contains_escaped))
7069 || (pt2->escaped
7070 && pt1->vars_contains_escaped))
7071 return true;
7072
7073 /* Check the escaped solution if required.
7074 ??? Do we need to check the local against the IPA escaped sets? */
7075 if ((pt1->ipa_escaped || pt2->ipa_escaped)
7076 && !pt_solution_empty_p (&ipa_escaped_pt))
7077 {
7078 /* If both point to escaped memory and that solution
7079 is not empty they alias. */
7080 if (pt1->ipa_escaped && pt2->ipa_escaped)
7081 return true;
7082
7083 /* If either points to escaped memory see if the escaped solution
7084 intersects with the other. */
7085 if ((pt1->ipa_escaped
7086 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt2))
7087 || (pt2->ipa_escaped
7088 && pt_solutions_intersect_1 (&ipa_escaped_pt, pt1)))
7089 return true;
7090 }
7091
7092 /* Now both pointers alias if their points-to solution intersects. */
7093 return (pt1->vars
7094 && pt2->vars
7095 && bitmap_intersect_p (pt1->vars, pt2->vars));
7096 }
7097
7098 bool
7099 pt_solutions_intersect (struct pt_solution *pt1, struct pt_solution *pt2)
7100 {
7101 bool res = pt_solutions_intersect_1 (pt1, pt2);
7102 if (res)
7103 ++pta_stats.pt_solutions_intersect_may_alias;
7104 else
7105 ++pta_stats.pt_solutions_intersect_no_alias;
7106 return res;
7107 }
7108
7109
7110 /* Dump points-to information to OUTFILE. */
7111
7112 static void
7113 dump_sa_points_to_info (FILE *outfile)
7114 {
7115 unsigned int i;
7116
7117 fprintf (outfile, "\nPoints-to sets\n\n");
7118
7119 if (dump_flags & TDF_STATS)
7120 {
7121 fprintf (outfile, "Stats:\n");
7122 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
7123 fprintf (outfile, "Non-pointer vars: %d\n",
7124 stats.nonpointer_vars);
7125 fprintf (outfile, "Statically unified vars: %d\n",
7126 stats.unified_vars_static);
7127 fprintf (outfile, "Dynamically unified vars: %d\n",
7128 stats.unified_vars_dynamic);
7129 fprintf (outfile, "Iterations: %d\n", stats.iterations);
7130 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
7131 fprintf (outfile, "Number of implicit edges: %d\n",
7132 stats.num_implicit_edges);
7133 }
7134
7135 for (i = 1; i < varmap.length (); i++)
7136 {
7137 varinfo_t vi = get_varinfo (i);
7138 if (!vi->may_have_pointers)
7139 continue;
7140 dump_solution_for_var (outfile, i);
7141 }
7142 }
7143
7144
7145 /* Debug points-to information to stderr. */
7146
7147 DEBUG_FUNCTION void
7148 debug_sa_points_to_info (void)
7149 {
7150 dump_sa_points_to_info (stderr);
7151 }
7152
7153
7154 /* Initialize the always-existing constraint variables for NULL
7155 ANYTHING, READONLY, and INTEGER */
7156
7157 static void
7158 init_base_vars (void)
7159 {
7160 struct constraint_expr lhs, rhs;
7161 varinfo_t var_anything;
7162 varinfo_t var_nothing;
7163 varinfo_t var_string;
7164 varinfo_t var_escaped;
7165 varinfo_t var_nonlocal;
7166 varinfo_t var_storedanything;
7167 varinfo_t var_integer;
7168
7169 /* Variable ID zero is reserved and should be NULL. */
7170 varmap.safe_push (NULL);
7171
7172 /* Create the NULL variable, used to represent that a variable points
7173 to NULL. */
7174 var_nothing = new_var_info (NULL_TREE, "NULL", false);
7175 gcc_assert (var_nothing->id == nothing_id);
7176 var_nothing->is_artificial_var = 1;
7177 var_nothing->offset = 0;
7178 var_nothing->size = ~0;
7179 var_nothing->fullsize = ~0;
7180 var_nothing->is_special_var = 1;
7181 var_nothing->may_have_pointers = 0;
7182 var_nothing->is_global_var = 0;
7183
7184 /* Create the ANYTHING variable, used to represent that a variable
7185 points to some unknown piece of memory. */
7186 var_anything = new_var_info (NULL_TREE, "ANYTHING", false);
7187 gcc_assert (var_anything->id == anything_id);
7188 var_anything->is_artificial_var = 1;
7189 var_anything->size = ~0;
7190 var_anything->offset = 0;
7191 var_anything->fullsize = ~0;
7192 var_anything->is_special_var = 1;
7193
7194 /* Anything points to anything. This makes deref constraints just
7195 work in the presence of linked list and other p = *p type loops,
7196 by saying that *ANYTHING = ANYTHING. */
7197 lhs.type = SCALAR;
7198 lhs.var = anything_id;
7199 lhs.offset = 0;
7200 rhs.type = ADDRESSOF;
7201 rhs.var = anything_id;
7202 rhs.offset = 0;
7203
7204 /* This specifically does not use process_constraint because
7205 process_constraint ignores all anything = anything constraints, since all
7206 but this one are redundant. */
7207 constraints.safe_push (new_constraint (lhs, rhs));
7208
7209 /* Create the STRING variable, used to represent that a variable
7210 points to a string literal. String literals don't contain
7211 pointers so STRING doesn't point to anything. */
7212 var_string = new_var_info (NULL_TREE, "STRING", false);
7213 gcc_assert (var_string->id == string_id);
7214 var_string->is_artificial_var = 1;
7215 var_string->offset = 0;
7216 var_string->size = ~0;
7217 var_string->fullsize = ~0;
7218 var_string->is_special_var = 1;
7219 var_string->may_have_pointers = 0;
7220
7221 /* Create the ESCAPED variable, used to represent the set of escaped
7222 memory. */
7223 var_escaped = new_var_info (NULL_TREE, "ESCAPED", false);
7224 gcc_assert (var_escaped->id == escaped_id);
7225 var_escaped->is_artificial_var = 1;
7226 var_escaped->offset = 0;
7227 var_escaped->size = ~0;
7228 var_escaped->fullsize = ~0;
7229 var_escaped->is_special_var = 0;
7230
7231 /* Create the NONLOCAL variable, used to represent the set of nonlocal
7232 memory. */
7233 var_nonlocal = new_var_info (NULL_TREE, "NONLOCAL", false);
7234 gcc_assert (var_nonlocal->id == nonlocal_id);
7235 var_nonlocal->is_artificial_var = 1;
7236 var_nonlocal->offset = 0;
7237 var_nonlocal->size = ~0;
7238 var_nonlocal->fullsize = ~0;
7239 var_nonlocal->is_special_var = 1;
7240
7241 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
7242 lhs.type = SCALAR;
7243 lhs.var = escaped_id;
7244 lhs.offset = 0;
7245 rhs.type = DEREF;
7246 rhs.var = escaped_id;
7247 rhs.offset = 0;
7248 process_constraint (new_constraint (lhs, rhs));
7249
7250 /* ESCAPED = ESCAPED + UNKNOWN_OFFSET, because if a sub-field escapes the
7251 whole variable escapes. */
7252 lhs.type = SCALAR;
7253 lhs.var = escaped_id;
7254 lhs.offset = 0;
7255 rhs.type = SCALAR;
7256 rhs.var = escaped_id;
7257 rhs.offset = UNKNOWN_OFFSET;
7258 process_constraint (new_constraint (lhs, rhs));
7259
7260 /* *ESCAPED = NONLOCAL. This is true because we have to assume
7261 everything pointed to by escaped points to what global memory can
7262 point to. */
7263 lhs.type = DEREF;
7264 lhs.var = escaped_id;
7265 lhs.offset = 0;
7266 rhs.type = SCALAR;
7267 rhs.var = nonlocal_id;
7268 rhs.offset = 0;
7269 process_constraint (new_constraint (lhs, rhs));
7270
7271 /* NONLOCAL = &NONLOCAL, NONLOCAL = &ESCAPED. This is true because
7272 global memory may point to global memory and escaped memory. */
7273 lhs.type = SCALAR;
7274 lhs.var = nonlocal_id;
7275 lhs.offset = 0;
7276 rhs.type = ADDRESSOF;
7277 rhs.var = nonlocal_id;
7278 rhs.offset = 0;
7279 process_constraint (new_constraint (lhs, rhs));
7280 rhs.type = ADDRESSOF;
7281 rhs.var = escaped_id;
7282 rhs.offset = 0;
7283 process_constraint (new_constraint (lhs, rhs));
7284
7285 /* Create the STOREDANYTHING variable, used to represent the set of
7286 variables stored to *ANYTHING. */
7287 var_storedanything = new_var_info (NULL_TREE, "STOREDANYTHING", false);
7288 gcc_assert (var_storedanything->id == storedanything_id);
7289 var_storedanything->is_artificial_var = 1;
7290 var_storedanything->offset = 0;
7291 var_storedanything->size = ~0;
7292 var_storedanything->fullsize = ~0;
7293 var_storedanything->is_special_var = 0;
7294
7295 /* Create the INTEGER variable, used to represent that a variable points
7296 to what an INTEGER "points to". */
7297 var_integer = new_var_info (NULL_TREE, "INTEGER", false);
7298 gcc_assert (var_integer->id == integer_id);
7299 var_integer->is_artificial_var = 1;
7300 var_integer->size = ~0;
7301 var_integer->fullsize = ~0;
7302 var_integer->offset = 0;
7303 var_integer->is_special_var = 1;
7304
7305 /* INTEGER = ANYTHING, because we don't know where a dereference of
7306 a random integer will point to. */
7307 lhs.type = SCALAR;
7308 lhs.var = integer_id;
7309 lhs.offset = 0;
7310 rhs.type = ADDRESSOF;
7311 rhs.var = anything_id;
7312 rhs.offset = 0;
7313 process_constraint (new_constraint (lhs, rhs));
7314 }
7315
7316 /* Initialize things necessary to perform PTA */
7317
7318 static void
7319 init_alias_vars (void)
7320 {
7321 use_field_sensitive = (param_max_fields_for_field_sensitive > 1);
7322
7323 bitmap_obstack_initialize (&pta_obstack);
7324 bitmap_obstack_initialize (&oldpta_obstack);
7325 bitmap_obstack_initialize (&predbitmap_obstack);
7326
7327 constraints.create (8);
7328 varmap.create (8);
7329 vi_for_tree = new hash_map<tree, varinfo_t>;
7330 call_stmt_vars = new hash_map<gimple *, varinfo_t>;
7331
7332 memset (&stats, 0, sizeof (stats));
7333 shared_bitmap_table = new hash_table<shared_bitmap_hasher> (511);
7334 init_base_vars ();
7335
7336 gcc_obstack_init (&fake_var_decl_obstack);
7337
7338 final_solutions = new hash_map<varinfo_t, pt_solution *>;
7339 gcc_obstack_init (&final_solutions_obstack);
7340 }
7341
7342 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
7343 predecessor edges. */
7344
7345 static void
7346 remove_preds_and_fake_succs (constraint_graph_t graph)
7347 {
7348 unsigned int i;
7349
7350 /* Clear the implicit ref and address nodes from the successor
7351 lists. */
7352 for (i = 1; i < FIRST_REF_NODE; i++)
7353 {
7354 if (graph->succs[i])
7355 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
7356 FIRST_REF_NODE * 2);
7357 }
7358
7359 /* Free the successor list for the non-ref nodes. */
7360 for (i = FIRST_REF_NODE + 1; i < graph->size; i++)
7361 {
7362 if (graph->succs[i])
7363 BITMAP_FREE (graph->succs[i]);
7364 }
7365
7366 /* Now reallocate the size of the successor list as, and blow away
7367 the predecessor bitmaps. */
7368 graph->size = varmap.length ();
7369 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
7370
7371 free (graph->implicit_preds);
7372 graph->implicit_preds = NULL;
7373 free (graph->preds);
7374 graph->preds = NULL;
7375 bitmap_obstack_release (&predbitmap_obstack);
7376 }
7377
7378 /* Solve the constraint set. */
7379
7380 static void
7381 solve_constraints (void)
7382 {
7383 class scc_info *si;
7384
7385 /* Sort varinfos so that ones that cannot be pointed to are last.
7386 This makes bitmaps more efficient. */
7387 unsigned int *map = XNEWVEC (unsigned int, varmap.length ());
7388 for (unsigned i = 0; i < integer_id + 1; ++i)
7389 map[i] = i;
7390 /* Start with address-taken vars, followed by not address-taken vars
7391 to move vars never appearing in the points-to solution bitmaps last. */
7392 unsigned j = integer_id + 1;
7393 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7394 if (varmap[varmap[i]->head]->address_taken)
7395 map[i] = j++;
7396 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7397 if (! varmap[varmap[i]->head]->address_taken)
7398 map[i] = j++;
7399 /* Shuffle varmap according to map. */
7400 for (unsigned i = integer_id + 1; i < varmap.length (); ++i)
7401 {
7402 while (map[varmap[i]->id] != i)
7403 std::swap (varmap[i], varmap[map[varmap[i]->id]]);
7404 gcc_assert (bitmap_empty_p (varmap[i]->solution));
7405 varmap[i]->id = i;
7406 varmap[i]->next = map[varmap[i]->next];
7407 varmap[i]->head = map[varmap[i]->head];
7408 }
7409 /* Finally rewrite constraints. */
7410 for (unsigned i = 0; i < constraints.length (); ++i)
7411 {
7412 constraints[i]->lhs.var = map[constraints[i]->lhs.var];
7413 constraints[i]->rhs.var = map[constraints[i]->rhs.var];
7414 }
7415 free (map);
7416
7417 if (dump_file)
7418 fprintf (dump_file,
7419 "\nCollapsing static cycles and doing variable "
7420 "substitution\n");
7421
7422 init_graph (varmap.length () * 2);
7423
7424 if (dump_file)
7425 fprintf (dump_file, "Building predecessor graph\n");
7426 build_pred_graph ();
7427
7428 if (dump_file)
7429 fprintf (dump_file, "Detecting pointer and location "
7430 "equivalences\n");
7431 si = perform_var_substitution (graph);
7432
7433 if (dump_file)
7434 fprintf (dump_file, "Rewriting constraints and unifying "
7435 "variables\n");
7436 rewrite_constraints (graph, si);
7437
7438 build_succ_graph ();
7439
7440 free_var_substitution_info (si);
7441
7442 /* Attach complex constraints to graph nodes. */
7443 move_complex_constraints (graph);
7444
7445 if (dump_file)
7446 fprintf (dump_file, "Uniting pointer but not location equivalent "
7447 "variables\n");
7448 unite_pointer_equivalences (graph);
7449
7450 if (dump_file)
7451 fprintf (dump_file, "Finding indirect cycles\n");
7452 find_indirect_cycles (graph);
7453
7454 /* Implicit nodes and predecessors are no longer necessary at this
7455 point. */
7456 remove_preds_and_fake_succs (graph);
7457
7458 if (dump_file && (dump_flags & TDF_GRAPH))
7459 {
7460 fprintf (dump_file, "\n\n// The constraint graph before solve-graph "
7461 "in dot format:\n");
7462 dump_constraint_graph (dump_file);
7463 fprintf (dump_file, "\n\n");
7464 }
7465
7466 if (dump_file)
7467 fprintf (dump_file, "Solving graph\n");
7468
7469 solve_graph (graph);
7470
7471 if (dump_file && (dump_flags & TDF_GRAPH))
7472 {
7473 fprintf (dump_file, "\n\n// The constraint graph after solve-graph "
7474 "in dot format:\n");
7475 dump_constraint_graph (dump_file);
7476 fprintf (dump_file, "\n\n");
7477 }
7478 }
7479
7480 /* Create points-to sets for the current function. See the comments
7481 at the start of the file for an algorithmic overview. */
7482
7483 static void
7484 compute_points_to_sets (void)
7485 {
7486 basic_block bb;
7487 varinfo_t vi;
7488
7489 timevar_push (TV_TREE_PTA);
7490
7491 init_alias_vars ();
7492
7493 intra_create_variable_infos (cfun);
7494
7495 /* Now walk all statements and build the constraint set. */
7496 FOR_EACH_BB_FN (bb, cfun)
7497 {
7498 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7499 gsi_next (&gsi))
7500 {
7501 gphi *phi = gsi.phi ();
7502
7503 if (! virtual_operand_p (gimple_phi_result (phi)))
7504 find_func_aliases (cfun, phi);
7505 }
7506
7507 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
7508 gsi_next (&gsi))
7509 {
7510 gimple *stmt = gsi_stmt (gsi);
7511
7512 find_func_aliases (cfun, stmt);
7513 }
7514 }
7515
7516 if (dump_file)
7517 {
7518 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
7519 dump_constraints (dump_file, 0);
7520 }
7521
7522 /* From the constraints compute the points-to sets. */
7523 solve_constraints ();
7524
7525 /* Post-process solutions for escapes through returns. */
7526 edge_iterator ei;
7527 edge e;
7528 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
7529 if (greturn *ret = safe_dyn_cast <greturn *> (last_stmt (e->src)))
7530 {
7531 tree val = gimple_return_retval (ret);
7532 /* ??? Easy to handle simple indirections with some work.
7533 Arbitrary references like foo.bar.baz are more difficult
7534 (but conservatively easy enough with just looking at the base).
7535 Mind to fixup find_func_aliases as well. */
7536 if (!val || !SSA_VAR_P (val))
7537 continue;
7538 /* returns happen last in non-IPA so they only influence
7539 the ESCAPED solution and we can filter local variables. */
7540 varinfo_t escaped_vi = get_varinfo (find (escaped_id));
7541 varinfo_t vi = lookup_vi_for_tree (val);
7542 bitmap delta = BITMAP_ALLOC (&pta_obstack);
7543 bitmap_iterator bi;
7544 unsigned i;
7545 for (; vi; vi = vi_next (vi))
7546 {
7547 varinfo_t part_vi = get_varinfo (find (vi->id));
7548 EXECUTE_IF_AND_COMPL_IN_BITMAP (part_vi->solution,
7549 escaped_vi->solution, 0, i, bi)
7550 {
7551 varinfo_t pointed_to_vi = get_varinfo (i);
7552 if (pointed_to_vi->is_global_var
7553 /* We delay marking of heap memory as global. */
7554 || pointed_to_vi->is_heap_var)
7555 bitmap_set_bit (delta, i);
7556 }
7557 }
7558
7559 /* Now compute the transitive closure. */
7560 bitmap_ior_into (escaped_vi->solution, delta);
7561 bitmap new_delta = BITMAP_ALLOC (&pta_obstack);
7562 while (!bitmap_empty_p (delta))
7563 {
7564 EXECUTE_IF_SET_IN_BITMAP (delta, 0, i, bi)
7565 {
7566 varinfo_t pointed_to_vi = get_varinfo (i);
7567 pointed_to_vi = get_varinfo (find (pointed_to_vi->id));
7568 unsigned j;
7569 bitmap_iterator bi2;
7570 EXECUTE_IF_AND_COMPL_IN_BITMAP (pointed_to_vi->solution,
7571 escaped_vi->solution,
7572 0, j, bi2)
7573 {
7574 varinfo_t pointed_to_vi2 = get_varinfo (j);
7575 if (pointed_to_vi2->is_global_var
7576 /* We delay marking of heap memory as global. */
7577 || pointed_to_vi2->is_heap_var)
7578 bitmap_set_bit (new_delta, j);
7579 }
7580 }
7581 bitmap_ior_into (escaped_vi->solution, new_delta);
7582 bitmap_clear (delta);
7583 std::swap (delta, new_delta);
7584 }
7585 BITMAP_FREE (delta);
7586 BITMAP_FREE (new_delta);
7587 }
7588
7589 if (dump_file)
7590 dump_sa_points_to_info (dump_file);
7591
7592 /* Compute the points-to set for ESCAPED used for call-clobber analysis. */
7593 cfun->gimple_df->escaped = find_what_var_points_to (cfun->decl,
7594 get_varinfo (escaped_id));
7595
7596 /* Make sure the ESCAPED solution (which is used as placeholder in
7597 other solutions) does not reference itself. This simplifies
7598 points-to solution queries. */
7599 cfun->gimple_df->escaped.escaped = 0;
7600
7601 /* Compute the points-to sets for pointer SSA_NAMEs. */
7602 unsigned i;
7603 tree ptr;
7604
7605 FOR_EACH_SSA_NAME (i, ptr, cfun)
7606 {
7607 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
7608 find_what_p_points_to (cfun->decl, ptr);
7609 }
7610
7611 /* Compute the call-used/clobbered sets. */
7612 FOR_EACH_BB_FN (bb, cfun)
7613 {
7614 gimple_stmt_iterator gsi;
7615
7616 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
7617 {
7618 gcall *stmt;
7619 struct pt_solution *pt;
7620
7621 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
7622 if (!stmt)
7623 continue;
7624
7625 pt = gimple_call_use_set (stmt);
7626 if (gimple_call_flags (stmt) & ECF_CONST)
7627 memset (pt, 0, sizeof (struct pt_solution));
7628 else
7629 {
7630 bool uses_global_memory = true;
7631 bool reads_global_memory = true;
7632
7633 determine_global_memory_access (stmt, NULL,
7634 &reads_global_memory,
7635 &uses_global_memory);
7636 if ((vi = lookup_call_use_vi (stmt)) != NULL)
7637 {
7638 *pt = find_what_var_points_to (cfun->decl, vi);
7639 /* Escaped (and thus nonlocal) variables are always
7640 implicitly used by calls. */
7641 /* ??? ESCAPED can be empty even though NONLOCAL
7642 always escaped. */
7643 if (uses_global_memory)
7644 {
7645 pt->nonlocal = 1;
7646 pt->escaped = 1;
7647 }
7648 }
7649 else if (uses_global_memory)
7650 {
7651 /* If there is nothing special about this call then
7652 we have made everything that is used also escape. */
7653 *pt = cfun->gimple_df->escaped;
7654 pt->nonlocal = 1;
7655 }
7656 else
7657 memset (pt, 0, sizeof (struct pt_solution));
7658 }
7659
7660 pt = gimple_call_clobber_set (stmt);
7661 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
7662 memset (pt, 0, sizeof (struct pt_solution));
7663 else
7664 {
7665 bool writes_global_memory = true;
7666
7667 determine_global_memory_access (stmt, &writes_global_memory,
7668 NULL, NULL);
7669
7670 if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
7671 {
7672 *pt = find_what_var_points_to (cfun->decl, vi);
7673 /* Escaped (and thus nonlocal) variables are always
7674 implicitly clobbered by calls. */
7675 /* ??? ESCAPED can be empty even though NONLOCAL
7676 always escaped. */
7677 if (writes_global_memory)
7678 {
7679 pt->nonlocal = 1;
7680 pt->escaped = 1;
7681 }
7682 }
7683 else if (writes_global_memory)
7684 {
7685 /* If there is nothing special about this call then
7686 we have made everything that is used also escape. */
7687 *pt = cfun->gimple_df->escaped;
7688 pt->nonlocal = 1;
7689 }
7690 else
7691 memset (pt, 0, sizeof (struct pt_solution));
7692 }
7693 }
7694 }
7695
7696 timevar_pop (TV_TREE_PTA);
7697 }
7698
7699
7700 /* Delete created points-to sets. */
7701
7702 static void
7703 delete_points_to_sets (void)
7704 {
7705 unsigned int i;
7706
7707 delete shared_bitmap_table;
7708 shared_bitmap_table = NULL;
7709 if (dump_file && (dump_flags & TDF_STATS))
7710 fprintf (dump_file, "Points to sets created:%d\n",
7711 stats.points_to_sets_created);
7712
7713 delete vi_for_tree;
7714 delete call_stmt_vars;
7715 bitmap_obstack_release (&pta_obstack);
7716 constraints.release ();
7717
7718 for (i = 0; i < graph->size; i++)
7719 graph->complex[i].release ();
7720 free (graph->complex);
7721
7722 free (graph->rep);
7723 free (graph->succs);
7724 free (graph->pe);
7725 free (graph->pe_rep);
7726 free (graph->indirect_cycles);
7727 free (graph);
7728
7729 varmap.release ();
7730 variable_info_pool.release ();
7731 constraint_pool.release ();
7732
7733 obstack_free (&fake_var_decl_obstack, NULL);
7734
7735 delete final_solutions;
7736 obstack_free (&final_solutions_obstack, NULL);
7737 }
7738
7739 struct vls_data
7740 {
7741 unsigned short clique;
7742 bool escaped_p;
7743 bitmap rvars;
7744 };
7745
7746 /* Mark "other" loads and stores as belonging to CLIQUE and with
7747 base zero. */
7748
7749 static bool
7750 visit_loadstore (gimple *, tree base, tree ref, void *data)
7751 {
7752 unsigned short clique = ((vls_data *) data)->clique;
7753 bitmap rvars = ((vls_data *) data)->rvars;
7754 bool escaped_p = ((vls_data *) data)->escaped_p;
7755 if (TREE_CODE (base) == MEM_REF
7756 || TREE_CODE (base) == TARGET_MEM_REF)
7757 {
7758 tree ptr = TREE_OPERAND (base, 0);
7759 if (TREE_CODE (ptr) == SSA_NAME)
7760 {
7761 /* For parameters, get at the points-to set for the actual parm
7762 decl. */
7763 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7764 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7765 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7766 ptr = SSA_NAME_VAR (ptr);
7767
7768 /* We need to make sure 'ptr' doesn't include any of
7769 the restrict tags we added bases for in its points-to set. */
7770 varinfo_t vi = lookup_vi_for_tree (ptr);
7771 if (! vi)
7772 return false;
7773
7774 vi = get_varinfo (find (vi->id));
7775 if (bitmap_intersect_p (rvars, vi->solution)
7776 || (escaped_p && bitmap_bit_p (vi->solution, escaped_id)))
7777 return false;
7778 }
7779
7780 /* Do not overwrite existing cliques (that includes clique, base
7781 pairs we just set). */
7782 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7783 {
7784 MR_DEPENDENCE_CLIQUE (base) = clique;
7785 MR_DEPENDENCE_BASE (base) = 0;
7786 }
7787 }
7788
7789 /* For plain decl accesses see whether they are accesses to globals
7790 and rewrite them to MEM_REFs with { clique, 0 }. */
7791 if (VAR_P (base)
7792 && is_global_var (base)
7793 /* ??? We can't rewrite a plain decl with the walk_stmt_load_store
7794 ops callback. */
7795 && base != ref)
7796 {
7797 tree *basep = &ref;
7798 while (handled_component_p (*basep))
7799 basep = &TREE_OPERAND (*basep, 0);
7800 gcc_assert (VAR_P (*basep));
7801 tree ptr = build_fold_addr_expr (*basep);
7802 tree zero = build_int_cst (TREE_TYPE (ptr), 0);
7803 *basep = build2 (MEM_REF, TREE_TYPE (*basep), ptr, zero);
7804 MR_DEPENDENCE_CLIQUE (*basep) = clique;
7805 MR_DEPENDENCE_BASE (*basep) = 0;
7806 }
7807
7808 return false;
7809 }
7810
7811 struct msdi_data {
7812 tree ptr;
7813 unsigned short *clique;
7814 unsigned short *last_ruid;
7815 varinfo_t restrict_var;
7816 };
7817
7818 /* If BASE is a MEM_REF then assign a clique, base pair to it, updating
7819 CLIQUE, *RESTRICT_VAR and LAST_RUID as passed via DATA.
7820 Return whether dependence info was assigned to BASE. */
7821
7822 static bool
7823 maybe_set_dependence_info (gimple *, tree base, tree, void *data)
7824 {
7825 tree ptr = ((msdi_data *)data)->ptr;
7826 unsigned short &clique = *((msdi_data *)data)->clique;
7827 unsigned short &last_ruid = *((msdi_data *)data)->last_ruid;
7828 varinfo_t restrict_var = ((msdi_data *)data)->restrict_var;
7829 if ((TREE_CODE (base) == MEM_REF
7830 || TREE_CODE (base) == TARGET_MEM_REF)
7831 && TREE_OPERAND (base, 0) == ptr)
7832 {
7833 /* Do not overwrite existing cliques. This avoids overwriting dependence
7834 info inlined from a function with restrict parameters inlined
7835 into a function with restrict parameters. This usually means we
7836 prefer to be precise in innermost loops. */
7837 if (MR_DEPENDENCE_CLIQUE (base) == 0)
7838 {
7839 if (clique == 0)
7840 {
7841 if (cfun->last_clique == 0)
7842 cfun->last_clique = 1;
7843 clique = 1;
7844 }
7845 if (restrict_var->ruid == 0)
7846 restrict_var->ruid = ++last_ruid;
7847 MR_DEPENDENCE_CLIQUE (base) = clique;
7848 MR_DEPENDENCE_BASE (base) = restrict_var->ruid;
7849 return true;
7850 }
7851 }
7852 return false;
7853 }
7854
7855 /* Clear dependence info for the clique DATA. */
7856
7857 static bool
7858 clear_dependence_clique (gimple *, tree base, tree, void *data)
7859 {
7860 unsigned short clique = (uintptr_t)data;
7861 if ((TREE_CODE (base) == MEM_REF
7862 || TREE_CODE (base) == TARGET_MEM_REF)
7863 && MR_DEPENDENCE_CLIQUE (base) == clique)
7864 {
7865 MR_DEPENDENCE_CLIQUE (base) = 0;
7866 MR_DEPENDENCE_BASE (base) = 0;
7867 }
7868
7869 return false;
7870 }
7871
7872 /* Compute the set of independend memory references based on restrict
7873 tags and their conservative propagation to the points-to sets. */
7874
7875 static void
7876 compute_dependence_clique (void)
7877 {
7878 /* First clear the special "local" clique. */
7879 basic_block bb;
7880 if (cfun->last_clique != 0)
7881 FOR_EACH_BB_FN (bb, cfun)
7882 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7883 !gsi_end_p (gsi); gsi_next (&gsi))
7884 {
7885 gimple *stmt = gsi_stmt (gsi);
7886 walk_stmt_load_store_ops (stmt, (void *)(uintptr_t) 1,
7887 clear_dependence_clique,
7888 clear_dependence_clique);
7889 }
7890
7891 unsigned short clique = 0;
7892 unsigned short last_ruid = 0;
7893 bitmap rvars = BITMAP_ALLOC (NULL);
7894 bool escaped_p = false;
7895 for (unsigned i = 0; i < num_ssa_names; ++i)
7896 {
7897 tree ptr = ssa_name (i);
7898 if (!ptr || !POINTER_TYPE_P (TREE_TYPE (ptr)))
7899 continue;
7900
7901 /* Avoid all this when ptr is not dereferenced? */
7902 tree p = ptr;
7903 if (SSA_NAME_IS_DEFAULT_DEF (ptr)
7904 && (TREE_CODE (SSA_NAME_VAR (ptr)) == PARM_DECL
7905 || TREE_CODE (SSA_NAME_VAR (ptr)) == RESULT_DECL))
7906 p = SSA_NAME_VAR (ptr);
7907 varinfo_t vi = lookup_vi_for_tree (p);
7908 if (!vi)
7909 continue;
7910 vi = get_varinfo (find (vi->id));
7911 bitmap_iterator bi;
7912 unsigned j;
7913 varinfo_t restrict_var = NULL;
7914 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
7915 {
7916 varinfo_t oi = get_varinfo (j);
7917 if (oi->head != j)
7918 oi = get_varinfo (oi->head);
7919 if (oi->is_restrict_var)
7920 {
7921 if (restrict_var
7922 && restrict_var != oi)
7923 {
7924 if (dump_file && (dump_flags & TDF_DETAILS))
7925 {
7926 fprintf (dump_file, "found restrict pointed-to "
7927 "for ");
7928 print_generic_expr (dump_file, ptr);
7929 fprintf (dump_file, " but not exclusively\n");
7930 }
7931 restrict_var = NULL;
7932 break;
7933 }
7934 restrict_var = oi;
7935 }
7936 /* NULL is the only other valid points-to entry. */
7937 else if (oi->id != nothing_id)
7938 {
7939 restrict_var = NULL;
7940 break;
7941 }
7942 }
7943 /* Ok, found that ptr must(!) point to a single(!) restrict
7944 variable. */
7945 /* ??? PTA isn't really a proper propagation engine to compute
7946 this property.
7947 ??? We could handle merging of two restricts by unifying them. */
7948 if (restrict_var)
7949 {
7950 /* Now look at possible dereferences of ptr. */
7951 imm_use_iterator ui;
7952 gimple *use_stmt;
7953 bool used = false;
7954 msdi_data data = { ptr, &clique, &last_ruid, restrict_var };
7955 FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
7956 used |= walk_stmt_load_store_ops (use_stmt, &data,
7957 maybe_set_dependence_info,
7958 maybe_set_dependence_info);
7959 if (used)
7960 {
7961 /* Add all subvars to the set of restrict pointed-to set. */
7962 for (unsigned sv = restrict_var->head; sv != 0;
7963 sv = get_varinfo (sv)->next)
7964 bitmap_set_bit (rvars, sv);
7965 varinfo_t escaped = get_varinfo (find (escaped_id));
7966 if (bitmap_bit_p (escaped->solution, restrict_var->id))
7967 escaped_p = true;
7968 }
7969 }
7970 }
7971
7972 if (clique != 0)
7973 {
7974 /* Assign the BASE id zero to all accesses not based on a restrict
7975 pointer. That way they get disambiguated against restrict
7976 accesses but not against each other. */
7977 /* ??? For restricts derived from globals (thus not incoming
7978 parameters) we can't restrict scoping properly thus the following
7979 is too aggressive there. For now we have excluded those globals from
7980 getting into the MR_DEPENDENCE machinery. */
7981 vls_data data = { clique, escaped_p, rvars };
7982 basic_block bb;
7983 FOR_EACH_BB_FN (bb, cfun)
7984 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7985 !gsi_end_p (gsi); gsi_next (&gsi))
7986 {
7987 gimple *stmt = gsi_stmt (gsi);
7988 walk_stmt_load_store_ops (stmt, &data,
7989 visit_loadstore, visit_loadstore);
7990 }
7991 }
7992
7993 BITMAP_FREE (rvars);
7994 }
7995
7996 /* Compute points-to information for every SSA_NAME pointer in the
7997 current function and compute the transitive closure of escaped
7998 variables to re-initialize the call-clobber states of local variables. */
7999
8000 unsigned int
8001 compute_may_aliases (void)
8002 {
8003 if (cfun->gimple_df->ipa_pta)
8004 {
8005 if (dump_file)
8006 {
8007 fprintf (dump_file, "\nNot re-computing points-to information "
8008 "because IPA points-to information is available.\n\n");
8009
8010 /* But still dump what we have remaining it. */
8011 dump_alias_info (dump_file);
8012 }
8013
8014 return 0;
8015 }
8016
8017 /* For each pointer P_i, determine the sets of variables that P_i may
8018 point-to. Compute the reachability set of escaped and call-used
8019 variables. */
8020 compute_points_to_sets ();
8021
8022 /* Debugging dumps. */
8023 if (dump_file)
8024 dump_alias_info (dump_file);
8025
8026 /* Compute restrict-based memory disambiguations. */
8027 compute_dependence_clique ();
8028
8029 /* Deallocate memory used by aliasing data structures and the internal
8030 points-to solution. */
8031 delete_points_to_sets ();
8032
8033 gcc_assert (!need_ssa_update_p (cfun));
8034
8035 return 0;
8036 }
8037
8038 /* A dummy pass to cause points-to information to be computed via
8039 TODO_rebuild_alias. */
8040
8041 namespace {
8042
8043 const pass_data pass_data_build_alias =
8044 {
8045 GIMPLE_PASS, /* type */
8046 "alias", /* name */
8047 OPTGROUP_NONE, /* optinfo_flags */
8048 TV_NONE, /* tv_id */
8049 ( PROP_cfg | PROP_ssa ), /* properties_required */
8050 0, /* properties_provided */
8051 0, /* properties_destroyed */
8052 0, /* todo_flags_start */
8053 TODO_rebuild_alias, /* todo_flags_finish */
8054 };
8055
8056 class pass_build_alias : public gimple_opt_pass
8057 {
8058 public:
8059 pass_build_alias (gcc::context *ctxt)
8060 : gimple_opt_pass (pass_data_build_alias, ctxt)
8061 {}
8062
8063 /* opt_pass methods: */
8064 virtual bool gate (function *) { return flag_tree_pta; }
8065
8066 }; // class pass_build_alias
8067
8068 } // anon namespace
8069
8070 gimple_opt_pass *
8071 make_pass_build_alias (gcc::context *ctxt)
8072 {
8073 return new pass_build_alias (ctxt);
8074 }
8075
8076 /* A dummy pass to cause points-to information to be computed via
8077 TODO_rebuild_alias. */
8078
8079 namespace {
8080
8081 const pass_data pass_data_build_ealias =
8082 {
8083 GIMPLE_PASS, /* type */
8084 "ealias", /* name */
8085 OPTGROUP_NONE, /* optinfo_flags */
8086 TV_NONE, /* tv_id */
8087 ( PROP_cfg | PROP_ssa ), /* properties_required */
8088 0, /* properties_provided */
8089 0, /* properties_destroyed */
8090 0, /* todo_flags_start */
8091 TODO_rebuild_alias, /* todo_flags_finish */
8092 };
8093
8094 class pass_build_ealias : public gimple_opt_pass
8095 {
8096 public:
8097 pass_build_ealias (gcc::context *ctxt)
8098 : gimple_opt_pass (pass_data_build_ealias, ctxt)
8099 {}
8100
8101 /* opt_pass methods: */
8102 virtual bool gate (function *) { return flag_tree_pta; }
8103
8104 }; // class pass_build_ealias
8105
8106 } // anon namespace
8107
8108 gimple_opt_pass *
8109 make_pass_build_ealias (gcc::context *ctxt)
8110 {
8111 return new pass_build_ealias (ctxt);
8112 }
8113
8114
8115 /* IPA PTA solutions for ESCAPED. */
8116 struct pt_solution ipa_escaped_pt
8117 = { true, false, false, false, false,
8118 false, false, false, false, false, NULL };
8119
8120 /* Associate node with varinfo DATA. Worker for
8121 cgraph_for_symbol_thunks_and_aliases. */
8122 static bool
8123 associate_varinfo_to_alias (struct cgraph_node *node, void *data)
8124 {
8125 if ((node->alias
8126 || (node->thunk
8127 && ! node->inlined_to))
8128 && node->analyzed
8129 && !node->ifunc_resolver)
8130 insert_vi_for_tree (node->decl, (varinfo_t)data);
8131 return false;
8132 }
8133
8134 /* Dump varinfo VI to FILE. */
8135
8136 static void
8137 dump_varinfo (FILE *file, varinfo_t vi)
8138 {
8139 if (vi == NULL)
8140 return;
8141
8142 fprintf (file, "%u: %s\n", vi->id, vi->name);
8143
8144 const char *sep = " ";
8145 if (vi->is_artificial_var)
8146 fprintf (file, "%sartificial", sep);
8147 if (vi->is_special_var)
8148 fprintf (file, "%sspecial", sep);
8149 if (vi->is_unknown_size_var)
8150 fprintf (file, "%sunknown-size", sep);
8151 if (vi->is_full_var)
8152 fprintf (file, "%sfull", sep);
8153 if (vi->is_heap_var)
8154 fprintf (file, "%sheap", sep);
8155 if (vi->may_have_pointers)
8156 fprintf (file, "%smay-have-pointers", sep);
8157 if (vi->only_restrict_pointers)
8158 fprintf (file, "%sonly-restrict-pointers", sep);
8159 if (vi->is_restrict_var)
8160 fprintf (file, "%sis-restrict-var", sep);
8161 if (vi->is_global_var)
8162 fprintf (file, "%sglobal", sep);
8163 if (vi->is_ipa_escape_point)
8164 fprintf (file, "%sipa-escape-point", sep);
8165 if (vi->is_fn_info)
8166 fprintf (file, "%sfn-info", sep);
8167 if (vi->ruid)
8168 fprintf (file, "%srestrict-uid:%u", sep, vi->ruid);
8169 if (vi->next)
8170 fprintf (file, "%snext:%u", sep, vi->next);
8171 if (vi->head != vi->id)
8172 fprintf (file, "%shead:%u", sep, vi->head);
8173 if (vi->offset)
8174 fprintf (file, "%soffset:" HOST_WIDE_INT_PRINT_DEC, sep, vi->offset);
8175 if (vi->size != ~(unsigned HOST_WIDE_INT)0)
8176 fprintf (file, "%ssize:" HOST_WIDE_INT_PRINT_DEC, sep, vi->size);
8177 if (vi->fullsize != ~(unsigned HOST_WIDE_INT)0
8178 && vi->fullsize != vi->size)
8179 fprintf (file, "%sfullsize:" HOST_WIDE_INT_PRINT_DEC, sep,
8180 vi->fullsize);
8181 fprintf (file, "\n");
8182
8183 if (vi->solution && !bitmap_empty_p (vi->solution))
8184 {
8185 bitmap_iterator bi;
8186 unsigned i;
8187 fprintf (file, " solution: {");
8188 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
8189 fprintf (file, " %u", i);
8190 fprintf (file, " }\n");
8191 }
8192
8193 if (vi->oldsolution && !bitmap_empty_p (vi->oldsolution)
8194 && !bitmap_equal_p (vi->solution, vi->oldsolution))
8195 {
8196 bitmap_iterator bi;
8197 unsigned i;
8198 fprintf (file, " oldsolution: {");
8199 EXECUTE_IF_SET_IN_BITMAP (vi->oldsolution, 0, i, bi)
8200 fprintf (file, " %u", i);
8201 fprintf (file, " }\n");
8202 }
8203 }
8204
8205 /* Dump varinfo VI to stderr. */
8206
8207 DEBUG_FUNCTION void
8208 debug_varinfo (varinfo_t vi)
8209 {
8210 dump_varinfo (stderr, vi);
8211 }
8212
8213 /* Dump varmap to FILE. */
8214
8215 static void
8216 dump_varmap (FILE *file)
8217 {
8218 if (varmap.length () == 0)
8219 return;
8220
8221 fprintf (file, "variables:\n");
8222
8223 for (unsigned int i = 0; i < varmap.length (); ++i)
8224 {
8225 varinfo_t vi = get_varinfo (i);
8226 dump_varinfo (file, vi);
8227 }
8228
8229 fprintf (file, "\n");
8230 }
8231
8232 /* Dump varmap to stderr. */
8233
8234 DEBUG_FUNCTION void
8235 debug_varmap (void)
8236 {
8237 dump_varmap (stderr);
8238 }
8239
8240 /* Compute whether node is refered to non-locally. Worker for
8241 cgraph_for_symbol_thunks_and_aliases. */
8242 static bool
8243 refered_from_nonlocal_fn (struct cgraph_node *node, void *data)
8244 {
8245 bool *nonlocal_p = (bool *)data;
8246 *nonlocal_p |= (node->used_from_other_partition
8247 || DECL_EXTERNAL (node->decl)
8248 || TREE_PUBLIC (node->decl)
8249 || node->force_output
8250 || lookup_attribute ("noipa", DECL_ATTRIBUTES (node->decl)));
8251 return false;
8252 }
8253
8254 /* Same for varpool nodes. */
8255 static bool
8256 refered_from_nonlocal_var (struct varpool_node *node, void *data)
8257 {
8258 bool *nonlocal_p = (bool *)data;
8259 *nonlocal_p |= (node->used_from_other_partition
8260 || DECL_EXTERNAL (node->decl)
8261 || TREE_PUBLIC (node->decl)
8262 || node->force_output);
8263 return false;
8264 }
8265
8266 /* Execute the driver for IPA PTA. */
8267 static unsigned int
8268 ipa_pta_execute (void)
8269 {
8270 struct cgraph_node *node;
8271 varpool_node *var;
8272 unsigned int from = 0;
8273
8274 in_ipa_mode = 1;
8275
8276 init_alias_vars ();
8277
8278 if (dump_file && (dump_flags & TDF_DETAILS))
8279 {
8280 symtab->dump (dump_file);
8281 fprintf (dump_file, "\n");
8282 }
8283
8284 if (dump_file)
8285 {
8286 fprintf (dump_file, "Generating generic constraints\n\n");
8287 dump_constraints (dump_file, from);
8288 fprintf (dump_file, "\n");
8289 from = constraints.length ();
8290 }
8291
8292 /* Build the constraints. */
8293 FOR_EACH_DEFINED_FUNCTION (node)
8294 {
8295 varinfo_t vi;
8296 /* Nodes without a body in this partition are not interesting.
8297 Especially do not visit clones at this point for now - we
8298 get duplicate decls there for inline clones at least. */
8299 if (!node->has_gimple_body_p ()
8300 || node->in_other_partition
8301 || node->inlined_to)
8302 continue;
8303 node->get_body ();
8304
8305 gcc_assert (!node->clone_of);
8306
8307 /* For externally visible or attribute used annotated functions use
8308 local constraints for their arguments.
8309 For local functions we see all callers and thus do not need initial
8310 constraints for parameters. */
8311 bool nonlocal_p = (node->used_from_other_partition
8312 || DECL_EXTERNAL (node->decl)
8313 || TREE_PUBLIC (node->decl)
8314 || node->force_output
8315 || lookup_attribute ("noipa",
8316 DECL_ATTRIBUTES (node->decl)));
8317 node->call_for_symbol_thunks_and_aliases (refered_from_nonlocal_fn,
8318 &nonlocal_p, true);
8319
8320 vi = create_function_info_for (node->decl,
8321 alias_get_name (node->decl), false,
8322 nonlocal_p);
8323 if (dump_file
8324 && from != constraints.length ())
8325 {
8326 fprintf (dump_file,
8327 "Generating initial constraints for %s",
8328 node->dump_name ());
8329 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8330 fprintf (dump_file, " (%s)",
8331 IDENTIFIER_POINTER
8332 (DECL_ASSEMBLER_NAME (node->decl)));
8333 fprintf (dump_file, "\n\n");
8334 dump_constraints (dump_file, from);
8335 fprintf (dump_file, "\n");
8336
8337 from = constraints.length ();
8338 }
8339
8340 node->call_for_symbol_thunks_and_aliases
8341 (associate_varinfo_to_alias, vi, true);
8342 }
8343
8344 /* Create constraints for global variables and their initializers. */
8345 FOR_EACH_VARIABLE (var)
8346 {
8347 if (var->alias && var->analyzed)
8348 continue;
8349
8350 varinfo_t vi = get_vi_for_tree (var->decl);
8351
8352 /* For the purpose of IPA PTA unit-local globals are not
8353 escape points. */
8354 bool nonlocal_p = (DECL_EXTERNAL (var->decl)
8355 || TREE_PUBLIC (var->decl)
8356 || var->used_from_other_partition
8357 || var->force_output);
8358 var->call_for_symbol_and_aliases (refered_from_nonlocal_var,
8359 &nonlocal_p, true);
8360 if (nonlocal_p)
8361 vi->is_ipa_escape_point = true;
8362 }
8363
8364 if (dump_file
8365 && from != constraints.length ())
8366 {
8367 fprintf (dump_file,
8368 "Generating constraints for global initializers\n\n");
8369 dump_constraints (dump_file, from);
8370 fprintf (dump_file, "\n");
8371 from = constraints.length ();
8372 }
8373
8374 FOR_EACH_DEFINED_FUNCTION (node)
8375 {
8376 struct function *func;
8377 basic_block bb;
8378
8379 /* Nodes without a body in this partition are not interesting. */
8380 if (!node->has_gimple_body_p ()
8381 || node->in_other_partition
8382 || node->clone_of)
8383 continue;
8384
8385 if (dump_file)
8386 {
8387 fprintf (dump_file,
8388 "Generating constraints for %s", node->dump_name ());
8389 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
8390 fprintf (dump_file, " (%s)",
8391 IDENTIFIER_POINTER
8392 (DECL_ASSEMBLER_NAME (node->decl)));
8393 fprintf (dump_file, "\n");
8394 }
8395
8396 func = DECL_STRUCT_FUNCTION (node->decl);
8397 gcc_assert (cfun == NULL);
8398
8399 /* Build constriants for the function body. */
8400 FOR_EACH_BB_FN (bb, func)
8401 {
8402 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
8403 gsi_next (&gsi))
8404 {
8405 gphi *phi = gsi.phi ();
8406
8407 if (! virtual_operand_p (gimple_phi_result (phi)))
8408 find_func_aliases (func, phi);
8409 }
8410
8411 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
8412 gsi_next (&gsi))
8413 {
8414 gimple *stmt = gsi_stmt (gsi);
8415
8416 find_func_aliases (func, stmt);
8417 find_func_clobbers (func, stmt);
8418 }
8419 }
8420
8421 if (dump_file)
8422 {
8423 fprintf (dump_file, "\n");
8424 dump_constraints (dump_file, from);
8425 fprintf (dump_file, "\n");
8426 from = constraints.length ();
8427 }
8428 }
8429
8430 /* From the constraints compute the points-to sets. */
8431 solve_constraints ();
8432
8433 if (dump_file)
8434 dump_sa_points_to_info (dump_file);
8435
8436 /* Now post-process solutions to handle locals from different
8437 runtime instantiations coming in through recursive invocations. */
8438 unsigned shadow_var_cnt = 0;
8439 for (unsigned i = 1; i < varmap.length (); ++i)
8440 {
8441 varinfo_t fi = get_varinfo (i);
8442 if (fi->is_fn_info
8443 && fi->decl)
8444 /* Automatic variables pointed to by their containing functions
8445 parameters need this treatment. */
8446 for (varinfo_t ai = first_vi_for_offset (fi, fi_parm_base);
8447 ai; ai = vi_next (ai))
8448 {
8449 varinfo_t vi = get_varinfo (find (ai->id));
8450 bitmap_iterator bi;
8451 unsigned j;
8452 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8453 {
8454 varinfo_t pt = get_varinfo (j);
8455 if (pt->shadow_var_uid == 0
8456 && pt->decl
8457 && auto_var_in_fn_p (pt->decl, fi->decl))
8458 {
8459 pt->shadow_var_uid = allocate_decl_uid ();
8460 shadow_var_cnt++;
8461 }
8462 }
8463 }
8464 /* As well as global variables which are another way of passing
8465 arguments to recursive invocations. */
8466 else if (fi->is_global_var)
8467 {
8468 for (varinfo_t ai = fi; ai; ai = vi_next (ai))
8469 {
8470 varinfo_t vi = get_varinfo (find (ai->id));
8471 bitmap_iterator bi;
8472 unsigned j;
8473 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, j, bi)
8474 {
8475 varinfo_t pt = get_varinfo (j);
8476 if (pt->shadow_var_uid == 0
8477 && pt->decl
8478 && auto_var_p (pt->decl))
8479 {
8480 pt->shadow_var_uid = allocate_decl_uid ();
8481 shadow_var_cnt++;
8482 }
8483 }
8484 }
8485 }
8486 }
8487 if (shadow_var_cnt && dump_file && (dump_flags & TDF_DETAILS))
8488 fprintf (dump_file, "Allocated %u shadow variables for locals "
8489 "maybe leaking into recursive invocations of their containing "
8490 "functions\n", shadow_var_cnt);
8491
8492 /* Compute the global points-to sets for ESCAPED.
8493 ??? Note that the computed escape set is not correct
8494 for the whole unit as we fail to consider graph edges to
8495 externally visible functions. */
8496 ipa_escaped_pt = find_what_var_points_to (NULL, get_varinfo (escaped_id));
8497
8498 /* Make sure the ESCAPED solution (which is used as placeholder in
8499 other solutions) does not reference itself. This simplifies
8500 points-to solution queries. */
8501 ipa_escaped_pt.ipa_escaped = 0;
8502
8503 /* Assign the points-to sets to the SSA names in the unit. */
8504 FOR_EACH_DEFINED_FUNCTION (node)
8505 {
8506 tree ptr;
8507 struct function *fn;
8508 unsigned i;
8509 basic_block bb;
8510
8511 /* Nodes without a body in this partition are not interesting. */
8512 if (!node->has_gimple_body_p ()
8513 || node->in_other_partition
8514 || node->clone_of)
8515 continue;
8516
8517 fn = DECL_STRUCT_FUNCTION (node->decl);
8518
8519 /* Compute the points-to sets for pointer SSA_NAMEs. */
8520 FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
8521 {
8522 if (ptr
8523 && POINTER_TYPE_P (TREE_TYPE (ptr)))
8524 find_what_p_points_to (node->decl, ptr);
8525 }
8526
8527 /* Compute the call-use and call-clobber sets for indirect calls
8528 and calls to external functions. */
8529 FOR_EACH_BB_FN (bb, fn)
8530 {
8531 gimple_stmt_iterator gsi;
8532
8533 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8534 {
8535 gcall *stmt;
8536 struct pt_solution *pt;
8537 varinfo_t vi, fi;
8538 tree decl;
8539
8540 stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
8541 if (!stmt)
8542 continue;
8543
8544 /* Handle direct calls to functions with body. */
8545 decl = gimple_call_fndecl (stmt);
8546
8547 {
8548 tree called_decl = NULL_TREE;
8549 if (gimple_call_builtin_p (stmt, BUILT_IN_GOMP_PARALLEL))
8550 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 0), 0);
8551 else if (gimple_call_builtin_p (stmt, BUILT_IN_GOACC_PARALLEL))
8552 called_decl = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
8553
8554 if (called_decl != NULL_TREE
8555 && !fndecl_maybe_in_other_partition (called_decl))
8556 decl = called_decl;
8557 }
8558
8559 if (decl
8560 && (fi = lookup_vi_for_tree (decl))
8561 && fi->is_fn_info)
8562 {
8563 *gimple_call_clobber_set (stmt)
8564 = find_what_var_points_to
8565 (node->decl, first_vi_for_offset (fi, fi_clobbers));
8566 *gimple_call_use_set (stmt)
8567 = find_what_var_points_to
8568 (node->decl, first_vi_for_offset (fi, fi_uses));
8569 }
8570 /* Handle direct calls to external functions. */
8571 else if (decl && (!fi || fi->decl))
8572 {
8573 pt = gimple_call_use_set (stmt);
8574 if (gimple_call_flags (stmt) & ECF_CONST)
8575 memset (pt, 0, sizeof (struct pt_solution));
8576 else if ((vi = lookup_call_use_vi (stmt)) != NULL)
8577 {
8578 *pt = find_what_var_points_to (node->decl, vi);
8579 /* Escaped (and thus nonlocal) variables are always
8580 implicitly used by calls. */
8581 /* ??? ESCAPED can be empty even though NONLOCAL
8582 always escaped. */
8583 pt->nonlocal = 1;
8584 pt->ipa_escaped = 1;
8585 }
8586 else
8587 {
8588 /* If there is nothing special about this call then
8589 we have made everything that is used also escape. */
8590 *pt = ipa_escaped_pt;
8591 pt->nonlocal = 1;
8592 }
8593
8594 pt = gimple_call_clobber_set (stmt);
8595 if (gimple_call_flags (stmt) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
8596 memset (pt, 0, sizeof (struct pt_solution));
8597 else if ((vi = lookup_call_clobber_vi (stmt)) != NULL)
8598 {
8599 *pt = find_what_var_points_to (node->decl, vi);
8600 /* Escaped (and thus nonlocal) variables are always
8601 implicitly clobbered by calls. */
8602 /* ??? ESCAPED can be empty even though NONLOCAL
8603 always escaped. */
8604 pt->nonlocal = 1;
8605 pt->ipa_escaped = 1;
8606 }
8607 else
8608 {
8609 /* If there is nothing special about this call then
8610 we have made everything that is used also escape. */
8611 *pt = ipa_escaped_pt;
8612 pt->nonlocal = 1;
8613 }
8614 }
8615 /* Handle indirect calls. */
8616 else if ((fi = get_fi_for_callee (stmt)))
8617 {
8618 /* We need to accumulate all clobbers/uses of all possible
8619 callees. */
8620 fi = get_varinfo (find (fi->id));
8621 /* If we cannot constrain the set of functions we'll end up
8622 calling we end up using/clobbering everything. */
8623 if (bitmap_bit_p (fi->solution, anything_id)
8624 || bitmap_bit_p (fi->solution, nonlocal_id)
8625 || bitmap_bit_p (fi->solution, escaped_id))
8626 {
8627 pt_solution_reset (gimple_call_clobber_set (stmt));
8628 pt_solution_reset (gimple_call_use_set (stmt));
8629 }
8630 else
8631 {
8632 bitmap_iterator bi;
8633 unsigned i;
8634 struct pt_solution *uses, *clobbers;
8635
8636 uses = gimple_call_use_set (stmt);
8637 clobbers = gimple_call_clobber_set (stmt);
8638 memset (uses, 0, sizeof (struct pt_solution));
8639 memset (clobbers, 0, sizeof (struct pt_solution));
8640 EXECUTE_IF_SET_IN_BITMAP (fi->solution, 0, i, bi)
8641 {
8642 struct pt_solution sol;
8643
8644 vi = get_varinfo (i);
8645 if (!vi->is_fn_info)
8646 {
8647 /* ??? We could be more precise here? */
8648 uses->nonlocal = 1;
8649 uses->ipa_escaped = 1;
8650 clobbers->nonlocal = 1;
8651 clobbers->ipa_escaped = 1;
8652 continue;
8653 }
8654
8655 if (!uses->anything)
8656 {
8657 sol = find_what_var_points_to
8658 (node->decl,
8659 first_vi_for_offset (vi, fi_uses));
8660 pt_solution_ior_into (uses, &sol);
8661 }
8662 if (!clobbers->anything)
8663 {
8664 sol = find_what_var_points_to
8665 (node->decl,
8666 first_vi_for_offset (vi, fi_clobbers));
8667 pt_solution_ior_into (clobbers, &sol);
8668 }
8669 }
8670 }
8671 }
8672 else
8673 gcc_unreachable ();
8674 }
8675 }
8676
8677 fn->gimple_df->ipa_pta = true;
8678
8679 /* We have to re-set the final-solution cache after each function
8680 because what is a "global" is dependent on function context. */
8681 final_solutions->empty ();
8682 obstack_free (&final_solutions_obstack, NULL);
8683 gcc_obstack_init (&final_solutions_obstack);
8684 }
8685
8686 delete_points_to_sets ();
8687
8688 in_ipa_mode = 0;
8689
8690 return 0;
8691 }
8692
8693 namespace {
8694
8695 const pass_data pass_data_ipa_pta =
8696 {
8697 SIMPLE_IPA_PASS, /* type */
8698 "pta", /* name */
8699 OPTGROUP_NONE, /* optinfo_flags */
8700 TV_IPA_PTA, /* tv_id */
8701 0, /* properties_required */
8702 0, /* properties_provided */
8703 0, /* properties_destroyed */
8704 0, /* todo_flags_start */
8705 0, /* todo_flags_finish */
8706 };
8707
8708 class pass_ipa_pta : public simple_ipa_opt_pass
8709 {
8710 public:
8711 pass_ipa_pta (gcc::context *ctxt)
8712 : simple_ipa_opt_pass (pass_data_ipa_pta, ctxt)
8713 {}
8714
8715 /* opt_pass methods: */
8716 virtual bool gate (function *)
8717 {
8718 return (optimize
8719 && flag_ipa_pta
8720 /* Don't bother doing anything if the program has errors. */
8721 && !seen_error ());
8722 }
8723
8724 opt_pass * clone () { return new pass_ipa_pta (m_ctxt); }
8725
8726 virtual unsigned int execute (function *) { return ipa_pta_execute (); }
8727
8728 }; // class pass_ipa_pta
8729
8730 } // anon namespace
8731
8732 simple_ipa_opt_pass *
8733 make_pass_ipa_pta (gcc::context *ctxt)
8734 {
8735 return new pass_ipa_pta (ctxt);
8736 }
8737