asan.cc revision 1.1 1 1.1 mrg /* AddressSanitizer, a fast memory error detector.
2 1.1 mrg Copyright (C) 2012-2022 Free Software Foundation, Inc.
3 1.1 mrg Contributed by Kostya Serebryany <kcc (at) google.com>
4 1.1 mrg
5 1.1 mrg This file is part of GCC.
6 1.1 mrg
7 1.1 mrg GCC is free software; you can redistribute it and/or modify it under
8 1.1 mrg the terms of the GNU General Public License as published by the Free
9 1.1 mrg Software Foundation; either version 3, or (at your option) any later
10 1.1 mrg version.
11 1.1 mrg
12 1.1 mrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 1.1 mrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 1.1 mrg FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 1.1 mrg for more details.
16 1.1 mrg
17 1.1 mrg You should have received a copy of the GNU General Public License
18 1.1 mrg along with GCC; see the file COPYING3. If not see
19 1.1 mrg <http://www.gnu.org/licenses/>. */
20 1.1 mrg
21 1.1 mrg
22 1.1 mrg #include "config.h"
23 1.1 mrg #include "system.h"
24 1.1 mrg #include "coretypes.h"
25 1.1 mrg #include "backend.h"
26 1.1 mrg #include "target.h"
27 1.1 mrg #include "rtl.h"
28 1.1 mrg #include "tree.h"
29 1.1 mrg #include "gimple.h"
30 1.1 mrg #include "cfghooks.h"
31 1.1 mrg #include "alloc-pool.h"
32 1.1 mrg #include "tree-pass.h"
33 1.1 mrg #include "memmodel.h"
34 1.1 mrg #include "tm_p.h"
35 1.1 mrg #include "ssa.h"
36 1.1 mrg #include "stringpool.h"
37 1.1 mrg #include "tree-ssanames.h"
38 1.1 mrg #include "optabs.h"
39 1.1 mrg #include "emit-rtl.h"
40 1.1 mrg #include "cgraph.h"
41 1.1 mrg #include "gimple-pretty-print.h"
42 1.1 mrg #include "alias.h"
43 1.1 mrg #include "fold-const.h"
44 1.1 mrg #include "cfganal.h"
45 1.1 mrg #include "gimplify.h"
46 1.1 mrg #include "gimple-iterator.h"
47 1.1 mrg #include "varasm.h"
48 1.1 mrg #include "stor-layout.h"
49 1.1 mrg #include "tree-iterator.h"
50 1.1 mrg #include "stringpool.h"
51 1.1 mrg #include "attribs.h"
52 1.1 mrg #include "asan.h"
53 1.1 mrg #include "dojump.h"
54 1.1 mrg #include "explow.h"
55 1.1 mrg #include "expr.h"
56 1.1 mrg #include "output.h"
57 1.1 mrg #include "langhooks.h"
58 1.1 mrg #include "cfgloop.h"
59 1.1 mrg #include "gimple-builder.h"
60 1.1 mrg #include "gimple-fold.h"
61 1.1 mrg #include "ubsan.h"
62 1.1 mrg #include "builtins.h"
63 1.1 mrg #include "fnmatch.h"
64 1.1 mrg #include "tree-inline.h"
65 1.1 mrg #include "tree-ssa.h"
66 1.1 mrg #include "tree-eh.h"
67 1.1 mrg #include "diagnostic-core.h"
68 1.1 mrg
69 1.1 mrg /* AddressSanitizer finds out-of-bounds and use-after-free bugs
70 1.1 mrg with <2x slowdown on average.
71 1.1 mrg
72 1.1 mrg The tool consists of two parts:
73 1.1 mrg instrumentation module (this file) and a run-time library.
74 1.1 mrg The instrumentation module adds a run-time check before every memory insn.
75 1.1 mrg For a 8- or 16- byte load accessing address X:
76 1.1 mrg ShadowAddr = (X >> 3) + Offset
77 1.1 mrg ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
78 1.1 mrg if (ShadowValue)
79 1.1 mrg __asan_report_load8(X);
80 1.1 mrg For a load of N bytes (N=1, 2 or 4) from address X:
81 1.1 mrg ShadowAddr = (X >> 3) + Offset
82 1.1 mrg ShadowValue = *(char*)ShadowAddr;
83 1.1 mrg if (ShadowValue)
84 1.1 mrg if ((X & 7) + N - 1 > ShadowValue)
85 1.1 mrg __asan_report_loadN(X);
86 1.1 mrg Stores are instrumented similarly, but using __asan_report_storeN functions.
87 1.1 mrg A call too __asan_init_vN() is inserted to the list of module CTORs.
88 1.1 mrg N is the version number of the AddressSanitizer API. The changes between the
89 1.1 mrg API versions are listed in libsanitizer/asan/asan_interface_internal.h.
90 1.1 mrg
91 1.1 mrg The run-time library redefines malloc (so that redzone are inserted around
92 1.1 mrg the allocated memory) and free (so that reuse of free-ed memory is delayed),
93 1.1 mrg provides __asan_report* and __asan_init_vN functions.
94 1.1 mrg
95 1.1 mrg Read more:
96 1.1 mrg http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
97 1.1 mrg
98 1.1 mrg The current implementation supports detection of out-of-bounds and
99 1.1 mrg use-after-free in the heap, on the stack and for global variables.
100 1.1 mrg
101 1.1 mrg [Protection of stack variables]
102 1.1 mrg
103 1.1 mrg To understand how detection of out-of-bounds and use-after-free works
104 1.1 mrg for stack variables, lets look at this example on x86_64 where the
105 1.1 mrg stack grows downward:
106 1.1 mrg
107 1.1 mrg int
108 1.1 mrg foo ()
109 1.1 mrg {
110 1.1 mrg char a[24] = {0};
111 1.1 mrg int b[2] = {0};
112 1.1 mrg
113 1.1 mrg a[5] = 1;
114 1.1 mrg b[1] = 2;
115 1.1 mrg
116 1.1 mrg return a[5] + b[1];
117 1.1 mrg }
118 1.1 mrg
119 1.1 mrg For this function, the stack protected by asan will be organized as
120 1.1 mrg follows, from the top of the stack to the bottom:
121 1.1 mrg
122 1.1 mrg Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
123 1.1 mrg
124 1.1 mrg Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
125 1.1 mrg the next slot be 32 bytes aligned; this one is called Partial
126 1.1 mrg Redzone; this 32 bytes alignment is an asan constraint]
127 1.1 mrg
128 1.1 mrg Slot 3/ [24 bytes for variable 'a']
129 1.1 mrg
130 1.1 mrg Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
131 1.1 mrg
132 1.1 mrg Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
133 1.1 mrg
134 1.1 mrg Slot 6/ [8 bytes for variable 'b']
135 1.1 mrg
136 1.1 mrg Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
137 1.1 mrg 'LEFT RedZone']
138 1.1 mrg
139 1.1 mrg The 32 bytes of LEFT red zone at the bottom of the stack can be
140 1.1 mrg decomposed as such:
141 1.1 mrg
142 1.1 mrg 1/ The first 8 bytes contain a magical asan number that is always
143 1.1 mrg 0x41B58AB3.
144 1.1 mrg
145 1.1 mrg 2/ The following 8 bytes contains a pointer to a string (to be
146 1.1 mrg parsed at runtime by the runtime asan library), which format is
147 1.1 mrg the following:
148 1.1 mrg
149 1.1 mrg "<function-name> <space> <num-of-variables-on-the-stack>
150 1.1 mrg (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
151 1.1 mrg <length-of-var-in-bytes> ){n} "
152 1.1 mrg
153 1.1 mrg where '(...){n}' means the content inside the parenthesis occurs 'n'
154 1.1 mrg times, with 'n' being the number of variables on the stack.
155 1.1 mrg
156 1.1 mrg 3/ The following 8 bytes contain the PC of the current function which
157 1.1 mrg will be used by the run-time library to print an error message.
158 1.1 mrg
159 1.1 mrg 4/ The following 8 bytes are reserved for internal use by the run-time.
160 1.1 mrg
161 1.1 mrg The shadow memory for that stack layout is going to look like this:
162 1.1 mrg
163 1.1 mrg - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
164 1.1 mrg The F1 byte pattern is a magic number called
165 1.1 mrg ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
166 1.1 mrg the memory for that shadow byte is part of a the LEFT red zone
167 1.1 mrg intended to seat at the bottom of the variables on the stack.
168 1.1 mrg
169 1.1 mrg - content of shadow memory 8 bytes for slots 6 and 5:
170 1.1 mrg 0xF4F4F400. The F4 byte pattern is a magic number
171 1.1 mrg called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
172 1.1 mrg memory region for this shadow byte is a PARTIAL red zone
173 1.1 mrg intended to pad a variable A, so that the slot following
174 1.1 mrg {A,padding} is 32 bytes aligned.
175 1.1 mrg
176 1.1 mrg Note that the fact that the least significant byte of this
177 1.1 mrg shadow memory content is 00 means that 8 bytes of its
178 1.1 mrg corresponding memory (which corresponds to the memory of
179 1.1 mrg variable 'b') is addressable.
180 1.1 mrg
181 1.1 mrg - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
182 1.1 mrg The F2 byte pattern is a magic number called
183 1.1 mrg ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
184 1.1 mrg region for this shadow byte is a MIDDLE red zone intended to
185 1.1 mrg seat between two 32 aligned slots of {variable,padding}.
186 1.1 mrg
187 1.1 mrg - content of shadow memory 8 bytes for slot 3 and 2:
188 1.1 mrg 0xF4000000. This represents is the concatenation of
189 1.1 mrg variable 'a' and the partial red zone following it, like what we
190 1.1 mrg had for variable 'b'. The least significant 3 bytes being 00
191 1.1 mrg means that the 3 bytes of variable 'a' are addressable.
192 1.1 mrg
193 1.1 mrg - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
194 1.1 mrg The F3 byte pattern is a magic number called
195 1.1 mrg ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
196 1.1 mrg region for this shadow byte is a RIGHT red zone intended to seat
197 1.1 mrg at the top of the variables of the stack.
198 1.1 mrg
199 1.1 mrg Note that the real variable layout is done in expand_used_vars in
200 1.1 mrg cfgexpand.cc. As far as Address Sanitizer is concerned, it lays out
201 1.1 mrg stack variables as well as the different red zones, emits some
202 1.1 mrg prologue code to populate the shadow memory as to poison (mark as
203 1.1 mrg non-accessible) the regions of the red zones and mark the regions of
204 1.1 mrg stack variables as accessible, and emit some epilogue code to
205 1.1 mrg un-poison (mark as accessible) the regions of red zones right before
206 1.1 mrg the function exits.
207 1.1 mrg
208 1.1 mrg [Protection of global variables]
209 1.1 mrg
210 1.1 mrg The basic idea is to insert a red zone between two global variables
211 1.1 mrg and install a constructor function that calls the asan runtime to do
212 1.1 mrg the populating of the relevant shadow memory regions at load time.
213 1.1 mrg
214 1.1 mrg So the global variables are laid out as to insert a red zone between
215 1.1 mrg them. The size of the red zones is so that each variable starts on a
216 1.1 mrg 32 bytes boundary.
217 1.1 mrg
218 1.1 mrg Then a constructor function is installed so that, for each global
219 1.1 mrg variable, it calls the runtime asan library function
220 1.1 mrg __asan_register_globals_with an instance of this type:
221 1.1 mrg
222 1.1 mrg struct __asan_global
223 1.1 mrg {
224 1.1 mrg // Address of the beginning of the global variable.
225 1.1 mrg const void *__beg;
226 1.1 mrg
227 1.1 mrg // Initial size of the global variable.
228 1.1 mrg uptr __size;
229 1.1 mrg
230 1.1 mrg // Size of the global variable + size of the red zone. This
231 1.1 mrg // size is 32 bytes aligned.
232 1.1 mrg uptr __size_with_redzone;
233 1.1 mrg
234 1.1 mrg // Name of the global variable.
235 1.1 mrg const void *__name;
236 1.1 mrg
237 1.1 mrg // Name of the module where the global variable is declared.
238 1.1 mrg const void *__module_name;
239 1.1 mrg
240 1.1 mrg // 1 if it has dynamic initialization, 0 otherwise.
241 1.1 mrg uptr __has_dynamic_init;
242 1.1 mrg
243 1.1 mrg // A pointer to struct that contains source location, could be NULL.
244 1.1 mrg __asan_global_source_location *__location;
245 1.1 mrg }
246 1.1 mrg
247 1.1 mrg A destructor function that calls the runtime asan library function
248 1.1 mrg _asan_unregister_globals is also installed. */
249 1.1 mrg
250 1.1 mrg static unsigned HOST_WIDE_INT asan_shadow_offset_value;
251 1.1 mrg static bool asan_shadow_offset_computed;
252 1.1 mrg static vec<char *> sanitized_sections;
253 1.1 mrg static tree last_alloca_addr;
254 1.1 mrg
255 1.1 mrg /* Set of variable declarations that are going to be guarded by
256 1.1 mrg use-after-scope sanitizer. */
257 1.1 mrg
258 1.1 mrg hash_set<tree> *asan_handled_variables = NULL;
259 1.1 mrg
260 1.1 mrg hash_set <tree> *asan_used_labels = NULL;
261 1.1 mrg
262 1.1 mrg /* Global variables for HWASAN stack tagging. */
263 1.1 mrg /* hwasan_frame_tag_offset records the offset from the frame base tag that the
264 1.1 mrg next object should have. */
265 1.1 mrg static uint8_t hwasan_frame_tag_offset = 0;
266 1.1 mrg /* hwasan_frame_base_ptr is a pointer with the same address as
267 1.1 mrg `virtual_stack_vars_rtx` for the current frame, and with the frame base tag
268 1.1 mrg stored in it. N.b. this global RTX does not need to be marked GTY, but is
269 1.1 mrg done so anyway. The need is not there since all uses are in just one pass
270 1.1 mrg (cfgexpand) and there are no calls to ggc_collect between the uses. We mark
271 1.1 mrg it GTY(()) anyway to allow the use of the variable later on if needed by
272 1.1 mrg future features. */
273 1.1 mrg static GTY(()) rtx hwasan_frame_base_ptr = NULL_RTX;
274 1.1 mrg /* hwasan_frame_base_init_seq is the sequence of RTL insns that will initialize
275 1.1 mrg the hwasan_frame_base_ptr. When the hwasan_frame_base_ptr is requested, we
276 1.1 mrg generate this sequence but do not emit it. If the sequence was created it
277 1.1 mrg is emitted once the function body has been expanded.
278 1.1 mrg
279 1.1 mrg This delay is because the frame base pointer may be needed anywhere in the
280 1.1 mrg function body, or needed by the expand_used_vars function. Emitting once in
281 1.1 mrg a known place is simpler than requiring the emission of the instructions to
282 1.1 mrg be know where it should go depending on the first place the hwasan frame
283 1.1 mrg base is needed. */
284 1.1 mrg static GTY(()) rtx_insn *hwasan_frame_base_init_seq = NULL;
285 1.1 mrg
286 1.1 mrg /* Structure defining the extent of one object on the stack that HWASAN needs
287 1.1 mrg to tag in the corresponding shadow stack space.
288 1.1 mrg
289 1.1 mrg The range this object spans on the stack is between `untagged_base +
290 1.1 mrg nearest_offset` and `untagged_base + farthest_offset`.
291 1.1 mrg `tagged_base` is an rtx containing the same value as `untagged_base` but
292 1.1 mrg with a random tag stored in the top byte. We record both `untagged_base`
293 1.1 mrg and `tagged_base` so that `hwasan_emit_prologue` can use both without having
294 1.1 mrg to emit RTL into the instruction stream to re-calculate one from the other.
295 1.1 mrg (`hwasan_emit_prologue` needs to use both bases since the
296 1.1 mrg __hwasan_tag_memory call it emits uses an untagged value, and it calculates
297 1.1 mrg the tag to store in shadow memory based on the tag_offset plus the tag in
298 1.1 mrg tagged_base). */
299 1.1 mrg struct hwasan_stack_var
300 1.1 mrg {
301 1.1 mrg rtx untagged_base;
302 1.1 mrg rtx tagged_base;
303 1.1 mrg poly_int64 nearest_offset;
304 1.1 mrg poly_int64 farthest_offset;
305 1.1 mrg uint8_t tag_offset;
306 1.1 mrg };
307 1.1 mrg
308 1.1 mrg /* Variable recording all stack variables that HWASAN needs to tag.
309 1.1 mrg Does not need to be marked as GTY(()) since every use is in the cfgexpand
310 1.1 mrg pass and gcc_collect is not called in the middle of that pass. */
311 1.1 mrg static vec<hwasan_stack_var> hwasan_tagged_stack_vars;
312 1.1 mrg
313 1.1 mrg
314 1.1 mrg /* Sets shadow offset to value in string VAL. */
315 1.1 mrg
316 1.1 mrg bool
317 1.1 mrg set_asan_shadow_offset (const char *val)
318 1.1 mrg {
319 1.1 mrg char *endp;
320 1.1 mrg
321 1.1 mrg errno = 0;
322 1.1 mrg #ifdef HAVE_LONG_LONG
323 1.1 mrg asan_shadow_offset_value = strtoull (val, &endp, 0);
324 1.1 mrg #else
325 1.1 mrg asan_shadow_offset_value = strtoul (val, &endp, 0);
326 1.1 mrg #endif
327 1.1 mrg if (!(*val != '\0' && *endp == '\0' && errno == 0))
328 1.1 mrg return false;
329 1.1 mrg
330 1.1 mrg asan_shadow_offset_computed = true;
331 1.1 mrg
332 1.1 mrg return true;
333 1.1 mrg }
334 1.1 mrg
335 1.1 mrg /* Set list of user-defined sections that need to be sanitized. */
336 1.1 mrg
337 1.1 mrg void
338 1.1 mrg set_sanitized_sections (const char *sections)
339 1.1 mrg {
340 1.1 mrg char *pat;
341 1.1 mrg unsigned i;
342 1.1 mrg FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
343 1.1 mrg free (pat);
344 1.1 mrg sanitized_sections.truncate (0);
345 1.1 mrg
346 1.1 mrg for (const char *s = sections; *s; )
347 1.1 mrg {
348 1.1 mrg const char *end;
349 1.1 mrg for (end = s; *end && *end != ','; ++end);
350 1.1 mrg size_t len = end - s;
351 1.1 mrg sanitized_sections.safe_push (xstrndup (s, len));
352 1.1 mrg s = *end ? end + 1 : end;
353 1.1 mrg }
354 1.1 mrg }
355 1.1 mrg
356 1.1 mrg bool
357 1.1 mrg asan_mark_p (gimple *stmt, enum asan_mark_flags flag)
358 1.1 mrg {
359 1.1 mrg return (gimple_call_internal_p (stmt, IFN_ASAN_MARK)
360 1.1 mrg && tree_to_uhwi (gimple_call_arg (stmt, 0)) == flag);
361 1.1 mrg }
362 1.1 mrg
363 1.1 mrg bool
364 1.1 mrg asan_sanitize_stack_p (void)
365 1.1 mrg {
366 1.1 mrg return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_stack);
367 1.1 mrg }
368 1.1 mrg
369 1.1 mrg bool
370 1.1 mrg asan_sanitize_allocas_p (void)
371 1.1 mrg {
372 1.1 mrg return (asan_sanitize_stack_p () && param_asan_protect_allocas);
373 1.1 mrg }
374 1.1 mrg
375 1.1 mrg bool
376 1.1 mrg asan_instrument_reads (void)
377 1.1 mrg {
378 1.1 mrg return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_instrument_reads);
379 1.1 mrg }
380 1.1 mrg
381 1.1 mrg bool
382 1.1 mrg asan_instrument_writes (void)
383 1.1 mrg {
384 1.1 mrg return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_instrument_writes);
385 1.1 mrg }
386 1.1 mrg
387 1.1 mrg bool
388 1.1 mrg asan_memintrin (void)
389 1.1 mrg {
390 1.1 mrg return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_memintrin);
391 1.1 mrg }
392 1.1 mrg
393 1.1 mrg
394 1.1 mrg /* Checks whether section SEC should be sanitized. */
395 1.1 mrg
396 1.1 mrg static bool
397 1.1 mrg section_sanitized_p (const char *sec)
398 1.1 mrg {
399 1.1 mrg char *pat;
400 1.1 mrg unsigned i;
401 1.1 mrg FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
402 1.1 mrg if (fnmatch (pat, sec, FNM_PERIOD) == 0)
403 1.1 mrg return true;
404 1.1 mrg return false;
405 1.1 mrg }
406 1.1 mrg
407 1.1 mrg /* Returns Asan shadow offset. */
408 1.1 mrg
409 1.1 mrg static unsigned HOST_WIDE_INT
410 1.1 mrg asan_shadow_offset ()
411 1.1 mrg {
412 1.1 mrg if (!asan_shadow_offset_computed)
413 1.1 mrg {
414 1.1 mrg asan_shadow_offset_computed = true;
415 1.1 mrg asan_shadow_offset_value = targetm.asan_shadow_offset ();
416 1.1 mrg }
417 1.1 mrg return asan_shadow_offset_value;
418 1.1 mrg }
419 1.1 mrg
420 1.1 mrg /* Returns Asan shadow offset has been set. */
421 1.1 mrg bool
422 1.1 mrg asan_shadow_offset_set_p ()
423 1.1 mrg {
424 1.1 mrg return asan_shadow_offset_computed;
425 1.1 mrg }
426 1.1 mrg
427 1.1 mrg alias_set_type asan_shadow_set = -1;
428 1.1 mrg
429 1.1 mrg /* Pointer types to 1, 2 or 4 byte integers in shadow memory. A separate
430 1.1 mrg alias set is used for all shadow memory accesses. */
431 1.1 mrg static GTY(()) tree shadow_ptr_types[3];
432 1.1 mrg
433 1.1 mrg /* Decl for __asan_option_detect_stack_use_after_return. */
434 1.1 mrg static GTY(()) tree asan_detect_stack_use_after_return;
435 1.1 mrg
436 1.1 mrg /* Hashtable support for memory references used by gimple
437 1.1 mrg statements. */
438 1.1 mrg
439 1.1 mrg /* This type represents a reference to a memory region. */
440 1.1 mrg struct asan_mem_ref
441 1.1 mrg {
442 1.1 mrg /* The expression of the beginning of the memory region. */
443 1.1 mrg tree start;
444 1.1 mrg
445 1.1 mrg /* The size of the access. */
446 1.1 mrg HOST_WIDE_INT access_size;
447 1.1 mrg };
448 1.1 mrg
449 1.1 mrg object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
450 1.1 mrg
451 1.1 mrg /* Initializes an instance of asan_mem_ref. */
452 1.1 mrg
453 1.1 mrg static void
454 1.1 mrg asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
455 1.1 mrg {
456 1.1 mrg ref->start = start;
457 1.1 mrg ref->access_size = access_size;
458 1.1 mrg }
459 1.1 mrg
460 1.1 mrg /* Allocates memory for an instance of asan_mem_ref into the memory
461 1.1 mrg pool returned by asan_mem_ref_get_alloc_pool and initialize it.
462 1.1 mrg START is the address of (or the expression pointing to) the
463 1.1 mrg beginning of memory reference. ACCESS_SIZE is the size of the
464 1.1 mrg access to the referenced memory. */
465 1.1 mrg
466 1.1 mrg static asan_mem_ref*
467 1.1 mrg asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
468 1.1 mrg {
469 1.1 mrg asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
470 1.1 mrg
471 1.1 mrg asan_mem_ref_init (ref, start, access_size);
472 1.1 mrg return ref;
473 1.1 mrg }
474 1.1 mrg
475 1.1 mrg /* This builds and returns a pointer to the end of the memory region
476 1.1 mrg that starts at START and of length LEN. */
477 1.1 mrg
478 1.1 mrg tree
479 1.1 mrg asan_mem_ref_get_end (tree start, tree len)
480 1.1 mrg {
481 1.1 mrg if (len == NULL_TREE || integer_zerop (len))
482 1.1 mrg return start;
483 1.1 mrg
484 1.1 mrg if (!ptrofftype_p (len))
485 1.1 mrg len = convert_to_ptrofftype (len);
486 1.1 mrg
487 1.1 mrg return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
488 1.1 mrg }
489 1.1 mrg
490 1.1 mrg /* Return a tree expression that represents the end of the referenced
491 1.1 mrg memory region. Beware that this function can actually build a new
492 1.1 mrg tree expression. */
493 1.1 mrg
494 1.1 mrg tree
495 1.1 mrg asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
496 1.1 mrg {
497 1.1 mrg return asan_mem_ref_get_end (ref->start, len);
498 1.1 mrg }
499 1.1 mrg
500 1.1 mrg struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
501 1.1 mrg {
502 1.1 mrg static inline hashval_t hash (const asan_mem_ref *);
503 1.1 mrg static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
504 1.1 mrg };
505 1.1 mrg
506 1.1 mrg /* Hash a memory reference. */
507 1.1 mrg
508 1.1 mrg inline hashval_t
509 1.1 mrg asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
510 1.1 mrg {
511 1.1 mrg return iterative_hash_expr (mem_ref->start, 0);
512 1.1 mrg }
513 1.1 mrg
514 1.1 mrg /* Compare two memory references. We accept the length of either
515 1.1 mrg memory references to be NULL_TREE. */
516 1.1 mrg
517 1.1 mrg inline bool
518 1.1 mrg asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
519 1.1 mrg const asan_mem_ref *m2)
520 1.1 mrg {
521 1.1 mrg return operand_equal_p (m1->start, m2->start, 0);
522 1.1 mrg }
523 1.1 mrg
524 1.1 mrg static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
525 1.1 mrg
526 1.1 mrg /* Returns a reference to the hash table containing memory references.
527 1.1 mrg This function ensures that the hash table is created. Note that
528 1.1 mrg this hash table is updated by the function
529 1.1 mrg update_mem_ref_hash_table. */
530 1.1 mrg
531 1.1 mrg static hash_table<asan_mem_ref_hasher> *
532 1.1 mrg get_mem_ref_hash_table ()
533 1.1 mrg {
534 1.1 mrg if (!asan_mem_ref_ht)
535 1.1 mrg asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
536 1.1 mrg
537 1.1 mrg return asan_mem_ref_ht;
538 1.1 mrg }
539 1.1 mrg
540 1.1 mrg /* Clear all entries from the memory references hash table. */
541 1.1 mrg
542 1.1 mrg static void
543 1.1 mrg empty_mem_ref_hash_table ()
544 1.1 mrg {
545 1.1 mrg if (asan_mem_ref_ht)
546 1.1 mrg asan_mem_ref_ht->empty ();
547 1.1 mrg }
548 1.1 mrg
549 1.1 mrg /* Free the memory references hash table. */
550 1.1 mrg
551 1.1 mrg static void
552 1.1 mrg free_mem_ref_resources ()
553 1.1 mrg {
554 1.1 mrg delete asan_mem_ref_ht;
555 1.1 mrg asan_mem_ref_ht = NULL;
556 1.1 mrg
557 1.1 mrg asan_mem_ref_pool.release ();
558 1.1 mrg }
559 1.1 mrg
560 1.1 mrg /* Return true iff the memory reference REF has been instrumented. */
561 1.1 mrg
562 1.1 mrg static bool
563 1.1 mrg has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
564 1.1 mrg {
565 1.1 mrg asan_mem_ref r;
566 1.1 mrg asan_mem_ref_init (&r, ref, access_size);
567 1.1 mrg
568 1.1 mrg asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
569 1.1 mrg return saved_ref && saved_ref->access_size >= access_size;
570 1.1 mrg }
571 1.1 mrg
572 1.1 mrg /* Return true iff the memory reference REF has been instrumented. */
573 1.1 mrg
574 1.1 mrg static bool
575 1.1 mrg has_mem_ref_been_instrumented (const asan_mem_ref *ref)
576 1.1 mrg {
577 1.1 mrg return has_mem_ref_been_instrumented (ref->start, ref->access_size);
578 1.1 mrg }
579 1.1 mrg
580 1.1 mrg /* Return true iff access to memory region starting at REF and of
581 1.1 mrg length LEN has been instrumented. */
582 1.1 mrg
583 1.1 mrg static bool
584 1.1 mrg has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
585 1.1 mrg {
586 1.1 mrg HOST_WIDE_INT size_in_bytes
587 1.1 mrg = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
588 1.1 mrg
589 1.1 mrg return size_in_bytes != -1
590 1.1 mrg && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
591 1.1 mrg }
592 1.1 mrg
593 1.1 mrg /* Set REF to the memory reference present in a gimple assignment
594 1.1 mrg ASSIGNMENT. Return true upon successful completion, false
595 1.1 mrg otherwise. */
596 1.1 mrg
597 1.1 mrg static bool
598 1.1 mrg get_mem_ref_of_assignment (const gassign *assignment,
599 1.1 mrg asan_mem_ref *ref,
600 1.1 mrg bool *ref_is_store)
601 1.1 mrg {
602 1.1 mrg gcc_assert (gimple_assign_single_p (assignment));
603 1.1 mrg
604 1.1 mrg if (gimple_store_p (assignment)
605 1.1 mrg && !gimple_clobber_p (assignment))
606 1.1 mrg {
607 1.1 mrg ref->start = gimple_assign_lhs (assignment);
608 1.1 mrg *ref_is_store = true;
609 1.1 mrg }
610 1.1 mrg else if (gimple_assign_load_p (assignment))
611 1.1 mrg {
612 1.1 mrg ref->start = gimple_assign_rhs1 (assignment);
613 1.1 mrg *ref_is_store = false;
614 1.1 mrg }
615 1.1 mrg else
616 1.1 mrg return false;
617 1.1 mrg
618 1.1 mrg ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
619 1.1 mrg return true;
620 1.1 mrg }
621 1.1 mrg
622 1.1 mrg /* Return address of last allocated dynamic alloca. */
623 1.1 mrg
624 1.1 mrg static tree
625 1.1 mrg get_last_alloca_addr ()
626 1.1 mrg {
627 1.1 mrg if (last_alloca_addr)
628 1.1 mrg return last_alloca_addr;
629 1.1 mrg
630 1.1 mrg last_alloca_addr = create_tmp_reg (ptr_type_node, "last_alloca_addr");
631 1.1 mrg gassign *g = gimple_build_assign (last_alloca_addr, null_pointer_node);
632 1.1 mrg edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
633 1.1 mrg gsi_insert_on_edge_immediate (e, g);
634 1.1 mrg return last_alloca_addr;
635 1.1 mrg }
636 1.1 mrg
637 1.1 mrg /* Insert __asan_allocas_unpoison (top, bottom) call before
638 1.1 mrg __builtin_stack_restore (new_sp) call.
639 1.1 mrg The pseudocode of this routine should look like this:
640 1.1 mrg top = last_alloca_addr;
641 1.1 mrg bot = new_sp;
642 1.1 mrg __asan_allocas_unpoison (top, bot);
643 1.1 mrg last_alloca_addr = new_sp;
644 1.1 mrg __builtin_stack_restore (new_sp);
645 1.1 mrg In general, we can't use new_sp as bot parameter because on some
646 1.1 mrg architectures SP has non zero offset from dynamic stack area. Moreover, on
647 1.1 mrg some architectures this offset (STACK_DYNAMIC_OFFSET) becomes known for each
648 1.1 mrg particular function only after all callees were expanded to rtl.
649 1.1 mrg The most noticeable example is PowerPC{,64}, see
650 1.1 mrg http://refspecs.linuxfoundation.org/ELF/ppc64/PPC-elf64abi.html#DYNAM-STACK.
651 1.1 mrg To overcome the issue we use following trick: pass new_sp as a second
652 1.1 mrg parameter to __asan_allocas_unpoison and rewrite it during expansion with
653 1.1 mrg new_sp + (virtual_dynamic_stack_rtx - sp) later in
654 1.1 mrg expand_asan_emit_allocas_unpoison function.
655 1.1 mrg
656 1.1 mrg HWASAN needs to do very similar, the eventual pseudocode should be:
657 1.1 mrg __hwasan_tag_memory (virtual_stack_dynamic_rtx,
658 1.1 mrg 0,
659 1.1 mrg new_sp - sp);
660 1.1 mrg __builtin_stack_restore (new_sp)
661 1.1 mrg
662 1.1 mrg Need to use the same trick to handle STACK_DYNAMIC_OFFSET as described
663 1.1 mrg above. */
664 1.1 mrg
665 1.1 mrg static void
666 1.1 mrg handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter)
667 1.1 mrg {
668 1.1 mrg if (!iter
669 1.1 mrg || !(asan_sanitize_allocas_p () || hwasan_sanitize_allocas_p ()))
670 1.1 mrg return;
671 1.1 mrg
672 1.1 mrg tree restored_stack = gimple_call_arg (call, 0);
673 1.1 mrg
674 1.1 mrg gimple *g;
675 1.1 mrg
676 1.1 mrg if (hwasan_sanitize_allocas_p ())
677 1.1 mrg {
678 1.1 mrg enum internal_fn fn = IFN_HWASAN_ALLOCA_UNPOISON;
679 1.1 mrg /* There is only one piece of information `expand_HWASAN_ALLOCA_UNPOISON`
680 1.1 mrg needs to work. This is the length of the area that we're
681 1.1 mrg deallocating. Since the stack pointer is known at expand time, the
682 1.1 mrg position of the new stack pointer after deallocation is enough
683 1.1 mrg information to calculate this length. */
684 1.1 mrg g = gimple_build_call_internal (fn, 1, restored_stack);
685 1.1 mrg }
686 1.1 mrg else
687 1.1 mrg {
688 1.1 mrg tree last_alloca = get_last_alloca_addr ();
689 1.1 mrg tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON);
690 1.1 mrg g = gimple_build_call (fn, 2, last_alloca, restored_stack);
691 1.1 mrg gsi_insert_before (iter, g, GSI_SAME_STMT);
692 1.1 mrg g = gimple_build_assign (last_alloca, restored_stack);
693 1.1 mrg }
694 1.1 mrg
695 1.1 mrg gsi_insert_before (iter, g, GSI_SAME_STMT);
696 1.1 mrg }
697 1.1 mrg
698 1.1 mrg /* Deploy and poison redzones around __builtin_alloca call. To do this, we
699 1.1 mrg should replace this call with another one with changed parameters and
700 1.1 mrg replace all its uses with new address, so
701 1.1 mrg addr = __builtin_alloca (old_size, align);
702 1.1 mrg is replaced by
703 1.1 mrg left_redzone_size = max (align, ASAN_RED_ZONE_SIZE);
704 1.1 mrg Following two statements are optimized out if we know that
705 1.1 mrg old_size & (ASAN_RED_ZONE_SIZE - 1) == 0, i.e. alloca doesn't need partial
706 1.1 mrg redzone.
707 1.1 mrg misalign = old_size & (ASAN_RED_ZONE_SIZE - 1);
708 1.1 mrg partial_redzone_size = ASAN_RED_ZONE_SIZE - misalign;
709 1.1 mrg right_redzone_size = ASAN_RED_ZONE_SIZE;
710 1.1 mrg additional_size = left_redzone_size + partial_redzone_size +
711 1.1 mrg right_redzone_size;
712 1.1 mrg new_size = old_size + additional_size;
713 1.1 mrg new_alloca = __builtin_alloca (new_size, max (align, 32))
714 1.1 mrg __asan_alloca_poison (new_alloca, old_size)
715 1.1 mrg addr = new_alloca + max (align, ASAN_RED_ZONE_SIZE);
716 1.1 mrg last_alloca_addr = new_alloca;
717 1.1 mrg ADDITIONAL_SIZE is added to make new memory allocation contain not only
718 1.1 mrg requested memory, but also left, partial and right redzones as well as some
719 1.1 mrg additional space, required by alignment. */
720 1.1 mrg
721 1.1 mrg static void
722 1.1 mrg handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter)
723 1.1 mrg {
724 1.1 mrg if (!iter
725 1.1 mrg || !(asan_sanitize_allocas_p () || hwasan_sanitize_allocas_p ()))
726 1.1 mrg return;
727 1.1 mrg
728 1.1 mrg gassign *g;
729 1.1 mrg gcall *gg;
730 1.1 mrg tree callee = gimple_call_fndecl (call);
731 1.1 mrg tree lhs = gimple_call_lhs (call);
732 1.1 mrg tree old_size = gimple_call_arg (call, 0);
733 1.1 mrg tree ptr_type = lhs ? TREE_TYPE (lhs) : ptr_type_node;
734 1.1 mrg tree partial_size = NULL_TREE;
735 1.1 mrg unsigned int align
736 1.1 mrg = DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
737 1.1 mrg ? 0 : tree_to_uhwi (gimple_call_arg (call, 1));
738 1.1 mrg
739 1.1 mrg bool throws = false;
740 1.1 mrg edge e = NULL;
741 1.1 mrg if (stmt_can_throw_internal (cfun, call))
742 1.1 mrg {
743 1.1 mrg if (!lhs)
744 1.1 mrg return;
745 1.1 mrg throws = true;
746 1.1 mrg e = find_fallthru_edge (gsi_bb (*iter)->succs);
747 1.1 mrg }
748 1.1 mrg
749 1.1 mrg if (hwasan_sanitize_allocas_p ())
750 1.1 mrg {
751 1.1 mrg gimple_seq stmts = NULL;
752 1.1 mrg location_t loc = gimple_location (gsi_stmt (*iter));
753 1.1 mrg /*
754 1.1 mrg HWASAN needs a different expansion.
755 1.1 mrg
756 1.1 mrg addr = __builtin_alloca (size, align);
757 1.1 mrg
758 1.1 mrg should be replaced by
759 1.1 mrg
760 1.1 mrg new_size = size rounded up to HWASAN_TAG_GRANULE_SIZE byte alignment;
761 1.1 mrg untagged_addr = __builtin_alloca (new_size, align);
762 1.1 mrg tag = __hwasan_choose_alloca_tag ();
763 1.1 mrg addr = ifn_HWASAN_SET_TAG (untagged_addr, tag);
764 1.1 mrg __hwasan_tag_memory (untagged_addr, tag, new_size);
765 1.1 mrg */
766 1.1 mrg /* Ensure alignment at least HWASAN_TAG_GRANULE_SIZE bytes so we start on
767 1.1 mrg a tag granule. */
768 1.1 mrg align = align > HWASAN_TAG_GRANULE_SIZE ? align : HWASAN_TAG_GRANULE_SIZE;
769 1.1 mrg
770 1.1 mrg tree old_size = gimple_call_arg (call, 0);
771 1.1 mrg tree new_size = gimple_build_round_up (&stmts, loc, size_type_node,
772 1.1 mrg old_size,
773 1.1 mrg HWASAN_TAG_GRANULE_SIZE);
774 1.1 mrg
775 1.1 mrg /* Make the alloca call */
776 1.1 mrg tree untagged_addr
777 1.1 mrg = gimple_build (&stmts, loc,
778 1.1 mrg as_combined_fn (BUILT_IN_ALLOCA_WITH_ALIGN), ptr_type,
779 1.1 mrg new_size, build_int_cst (size_type_node, align));
780 1.1 mrg
781 1.1 mrg /* Choose the tag.
782 1.1 mrg Here we use an internal function so we can choose the tag at expand
783 1.1 mrg time. We need the decision to be made after stack variables have been
784 1.1 mrg assigned their tag (i.e. once the hwasan_frame_tag_offset variable has
785 1.1 mrg been set to one after the last stack variables tag). */
786 1.1 mrg tree tag = gimple_build (&stmts, loc, CFN_HWASAN_CHOOSE_TAG,
787 1.1 mrg unsigned_char_type_node);
788 1.1 mrg
789 1.1 mrg /* Add tag to pointer. */
790 1.1 mrg tree addr
791 1.1 mrg = gimple_build (&stmts, loc, CFN_HWASAN_SET_TAG, ptr_type,
792 1.1 mrg untagged_addr, tag);
793 1.1 mrg
794 1.1 mrg /* Tag shadow memory.
795 1.1 mrg NOTE: require using `untagged_addr` here for libhwasan API. */
796 1.1 mrg gimple_build (&stmts, loc, as_combined_fn (BUILT_IN_HWASAN_TAG_MEM),
797 1.1 mrg void_type_node, untagged_addr, tag, new_size);
798 1.1 mrg
799 1.1 mrg /* Insert the built up code sequence into the original instruction stream
800 1.1 mrg the iterator points to. */
801 1.1 mrg gsi_insert_seq_before (iter, stmts, GSI_SAME_STMT);
802 1.1 mrg
803 1.1 mrg /* Finally, replace old alloca ptr with NEW_ALLOCA. */
804 1.1 mrg replace_call_with_value (iter, addr);
805 1.1 mrg return;
806 1.1 mrg }
807 1.1 mrg
808 1.1 mrg tree last_alloca = get_last_alloca_addr ();
809 1.1 mrg const HOST_WIDE_INT redzone_mask = ASAN_RED_ZONE_SIZE - 1;
810 1.1 mrg
811 1.1 mrg /* If ALIGN > ASAN_RED_ZONE_SIZE, we embed left redzone into first ALIGN
812 1.1 mrg bytes of allocated space. Otherwise, align alloca to ASAN_RED_ZONE_SIZE
813 1.1 mrg manually. */
814 1.1 mrg align = MAX (align, ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
815 1.1 mrg
816 1.1 mrg tree alloca_rz_mask = build_int_cst (size_type_node, redzone_mask);
817 1.1 mrg tree redzone_size = build_int_cst (size_type_node, ASAN_RED_ZONE_SIZE);
818 1.1 mrg
819 1.1 mrg /* Extract lower bits from old_size. */
820 1.1 mrg wide_int size_nonzero_bits = get_nonzero_bits (old_size);
821 1.1 mrg wide_int rz_mask
822 1.1 mrg = wi::uhwi (redzone_mask, wi::get_precision (size_nonzero_bits));
823 1.1 mrg wide_int old_size_lower_bits = wi::bit_and (size_nonzero_bits, rz_mask);
824 1.1 mrg
825 1.1 mrg /* If alloca size is aligned to ASAN_RED_ZONE_SIZE, we don't need partial
826 1.1 mrg redzone. Otherwise, compute its size here. */
827 1.1 mrg if (wi::ne_p (old_size_lower_bits, 0))
828 1.1 mrg {
829 1.1 mrg /* misalign = size & (ASAN_RED_ZONE_SIZE - 1)
830 1.1 mrg partial_size = ASAN_RED_ZONE_SIZE - misalign. */
831 1.1 mrg g = gimple_build_assign (make_ssa_name (size_type_node, NULL),
832 1.1 mrg BIT_AND_EXPR, old_size, alloca_rz_mask);
833 1.1 mrg gsi_insert_before (iter, g, GSI_SAME_STMT);
834 1.1 mrg tree misalign = gimple_assign_lhs (g);
835 1.1 mrg g = gimple_build_assign (make_ssa_name (size_type_node, NULL), MINUS_EXPR,
836 1.1 mrg redzone_size, misalign);
837 1.1 mrg gsi_insert_before (iter, g, GSI_SAME_STMT);
838 1.1 mrg partial_size = gimple_assign_lhs (g);
839 1.1 mrg }
840 1.1 mrg
841 1.1 mrg /* additional_size = align + ASAN_RED_ZONE_SIZE. */
842 1.1 mrg tree additional_size = build_int_cst (size_type_node, align / BITS_PER_UNIT
843 1.1 mrg + ASAN_RED_ZONE_SIZE);
844 1.1 mrg /* If alloca has partial redzone, include it to additional_size too. */
845 1.1 mrg if (partial_size)
846 1.1 mrg {
847 1.1 mrg /* additional_size += partial_size. */
848 1.1 mrg g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR,
849 1.1 mrg partial_size, additional_size);
850 1.1 mrg gsi_insert_before (iter, g, GSI_SAME_STMT);
851 1.1 mrg additional_size = gimple_assign_lhs (g);
852 1.1 mrg }
853 1.1 mrg
854 1.1 mrg /* new_size = old_size + additional_size. */
855 1.1 mrg g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR, old_size,
856 1.1 mrg additional_size);
857 1.1 mrg gsi_insert_before (iter, g, GSI_SAME_STMT);
858 1.1 mrg tree new_size = gimple_assign_lhs (g);
859 1.1 mrg
860 1.1 mrg /* Build new __builtin_alloca call:
861 1.1 mrg new_alloca_with_rz = __builtin_alloca (new_size, align). */
862 1.1 mrg tree fn = builtin_decl_implicit (BUILT_IN_ALLOCA_WITH_ALIGN);
863 1.1 mrg gg = gimple_build_call (fn, 2, new_size,
864 1.1 mrg build_int_cst (size_type_node, align));
865 1.1 mrg tree new_alloca_with_rz = make_ssa_name (ptr_type, gg);
866 1.1 mrg gimple_call_set_lhs (gg, new_alloca_with_rz);
867 1.1 mrg if (throws)
868 1.1 mrg {
869 1.1 mrg gimple_call_set_lhs (call, NULL);
870 1.1 mrg gsi_replace (iter, gg, true);
871 1.1 mrg }
872 1.1 mrg else
873 1.1 mrg gsi_insert_before (iter, gg, GSI_SAME_STMT);
874 1.1 mrg
875 1.1 mrg /* new_alloca = new_alloca_with_rz + align. */
876 1.1 mrg g = gimple_build_assign (make_ssa_name (ptr_type), POINTER_PLUS_EXPR,
877 1.1 mrg new_alloca_with_rz,
878 1.1 mrg build_int_cst (size_type_node,
879 1.1 mrg align / BITS_PER_UNIT));
880 1.1 mrg gimple_stmt_iterator gsi = gsi_none ();
881 1.1 mrg if (throws)
882 1.1 mrg {
883 1.1 mrg gsi_insert_on_edge_immediate (e, g);
884 1.1 mrg gsi = gsi_for_stmt (g);
885 1.1 mrg }
886 1.1 mrg else
887 1.1 mrg gsi_insert_before (iter, g, GSI_SAME_STMT);
888 1.1 mrg tree new_alloca = gimple_assign_lhs (g);
889 1.1 mrg
890 1.1 mrg /* Poison newly created alloca redzones:
891 1.1 mrg __asan_alloca_poison (new_alloca, old_size). */
892 1.1 mrg fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCA_POISON);
893 1.1 mrg gg = gimple_build_call (fn, 2, new_alloca, old_size);
894 1.1 mrg if (throws)
895 1.1 mrg gsi_insert_after (&gsi, gg, GSI_NEW_STMT);
896 1.1 mrg else
897 1.1 mrg gsi_insert_before (iter, gg, GSI_SAME_STMT);
898 1.1 mrg
899 1.1 mrg /* Save new_alloca_with_rz value into last_alloca to use it during
900 1.1 mrg allocas unpoisoning. */
901 1.1 mrg g = gimple_build_assign (last_alloca, new_alloca_with_rz);
902 1.1 mrg if (throws)
903 1.1 mrg gsi_insert_after (&gsi, g, GSI_NEW_STMT);
904 1.1 mrg else
905 1.1 mrg gsi_insert_before (iter, g, GSI_SAME_STMT);
906 1.1 mrg
907 1.1 mrg /* Finally, replace old alloca ptr with NEW_ALLOCA. */
908 1.1 mrg if (throws)
909 1.1 mrg {
910 1.1 mrg g = gimple_build_assign (lhs, new_alloca);
911 1.1 mrg gsi_insert_after (&gsi, g, GSI_NEW_STMT);
912 1.1 mrg }
913 1.1 mrg else
914 1.1 mrg replace_call_with_value (iter, new_alloca);
915 1.1 mrg }
916 1.1 mrg
917 1.1 mrg /* Return the memory references contained in a gimple statement
918 1.1 mrg representing a builtin call that has to do with memory access. */
919 1.1 mrg
920 1.1 mrg static bool
921 1.1 mrg get_mem_refs_of_builtin_call (gcall *call,
922 1.1 mrg asan_mem_ref *src0,
923 1.1 mrg tree *src0_len,
924 1.1 mrg bool *src0_is_store,
925 1.1 mrg asan_mem_ref *src1,
926 1.1 mrg tree *src1_len,
927 1.1 mrg bool *src1_is_store,
928 1.1 mrg asan_mem_ref *dst,
929 1.1 mrg tree *dst_len,
930 1.1 mrg bool *dst_is_store,
931 1.1 mrg bool *dest_is_deref,
932 1.1 mrg bool *intercepted_p,
933 1.1 mrg gimple_stmt_iterator *iter = NULL)
934 1.1 mrg {
935 1.1 mrg gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
936 1.1 mrg
937 1.1 mrg tree callee = gimple_call_fndecl (call);
938 1.1 mrg tree source0 = NULL_TREE, source1 = NULL_TREE,
939 1.1 mrg dest = NULL_TREE, len = NULL_TREE;
940 1.1 mrg bool is_store = true, got_reference_p = false;
941 1.1 mrg HOST_WIDE_INT access_size = 1;
942 1.1 mrg
943 1.1 mrg *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
944 1.1 mrg
945 1.1 mrg switch (DECL_FUNCTION_CODE (callee))
946 1.1 mrg {
947 1.1 mrg /* (s, s, n) style memops. */
948 1.1 mrg case BUILT_IN_BCMP:
949 1.1 mrg case BUILT_IN_MEMCMP:
950 1.1 mrg source0 = gimple_call_arg (call, 0);
951 1.1 mrg source1 = gimple_call_arg (call, 1);
952 1.1 mrg len = gimple_call_arg (call, 2);
953 1.1 mrg break;
954 1.1 mrg
955 1.1 mrg /* (src, dest, n) style memops. */
956 1.1 mrg case BUILT_IN_BCOPY:
957 1.1 mrg source0 = gimple_call_arg (call, 0);
958 1.1 mrg dest = gimple_call_arg (call, 1);
959 1.1 mrg len = gimple_call_arg (call, 2);
960 1.1 mrg break;
961 1.1 mrg
962 1.1 mrg /* (dest, src, n) style memops. */
963 1.1 mrg case BUILT_IN_MEMCPY:
964 1.1 mrg case BUILT_IN_MEMCPY_CHK:
965 1.1 mrg case BUILT_IN_MEMMOVE:
966 1.1 mrg case BUILT_IN_MEMMOVE_CHK:
967 1.1 mrg case BUILT_IN_MEMPCPY:
968 1.1 mrg case BUILT_IN_MEMPCPY_CHK:
969 1.1 mrg dest = gimple_call_arg (call, 0);
970 1.1 mrg source0 = gimple_call_arg (call, 1);
971 1.1 mrg len = gimple_call_arg (call, 2);
972 1.1 mrg break;
973 1.1 mrg
974 1.1 mrg /* (dest, n) style memops. */
975 1.1 mrg case BUILT_IN_BZERO:
976 1.1 mrg dest = gimple_call_arg (call, 0);
977 1.1 mrg len = gimple_call_arg (call, 1);
978 1.1 mrg break;
979 1.1 mrg
980 1.1 mrg /* (dest, x, n) style memops*/
981 1.1 mrg case BUILT_IN_MEMSET:
982 1.1 mrg case BUILT_IN_MEMSET_CHK:
983 1.1 mrg dest = gimple_call_arg (call, 0);
984 1.1 mrg len = gimple_call_arg (call, 2);
985 1.1 mrg break;
986 1.1 mrg
987 1.1 mrg case BUILT_IN_STRLEN:
988 1.1 mrg /* Special case strlen here since its length is taken from its return
989 1.1 mrg value.
990 1.1 mrg
991 1.1 mrg The approach taken by the sanitizers is to check a memory access
992 1.1 mrg before it's taken. For ASAN strlen is intercepted by libasan, so no
993 1.1 mrg check is inserted by the compiler.
994 1.1 mrg
995 1.1 mrg This function still returns `true` and provides a length to the rest
996 1.1 mrg of the ASAN pass in order to record what areas have been checked,
997 1.1 mrg avoiding superfluous checks later on.
998 1.1 mrg
999 1.1 mrg HWASAN does not intercept any of these internal functions.
1000 1.1 mrg This means that checks for memory accesses must be inserted by the
1001 1.1 mrg compiler.
1002 1.1 mrg strlen is a special case, because we can tell the length from the
1003 1.1 mrg return of the function, but that is not known until after the function
1004 1.1 mrg has returned.
1005 1.1 mrg
1006 1.1 mrg Hence we can't check the memory access before it happens.
1007 1.1 mrg We could check the memory access after it has already happened, but
1008 1.1 mrg for now we choose to just ignore `strlen` calls.
1009 1.1 mrg This decision was simply made because that means the special case is
1010 1.1 mrg limited to this one case of this one function. */
1011 1.1 mrg if (hwasan_sanitize_p ())
1012 1.1 mrg return false;
1013 1.1 mrg source0 = gimple_call_arg (call, 0);
1014 1.1 mrg len = gimple_call_lhs (call);
1015 1.1 mrg break;
1016 1.1 mrg
1017 1.1 mrg case BUILT_IN_STACK_RESTORE:
1018 1.1 mrg handle_builtin_stack_restore (call, iter);
1019 1.1 mrg break;
1020 1.1 mrg
1021 1.1 mrg CASE_BUILT_IN_ALLOCA:
1022 1.1 mrg handle_builtin_alloca (call, iter);
1023 1.1 mrg break;
1024 1.1 mrg /* And now the __atomic* and __sync builtins.
1025 1.1 mrg These are handled differently from the classical memory
1026 1.1 mrg access builtins above. */
1027 1.1 mrg
1028 1.1 mrg case BUILT_IN_ATOMIC_LOAD_1:
1029 1.1 mrg is_store = false;
1030 1.1 mrg /* FALLTHRU */
1031 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_ADD_1:
1032 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_SUB_1:
1033 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_OR_1:
1034 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_AND_1:
1035 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_XOR_1:
1036 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_NAND_1:
1037 1.1 mrg case BUILT_IN_SYNC_ADD_AND_FETCH_1:
1038 1.1 mrg case BUILT_IN_SYNC_SUB_AND_FETCH_1:
1039 1.1 mrg case BUILT_IN_SYNC_OR_AND_FETCH_1:
1040 1.1 mrg case BUILT_IN_SYNC_AND_AND_FETCH_1:
1041 1.1 mrg case BUILT_IN_SYNC_XOR_AND_FETCH_1:
1042 1.1 mrg case BUILT_IN_SYNC_NAND_AND_FETCH_1:
1043 1.1 mrg case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
1044 1.1 mrg case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
1045 1.1 mrg case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
1046 1.1 mrg case BUILT_IN_SYNC_LOCK_RELEASE_1:
1047 1.1 mrg case BUILT_IN_ATOMIC_EXCHANGE_1:
1048 1.1 mrg case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
1049 1.1 mrg case BUILT_IN_ATOMIC_STORE_1:
1050 1.1 mrg case BUILT_IN_ATOMIC_ADD_FETCH_1:
1051 1.1 mrg case BUILT_IN_ATOMIC_SUB_FETCH_1:
1052 1.1 mrg case BUILT_IN_ATOMIC_AND_FETCH_1:
1053 1.1 mrg case BUILT_IN_ATOMIC_NAND_FETCH_1:
1054 1.1 mrg case BUILT_IN_ATOMIC_XOR_FETCH_1:
1055 1.1 mrg case BUILT_IN_ATOMIC_OR_FETCH_1:
1056 1.1 mrg case BUILT_IN_ATOMIC_FETCH_ADD_1:
1057 1.1 mrg case BUILT_IN_ATOMIC_FETCH_SUB_1:
1058 1.1 mrg case BUILT_IN_ATOMIC_FETCH_AND_1:
1059 1.1 mrg case BUILT_IN_ATOMIC_FETCH_NAND_1:
1060 1.1 mrg case BUILT_IN_ATOMIC_FETCH_XOR_1:
1061 1.1 mrg case BUILT_IN_ATOMIC_FETCH_OR_1:
1062 1.1 mrg access_size = 1;
1063 1.1 mrg goto do_atomic;
1064 1.1 mrg
1065 1.1 mrg case BUILT_IN_ATOMIC_LOAD_2:
1066 1.1 mrg is_store = false;
1067 1.1 mrg /* FALLTHRU */
1068 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_ADD_2:
1069 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_SUB_2:
1070 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_OR_2:
1071 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_AND_2:
1072 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_XOR_2:
1073 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_NAND_2:
1074 1.1 mrg case BUILT_IN_SYNC_ADD_AND_FETCH_2:
1075 1.1 mrg case BUILT_IN_SYNC_SUB_AND_FETCH_2:
1076 1.1 mrg case BUILT_IN_SYNC_OR_AND_FETCH_2:
1077 1.1 mrg case BUILT_IN_SYNC_AND_AND_FETCH_2:
1078 1.1 mrg case BUILT_IN_SYNC_XOR_AND_FETCH_2:
1079 1.1 mrg case BUILT_IN_SYNC_NAND_AND_FETCH_2:
1080 1.1 mrg case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
1081 1.1 mrg case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
1082 1.1 mrg case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
1083 1.1 mrg case BUILT_IN_SYNC_LOCK_RELEASE_2:
1084 1.1 mrg case BUILT_IN_ATOMIC_EXCHANGE_2:
1085 1.1 mrg case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
1086 1.1 mrg case BUILT_IN_ATOMIC_STORE_2:
1087 1.1 mrg case BUILT_IN_ATOMIC_ADD_FETCH_2:
1088 1.1 mrg case BUILT_IN_ATOMIC_SUB_FETCH_2:
1089 1.1 mrg case BUILT_IN_ATOMIC_AND_FETCH_2:
1090 1.1 mrg case BUILT_IN_ATOMIC_NAND_FETCH_2:
1091 1.1 mrg case BUILT_IN_ATOMIC_XOR_FETCH_2:
1092 1.1 mrg case BUILT_IN_ATOMIC_OR_FETCH_2:
1093 1.1 mrg case BUILT_IN_ATOMIC_FETCH_ADD_2:
1094 1.1 mrg case BUILT_IN_ATOMIC_FETCH_SUB_2:
1095 1.1 mrg case BUILT_IN_ATOMIC_FETCH_AND_2:
1096 1.1 mrg case BUILT_IN_ATOMIC_FETCH_NAND_2:
1097 1.1 mrg case BUILT_IN_ATOMIC_FETCH_XOR_2:
1098 1.1 mrg case BUILT_IN_ATOMIC_FETCH_OR_2:
1099 1.1 mrg access_size = 2;
1100 1.1 mrg goto do_atomic;
1101 1.1 mrg
1102 1.1 mrg case BUILT_IN_ATOMIC_LOAD_4:
1103 1.1 mrg is_store = false;
1104 1.1 mrg /* FALLTHRU */
1105 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_ADD_4:
1106 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_SUB_4:
1107 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_OR_4:
1108 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_AND_4:
1109 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_XOR_4:
1110 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_NAND_4:
1111 1.1 mrg case BUILT_IN_SYNC_ADD_AND_FETCH_4:
1112 1.1 mrg case BUILT_IN_SYNC_SUB_AND_FETCH_4:
1113 1.1 mrg case BUILT_IN_SYNC_OR_AND_FETCH_4:
1114 1.1 mrg case BUILT_IN_SYNC_AND_AND_FETCH_4:
1115 1.1 mrg case BUILT_IN_SYNC_XOR_AND_FETCH_4:
1116 1.1 mrg case BUILT_IN_SYNC_NAND_AND_FETCH_4:
1117 1.1 mrg case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
1118 1.1 mrg case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
1119 1.1 mrg case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
1120 1.1 mrg case BUILT_IN_SYNC_LOCK_RELEASE_4:
1121 1.1 mrg case BUILT_IN_ATOMIC_EXCHANGE_4:
1122 1.1 mrg case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
1123 1.1 mrg case BUILT_IN_ATOMIC_STORE_4:
1124 1.1 mrg case BUILT_IN_ATOMIC_ADD_FETCH_4:
1125 1.1 mrg case BUILT_IN_ATOMIC_SUB_FETCH_4:
1126 1.1 mrg case BUILT_IN_ATOMIC_AND_FETCH_4:
1127 1.1 mrg case BUILT_IN_ATOMIC_NAND_FETCH_4:
1128 1.1 mrg case BUILT_IN_ATOMIC_XOR_FETCH_4:
1129 1.1 mrg case BUILT_IN_ATOMIC_OR_FETCH_4:
1130 1.1 mrg case BUILT_IN_ATOMIC_FETCH_ADD_4:
1131 1.1 mrg case BUILT_IN_ATOMIC_FETCH_SUB_4:
1132 1.1 mrg case BUILT_IN_ATOMIC_FETCH_AND_4:
1133 1.1 mrg case BUILT_IN_ATOMIC_FETCH_NAND_4:
1134 1.1 mrg case BUILT_IN_ATOMIC_FETCH_XOR_4:
1135 1.1 mrg case BUILT_IN_ATOMIC_FETCH_OR_4:
1136 1.1 mrg access_size = 4;
1137 1.1 mrg goto do_atomic;
1138 1.1 mrg
1139 1.1 mrg case BUILT_IN_ATOMIC_LOAD_8:
1140 1.1 mrg is_store = false;
1141 1.1 mrg /* FALLTHRU */
1142 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_ADD_8:
1143 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_SUB_8:
1144 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_OR_8:
1145 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_AND_8:
1146 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_XOR_8:
1147 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_NAND_8:
1148 1.1 mrg case BUILT_IN_SYNC_ADD_AND_FETCH_8:
1149 1.1 mrg case BUILT_IN_SYNC_SUB_AND_FETCH_8:
1150 1.1 mrg case BUILT_IN_SYNC_OR_AND_FETCH_8:
1151 1.1 mrg case BUILT_IN_SYNC_AND_AND_FETCH_8:
1152 1.1 mrg case BUILT_IN_SYNC_XOR_AND_FETCH_8:
1153 1.1 mrg case BUILT_IN_SYNC_NAND_AND_FETCH_8:
1154 1.1 mrg case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
1155 1.1 mrg case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
1156 1.1 mrg case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
1157 1.1 mrg case BUILT_IN_SYNC_LOCK_RELEASE_8:
1158 1.1 mrg case BUILT_IN_ATOMIC_EXCHANGE_8:
1159 1.1 mrg case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
1160 1.1 mrg case BUILT_IN_ATOMIC_STORE_8:
1161 1.1 mrg case BUILT_IN_ATOMIC_ADD_FETCH_8:
1162 1.1 mrg case BUILT_IN_ATOMIC_SUB_FETCH_8:
1163 1.1 mrg case BUILT_IN_ATOMIC_AND_FETCH_8:
1164 1.1 mrg case BUILT_IN_ATOMIC_NAND_FETCH_8:
1165 1.1 mrg case BUILT_IN_ATOMIC_XOR_FETCH_8:
1166 1.1 mrg case BUILT_IN_ATOMIC_OR_FETCH_8:
1167 1.1 mrg case BUILT_IN_ATOMIC_FETCH_ADD_8:
1168 1.1 mrg case BUILT_IN_ATOMIC_FETCH_SUB_8:
1169 1.1 mrg case BUILT_IN_ATOMIC_FETCH_AND_8:
1170 1.1 mrg case BUILT_IN_ATOMIC_FETCH_NAND_8:
1171 1.1 mrg case BUILT_IN_ATOMIC_FETCH_XOR_8:
1172 1.1 mrg case BUILT_IN_ATOMIC_FETCH_OR_8:
1173 1.1 mrg access_size = 8;
1174 1.1 mrg goto do_atomic;
1175 1.1 mrg
1176 1.1 mrg case BUILT_IN_ATOMIC_LOAD_16:
1177 1.1 mrg is_store = false;
1178 1.1 mrg /* FALLTHRU */
1179 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_ADD_16:
1180 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_SUB_16:
1181 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_OR_16:
1182 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_AND_16:
1183 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_XOR_16:
1184 1.1 mrg case BUILT_IN_SYNC_FETCH_AND_NAND_16:
1185 1.1 mrg case BUILT_IN_SYNC_ADD_AND_FETCH_16:
1186 1.1 mrg case BUILT_IN_SYNC_SUB_AND_FETCH_16:
1187 1.1 mrg case BUILT_IN_SYNC_OR_AND_FETCH_16:
1188 1.1 mrg case BUILT_IN_SYNC_AND_AND_FETCH_16:
1189 1.1 mrg case BUILT_IN_SYNC_XOR_AND_FETCH_16:
1190 1.1 mrg case BUILT_IN_SYNC_NAND_AND_FETCH_16:
1191 1.1 mrg case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
1192 1.1 mrg case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
1193 1.1 mrg case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
1194 1.1 mrg case BUILT_IN_SYNC_LOCK_RELEASE_16:
1195 1.1 mrg case BUILT_IN_ATOMIC_EXCHANGE_16:
1196 1.1 mrg case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
1197 1.1 mrg case BUILT_IN_ATOMIC_STORE_16:
1198 1.1 mrg case BUILT_IN_ATOMIC_ADD_FETCH_16:
1199 1.1 mrg case BUILT_IN_ATOMIC_SUB_FETCH_16:
1200 1.1 mrg case BUILT_IN_ATOMIC_AND_FETCH_16:
1201 1.1 mrg case BUILT_IN_ATOMIC_NAND_FETCH_16:
1202 1.1 mrg case BUILT_IN_ATOMIC_XOR_FETCH_16:
1203 1.1 mrg case BUILT_IN_ATOMIC_OR_FETCH_16:
1204 1.1 mrg case BUILT_IN_ATOMIC_FETCH_ADD_16:
1205 1.1 mrg case BUILT_IN_ATOMIC_FETCH_SUB_16:
1206 1.1 mrg case BUILT_IN_ATOMIC_FETCH_AND_16:
1207 1.1 mrg case BUILT_IN_ATOMIC_FETCH_NAND_16:
1208 1.1 mrg case BUILT_IN_ATOMIC_FETCH_XOR_16:
1209 1.1 mrg case BUILT_IN_ATOMIC_FETCH_OR_16:
1210 1.1 mrg access_size = 16;
1211 1.1 mrg /* FALLTHRU */
1212 1.1 mrg do_atomic:
1213 1.1 mrg {
1214 1.1 mrg dest = gimple_call_arg (call, 0);
1215 1.1 mrg /* DEST represents the address of a memory location.
1216 1.1 mrg instrument_derefs wants the memory location, so lets
1217 1.1 mrg dereference the address DEST before handing it to
1218 1.1 mrg instrument_derefs. */
1219 1.1 mrg tree type = build_nonstandard_integer_type (access_size
1220 1.1 mrg * BITS_PER_UNIT, 1);
1221 1.1 mrg dest = build2 (MEM_REF, type, dest,
1222 1.1 mrg build_int_cst (build_pointer_type (char_type_node), 0));
1223 1.1 mrg break;
1224 1.1 mrg }
1225 1.1 mrg
1226 1.1 mrg default:
1227 1.1 mrg /* The other builtins memory access are not instrumented in this
1228 1.1 mrg function because they either don't have any length parameter,
1229 1.1 mrg or their length parameter is just a limit. */
1230 1.1 mrg break;
1231 1.1 mrg }
1232 1.1 mrg
1233 1.1 mrg if (len != NULL_TREE)
1234 1.1 mrg {
1235 1.1 mrg if (source0 != NULL_TREE)
1236 1.1 mrg {
1237 1.1 mrg src0->start = source0;
1238 1.1 mrg src0->access_size = access_size;
1239 1.1 mrg *src0_len = len;
1240 1.1 mrg *src0_is_store = false;
1241 1.1 mrg }
1242 1.1 mrg
1243 1.1 mrg if (source1 != NULL_TREE)
1244 1.1 mrg {
1245 1.1 mrg src1->start = source1;
1246 1.1 mrg src1->access_size = access_size;
1247 1.1 mrg *src1_len = len;
1248 1.1 mrg *src1_is_store = false;
1249 1.1 mrg }
1250 1.1 mrg
1251 1.1 mrg if (dest != NULL_TREE)
1252 1.1 mrg {
1253 1.1 mrg dst->start = dest;
1254 1.1 mrg dst->access_size = access_size;
1255 1.1 mrg *dst_len = len;
1256 1.1 mrg *dst_is_store = true;
1257 1.1 mrg }
1258 1.1 mrg
1259 1.1 mrg got_reference_p = true;
1260 1.1 mrg }
1261 1.1 mrg else if (dest)
1262 1.1 mrg {
1263 1.1 mrg dst->start = dest;
1264 1.1 mrg dst->access_size = access_size;
1265 1.1 mrg *dst_len = NULL_TREE;
1266 1.1 mrg *dst_is_store = is_store;
1267 1.1 mrg *dest_is_deref = true;
1268 1.1 mrg got_reference_p = true;
1269 1.1 mrg }
1270 1.1 mrg
1271 1.1 mrg return got_reference_p;
1272 1.1 mrg }
1273 1.1 mrg
1274 1.1 mrg /* Return true iff a given gimple statement has been instrumented.
1275 1.1 mrg Note that the statement is "defined" by the memory references it
1276 1.1 mrg contains. */
1277 1.1 mrg
1278 1.1 mrg static bool
1279 1.1 mrg has_stmt_been_instrumented_p (gimple *stmt)
1280 1.1 mrg {
1281 1.1 mrg if (gimple_assign_single_p (stmt))
1282 1.1 mrg {
1283 1.1 mrg bool r_is_store;
1284 1.1 mrg asan_mem_ref r;
1285 1.1 mrg asan_mem_ref_init (&r, NULL, 1);
1286 1.1 mrg
1287 1.1 mrg if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
1288 1.1 mrg &r_is_store))
1289 1.1 mrg {
1290 1.1 mrg if (!has_mem_ref_been_instrumented (&r))
1291 1.1 mrg return false;
1292 1.1 mrg if (r_is_store && gimple_assign_load_p (stmt))
1293 1.1 mrg {
1294 1.1 mrg asan_mem_ref src;
1295 1.1 mrg asan_mem_ref_init (&src, NULL, 1);
1296 1.1 mrg src.start = gimple_assign_rhs1 (stmt);
1297 1.1 mrg src.access_size = int_size_in_bytes (TREE_TYPE (src.start));
1298 1.1 mrg if (!has_mem_ref_been_instrumented (&src))
1299 1.1 mrg return false;
1300 1.1 mrg }
1301 1.1 mrg return true;
1302 1.1 mrg }
1303 1.1 mrg }
1304 1.1 mrg else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1305 1.1 mrg {
1306 1.1 mrg asan_mem_ref src0, src1, dest;
1307 1.1 mrg asan_mem_ref_init (&src0, NULL, 1);
1308 1.1 mrg asan_mem_ref_init (&src1, NULL, 1);
1309 1.1 mrg asan_mem_ref_init (&dest, NULL, 1);
1310 1.1 mrg
1311 1.1 mrg tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1312 1.1 mrg bool src0_is_store = false, src1_is_store = false,
1313 1.1 mrg dest_is_store = false, dest_is_deref = false, intercepted_p = true;
1314 1.1 mrg if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
1315 1.1 mrg &src0, &src0_len, &src0_is_store,
1316 1.1 mrg &src1, &src1_len, &src1_is_store,
1317 1.1 mrg &dest, &dest_len, &dest_is_store,
1318 1.1 mrg &dest_is_deref, &intercepted_p))
1319 1.1 mrg {
1320 1.1 mrg if (src0.start != NULL_TREE
1321 1.1 mrg && !has_mem_ref_been_instrumented (&src0, src0_len))
1322 1.1 mrg return false;
1323 1.1 mrg
1324 1.1 mrg if (src1.start != NULL_TREE
1325 1.1 mrg && !has_mem_ref_been_instrumented (&src1, src1_len))
1326 1.1 mrg return false;
1327 1.1 mrg
1328 1.1 mrg if (dest.start != NULL_TREE
1329 1.1 mrg && !has_mem_ref_been_instrumented (&dest, dest_len))
1330 1.1 mrg return false;
1331 1.1 mrg
1332 1.1 mrg return true;
1333 1.1 mrg }
1334 1.1 mrg }
1335 1.1 mrg else if (is_gimple_call (stmt) && gimple_store_p (stmt))
1336 1.1 mrg {
1337 1.1 mrg asan_mem_ref r;
1338 1.1 mrg asan_mem_ref_init (&r, NULL, 1);
1339 1.1 mrg
1340 1.1 mrg r.start = gimple_call_lhs (stmt);
1341 1.1 mrg r.access_size = int_size_in_bytes (TREE_TYPE (r.start));
1342 1.1 mrg return has_mem_ref_been_instrumented (&r);
1343 1.1 mrg }
1344 1.1 mrg
1345 1.1 mrg return false;
1346 1.1 mrg }
1347 1.1 mrg
1348 1.1 mrg /* Insert a memory reference into the hash table. */
1349 1.1 mrg
1350 1.1 mrg static void
1351 1.1 mrg update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
1352 1.1 mrg {
1353 1.1 mrg hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
1354 1.1 mrg
1355 1.1 mrg asan_mem_ref r;
1356 1.1 mrg asan_mem_ref_init (&r, ref, access_size);
1357 1.1 mrg
1358 1.1 mrg asan_mem_ref **slot = ht->find_slot (&r, INSERT);
1359 1.1 mrg if (*slot == NULL || (*slot)->access_size < access_size)
1360 1.1 mrg *slot = asan_mem_ref_new (ref, access_size);
1361 1.1 mrg }
1362 1.1 mrg
1363 1.1 mrg /* Initialize shadow_ptr_types array. */
1364 1.1 mrg
1365 1.1 mrg static void
1366 1.1 mrg asan_init_shadow_ptr_types (void)
1367 1.1 mrg {
1368 1.1 mrg asan_shadow_set = new_alias_set ();
1369 1.1 mrg tree types[3] = { signed_char_type_node, short_integer_type_node,
1370 1.1 mrg integer_type_node };
1371 1.1 mrg
1372 1.1 mrg for (unsigned i = 0; i < 3; i++)
1373 1.1 mrg {
1374 1.1 mrg shadow_ptr_types[i] = build_distinct_type_copy (types[i]);
1375 1.1 mrg TYPE_ALIAS_SET (shadow_ptr_types[i]) = asan_shadow_set;
1376 1.1 mrg shadow_ptr_types[i] = build_pointer_type (shadow_ptr_types[i]);
1377 1.1 mrg }
1378 1.1 mrg
1379 1.1 mrg initialize_sanitizer_builtins ();
1380 1.1 mrg }
1381 1.1 mrg
1382 1.1 mrg /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
1383 1.1 mrg
1384 1.1 mrg static tree
1385 1.1 mrg asan_pp_string (pretty_printer *pp)
1386 1.1 mrg {
1387 1.1 mrg const char *buf = pp_formatted_text (pp);
1388 1.1 mrg size_t len = strlen (buf);
1389 1.1 mrg tree ret = build_string (len + 1, buf);
1390 1.1 mrg TREE_TYPE (ret)
1391 1.1 mrg = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
1392 1.1 mrg build_index_type (size_int (len)));
1393 1.1 mrg TREE_READONLY (ret) = 1;
1394 1.1 mrg TREE_STATIC (ret) = 1;
1395 1.1 mrg return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
1396 1.1 mrg }
1397 1.1 mrg
1398 1.1 mrg /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
1399 1.1 mrg though. */
1400 1.1 mrg
1401 1.1 mrg static void
1402 1.1 mrg asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
1403 1.1 mrg {
1404 1.1 mrg rtx_insn *insn, *insns, *jump;
1405 1.1 mrg rtx_code_label *top_label;
1406 1.1 mrg rtx end, addr, tmp;
1407 1.1 mrg
1408 1.1 mrg gcc_assert ((len & 3) == 0);
1409 1.1 mrg start_sequence ();
1410 1.1 mrg clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
1411 1.1 mrg insns = get_insns ();
1412 1.1 mrg end_sequence ();
1413 1.1 mrg for (insn = insns; insn; insn = NEXT_INSN (insn))
1414 1.1 mrg if (CALL_P (insn))
1415 1.1 mrg break;
1416 1.1 mrg if (insn == NULL_RTX)
1417 1.1 mrg {
1418 1.1 mrg emit_insn (insns);
1419 1.1 mrg return;
1420 1.1 mrg }
1421 1.1 mrg
1422 1.1 mrg top_label = gen_label_rtx ();
1423 1.1 mrg addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1424 1.1 mrg shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1425 1.1 mrg end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1426 1.1 mrg emit_label (top_label);
1427 1.1 mrg
1428 1.1 mrg emit_move_insn (shadow_mem, const0_rtx);
1429 1.1 mrg tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1430 1.1 mrg true, OPTAB_LIB_WIDEN);
1431 1.1 mrg if (tmp != addr)
1432 1.1 mrg emit_move_insn (addr, tmp);
1433 1.1 mrg emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1434 1.1 mrg jump = get_last_insn ();
1435 1.1 mrg gcc_assert (JUMP_P (jump));
1436 1.1 mrg add_reg_br_prob_note (jump,
1437 1.1 mrg profile_probability::guessed_always ()
1438 1.1 mrg .apply_scale (80, 100));
1439 1.1 mrg }
1440 1.1 mrg
1441 1.1 mrg void
1442 1.1 mrg asan_function_start (void)
1443 1.1 mrg {
1444 1.1 mrg section *fnsec = function_section (current_function_decl);
1445 1.1 mrg switch_to_section (fnsec);
1446 1.1 mrg ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1447 1.1 mrg current_function_funcdef_no);
1448 1.1 mrg }
1449 1.1 mrg
1450 1.1 mrg /* Return number of shadow bytes that are occupied by a local variable
1451 1.1 mrg of SIZE bytes. */
1452 1.1 mrg
1453 1.1 mrg static unsigned HOST_WIDE_INT
1454 1.1 mrg shadow_mem_size (unsigned HOST_WIDE_INT size)
1455 1.1 mrg {
1456 1.1 mrg /* It must be possible to align stack variables to granularity
1457 1.1 mrg of shadow memory. */
1458 1.1 mrg gcc_assert (BITS_PER_UNIT
1459 1.1 mrg * ASAN_SHADOW_GRANULARITY <= MAX_SUPPORTED_STACK_ALIGNMENT);
1460 1.1 mrg
1461 1.1 mrg return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY;
1462 1.1 mrg }
1463 1.1 mrg
1464 1.1 mrg /* Always emit 4 bytes at a time. */
1465 1.1 mrg #define RZ_BUFFER_SIZE 4
1466 1.1 mrg
1467 1.1 mrg /* ASAN redzone buffer container that handles emission of shadow bytes. */
1468 1.1 mrg class asan_redzone_buffer
1469 1.1 mrg {
1470 1.1 mrg public:
1471 1.1 mrg /* Constructor. */
1472 1.1 mrg asan_redzone_buffer (rtx shadow_mem, HOST_WIDE_INT prev_offset):
1473 1.1 mrg m_shadow_mem (shadow_mem), m_prev_offset (prev_offset),
1474 1.1 mrg m_original_offset (prev_offset), m_shadow_bytes (RZ_BUFFER_SIZE)
1475 1.1 mrg {}
1476 1.1 mrg
1477 1.1 mrg /* Emit VALUE shadow byte at a given OFFSET. */
1478 1.1 mrg void emit_redzone_byte (HOST_WIDE_INT offset, unsigned char value);
1479 1.1 mrg
1480 1.1 mrg /* Emit RTX emission of the content of the buffer. */
1481 1.1 mrg void flush_redzone_payload (void);
1482 1.1 mrg
1483 1.1 mrg private:
1484 1.1 mrg /* Flush if the content of the buffer is full
1485 1.1 mrg (equal to RZ_BUFFER_SIZE). */
1486 1.1 mrg void flush_if_full (void);
1487 1.1 mrg
1488 1.1 mrg /* Memory where we last emitted a redzone payload. */
1489 1.1 mrg rtx m_shadow_mem;
1490 1.1 mrg
1491 1.1 mrg /* Relative offset where we last emitted a redzone payload. */
1492 1.1 mrg HOST_WIDE_INT m_prev_offset;
1493 1.1 mrg
1494 1.1 mrg /* Relative original offset. Used for checking only. */
1495 1.1 mrg HOST_WIDE_INT m_original_offset;
1496 1.1 mrg
1497 1.1 mrg public:
1498 1.1 mrg /* Buffer with redzone payload. */
1499 1.1 mrg auto_vec<unsigned char> m_shadow_bytes;
1500 1.1 mrg };
1501 1.1 mrg
1502 1.1 mrg /* Emit VALUE shadow byte at a given OFFSET. */
1503 1.1 mrg
1504 1.1 mrg void
1505 1.1 mrg asan_redzone_buffer::emit_redzone_byte (HOST_WIDE_INT offset,
1506 1.1 mrg unsigned char value)
1507 1.1 mrg {
1508 1.1 mrg gcc_assert ((offset & (ASAN_SHADOW_GRANULARITY - 1)) == 0);
1509 1.1 mrg gcc_assert (offset >= m_prev_offset);
1510 1.1 mrg
1511 1.1 mrg HOST_WIDE_INT off
1512 1.1 mrg = m_prev_offset + ASAN_SHADOW_GRANULARITY * m_shadow_bytes.length ();
1513 1.1 mrg if (off == offset)
1514 1.1 mrg /* Consecutive shadow memory byte. */;
1515 1.1 mrg else if (offset < m_prev_offset + (HOST_WIDE_INT) (ASAN_SHADOW_GRANULARITY
1516 1.1 mrg * RZ_BUFFER_SIZE)
1517 1.1 mrg && !m_shadow_bytes.is_empty ())
1518 1.1 mrg {
1519 1.1 mrg /* Shadow memory byte with a small gap. */
1520 1.1 mrg for (; off < offset; off += ASAN_SHADOW_GRANULARITY)
1521 1.1 mrg m_shadow_bytes.safe_push (0);
1522 1.1 mrg }
1523 1.1 mrg else
1524 1.1 mrg {
1525 1.1 mrg if (!m_shadow_bytes.is_empty ())
1526 1.1 mrg flush_redzone_payload ();
1527 1.1 mrg
1528 1.1 mrg /* Maybe start earlier in order to use aligned store. */
1529 1.1 mrg HOST_WIDE_INT align = (offset - m_prev_offset) % ASAN_RED_ZONE_SIZE;
1530 1.1 mrg if (align)
1531 1.1 mrg {
1532 1.1 mrg offset -= align;
1533 1.1 mrg for (unsigned i = 0; i < align / BITS_PER_UNIT; i++)
1534 1.1 mrg m_shadow_bytes.safe_push (0);
1535 1.1 mrg }
1536 1.1 mrg
1537 1.1 mrg /* Adjust m_prev_offset and m_shadow_mem. */
1538 1.1 mrg HOST_WIDE_INT diff = offset - m_prev_offset;
1539 1.1 mrg m_shadow_mem = adjust_address (m_shadow_mem, VOIDmode,
1540 1.1 mrg diff >> ASAN_SHADOW_SHIFT);
1541 1.1 mrg m_prev_offset = offset;
1542 1.1 mrg }
1543 1.1 mrg m_shadow_bytes.safe_push (value);
1544 1.1 mrg flush_if_full ();
1545 1.1 mrg }
1546 1.1 mrg
1547 1.1 mrg /* Emit RTX emission of the content of the buffer. */
1548 1.1 mrg
1549 1.1 mrg void
1550 1.1 mrg asan_redzone_buffer::flush_redzone_payload (void)
1551 1.1 mrg {
1552 1.1 mrg gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
1553 1.1 mrg
1554 1.1 mrg if (m_shadow_bytes.is_empty ())
1555 1.1 mrg return;
1556 1.1 mrg
1557 1.1 mrg /* Be sure we always emit to an aligned address. */
1558 1.1 mrg gcc_assert (((m_prev_offset - m_original_offset)
1559 1.1 mrg & (ASAN_RED_ZONE_SIZE - 1)) == 0);
1560 1.1 mrg
1561 1.1 mrg /* Fill it to RZ_BUFFER_SIZE bytes with zeros if needed. */
1562 1.1 mrg unsigned l = m_shadow_bytes.length ();
1563 1.1 mrg for (unsigned i = 0; i <= RZ_BUFFER_SIZE - l; i++)
1564 1.1 mrg m_shadow_bytes.safe_push (0);
1565 1.1 mrg
1566 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS))
1567 1.1 mrg fprintf (dump_file,
1568 1.1 mrg "Flushing rzbuffer at offset %" PRId64 " with: ", m_prev_offset);
1569 1.1 mrg
1570 1.1 mrg unsigned HOST_WIDE_INT val = 0;
1571 1.1 mrg for (unsigned i = 0; i < RZ_BUFFER_SIZE; i++)
1572 1.1 mrg {
1573 1.1 mrg unsigned char v
1574 1.1 mrg = m_shadow_bytes[BYTES_BIG_ENDIAN ? RZ_BUFFER_SIZE - i - 1 : i];
1575 1.1 mrg val |= (unsigned HOST_WIDE_INT)v << (BITS_PER_UNIT * i);
1576 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS))
1577 1.1 mrg fprintf (dump_file, "%02x ", v);
1578 1.1 mrg }
1579 1.1 mrg
1580 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS))
1581 1.1 mrg fprintf (dump_file, "\n");
1582 1.1 mrg
1583 1.1 mrg rtx c = gen_int_mode (val, SImode);
1584 1.1 mrg m_shadow_mem = adjust_address (m_shadow_mem, SImode, 0);
1585 1.1 mrg emit_move_insn (m_shadow_mem, c);
1586 1.1 mrg m_shadow_bytes.truncate (0);
1587 1.1 mrg }
1588 1.1 mrg
1589 1.1 mrg /* Flush if the content of the buffer is full
1590 1.1 mrg (equal to RZ_BUFFER_SIZE). */
1591 1.1 mrg
1592 1.1 mrg void
1593 1.1 mrg asan_redzone_buffer::flush_if_full (void)
1594 1.1 mrg {
1595 1.1 mrg if (m_shadow_bytes.length () == RZ_BUFFER_SIZE)
1596 1.1 mrg flush_redzone_payload ();
1597 1.1 mrg }
1598 1.1 mrg
1599 1.1 mrg
1600 1.1 mrg /* HWAddressSanitizer (hwasan) is a probabilistic method for detecting
1601 1.1 mrg out-of-bounds and use-after-free bugs.
1602 1.1 mrg Read more:
1603 1.1 mrg http://code.google.com/p/address-sanitizer/
1604 1.1 mrg
1605 1.1 mrg Similar to AddressSanitizer (asan) it consists of two parts: the
1606 1.1 mrg instrumentation module in this file, and a run-time library.
1607 1.1 mrg
1608 1.1 mrg The instrumentation module adds a run-time check before every memory insn in
1609 1.1 mrg the same manner as asan (see the block comment for AddressSanitizer above).
1610 1.1 mrg Currently, hwasan only adds out-of-line instrumentation, where each check is
1611 1.1 mrg implemented as a function call to the run-time library. Hence a check for a
1612 1.1 mrg load of N bytes from address X would be implemented with a function call to
1613 1.1 mrg __hwasan_loadN(X), and checking a store of N bytes from address X would be
1614 1.1 mrg implemented with a function call to __hwasan_storeN(X).
1615 1.1 mrg
1616 1.1 mrg The main difference between hwasan and asan is in the information stored to
1617 1.1 mrg help this checking. Both sanitizers use a shadow memory area which stores
1618 1.1 mrg data recording the state of main memory at a corresponding address.
1619 1.1 mrg
1620 1.1 mrg For hwasan, each 16 byte granule in main memory has a corresponding 1 byte
1621 1.1 mrg in shadow memory. This shadow address can be calculated with equation:
1622 1.1 mrg (addr >> log_2(HWASAN_TAG_GRANULE_SIZE))
1623 1.1 mrg + __hwasan_shadow_memory_dynamic_address;
1624 1.1 mrg The conversion between real and shadow memory for asan is given in the block
1625 1.1 mrg comment at the top of this file.
1626 1.1 mrg The description of how this shadow memory is laid out for asan is in the
1627 1.1 mrg block comment at the top of this file, here we describe how this shadow
1628 1.1 mrg memory is used for hwasan.
1629 1.1 mrg
1630 1.1 mrg For hwasan, each variable is assigned a byte-sized 'tag'. The extent of
1631 1.1 mrg the shadow memory for that variable is filled with the assigned tag, and
1632 1.1 mrg every pointer referencing that variable has its top byte set to the same
1633 1.1 mrg tag. The run-time library redefines malloc so that every allocation returns
1634 1.1 mrg a tagged pointer and tags the corresponding shadow memory with the same tag.
1635 1.1 mrg
1636 1.1 mrg On each pointer dereference the tag found in the pointer is compared to the
1637 1.1 mrg tag found in the shadow memory corresponding to the accessed memory address.
1638 1.1 mrg If these tags are found to differ then this memory access is judged to be
1639 1.1 mrg invalid and a report is generated.
1640 1.1 mrg
1641 1.1 mrg This method of bug detection is not perfect -- it can not catch every bad
1642 1.1 mrg access -- but catches them probabilistically instead. There is always the
1643 1.1 mrg possibility that an invalid memory access will happen to access memory
1644 1.1 mrg tagged with the same tag as the pointer that this access used.
1645 1.1 mrg The chances of this are approx. 0.4% for any two uncorrelated objects.
1646 1.1 mrg
1647 1.1 mrg Random tag generation can mitigate this problem by decreasing the
1648 1.1 mrg probability that an invalid access will be missed in the same manner over
1649 1.1 mrg multiple runs. i.e. if two objects are tagged the same in one run of the
1650 1.1 mrg binary they are unlikely to be tagged the same in the next run.
1651 1.1 mrg Both heap and stack allocated objects have random tags by default.
1652 1.1 mrg
1653 1.1 mrg [16 byte granule implications]
1654 1.1 mrg Since the shadow memory only has a resolution on real memory of 16 bytes,
1655 1.1 mrg invalid accesses that are within the same 16 byte granule as a valid
1656 1.1 mrg address will not be caught.
1657 1.1 mrg
1658 1.1 mrg There is a "short-granule" feature in the runtime library which does catch
1659 1.1 mrg such accesses, but this feature is not implemented for stack objects (since
1660 1.1 mrg stack objects are allocated and tagged by compiler instrumentation, and
1661 1.1 mrg this feature has not yet been implemented in GCC instrumentation).
1662 1.1 mrg
1663 1.1 mrg Another outcome of this 16 byte resolution is that each tagged object must
1664 1.1 mrg be 16 byte aligned. If two objects were to share any 16 byte granule in
1665 1.1 mrg memory, then they both would have to be given the same tag, and invalid
1666 1.1 mrg accesses to one using a pointer to the other would be undetectable.
1667 1.1 mrg
1668 1.1 mrg [Compiler instrumentation]
1669 1.1 mrg Compiler instrumentation ensures that two adjacent buffers on the stack are
1670 1.1 mrg given different tags, this means an access to one buffer using a pointer
1671 1.1 mrg generated from the other (e.g. through buffer overrun) will have mismatched
1672 1.1 mrg tags and be caught by hwasan.
1673 1.1 mrg
1674 1.1 mrg We don't randomly tag every object on the stack, since that would require
1675 1.1 mrg keeping many registers to record each tag. Instead we randomly generate a
1676 1.1 mrg tag for each function frame, and each new stack object uses a tag offset
1677 1.1 mrg from that frame tag.
1678 1.1 mrg i.e. each object is tagged as RFT + offset, where RFT is the "random frame
1679 1.1 mrg tag" generated for this frame.
1680 1.1 mrg This means that randomisation does not peturb the difference between tags
1681 1.1 mrg on tagged stack objects within a frame, but this is mitigated by the fact
1682 1.1 mrg that objects with the same tag within a frame are very far apart
1683 1.1 mrg (approx. 2^HWASAN_TAG_SIZE objects apart).
1684 1.1 mrg
1685 1.1 mrg As a demonstration, using the same example program as in the asan block
1686 1.1 mrg comment above:
1687 1.1 mrg
1688 1.1 mrg int
1689 1.1 mrg foo ()
1690 1.1 mrg {
1691 1.1 mrg char a[24] = {0};
1692 1.1 mrg int b[2] = {0};
1693 1.1 mrg
1694 1.1 mrg a[5] = 1;
1695 1.1 mrg b[1] = 2;
1696 1.1 mrg
1697 1.1 mrg return a[5] + b[1];
1698 1.1 mrg }
1699 1.1 mrg
1700 1.1 mrg On AArch64 the stack will be ordered as follows for the above function:
1701 1.1 mrg
1702 1.1 mrg Slot 1/ [24 bytes for variable 'a']
1703 1.1 mrg Slot 2/ [8 bytes padding for alignment]
1704 1.1 mrg Slot 3/ [8 bytes for variable 'b']
1705 1.1 mrg Slot 4/ [8 bytes padding for alignment]
1706 1.1 mrg
1707 1.1 mrg (The padding is there to ensure 16 byte alignment as described in the 16
1708 1.1 mrg byte granule implications).
1709 1.1 mrg
1710 1.1 mrg While the shadow memory will be ordered as follows:
1711 1.1 mrg
1712 1.1 mrg - 2 bytes (representing 32 bytes in real memory) tagged with RFT + 1.
1713 1.1 mrg - 1 byte (representing 16 bytes in real memory) tagged with RFT + 2.
1714 1.1 mrg
1715 1.1 mrg And any pointer to "a" will have the tag RFT + 1, and any pointer to "b"
1716 1.1 mrg will have the tag RFT + 2.
1717 1.1 mrg
1718 1.1 mrg [Top Byte Ignore requirements]
1719 1.1 mrg Hwasan requires the ability to store an 8 bit tag in every pointer. There
1720 1.1 mrg is no instrumentation done to remove this tag from pointers before
1721 1.1 mrg dereferencing, which means the hardware must ignore this tag during memory
1722 1.1 mrg accesses.
1723 1.1 mrg
1724 1.1 mrg Architectures where this feature is available should indicate this using
1725 1.1 mrg the TARGET_MEMTAG_CAN_TAG_ADDRESSES hook.
1726 1.1 mrg
1727 1.1 mrg [Stack requires cleanup on unwinding]
1728 1.1 mrg During normal operation of a hwasan sanitized program more space in the
1729 1.1 mrg shadow memory becomes tagged as the stack grows. As the stack shrinks this
1730 1.1 mrg shadow memory space must become untagged. If it is not untagged then when
1731 1.1 mrg the stack grows again (during other function calls later on in the program)
1732 1.1 mrg objects on the stack that are usually not tagged (e.g. parameters passed on
1733 1.1 mrg the stack) can be placed in memory whose shadow space is tagged with
1734 1.1 mrg something else, and accesses can cause false positive reports.
1735 1.1 mrg
1736 1.1 mrg Hence we place untagging code on every epilogue of functions which tag some
1737 1.1 mrg stack objects.
1738 1.1 mrg
1739 1.1 mrg Moreover, the run-time library intercepts longjmp & setjmp to untag when
1740 1.1 mrg the stack is unwound this way.
1741 1.1 mrg
1742 1.1 mrg C++ exceptions are not yet handled, which means this sanitizer can not
1743 1.1 mrg handle C++ code that throws exceptions -- it will give false positives
1744 1.1 mrg after an exception has been thrown. The implementation that the hwasan
1745 1.1 mrg library has for handling these relies on the frame pointer being after any
1746 1.1 mrg local variables. This is not generally the case for GCC. */
1747 1.1 mrg
1748 1.1 mrg
1749 1.1 mrg /* Returns whether we are tagging pointers and checking those tags on memory
1750 1.1 mrg access. */
1751 1.1 mrg bool
1752 1.1 mrg hwasan_sanitize_p ()
1753 1.1 mrg {
1754 1.1 mrg return sanitize_flags_p (SANITIZE_HWADDRESS);
1755 1.1 mrg }
1756 1.1 mrg
1757 1.1 mrg /* Are we tagging the stack? */
1758 1.1 mrg bool
1759 1.1 mrg hwasan_sanitize_stack_p ()
1760 1.1 mrg {
1761 1.1 mrg return (hwasan_sanitize_p () && param_hwasan_instrument_stack);
1762 1.1 mrg }
1763 1.1 mrg
1764 1.1 mrg /* Are we tagging alloca objects? */
1765 1.1 mrg bool
1766 1.1 mrg hwasan_sanitize_allocas_p (void)
1767 1.1 mrg {
1768 1.1 mrg return (hwasan_sanitize_stack_p () && param_hwasan_instrument_allocas);
1769 1.1 mrg }
1770 1.1 mrg
1771 1.1 mrg /* Should we instrument reads? */
1772 1.1 mrg bool
1773 1.1 mrg hwasan_instrument_reads (void)
1774 1.1 mrg {
1775 1.1 mrg return (hwasan_sanitize_p () && param_hwasan_instrument_reads);
1776 1.1 mrg }
1777 1.1 mrg
1778 1.1 mrg /* Should we instrument writes? */
1779 1.1 mrg bool
1780 1.1 mrg hwasan_instrument_writes (void)
1781 1.1 mrg {
1782 1.1 mrg return (hwasan_sanitize_p () && param_hwasan_instrument_writes);
1783 1.1 mrg }
1784 1.1 mrg
1785 1.1 mrg /* Should we instrument builtin calls? */
1786 1.1 mrg bool
1787 1.1 mrg hwasan_memintrin (void)
1788 1.1 mrg {
1789 1.1 mrg return (hwasan_sanitize_p () && param_hwasan_instrument_mem_intrinsics);
1790 1.1 mrg }
1791 1.1 mrg
1792 1.1 mrg /* Insert code to protect stack vars. The prologue sequence should be emitted
1793 1.1 mrg directly, epilogue sequence returned. BASE is the register holding the
1794 1.1 mrg stack base, against which OFFSETS array offsets are relative to, OFFSETS
1795 1.1 mrg array contains pairs of offsets in reverse order, always the end offset
1796 1.1 mrg of some gap that needs protection followed by starting offset,
1797 1.1 mrg and DECLS is an array of representative decls for each var partition.
1798 1.1 mrg LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1799 1.1 mrg elements long (OFFSETS include gap before the first variable as well
1800 1.1 mrg as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1801 1.1 mrg register which stack vars DECL_RTLs are based on. Either BASE should be
1802 1.1 mrg assigned to PBASE, when not doing use after return protection, or
1803 1.1 mrg corresponding address based on __asan_stack_malloc* return value. */
1804 1.1 mrg
1805 1.1 mrg rtx_insn *
1806 1.1 mrg asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1807 1.1 mrg HOST_WIDE_INT *offsets, tree *decls, int length)
1808 1.1 mrg {
1809 1.1 mrg rtx shadow_base, shadow_mem, ret, mem, orig_base;
1810 1.1 mrg rtx_code_label *lab;
1811 1.1 mrg rtx_insn *insns;
1812 1.1 mrg char buf[32];
1813 1.1 mrg HOST_WIDE_INT base_offset = offsets[length - 1];
1814 1.1 mrg HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1815 1.1 mrg HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1816 1.1 mrg HOST_WIDE_INT last_offset, last_size, last_size_aligned;
1817 1.1 mrg int l;
1818 1.1 mrg unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1819 1.1 mrg tree str_cst, decl, id;
1820 1.1 mrg int use_after_return_class = -1;
1821 1.1 mrg
1822 1.1 mrg /* Don't emit anything when doing error recovery, the assertions
1823 1.1 mrg might fail e.g. if a function had a frame offset overflow. */
1824 1.1 mrg if (seen_error ())
1825 1.1 mrg return NULL;
1826 1.1 mrg
1827 1.1 mrg if (shadow_ptr_types[0] == NULL_TREE)
1828 1.1 mrg asan_init_shadow_ptr_types ();
1829 1.1 mrg
1830 1.1 mrg expanded_location cfun_xloc
1831 1.1 mrg = expand_location (DECL_SOURCE_LOCATION (current_function_decl));
1832 1.1 mrg
1833 1.1 mrg /* First of all, prepare the description string. */
1834 1.1 mrg pretty_printer asan_pp;
1835 1.1 mrg
1836 1.1 mrg pp_decimal_int (&asan_pp, length / 2 - 1);
1837 1.1 mrg pp_space (&asan_pp);
1838 1.1 mrg for (l = length - 2; l; l -= 2)
1839 1.1 mrg {
1840 1.1 mrg tree decl = decls[l / 2 - 1];
1841 1.1 mrg pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1842 1.1 mrg pp_space (&asan_pp);
1843 1.1 mrg pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1844 1.1 mrg pp_space (&asan_pp);
1845 1.1 mrg
1846 1.1 mrg expanded_location xloc
1847 1.1 mrg = expand_location (DECL_SOURCE_LOCATION (decl));
1848 1.1 mrg char location[32];
1849 1.1 mrg
1850 1.1 mrg if (xloc.file == cfun_xloc.file)
1851 1.1 mrg sprintf (location, ":%d", xloc.line);
1852 1.1 mrg else
1853 1.1 mrg location[0] = '\0';
1854 1.1 mrg
1855 1.1 mrg if (DECL_P (decl) && DECL_NAME (decl))
1856 1.1 mrg {
1857 1.1 mrg unsigned idlen
1858 1.1 mrg = IDENTIFIER_LENGTH (DECL_NAME (decl)) + strlen (location);
1859 1.1 mrg pp_decimal_int (&asan_pp, idlen);
1860 1.1 mrg pp_space (&asan_pp);
1861 1.1 mrg pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1862 1.1 mrg pp_string (&asan_pp, location);
1863 1.1 mrg }
1864 1.1 mrg else
1865 1.1 mrg pp_string (&asan_pp, "9 <unknown>");
1866 1.1 mrg
1867 1.1 mrg if (l > 2)
1868 1.1 mrg pp_space (&asan_pp);
1869 1.1 mrg }
1870 1.1 mrg str_cst = asan_pp_string (&asan_pp);
1871 1.1 mrg
1872 1.1 mrg gcc_checking_assert (offsets[0] == (crtl->stack_protect_guard
1873 1.1 mrg ? -ASAN_RED_ZONE_SIZE : 0));
1874 1.1 mrg /* Emit the prologue sequence. */
1875 1.1 mrg if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1876 1.1 mrg && param_asan_use_after_return)
1877 1.1 mrg {
1878 1.1 mrg HOST_WIDE_INT adjusted_frame_size = asan_frame_size;
1879 1.1 mrg /* The stack protector guard is allocated at the top of the frame
1880 1.1 mrg and cfgexpand.cc then uses align_frame_offset (ASAN_RED_ZONE_SIZE);
1881 1.1 mrg while in that case we can still use asan_frame_size, we need to take
1882 1.1 mrg that into account when computing base_align_bias. */
1883 1.1 mrg if (alignb > ASAN_RED_ZONE_SIZE && crtl->stack_protect_guard)
1884 1.1 mrg adjusted_frame_size += ASAN_RED_ZONE_SIZE;
1885 1.1 mrg use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1886 1.1 mrg /* __asan_stack_malloc_N guarantees alignment
1887 1.1 mrg N < 6 ? (64 << N) : 4096 bytes. */
1888 1.1 mrg if (alignb > (use_after_return_class < 6
1889 1.1 mrg ? (64U << use_after_return_class) : 4096U))
1890 1.1 mrg use_after_return_class = -1;
1891 1.1 mrg else if (alignb > ASAN_RED_ZONE_SIZE
1892 1.1 mrg && (adjusted_frame_size & (alignb - 1)))
1893 1.1 mrg {
1894 1.1 mrg base_align_bias
1895 1.1 mrg = ((adjusted_frame_size + alignb - 1)
1896 1.1 mrg & ~(alignb - HOST_WIDE_INT_1)) - adjusted_frame_size;
1897 1.1 mrg use_after_return_class
1898 1.1 mrg = floor_log2 (asan_frame_size + base_align_bias - 1) - 5;
1899 1.1 mrg if (use_after_return_class > 10)
1900 1.1 mrg {
1901 1.1 mrg base_align_bias = 0;
1902 1.1 mrg use_after_return_class = -1;
1903 1.1 mrg }
1904 1.1 mrg }
1905 1.1 mrg }
1906 1.1 mrg
1907 1.1 mrg /* Align base if target is STRICT_ALIGNMENT. */
1908 1.1 mrg if (STRICT_ALIGNMENT)
1909 1.1 mrg {
1910 1.1 mrg const HOST_WIDE_INT align
1911 1.1 mrg = (GET_MODE_ALIGNMENT (SImode) / BITS_PER_UNIT) << ASAN_SHADOW_SHIFT;
1912 1.1 mrg base = expand_binop (Pmode, and_optab, base, gen_int_mode (-align, Pmode),
1913 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT);
1914 1.1 mrg }
1915 1.1 mrg
1916 1.1 mrg if (use_after_return_class == -1 && pbase)
1917 1.1 mrg emit_move_insn (pbase, base);
1918 1.1 mrg
1919 1.1 mrg base = expand_binop (Pmode, add_optab, base,
1920 1.1 mrg gen_int_mode (base_offset - base_align_bias, Pmode),
1921 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT);
1922 1.1 mrg orig_base = NULL_RTX;
1923 1.1 mrg if (use_after_return_class != -1)
1924 1.1 mrg {
1925 1.1 mrg if (asan_detect_stack_use_after_return == NULL_TREE)
1926 1.1 mrg {
1927 1.1 mrg id = get_identifier ("__asan_option_detect_stack_use_after_return");
1928 1.1 mrg decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1929 1.1 mrg integer_type_node);
1930 1.1 mrg SET_DECL_ASSEMBLER_NAME (decl, id);
1931 1.1 mrg TREE_ADDRESSABLE (decl) = 1;
1932 1.1 mrg DECL_ARTIFICIAL (decl) = 1;
1933 1.1 mrg DECL_IGNORED_P (decl) = 1;
1934 1.1 mrg DECL_EXTERNAL (decl) = 1;
1935 1.1 mrg TREE_STATIC (decl) = 1;
1936 1.1 mrg TREE_PUBLIC (decl) = 1;
1937 1.1 mrg TREE_USED (decl) = 1;
1938 1.1 mrg asan_detect_stack_use_after_return = decl;
1939 1.1 mrg }
1940 1.1 mrg orig_base = gen_reg_rtx (Pmode);
1941 1.1 mrg emit_move_insn (orig_base, base);
1942 1.1 mrg ret = expand_normal (asan_detect_stack_use_after_return);
1943 1.1 mrg lab = gen_label_rtx ();
1944 1.1 mrg emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1945 1.1 mrg VOIDmode, 0, lab,
1946 1.1 mrg profile_probability::very_likely ());
1947 1.1 mrg snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1948 1.1 mrg use_after_return_class);
1949 1.1 mrg ret = init_one_libfunc (buf);
1950 1.1 mrg ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
1951 1.1 mrg GEN_INT (asan_frame_size
1952 1.1 mrg + base_align_bias),
1953 1.1 mrg TYPE_MODE (pointer_sized_int_node));
1954 1.1 mrg /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
1955 1.1 mrg and NULL otherwise. Check RET value is NULL here and jump over the
1956 1.1 mrg BASE reassignment in this case. Otherwise, reassign BASE to RET. */
1957 1.1 mrg emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1958 1.1 mrg VOIDmode, 0, lab,
1959 1.1 mrg profile_probability:: very_unlikely ());
1960 1.1 mrg ret = convert_memory_address (Pmode, ret);
1961 1.1 mrg emit_move_insn (base, ret);
1962 1.1 mrg emit_label (lab);
1963 1.1 mrg emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1964 1.1 mrg gen_int_mode (base_align_bias
1965 1.1 mrg - base_offset, Pmode),
1966 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT));
1967 1.1 mrg }
1968 1.1 mrg mem = gen_rtx_MEM (ptr_mode, base);
1969 1.1 mrg mem = adjust_address (mem, VOIDmode, base_align_bias);
1970 1.1 mrg emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1971 1.1 mrg mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1972 1.1 mrg emit_move_insn (mem, expand_normal (str_cst));
1973 1.1 mrg mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1974 1.1 mrg ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1975 1.1 mrg id = get_identifier (buf);
1976 1.1 mrg decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1977 1.1 mrg VAR_DECL, id, char_type_node);
1978 1.1 mrg SET_DECL_ASSEMBLER_NAME (decl, id);
1979 1.1 mrg TREE_ADDRESSABLE (decl) = 1;
1980 1.1 mrg TREE_READONLY (decl) = 1;
1981 1.1 mrg DECL_ARTIFICIAL (decl) = 1;
1982 1.1 mrg DECL_IGNORED_P (decl) = 1;
1983 1.1 mrg TREE_STATIC (decl) = 1;
1984 1.1 mrg TREE_PUBLIC (decl) = 0;
1985 1.1 mrg TREE_USED (decl) = 1;
1986 1.1 mrg DECL_INITIAL (decl) = decl;
1987 1.1 mrg TREE_ASM_WRITTEN (decl) = 1;
1988 1.1 mrg TREE_ASM_WRITTEN (id) = 1;
1989 1.1 mrg emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1990 1.1 mrg shadow_base = expand_binop (Pmode, lshr_optab, base,
1991 1.1 mrg gen_int_shift_amount (Pmode, ASAN_SHADOW_SHIFT),
1992 1.1 mrg NULL_RTX, 1, OPTAB_DIRECT);
1993 1.1 mrg shadow_base
1994 1.1 mrg = plus_constant (Pmode, shadow_base,
1995 1.1 mrg asan_shadow_offset ()
1996 1.1 mrg + (base_align_bias >> ASAN_SHADOW_SHIFT));
1997 1.1 mrg gcc_assert (asan_shadow_set != -1
1998 1.1 mrg && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1999 1.1 mrg shadow_mem = gen_rtx_MEM (SImode, shadow_base);
2000 1.1 mrg set_mem_alias_set (shadow_mem, asan_shadow_set);
2001 1.1 mrg if (STRICT_ALIGNMENT)
2002 1.1 mrg set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
2003 1.1 mrg prev_offset = base_offset;
2004 1.1 mrg
2005 1.1 mrg asan_redzone_buffer rz_buffer (shadow_mem, prev_offset);
2006 1.1 mrg for (l = length; l; l -= 2)
2007 1.1 mrg {
2008 1.1 mrg if (l == 2)
2009 1.1 mrg cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
2010 1.1 mrg offset = offsets[l - 1];
2011 1.1 mrg
2012 1.1 mrg bool extra_byte = (offset - base_offset) & (ASAN_SHADOW_GRANULARITY - 1);
2013 1.1 mrg /* If a red-zone is not aligned to ASAN_SHADOW_GRANULARITY then
2014 1.1 mrg the previous stack variable has size % ASAN_SHADOW_GRANULARITY != 0.
2015 1.1 mrg In that case we have to emit one extra byte that will describe
2016 1.1 mrg how many bytes (our of ASAN_SHADOW_GRANULARITY) can be accessed. */
2017 1.1 mrg if (extra_byte)
2018 1.1 mrg {
2019 1.1 mrg HOST_WIDE_INT aoff
2020 1.1 mrg = base_offset + ((offset - base_offset)
2021 1.1 mrg & ~(ASAN_SHADOW_GRANULARITY - HOST_WIDE_INT_1));
2022 1.1 mrg rz_buffer.emit_redzone_byte (aoff, offset - aoff);
2023 1.1 mrg offset = aoff + ASAN_SHADOW_GRANULARITY;
2024 1.1 mrg }
2025 1.1 mrg
2026 1.1 mrg /* Calculate size of red zone payload. */
2027 1.1 mrg while (offset < offsets[l - 2])
2028 1.1 mrg {
2029 1.1 mrg rz_buffer.emit_redzone_byte (offset, cur_shadow_byte);
2030 1.1 mrg offset += ASAN_SHADOW_GRANULARITY;
2031 1.1 mrg }
2032 1.1 mrg
2033 1.1 mrg cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
2034 1.1 mrg }
2035 1.1 mrg
2036 1.1 mrg /* As the automatic variables are aligned to
2037 1.1 mrg ASAN_RED_ZONE_SIZE / ASAN_SHADOW_GRANULARITY, the buffer should be
2038 1.1 mrg flushed here. */
2039 1.1 mrg gcc_assert (rz_buffer.m_shadow_bytes.is_empty ());
2040 1.1 mrg
2041 1.1 mrg do_pending_stack_adjust ();
2042 1.1 mrg
2043 1.1 mrg /* Construct epilogue sequence. */
2044 1.1 mrg start_sequence ();
2045 1.1 mrg
2046 1.1 mrg lab = NULL;
2047 1.1 mrg if (use_after_return_class != -1)
2048 1.1 mrg {
2049 1.1 mrg rtx_code_label *lab2 = gen_label_rtx ();
2050 1.1 mrg char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
2051 1.1 mrg emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
2052 1.1 mrg VOIDmode, 0, lab2,
2053 1.1 mrg profile_probability::very_likely ());
2054 1.1 mrg shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
2055 1.1 mrg set_mem_alias_set (shadow_mem, asan_shadow_set);
2056 1.1 mrg mem = gen_rtx_MEM (ptr_mode, base);
2057 1.1 mrg mem = adjust_address (mem, VOIDmode, base_align_bias);
2058 1.1 mrg emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
2059 1.1 mrg unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
2060 1.1 mrg if (use_after_return_class < 5
2061 1.1 mrg && can_store_by_pieces (sz, builtin_memset_read_str, &c,
2062 1.1 mrg BITS_PER_UNIT, true))
2063 1.1 mrg {
2064 1.1 mrg /* Emit:
2065 1.1 mrg memset(ShadowBase, kAsanStackAfterReturnMagic, ShadowSize);
2066 1.1 mrg **SavedFlagPtr(FakeStack, class_id) = 0
2067 1.1 mrg */
2068 1.1 mrg store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
2069 1.1 mrg BITS_PER_UNIT, true, RETURN_BEGIN);
2070 1.1 mrg
2071 1.1 mrg unsigned HOST_WIDE_INT offset
2072 1.1 mrg = (1 << (use_after_return_class + 6));
2073 1.1 mrg offset -= GET_MODE_SIZE (ptr_mode);
2074 1.1 mrg mem = gen_rtx_MEM (ptr_mode, base);
2075 1.1 mrg mem = adjust_address (mem, ptr_mode, offset);
2076 1.1 mrg rtx addr = gen_reg_rtx (ptr_mode);
2077 1.1 mrg emit_move_insn (addr, mem);
2078 1.1 mrg addr = convert_memory_address (Pmode, addr);
2079 1.1 mrg mem = gen_rtx_MEM (QImode, addr);
2080 1.1 mrg emit_move_insn (mem, const0_rtx);
2081 1.1 mrg }
2082 1.1 mrg else if (use_after_return_class >= 5
2083 1.1 mrg || !set_storage_via_setmem (shadow_mem,
2084 1.1 mrg GEN_INT (sz),
2085 1.1 mrg gen_int_mode (c, QImode),
2086 1.1 mrg BITS_PER_UNIT, BITS_PER_UNIT,
2087 1.1 mrg -1, sz, sz, sz))
2088 1.1 mrg {
2089 1.1 mrg snprintf (buf, sizeof buf, "__asan_stack_free_%d",
2090 1.1 mrg use_after_return_class);
2091 1.1 mrg ret = init_one_libfunc (buf);
2092 1.1 mrg rtx addr = convert_memory_address (ptr_mode, base);
2093 1.1 mrg rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
2094 1.1 mrg emit_library_call (ret, LCT_NORMAL, ptr_mode, addr, ptr_mode,
2095 1.1 mrg GEN_INT (asan_frame_size + base_align_bias),
2096 1.1 mrg TYPE_MODE (pointer_sized_int_node),
2097 1.1 mrg orig_addr, ptr_mode);
2098 1.1 mrg }
2099 1.1 mrg lab = gen_label_rtx ();
2100 1.1 mrg emit_jump (lab);
2101 1.1 mrg emit_label (lab2);
2102 1.1 mrg }
2103 1.1 mrg
2104 1.1 mrg shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
2105 1.1 mrg set_mem_alias_set (shadow_mem, asan_shadow_set);
2106 1.1 mrg
2107 1.1 mrg if (STRICT_ALIGNMENT)
2108 1.1 mrg set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
2109 1.1 mrg
2110 1.1 mrg prev_offset = base_offset;
2111 1.1 mrg last_offset = base_offset;
2112 1.1 mrg last_size = 0;
2113 1.1 mrg last_size_aligned = 0;
2114 1.1 mrg for (l = length; l; l -= 2)
2115 1.1 mrg {
2116 1.1 mrg offset = base_offset + ((offsets[l - 1] - base_offset)
2117 1.1 mrg & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
2118 1.1 mrg if (last_offset + last_size_aligned < offset)
2119 1.1 mrg {
2120 1.1 mrg shadow_mem = adjust_address (shadow_mem, VOIDmode,
2121 1.1 mrg (last_offset - prev_offset)
2122 1.1 mrg >> ASAN_SHADOW_SHIFT);
2123 1.1 mrg prev_offset = last_offset;
2124 1.1 mrg asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT);
2125 1.1 mrg last_offset = offset;
2126 1.1 mrg last_size = 0;
2127 1.1 mrg }
2128 1.1 mrg else
2129 1.1 mrg last_size = offset - last_offset;
2130 1.1 mrg last_size += base_offset + ((offsets[l - 2] - base_offset)
2131 1.1 mrg & ~(ASAN_MIN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
2132 1.1 mrg - offset;
2133 1.1 mrg
2134 1.1 mrg /* Unpoison shadow memory that corresponds to a variable that is
2135 1.1 mrg is subject of use-after-return sanitization. */
2136 1.1 mrg if (l > 2)
2137 1.1 mrg {
2138 1.1 mrg decl = decls[l / 2 - 2];
2139 1.1 mrg if (asan_handled_variables != NULL
2140 1.1 mrg && asan_handled_variables->contains (decl))
2141 1.1 mrg {
2142 1.1 mrg HOST_WIDE_INT size = offsets[l - 3] - offsets[l - 2];
2143 1.1 mrg if (dump_file && (dump_flags & TDF_DETAILS))
2144 1.1 mrg {
2145 1.1 mrg const char *n = (DECL_NAME (decl)
2146 1.1 mrg ? IDENTIFIER_POINTER (DECL_NAME (decl))
2147 1.1 mrg : "<unknown>");
2148 1.1 mrg fprintf (dump_file, "Unpoisoning shadow stack for variable: "
2149 1.1 mrg "%s (%" PRId64 " B)\n", n, size);
2150 1.1 mrg }
2151 1.1 mrg
2152 1.1 mrg last_size += size & ~(ASAN_MIN_RED_ZONE_SIZE - HOST_WIDE_INT_1);
2153 1.1 mrg }
2154 1.1 mrg }
2155 1.1 mrg last_size_aligned
2156 1.1 mrg = ((last_size + (ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
2157 1.1 mrg & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
2158 1.1 mrg }
2159 1.1 mrg if (last_size_aligned)
2160 1.1 mrg {
2161 1.1 mrg shadow_mem = adjust_address (shadow_mem, VOIDmode,
2162 1.1 mrg (last_offset - prev_offset)
2163 1.1 mrg >> ASAN_SHADOW_SHIFT);
2164 1.1 mrg asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT);
2165 1.1 mrg }
2166 1.1 mrg
2167 1.1 mrg /* Clean-up set with instrumented stack variables. */
2168 1.1 mrg delete asan_handled_variables;
2169 1.1 mrg asan_handled_variables = NULL;
2170 1.1 mrg delete asan_used_labels;
2171 1.1 mrg asan_used_labels = NULL;
2172 1.1 mrg
2173 1.1 mrg do_pending_stack_adjust ();
2174 1.1 mrg if (lab)
2175 1.1 mrg emit_label (lab);
2176 1.1 mrg
2177 1.1 mrg insns = get_insns ();
2178 1.1 mrg end_sequence ();
2179 1.1 mrg return insns;
2180 1.1 mrg }
2181 1.1 mrg
2182 1.1 mrg /* Emit __asan_allocas_unpoison (top, bot) call. The BASE parameter corresponds
2183 1.1 mrg to BOT argument, for TOP virtual_stack_dynamic_rtx is used. NEW_SEQUENCE
2184 1.1 mrg indicates whether we're emitting new instructions sequence or not. */
2185 1.1 mrg
2186 1.1 mrg rtx_insn *
2187 1.1 mrg asan_emit_allocas_unpoison (rtx top, rtx bot, rtx_insn *before)
2188 1.1 mrg {
2189 1.1 mrg if (before)
2190 1.1 mrg push_to_sequence (before);
2191 1.1 mrg else
2192 1.1 mrg start_sequence ();
2193 1.1 mrg rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
2194 1.1 mrg top = convert_memory_address (ptr_mode, top);
2195 1.1 mrg bot = convert_memory_address (ptr_mode, bot);
2196 1.1 mrg emit_library_call (ret, LCT_NORMAL, ptr_mode,
2197 1.1 mrg top, ptr_mode, bot, ptr_mode);
2198 1.1 mrg
2199 1.1 mrg do_pending_stack_adjust ();
2200 1.1 mrg rtx_insn *insns = get_insns ();
2201 1.1 mrg end_sequence ();
2202 1.1 mrg return insns;
2203 1.1 mrg }
2204 1.1 mrg
2205 1.1 mrg /* Return true if DECL, a global var, might be overridden and needs
2206 1.1 mrg therefore a local alias. */
2207 1.1 mrg
2208 1.1 mrg static bool
2209 1.1 mrg asan_needs_local_alias (tree decl)
2210 1.1 mrg {
2211 1.1 mrg return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
2212 1.1 mrg }
2213 1.1 mrg
2214 1.1 mrg /* Return true if DECL, a global var, is an artificial ODR indicator symbol
2215 1.1 mrg therefore doesn't need protection. */
2216 1.1 mrg
2217 1.1 mrg static bool
2218 1.1 mrg is_odr_indicator (tree decl)
2219 1.1 mrg {
2220 1.1 mrg return (DECL_ARTIFICIAL (decl)
2221 1.1 mrg && lookup_attribute ("asan odr indicator", DECL_ATTRIBUTES (decl)));
2222 1.1 mrg }
2223 1.1 mrg
2224 1.1 mrg /* Return true if DECL is a VAR_DECL that should be protected
2225 1.1 mrg by Address Sanitizer, by appending a red zone with protected
2226 1.1 mrg shadow memory after it and aligning it to at least
2227 1.1 mrg ASAN_RED_ZONE_SIZE bytes. */
2228 1.1 mrg
2229 1.1 mrg bool
2230 1.1 mrg asan_protect_global (tree decl, bool ignore_decl_rtl_set_p)
2231 1.1 mrg {
2232 1.1 mrg if (!param_asan_globals)
2233 1.1 mrg return false;
2234 1.1 mrg
2235 1.1 mrg rtx rtl, symbol;
2236 1.1 mrg
2237 1.1 mrg if (TREE_CODE (decl) == STRING_CST)
2238 1.1 mrg {
2239 1.1 mrg /* Instrument all STRING_CSTs except those created
2240 1.1 mrg by asan_pp_string here. */
2241 1.1 mrg if (shadow_ptr_types[0] != NULL_TREE
2242 1.1 mrg && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
2243 1.1 mrg && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
2244 1.1 mrg return false;
2245 1.1 mrg return true;
2246 1.1 mrg }
2247 1.1 mrg if (!VAR_P (decl)
2248 1.1 mrg /* TLS vars aren't statically protectable. */
2249 1.1 mrg || DECL_THREAD_LOCAL_P (decl)
2250 1.1 mrg /* Externs will be protected elsewhere. */
2251 1.1 mrg || DECL_EXTERNAL (decl)
2252 1.1 mrg /* PR sanitizer/81697: For architectures that use section anchors first
2253 1.1 mrg call to asan_protect_global may occur before DECL_RTL (decl) is set.
2254 1.1 mrg We should ignore DECL_RTL_SET_P then, because otherwise the first call
2255 1.1 mrg to asan_protect_global will return FALSE and the following calls on the
2256 1.1 mrg same decl after setting DECL_RTL (decl) will return TRUE and we'll end
2257 1.1 mrg up with inconsistency at runtime. */
2258 1.1 mrg || (!DECL_RTL_SET_P (decl) && !ignore_decl_rtl_set_p)
2259 1.1 mrg /* Comdat vars pose an ABI problem, we can't know if
2260 1.1 mrg the var that is selected by the linker will have
2261 1.1 mrg padding or not. */
2262 1.1 mrg || DECL_ONE_ONLY (decl)
2263 1.1 mrg /* Similarly for common vars. People can use -fno-common.
2264 1.1 mrg Note: Linux kernel is built with -fno-common, so we do instrument
2265 1.1 mrg globals there even if it is C. */
2266 1.1 mrg || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
2267 1.1 mrg /* Don't protect if using user section, often vars placed
2268 1.1 mrg into user section from multiple TUs are then assumed
2269 1.1 mrg to be an array of such vars, putting padding in there
2270 1.1 mrg breaks this assumption. */
2271 1.1 mrg || (DECL_SECTION_NAME (decl) != NULL
2272 1.1 mrg && !symtab_node::get (decl)->implicit_section
2273 1.1 mrg && !section_sanitized_p (DECL_SECTION_NAME (decl)))
2274 1.1 mrg /* Don't protect variables in non-generic address-space. */
2275 1.1 mrg || !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)))
2276 1.1 mrg || DECL_SIZE (decl) == 0
2277 1.1 mrg || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
2278 1.1 mrg || TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2279 1.1 mrg || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
2280 1.1 mrg || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
2281 1.1 mrg || TREE_TYPE (decl) == ubsan_get_source_location_type ()
2282 1.1 mrg || is_odr_indicator (decl))
2283 1.1 mrg return false;
2284 1.1 mrg
2285 1.1 mrg if (!ignore_decl_rtl_set_p || DECL_RTL_SET_P (decl))
2286 1.1 mrg {
2287 1.1 mrg
2288 1.1 mrg rtl = DECL_RTL (decl);
2289 1.1 mrg if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
2290 1.1 mrg return false;
2291 1.1 mrg symbol = XEXP (rtl, 0);
2292 1.1 mrg
2293 1.1 mrg if (CONSTANT_POOL_ADDRESS_P (symbol)
2294 1.1 mrg || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
2295 1.1 mrg return false;
2296 1.1 mrg }
2297 1.1 mrg
2298 1.1 mrg if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
2299 1.1 mrg return false;
2300 1.1 mrg
2301 1.1 mrg if (!TARGET_SUPPORTS_ALIASES && asan_needs_local_alias (decl))
2302 1.1 mrg return false;
2303 1.1 mrg
2304 1.1 mrg return true;
2305 1.1 mrg }
2306 1.1 mrg
2307 1.1 mrg /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
2308 1.1 mrg IS_STORE is either 1 (for a store) or 0 (for a load). */
2309 1.1 mrg
2310 1.1 mrg static tree
2311 1.1 mrg report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
2312 1.1 mrg int *nargs)
2313 1.1 mrg {
2314 1.1 mrg gcc_assert (!hwasan_sanitize_p ());
2315 1.1 mrg
2316 1.1 mrg static enum built_in_function report[2][2][6]
2317 1.1 mrg = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
2318 1.1 mrg BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
2319 1.1 mrg BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
2320 1.1 mrg { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
2321 1.1 mrg BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
2322 1.1 mrg BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
2323 1.1 mrg { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
2324 1.1 mrg BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
2325 1.1 mrg BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
2326 1.1 mrg BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
2327 1.1 mrg BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
2328 1.1 mrg BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
2329 1.1 mrg { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
2330 1.1 mrg BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
2331 1.1 mrg BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
2332 1.1 mrg BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
2333 1.1 mrg BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
2334 1.1 mrg BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
2335 1.1 mrg if (size_in_bytes == -1)
2336 1.1 mrg {
2337 1.1 mrg *nargs = 2;
2338 1.1 mrg return builtin_decl_implicit (report[recover_p][is_store][5]);
2339 1.1 mrg }
2340 1.1 mrg *nargs = 1;
2341 1.1 mrg int size_log2 = exact_log2 (size_in_bytes);
2342 1.1 mrg return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
2343 1.1 mrg }
2344 1.1 mrg
2345 1.1 mrg /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
2346 1.1 mrg IS_STORE is either 1 (for a store) or 0 (for a load). */
2347 1.1 mrg
2348 1.1 mrg static tree
2349 1.1 mrg check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
2350 1.1 mrg int *nargs)
2351 1.1 mrg {
2352 1.1 mrg static enum built_in_function check[2][2][6]
2353 1.1 mrg = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
2354 1.1 mrg BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
2355 1.1 mrg BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
2356 1.1 mrg { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
2357 1.1 mrg BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
2358 1.1 mrg BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
2359 1.1 mrg { { BUILT_IN_ASAN_LOAD1_NOABORT,
2360 1.1 mrg BUILT_IN_ASAN_LOAD2_NOABORT,
2361 1.1 mrg BUILT_IN_ASAN_LOAD4_NOABORT,
2362 1.1 mrg BUILT_IN_ASAN_LOAD8_NOABORT,
2363 1.1 mrg BUILT_IN_ASAN_LOAD16_NOABORT,
2364 1.1 mrg BUILT_IN_ASAN_LOADN_NOABORT },
2365 1.1 mrg { BUILT_IN_ASAN_STORE1_NOABORT,
2366 1.1 mrg BUILT_IN_ASAN_STORE2_NOABORT,
2367 1.1 mrg BUILT_IN_ASAN_STORE4_NOABORT,
2368 1.1 mrg BUILT_IN_ASAN_STORE8_NOABORT,
2369 1.1 mrg BUILT_IN_ASAN_STORE16_NOABORT,
2370 1.1 mrg BUILT_IN_ASAN_STOREN_NOABORT } } };
2371 1.1 mrg if (size_in_bytes == -1)
2372 1.1 mrg {
2373 1.1 mrg *nargs = 2;
2374 1.1 mrg return builtin_decl_implicit (check[recover_p][is_store][5]);
2375 1.1 mrg }
2376 1.1 mrg *nargs = 1;
2377 1.1 mrg int size_log2 = exact_log2 (size_in_bytes);
2378 1.1 mrg return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
2379 1.1 mrg }
2380 1.1 mrg
2381 1.1 mrg /* Split the current basic block and create a condition statement
2382 1.1 mrg insertion point right before or after the statement pointed to by
2383 1.1 mrg ITER. Return an iterator to the point at which the caller might
2384 1.1 mrg safely insert the condition statement.
2385 1.1 mrg
2386 1.1 mrg THEN_BLOCK must be set to the address of an uninitialized instance
2387 1.1 mrg of basic_block. The function will then set *THEN_BLOCK to the
2388 1.1 mrg 'then block' of the condition statement to be inserted by the
2389 1.1 mrg caller.
2390 1.1 mrg
2391 1.1 mrg If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
2392 1.1 mrg *THEN_BLOCK to *FALLTHROUGH_BLOCK.
2393 1.1 mrg
2394 1.1 mrg Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
2395 1.1 mrg block' of the condition statement to be inserted by the caller.
2396 1.1 mrg
2397 1.1 mrg Note that *FALLTHROUGH_BLOCK is a new block that contains the
2398 1.1 mrg statements starting from *ITER, and *THEN_BLOCK is a new empty
2399 1.1 mrg block.
2400 1.1 mrg
2401 1.1 mrg *ITER is adjusted to point to always point to the first statement
2402 1.1 mrg of the basic block * FALLTHROUGH_BLOCK. That statement is the
2403 1.1 mrg same as what ITER was pointing to prior to calling this function,
2404 1.1 mrg if BEFORE_P is true; otherwise, it is its following statement. */
2405 1.1 mrg
2406 1.1 mrg gimple_stmt_iterator
2407 1.1 mrg create_cond_insert_point (gimple_stmt_iterator *iter,
2408 1.1 mrg bool before_p,
2409 1.1 mrg bool then_more_likely_p,
2410 1.1 mrg bool create_then_fallthru_edge,
2411 1.1 mrg basic_block *then_block,
2412 1.1 mrg basic_block *fallthrough_block)
2413 1.1 mrg {
2414 1.1 mrg gimple_stmt_iterator gsi = *iter;
2415 1.1 mrg
2416 1.1 mrg if (!gsi_end_p (gsi) && before_p)
2417 1.1 mrg gsi_prev (&gsi);
2418 1.1 mrg
2419 1.1 mrg basic_block cur_bb = gsi_bb (*iter);
2420 1.1 mrg
2421 1.1 mrg edge e = split_block (cur_bb, gsi_stmt (gsi));
2422 1.1 mrg
2423 1.1 mrg /* Get a hold on the 'condition block', the 'then block' and the
2424 1.1 mrg 'else block'. */
2425 1.1 mrg basic_block cond_bb = e->src;
2426 1.1 mrg basic_block fallthru_bb = e->dest;
2427 1.1 mrg basic_block then_bb = create_empty_bb (cond_bb);
2428 1.1 mrg if (current_loops)
2429 1.1 mrg {
2430 1.1 mrg add_bb_to_loop (then_bb, cond_bb->loop_father);
2431 1.1 mrg loops_state_set (LOOPS_NEED_FIXUP);
2432 1.1 mrg }
2433 1.1 mrg
2434 1.1 mrg /* Set up the newly created 'then block'. */
2435 1.1 mrg e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
2436 1.1 mrg profile_probability fallthrough_probability
2437 1.1 mrg = then_more_likely_p
2438 1.1 mrg ? profile_probability::very_unlikely ()
2439 1.1 mrg : profile_probability::very_likely ();
2440 1.1 mrg e->probability = fallthrough_probability.invert ();
2441 1.1 mrg then_bb->count = e->count ();
2442 1.1 mrg if (create_then_fallthru_edge)
2443 1.1 mrg make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
2444 1.1 mrg
2445 1.1 mrg /* Set up the fallthrough basic block. */
2446 1.1 mrg e = find_edge (cond_bb, fallthru_bb);
2447 1.1 mrg e->flags = EDGE_FALSE_VALUE;
2448 1.1 mrg e->probability = fallthrough_probability;
2449 1.1 mrg
2450 1.1 mrg /* Update dominance info for the newly created then_bb; note that
2451 1.1 mrg fallthru_bb's dominance info has already been updated by
2452 1.1 mrg split_bock. */
2453 1.1 mrg if (dom_info_available_p (CDI_DOMINATORS))
2454 1.1 mrg set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
2455 1.1 mrg
2456 1.1 mrg *then_block = then_bb;
2457 1.1 mrg *fallthrough_block = fallthru_bb;
2458 1.1 mrg *iter = gsi_start_bb (fallthru_bb);
2459 1.1 mrg
2460 1.1 mrg return gsi_last_bb (cond_bb);
2461 1.1 mrg }
2462 1.1 mrg
2463 1.1 mrg /* Insert an if condition followed by a 'then block' right before the
2464 1.1 mrg statement pointed to by ITER. The fallthrough block -- which is the
2465 1.1 mrg else block of the condition as well as the destination of the
2466 1.1 mrg outcoming edge of the 'then block' -- starts with the statement
2467 1.1 mrg pointed to by ITER.
2468 1.1 mrg
2469 1.1 mrg COND is the condition of the if.
2470 1.1 mrg
2471 1.1 mrg If THEN_MORE_LIKELY_P is true, the probability of the edge to the
2472 1.1 mrg 'then block' is higher than the probability of the edge to the
2473 1.1 mrg fallthrough block.
2474 1.1 mrg
2475 1.1 mrg Upon completion of the function, *THEN_BB is set to the newly
2476 1.1 mrg inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
2477 1.1 mrg fallthrough block.
2478 1.1 mrg
2479 1.1 mrg *ITER is adjusted to still point to the same statement it was
2480 1.1 mrg pointing to initially. */
2481 1.1 mrg
2482 1.1 mrg static void
2483 1.1 mrg insert_if_then_before_iter (gcond *cond,
2484 1.1 mrg gimple_stmt_iterator *iter,
2485 1.1 mrg bool then_more_likely_p,
2486 1.1 mrg basic_block *then_bb,
2487 1.1 mrg basic_block *fallthrough_bb)
2488 1.1 mrg {
2489 1.1 mrg gimple_stmt_iterator cond_insert_point =
2490 1.1 mrg create_cond_insert_point (iter,
2491 1.1 mrg /*before_p=*/true,
2492 1.1 mrg then_more_likely_p,
2493 1.1 mrg /*create_then_fallthru_edge=*/true,
2494 1.1 mrg then_bb,
2495 1.1 mrg fallthrough_bb);
2496 1.1 mrg gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
2497 1.1 mrg }
2498 1.1 mrg
2499 1.1 mrg /* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().
2500 1.1 mrg If RETURN_ADDRESS is set to true, return memory location instread
2501 1.1 mrg of a value in the shadow memory. */
2502 1.1 mrg
2503 1.1 mrg static tree
2504 1.1 mrg build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
2505 1.1 mrg tree base_addr, tree shadow_ptr_type,
2506 1.1 mrg bool return_address = false)
2507 1.1 mrg {
2508 1.1 mrg tree t, uintptr_type = TREE_TYPE (base_addr);
2509 1.1 mrg tree shadow_type = TREE_TYPE (shadow_ptr_type);
2510 1.1 mrg gimple *g;
2511 1.1 mrg
2512 1.1 mrg t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
2513 1.1 mrg g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
2514 1.1 mrg base_addr, t);
2515 1.1 mrg gimple_set_location (g, location);
2516 1.1 mrg gsi_insert_after (gsi, g, GSI_NEW_STMT);
2517 1.1 mrg
2518 1.1 mrg t = build_int_cst (uintptr_type, asan_shadow_offset ());
2519 1.1 mrg g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
2520 1.1 mrg gimple_assign_lhs (g), t);
2521 1.1 mrg gimple_set_location (g, location);
2522 1.1 mrg gsi_insert_after (gsi, g, GSI_NEW_STMT);
2523 1.1 mrg
2524 1.1 mrg g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
2525 1.1 mrg gimple_assign_lhs (g));
2526 1.1 mrg gimple_set_location (g, location);
2527 1.1 mrg gsi_insert_after (gsi, g, GSI_NEW_STMT);
2528 1.1 mrg
2529 1.1 mrg if (!return_address)
2530 1.1 mrg {
2531 1.1 mrg t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
2532 1.1 mrg build_int_cst (shadow_ptr_type, 0));
2533 1.1 mrg g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
2534 1.1 mrg gimple_set_location (g, location);
2535 1.1 mrg gsi_insert_after (gsi, g, GSI_NEW_STMT);
2536 1.1 mrg }
2537 1.1 mrg
2538 1.1 mrg return gimple_assign_lhs (g);
2539 1.1 mrg }
2540 1.1 mrg
2541 1.1 mrg /* BASE can already be an SSA_NAME; in that case, do not create a
2542 1.1 mrg new SSA_NAME for it. */
2543 1.1 mrg
2544 1.1 mrg static tree
2545 1.1 mrg maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
2546 1.1 mrg bool before_p)
2547 1.1 mrg {
2548 1.1 mrg STRIP_USELESS_TYPE_CONVERSION (base);
2549 1.1 mrg if (TREE_CODE (base) == SSA_NAME)
2550 1.1 mrg return base;
2551 1.1 mrg gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)), base);
2552 1.1 mrg gimple_set_location (g, loc);
2553 1.1 mrg if (before_p)
2554 1.1 mrg gsi_insert_before (iter, g, GSI_SAME_STMT);
2555 1.1 mrg else
2556 1.1 mrg gsi_insert_after (iter, g, GSI_NEW_STMT);
2557 1.1 mrg return gimple_assign_lhs (g);
2558 1.1 mrg }
2559 1.1 mrg
2560 1.1 mrg /* LEN can already have necessary size and precision;
2561 1.1 mrg in that case, do not create a new variable. */
2562 1.1 mrg
2563 1.1 mrg tree
2564 1.1 mrg maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
2565 1.1 mrg bool before_p)
2566 1.1 mrg {
2567 1.1 mrg if (ptrofftype_p (len))
2568 1.1 mrg return len;
2569 1.1 mrg gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2570 1.1 mrg NOP_EXPR, len);
2571 1.1 mrg gimple_set_location (g, loc);
2572 1.1 mrg if (before_p)
2573 1.1 mrg gsi_insert_before (iter, g, GSI_SAME_STMT);
2574 1.1 mrg else
2575 1.1 mrg gsi_insert_after (iter, g, GSI_NEW_STMT);
2576 1.1 mrg return gimple_assign_lhs (g);
2577 1.1 mrg }
2578 1.1 mrg
2579 1.1 mrg /* Instrument the memory access instruction BASE. Insert new
2580 1.1 mrg statements before or after ITER.
2581 1.1 mrg
2582 1.1 mrg Note that the memory access represented by BASE can be either an
2583 1.1 mrg SSA_NAME, or a non-SSA expression. LOCATION is the source code
2584 1.1 mrg location. IS_STORE is TRUE for a store, FALSE for a load.
2585 1.1 mrg BEFORE_P is TRUE for inserting the instrumentation code before
2586 1.1 mrg ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
2587 1.1 mrg for a scalar memory access and FALSE for memory region access.
2588 1.1 mrg NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
2589 1.1 mrg length. ALIGN tells alignment of accessed memory object.
2590 1.1 mrg
2591 1.1 mrg START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
2592 1.1 mrg memory region have already been instrumented.
2593 1.1 mrg
2594 1.1 mrg If BEFORE_P is TRUE, *ITER is arranged to still point to the
2595 1.1 mrg statement it was pointing to prior to calling this function,
2596 1.1 mrg otherwise, it points to the statement logically following it. */
2597 1.1 mrg
2598 1.1 mrg static void
2599 1.1 mrg build_check_stmt (location_t loc, tree base, tree len,
2600 1.1 mrg HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
2601 1.1 mrg bool is_non_zero_len, bool before_p, bool is_store,
2602 1.1 mrg bool is_scalar_access, unsigned int align = 0)
2603 1.1 mrg {
2604 1.1 mrg gimple_stmt_iterator gsi = *iter;
2605 1.1 mrg gimple *g;
2606 1.1 mrg
2607 1.1 mrg gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
2608 1.1 mrg gcc_assert (size_in_bytes == -1 || size_in_bytes >= 1);
2609 1.1 mrg
2610 1.1 mrg gsi = *iter;
2611 1.1 mrg
2612 1.1 mrg base = unshare_expr (base);
2613 1.1 mrg base = maybe_create_ssa_name (loc, base, &gsi, before_p);
2614 1.1 mrg
2615 1.1 mrg if (len)
2616 1.1 mrg {
2617 1.1 mrg len = unshare_expr (len);
2618 1.1 mrg len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
2619 1.1 mrg }
2620 1.1 mrg else
2621 1.1 mrg {
2622 1.1 mrg gcc_assert (size_in_bytes != -1);
2623 1.1 mrg len = build_int_cst (pointer_sized_int_node, size_in_bytes);
2624 1.1 mrg }
2625 1.1 mrg
2626 1.1 mrg if (size_in_bytes > 1)
2627 1.1 mrg {
2628 1.1 mrg if ((size_in_bytes & (size_in_bytes - 1)) != 0
2629 1.1 mrg || size_in_bytes > 16)
2630 1.1 mrg is_scalar_access = false;
2631 1.1 mrg else if (align && align < size_in_bytes * BITS_PER_UNIT)
2632 1.1 mrg {
2633 1.1 mrg /* On non-strict alignment targets, if
2634 1.1 mrg 16-byte access is just 8-byte aligned,
2635 1.1 mrg this will result in misaligned shadow
2636 1.1 mrg memory 2 byte load, but otherwise can
2637 1.1 mrg be handled using one read. */
2638 1.1 mrg if (size_in_bytes != 16
2639 1.1 mrg || STRICT_ALIGNMENT
2640 1.1 mrg || align < 8 * BITS_PER_UNIT)
2641 1.1 mrg is_scalar_access = false;
2642 1.1 mrg }
2643 1.1 mrg }
2644 1.1 mrg
2645 1.1 mrg HOST_WIDE_INT flags = 0;
2646 1.1 mrg if (is_store)
2647 1.1 mrg flags |= ASAN_CHECK_STORE;
2648 1.1 mrg if (is_non_zero_len)
2649 1.1 mrg flags |= ASAN_CHECK_NON_ZERO_LEN;
2650 1.1 mrg if (is_scalar_access)
2651 1.1 mrg flags |= ASAN_CHECK_SCALAR_ACCESS;
2652 1.1 mrg
2653 1.1 mrg enum internal_fn fn = hwasan_sanitize_p ()
2654 1.1 mrg ? IFN_HWASAN_CHECK
2655 1.1 mrg : IFN_ASAN_CHECK;
2656 1.1 mrg
2657 1.1 mrg g = gimple_build_call_internal (fn, 4,
2658 1.1 mrg build_int_cst (integer_type_node, flags),
2659 1.1 mrg base, len,
2660 1.1 mrg build_int_cst (integer_type_node,
2661 1.1 mrg align / BITS_PER_UNIT));
2662 1.1 mrg gimple_set_location (g, loc);
2663 1.1 mrg if (before_p)
2664 1.1 mrg gsi_insert_before (&gsi, g, GSI_SAME_STMT);
2665 1.1 mrg else
2666 1.1 mrg {
2667 1.1 mrg gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2668 1.1 mrg gsi_next (&gsi);
2669 1.1 mrg *iter = gsi;
2670 1.1 mrg }
2671 1.1 mrg }
2672 1.1 mrg
2673 1.1 mrg /* If T represents a memory access, add instrumentation code before ITER.
2674 1.1 mrg LOCATION is source code location.
2675 1.1 mrg IS_STORE is either TRUE (for a store) or FALSE (for a load). */
2676 1.1 mrg
2677 1.1 mrg static void
2678 1.1 mrg instrument_derefs (gimple_stmt_iterator *iter, tree t,
2679 1.1 mrg location_t location, bool is_store)
2680 1.1 mrg {
2681 1.1 mrg if (is_store && !(asan_instrument_writes () || hwasan_instrument_writes ()))
2682 1.1 mrg return;
2683 1.1 mrg if (!is_store && !(asan_instrument_reads () || hwasan_instrument_reads ()))
2684 1.1 mrg return;
2685 1.1 mrg
2686 1.1 mrg tree type, base;
2687 1.1 mrg HOST_WIDE_INT size_in_bytes;
2688 1.1 mrg if (location == UNKNOWN_LOCATION)
2689 1.1 mrg location = EXPR_LOCATION (t);
2690 1.1 mrg
2691 1.1 mrg type = TREE_TYPE (t);
2692 1.1 mrg switch (TREE_CODE (t))
2693 1.1 mrg {
2694 1.1 mrg case ARRAY_REF:
2695 1.1 mrg case COMPONENT_REF:
2696 1.1 mrg case INDIRECT_REF:
2697 1.1 mrg case MEM_REF:
2698 1.1 mrg case VAR_DECL:
2699 1.1 mrg case BIT_FIELD_REF:
2700 1.1 mrg break;
2701 1.1 mrg /* FALLTHRU */
2702 1.1 mrg default:
2703 1.1 mrg return;
2704 1.1 mrg }
2705 1.1 mrg
2706 1.1 mrg size_in_bytes = int_size_in_bytes (type);
2707 1.1 mrg if (size_in_bytes <= 0)
2708 1.1 mrg return;
2709 1.1 mrg
2710 1.1 mrg poly_int64 bitsize, bitpos;
2711 1.1 mrg tree offset;
2712 1.1 mrg machine_mode mode;
2713 1.1 mrg int unsignedp, reversep, volatilep = 0;
2714 1.1 mrg tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
2715 1.1 mrg &unsignedp, &reversep, &volatilep);
2716 1.1 mrg
2717 1.1 mrg if (TREE_CODE (t) == COMPONENT_REF
2718 1.1 mrg && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
2719 1.1 mrg {
2720 1.1 mrg tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
2721 1.1 mrg instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
2722 1.1 mrg TREE_OPERAND (t, 0), repr,
2723 1.1 mrg TREE_OPERAND (t, 2)),
2724 1.1 mrg location, is_store);
2725 1.1 mrg return;
2726 1.1 mrg }
2727 1.1 mrg
2728 1.1 mrg if (!multiple_p (bitpos, BITS_PER_UNIT)
2729 1.1 mrg || maybe_ne (bitsize, size_in_bytes * BITS_PER_UNIT))
2730 1.1 mrg return;
2731 1.1 mrg
2732 1.1 mrg if (VAR_P (inner) && DECL_HARD_REGISTER (inner))
2733 1.1 mrg return;
2734 1.1 mrg
2735 1.1 mrg /* Accesses to non-generic address-spaces should not be instrumented. */
2736 1.1 mrg if (!ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (inner))))
2737 1.1 mrg return;
2738 1.1 mrg
2739 1.1 mrg poly_int64 decl_size;
2740 1.1 mrg if ((VAR_P (inner) || TREE_CODE (inner) == RESULT_DECL)
2741 1.1 mrg && offset == NULL_TREE
2742 1.1 mrg && DECL_SIZE (inner)
2743 1.1 mrg && poly_int_tree_p (DECL_SIZE (inner), &decl_size)
2744 1.1 mrg && known_subrange_p (bitpos, bitsize, 0, decl_size))
2745 1.1 mrg {
2746 1.1 mrg if (VAR_P (inner) && DECL_THREAD_LOCAL_P (inner))
2747 1.1 mrg return;
2748 1.1 mrg /* If we're not sanitizing globals and we can tell statically that this
2749 1.1 mrg access is inside a global variable, then there's no point adding
2750 1.1 mrg instrumentation to check the access. N.b. hwasan currently never
2751 1.1 mrg sanitizes globals. */
2752 1.1 mrg if ((hwasan_sanitize_p () || !param_asan_globals)
2753 1.1 mrg && is_global_var (inner))
2754 1.1 mrg return;
2755 1.1 mrg if (!TREE_STATIC (inner))
2756 1.1 mrg {
2757 1.1 mrg /* Automatic vars in the current function will be always
2758 1.1 mrg accessible. */
2759 1.1 mrg if (decl_function_context (inner) == current_function_decl
2760 1.1 mrg && (!asan_sanitize_use_after_scope ()
2761 1.1 mrg || !TREE_ADDRESSABLE (inner)))
2762 1.1 mrg return;
2763 1.1 mrg }
2764 1.1 mrg /* Always instrument external vars, they might be dynamically
2765 1.1 mrg initialized. */
2766 1.1 mrg else if (!DECL_EXTERNAL (inner))
2767 1.1 mrg {
2768 1.1 mrg /* For static vars if they are known not to be dynamically
2769 1.1 mrg initialized, they will be always accessible. */
2770 1.1 mrg varpool_node *vnode = varpool_node::get (inner);
2771 1.1 mrg if (vnode && !vnode->dynamically_initialized)
2772 1.1 mrg return;
2773 1.1 mrg }
2774 1.1 mrg }
2775 1.1 mrg
2776 1.1 mrg if (DECL_P (inner)
2777 1.1 mrg && decl_function_context (inner) == current_function_decl
2778 1.1 mrg && !TREE_ADDRESSABLE (inner))
2779 1.1 mrg mark_addressable (inner);
2780 1.1 mrg
2781 1.1 mrg base = build_fold_addr_expr (t);
2782 1.1 mrg if (!has_mem_ref_been_instrumented (base, size_in_bytes))
2783 1.1 mrg {
2784 1.1 mrg unsigned int align = get_object_alignment (t);
2785 1.1 mrg build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
2786 1.1 mrg /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
2787 1.1 mrg is_store, /*is_scalar_access*/true, align);
2788 1.1 mrg update_mem_ref_hash_table (base, size_in_bytes);
2789 1.1 mrg update_mem_ref_hash_table (t, size_in_bytes);
2790 1.1 mrg }
2791 1.1 mrg
2792 1.1 mrg }
2793 1.1 mrg
2794 1.1 mrg /* Insert a memory reference into the hash table if access length
2795 1.1 mrg can be determined in compile time. */
2796 1.1 mrg
2797 1.1 mrg static void
2798 1.1 mrg maybe_update_mem_ref_hash_table (tree base, tree len)
2799 1.1 mrg {
2800 1.1 mrg if (!POINTER_TYPE_P (TREE_TYPE (base))
2801 1.1 mrg || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
2802 1.1 mrg return;
2803 1.1 mrg
2804 1.1 mrg HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2805 1.1 mrg
2806 1.1 mrg if (size_in_bytes != -1)
2807 1.1 mrg update_mem_ref_hash_table (base, size_in_bytes);
2808 1.1 mrg }
2809 1.1 mrg
2810 1.1 mrg /* Instrument an access to a contiguous memory region that starts at
2811 1.1 mrg the address pointed to by BASE, over a length of LEN (expressed in
2812 1.1 mrg the sizeof (*BASE) bytes). ITER points to the instruction before
2813 1.1 mrg which the instrumentation instructions must be inserted. LOCATION
2814 1.1 mrg is the source location that the instrumentation instructions must
2815 1.1 mrg have. If IS_STORE is true, then the memory access is a store;
2816 1.1 mrg otherwise, it's a load. */
2817 1.1 mrg
2818 1.1 mrg static void
2819 1.1 mrg instrument_mem_region_access (tree base, tree len,
2820 1.1 mrg gimple_stmt_iterator *iter,
2821 1.1 mrg location_t location, bool is_store)
2822 1.1 mrg {
2823 1.1 mrg if (!POINTER_TYPE_P (TREE_TYPE (base))
2824 1.1 mrg || !INTEGRAL_TYPE_P (TREE_TYPE (len))
2825 1.1 mrg || integer_zerop (len))
2826 1.1 mrg return;
2827 1.1 mrg
2828 1.1 mrg HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2829 1.1 mrg
2830 1.1 mrg if ((size_in_bytes == -1)
2831 1.1 mrg || !has_mem_ref_been_instrumented (base, size_in_bytes))
2832 1.1 mrg {
2833 1.1 mrg build_check_stmt (location, base, len, size_in_bytes, iter,
2834 1.1 mrg /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
2835 1.1 mrg is_store, /*is_scalar_access*/false, /*align*/0);
2836 1.1 mrg }
2837 1.1 mrg
2838 1.1 mrg maybe_update_mem_ref_hash_table (base, len);
2839 1.1 mrg *iter = gsi_for_stmt (gsi_stmt (*iter));
2840 1.1 mrg }
2841 1.1 mrg
2842 1.1 mrg /* Instrument the call to a built-in memory access function that is
2843 1.1 mrg pointed to by the iterator ITER.
2844 1.1 mrg
2845 1.1 mrg Upon completion, return TRUE iff *ITER has been advanced to the
2846 1.1 mrg statement following the one it was originally pointing to. */
2847 1.1 mrg
2848 1.1 mrg static bool
2849 1.1 mrg instrument_builtin_call (gimple_stmt_iterator *iter)
2850 1.1 mrg {
2851 1.1 mrg if (!(asan_memintrin () || hwasan_memintrin ()))
2852 1.1 mrg return false;
2853 1.1 mrg
2854 1.1 mrg bool iter_advanced_p = false;
2855 1.1 mrg gcall *call = as_a <gcall *> (gsi_stmt (*iter));
2856 1.1 mrg
2857 1.1 mrg gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
2858 1.1 mrg
2859 1.1 mrg location_t loc = gimple_location (call);
2860 1.1 mrg
2861 1.1 mrg asan_mem_ref src0, src1, dest;
2862 1.1 mrg asan_mem_ref_init (&src0, NULL, 1);
2863 1.1 mrg asan_mem_ref_init (&src1, NULL, 1);
2864 1.1 mrg asan_mem_ref_init (&dest, NULL, 1);
2865 1.1 mrg
2866 1.1 mrg tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
2867 1.1 mrg bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
2868 1.1 mrg dest_is_deref = false, intercepted_p = true;
2869 1.1 mrg
2870 1.1 mrg if (get_mem_refs_of_builtin_call (call,
2871 1.1 mrg &src0, &src0_len, &src0_is_store,
2872 1.1 mrg &src1, &src1_len, &src1_is_store,
2873 1.1 mrg &dest, &dest_len, &dest_is_store,
2874 1.1 mrg &dest_is_deref, &intercepted_p, iter))
2875 1.1 mrg {
2876 1.1 mrg if (dest_is_deref)
2877 1.1 mrg {
2878 1.1 mrg instrument_derefs (iter, dest.start, loc, dest_is_store);
2879 1.1 mrg gsi_next (iter);
2880 1.1 mrg iter_advanced_p = true;
2881 1.1 mrg }
2882 1.1 mrg else if (!intercepted_p
2883 1.1 mrg && (src0_len || src1_len || dest_len))
2884 1.1 mrg {
2885 1.1 mrg if (src0.start != NULL_TREE)
2886 1.1 mrg instrument_mem_region_access (src0.start, src0_len,
2887 1.1 mrg iter, loc, /*is_store=*/false);
2888 1.1 mrg if (src1.start != NULL_TREE)
2889 1.1 mrg instrument_mem_region_access (src1.start, src1_len,
2890 1.1 mrg iter, loc, /*is_store=*/false);
2891 1.1 mrg if (dest.start != NULL_TREE)
2892 1.1 mrg instrument_mem_region_access (dest.start, dest_len,
2893 1.1 mrg iter, loc, /*is_store=*/true);
2894 1.1 mrg
2895 1.1 mrg *iter = gsi_for_stmt (call);
2896 1.1 mrg gsi_next (iter);
2897 1.1 mrg iter_advanced_p = true;
2898 1.1 mrg }
2899 1.1 mrg else
2900 1.1 mrg {
2901 1.1 mrg if (src0.start != NULL_TREE)
2902 1.1 mrg maybe_update_mem_ref_hash_table (src0.start, src0_len);
2903 1.1 mrg if (src1.start != NULL_TREE)
2904 1.1 mrg maybe_update_mem_ref_hash_table (src1.start, src1_len);
2905 1.1 mrg if (dest.start != NULL_TREE)
2906 1.1 mrg maybe_update_mem_ref_hash_table (dest.start, dest_len);
2907 1.1 mrg }
2908 1.1 mrg }
2909 1.1 mrg return iter_advanced_p;
2910 1.1 mrg }
2911 1.1 mrg
2912 1.1 mrg /* Instrument the assignment statement ITER if it is subject to
2913 1.1 mrg instrumentation. Return TRUE iff instrumentation actually
2914 1.1 mrg happened. In that case, the iterator ITER is advanced to the next
2915 1.1 mrg logical expression following the one initially pointed to by ITER,
2916 1.1 mrg and the relevant memory reference that which access has been
2917 1.1 mrg instrumented is added to the memory references hash table. */
2918 1.1 mrg
2919 1.1 mrg static bool
2920 1.1 mrg maybe_instrument_assignment (gimple_stmt_iterator *iter)
2921 1.1 mrg {
2922 1.1 mrg gimple *s = gsi_stmt (*iter);
2923 1.1 mrg
2924 1.1 mrg gcc_assert (gimple_assign_single_p (s));
2925 1.1 mrg
2926 1.1 mrg tree ref_expr = NULL_TREE;
2927 1.1 mrg bool is_store, is_instrumented = false;
2928 1.1 mrg
2929 1.1 mrg if (gimple_store_p (s))
2930 1.1 mrg {
2931 1.1 mrg ref_expr = gimple_assign_lhs (s);
2932 1.1 mrg is_store = true;
2933 1.1 mrg instrument_derefs (iter, ref_expr,
2934 1.1 mrg gimple_location (s),
2935 1.1 mrg is_store);
2936 1.1 mrg is_instrumented = true;
2937 1.1 mrg }
2938 1.1 mrg
2939 1.1 mrg if (gimple_assign_load_p (s))
2940 1.1 mrg {
2941 1.1 mrg ref_expr = gimple_assign_rhs1 (s);
2942 1.1 mrg is_store = false;
2943 1.1 mrg instrument_derefs (iter, ref_expr,
2944 1.1 mrg gimple_location (s),
2945 1.1 mrg is_store);
2946 1.1 mrg is_instrumented = true;
2947 1.1 mrg }
2948 1.1 mrg
2949 1.1 mrg if (is_instrumented)
2950 1.1 mrg gsi_next (iter);
2951 1.1 mrg
2952 1.1 mrg return is_instrumented;
2953 1.1 mrg }
2954 1.1 mrg
2955 1.1 mrg /* Instrument the function call pointed to by the iterator ITER, if it
2956 1.1 mrg is subject to instrumentation. At the moment, the only function
2957 1.1 mrg calls that are instrumented are some built-in functions that access
2958 1.1 mrg memory. Look at instrument_builtin_call to learn more.
2959 1.1 mrg
2960 1.1 mrg Upon completion return TRUE iff *ITER was advanced to the statement
2961 1.1 mrg following the one it was originally pointing to. */
2962 1.1 mrg
2963 1.1 mrg static bool
2964 1.1 mrg maybe_instrument_call (gimple_stmt_iterator *iter)
2965 1.1 mrg {
2966 1.1 mrg gimple *stmt = gsi_stmt (*iter);
2967 1.1 mrg bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2968 1.1 mrg
2969 1.1 mrg if (is_builtin && instrument_builtin_call (iter))
2970 1.1 mrg return true;
2971 1.1 mrg
2972 1.1 mrg if (gimple_call_noreturn_p (stmt))
2973 1.1 mrg {
2974 1.1 mrg if (is_builtin)
2975 1.1 mrg {
2976 1.1 mrg tree callee = gimple_call_fndecl (stmt);
2977 1.1 mrg switch (DECL_FUNCTION_CODE (callee))
2978 1.1 mrg {
2979 1.1 mrg case BUILT_IN_UNREACHABLE:
2980 1.1 mrg case BUILT_IN_TRAP:
2981 1.1 mrg /* Don't instrument these. */
2982 1.1 mrg return false;
2983 1.1 mrg default:
2984 1.1 mrg break;
2985 1.1 mrg }
2986 1.1 mrg }
2987 1.1 mrg /* If a function does not return, then we must handle clearing up the
2988 1.1 mrg shadow stack accordingly. For ASAN we can simply set the entire stack
2989 1.1 mrg to "valid" for accesses by setting the shadow space to 0 and all
2990 1.1 mrg accesses will pass checks. That means that some bad accesses may be
2991 1.1 mrg missed, but we will not report any false positives.
2992 1.1 mrg
2993 1.1 mrg This is not possible for HWASAN. Since there is no "always valid" tag
2994 1.1 mrg we can not set any space to "always valid". If we were to clear the
2995 1.1 mrg entire shadow stack then code resuming from `longjmp` or a caught
2996 1.1 mrg exception would trigger false positives when correctly accessing
2997 1.1 mrg variables on the stack. Hence we need to handle things like
2998 1.1 mrg `longjmp`, thread exit, and exceptions in a different way. These
2999 1.1 mrg problems must be handled externally to the compiler, e.g. in the
3000 1.1 mrg language runtime. */
3001 1.1 mrg if (! hwasan_sanitize_p ())
3002 1.1 mrg {
3003 1.1 mrg tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
3004 1.1 mrg gimple *g = gimple_build_call (decl, 0);
3005 1.1 mrg gimple_set_location (g, gimple_location (stmt));
3006 1.1 mrg gsi_insert_before (iter, g, GSI_SAME_STMT);
3007 1.1 mrg }
3008 1.1 mrg }
3009 1.1 mrg
3010 1.1 mrg bool instrumented = false;
3011 1.1 mrg if (gimple_store_p (stmt))
3012 1.1 mrg {
3013 1.1 mrg tree ref_expr = gimple_call_lhs (stmt);
3014 1.1 mrg instrument_derefs (iter, ref_expr,
3015 1.1 mrg gimple_location (stmt),
3016 1.1 mrg /*is_store=*/true);
3017 1.1 mrg
3018 1.1 mrg instrumented = true;
3019 1.1 mrg }
3020 1.1 mrg
3021 1.1 mrg /* Walk through gimple_call arguments and check them id needed. */
3022 1.1 mrg unsigned args_num = gimple_call_num_args (stmt);
3023 1.1 mrg for (unsigned i = 0; i < args_num; ++i)
3024 1.1 mrg {
3025 1.1 mrg tree arg = gimple_call_arg (stmt, i);
3026 1.1 mrg /* If ARG is not a non-aggregate register variable, compiler in general
3027 1.1 mrg creates temporary for it and pass it as argument to gimple call.
3028 1.1 mrg But in some cases, e.g. when we pass by value a small structure that
3029 1.1 mrg fits to register, compiler can avoid extra overhead by pulling out
3030 1.1 mrg these temporaries. In this case, we should check the argument. */
3031 1.1 mrg if (!is_gimple_reg (arg) && !is_gimple_min_invariant (arg))
3032 1.1 mrg {
3033 1.1 mrg instrument_derefs (iter, arg,
3034 1.1 mrg gimple_location (stmt),
3035 1.1 mrg /*is_store=*/false);
3036 1.1 mrg instrumented = true;
3037 1.1 mrg }
3038 1.1 mrg }
3039 1.1 mrg if (instrumented)
3040 1.1 mrg gsi_next (iter);
3041 1.1 mrg return instrumented;
3042 1.1 mrg }
3043 1.1 mrg
3044 1.1 mrg /* Walk each instruction of all basic block and instrument those that
3045 1.1 mrg represent memory references: loads, stores, or function calls.
3046 1.1 mrg In a given basic block, this function avoids instrumenting memory
3047 1.1 mrg references that have already been instrumented. */
3048 1.1 mrg
3049 1.1 mrg static void
3050 1.1 mrg transform_statements (void)
3051 1.1 mrg {
3052 1.1 mrg basic_block bb, last_bb = NULL;
3053 1.1 mrg gimple_stmt_iterator i;
3054 1.1 mrg int saved_last_basic_block = last_basic_block_for_fn (cfun);
3055 1.1 mrg
3056 1.1 mrg FOR_EACH_BB_FN (bb, cfun)
3057 1.1 mrg {
3058 1.1 mrg basic_block prev_bb = bb;
3059 1.1 mrg
3060 1.1 mrg if (bb->index >= saved_last_basic_block) continue;
3061 1.1 mrg
3062 1.1 mrg /* Flush the mem ref hash table, if current bb doesn't have
3063 1.1 mrg exactly one predecessor, or if that predecessor (skipping
3064 1.1 mrg over asan created basic blocks) isn't the last processed
3065 1.1 mrg basic block. Thus we effectively flush on extended basic
3066 1.1 mrg block boundaries. */
3067 1.1 mrg while (single_pred_p (prev_bb))
3068 1.1 mrg {
3069 1.1 mrg prev_bb = single_pred (prev_bb);
3070 1.1 mrg if (prev_bb->index < saved_last_basic_block)
3071 1.1 mrg break;
3072 1.1 mrg }
3073 1.1 mrg if (prev_bb != last_bb)
3074 1.1 mrg empty_mem_ref_hash_table ();
3075 1.1 mrg last_bb = bb;
3076 1.1 mrg
3077 1.1 mrg for (i = gsi_start_bb (bb); !gsi_end_p (i);)
3078 1.1 mrg {
3079 1.1 mrg gimple *s = gsi_stmt (i);
3080 1.1 mrg
3081 1.1 mrg if (has_stmt_been_instrumented_p (s))
3082 1.1 mrg gsi_next (&i);
3083 1.1 mrg else if (gimple_assign_single_p (s)
3084 1.1 mrg && !gimple_clobber_p (s)
3085 1.1 mrg && maybe_instrument_assignment (&i))
3086 1.1 mrg /* Nothing to do as maybe_instrument_assignment advanced
3087 1.1 mrg the iterator I. */;
3088 1.1 mrg else if (is_gimple_call (s) && maybe_instrument_call (&i))
3089 1.1 mrg /* Nothing to do as maybe_instrument_call
3090 1.1 mrg advanced the iterator I. */;
3091 1.1 mrg else
3092 1.1 mrg {
3093 1.1 mrg /* No instrumentation happened.
3094 1.1 mrg
3095 1.1 mrg If the current instruction is a function call that
3096 1.1 mrg might free something, let's forget about the memory
3097 1.1 mrg references that got instrumented. Otherwise we might
3098 1.1 mrg miss some instrumentation opportunities. Do the same
3099 1.1 mrg for a ASAN_MARK poisoning internal function. */
3100 1.1 mrg if (is_gimple_call (s)
3101 1.1 mrg && (!nonfreeing_call_p (s)
3102 1.1 mrg || asan_mark_p (s, ASAN_MARK_POISON)))
3103 1.1 mrg empty_mem_ref_hash_table ();
3104 1.1 mrg
3105 1.1 mrg gsi_next (&i);
3106 1.1 mrg }
3107 1.1 mrg }
3108 1.1 mrg }
3109 1.1 mrg free_mem_ref_resources ();
3110 1.1 mrg }
3111 1.1 mrg
3112 1.1 mrg /* Build
3113 1.1 mrg __asan_before_dynamic_init (module_name)
3114 1.1 mrg or
3115 1.1 mrg __asan_after_dynamic_init ()
3116 1.1 mrg call. */
3117 1.1 mrg
3118 1.1 mrg tree
3119 1.1 mrg asan_dynamic_init_call (bool after_p)
3120 1.1 mrg {
3121 1.1 mrg if (shadow_ptr_types[0] == NULL_TREE)
3122 1.1 mrg asan_init_shadow_ptr_types ();
3123 1.1 mrg
3124 1.1 mrg tree fn = builtin_decl_implicit (after_p
3125 1.1 mrg ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
3126 1.1 mrg : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
3127 1.1 mrg tree module_name_cst = NULL_TREE;
3128 1.1 mrg if (!after_p)
3129 1.1 mrg {
3130 1.1 mrg pretty_printer module_name_pp;
3131 1.1 mrg pp_string (&module_name_pp, main_input_filename);
3132 1.1 mrg
3133 1.1 mrg module_name_cst = asan_pp_string (&module_name_pp);
3134 1.1 mrg module_name_cst = fold_convert (const_ptr_type_node,
3135 1.1 mrg module_name_cst);
3136 1.1 mrg }
3137 1.1 mrg
3138 1.1 mrg return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
3139 1.1 mrg }
3140 1.1 mrg
3141 1.1 mrg /* Build
3142 1.1 mrg struct __asan_global
3143 1.1 mrg {
3144 1.1 mrg const void *__beg;
3145 1.1 mrg uptr __size;
3146 1.1 mrg uptr __size_with_redzone;
3147 1.1 mrg const void *__name;
3148 1.1 mrg const void *__module_name;
3149 1.1 mrg uptr __has_dynamic_init;
3150 1.1 mrg __asan_global_source_location *__location;
3151 1.1 mrg char *__odr_indicator;
3152 1.1 mrg } type. */
3153 1.1 mrg
3154 1.1 mrg static tree
3155 1.1 mrg asan_global_struct (void)
3156 1.1 mrg {
3157 1.1 mrg static const char *field_names[]
3158 1.1 mrg = { "__beg", "__size", "__size_with_redzone",
3159 1.1 mrg "__name", "__module_name", "__has_dynamic_init", "__location",
3160 1.1 mrg "__odr_indicator" };
3161 1.1 mrg tree fields[ARRAY_SIZE (field_names)], ret;
3162 1.1 mrg unsigned i;
3163 1.1 mrg
3164 1.1 mrg ret = make_node (RECORD_TYPE);
3165 1.1 mrg for (i = 0; i < ARRAY_SIZE (field_names); i++)
3166 1.1 mrg {
3167 1.1 mrg fields[i]
3168 1.1 mrg = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
3169 1.1 mrg get_identifier (field_names[i]),
3170 1.1 mrg (i == 0 || i == 3) ? const_ptr_type_node
3171 1.1 mrg : pointer_sized_int_node);
3172 1.1 mrg DECL_CONTEXT (fields[i]) = ret;
3173 1.1 mrg if (i)
3174 1.1 mrg DECL_CHAIN (fields[i - 1]) = fields[i];
3175 1.1 mrg }
3176 1.1 mrg tree type_decl = build_decl (input_location, TYPE_DECL,
3177 1.1 mrg get_identifier ("__asan_global"), ret);
3178 1.1 mrg DECL_IGNORED_P (type_decl) = 1;
3179 1.1 mrg DECL_ARTIFICIAL (type_decl) = 1;
3180 1.1 mrg TYPE_FIELDS (ret) = fields[0];
3181 1.1 mrg TYPE_NAME (ret) = type_decl;
3182 1.1 mrg TYPE_STUB_DECL (ret) = type_decl;
3183 1.1 mrg TYPE_ARTIFICIAL (ret) = 1;
3184 1.1 mrg layout_type (ret);
3185 1.1 mrg return ret;
3186 1.1 mrg }
3187 1.1 mrg
3188 1.1 mrg /* Create and return odr indicator symbol for DECL.
3189 1.1 mrg TYPE is __asan_global struct type as returned by asan_global_struct. */
3190 1.1 mrg
3191 1.1 mrg static tree
3192 1.1 mrg create_odr_indicator (tree decl, tree type)
3193 1.1 mrg {
3194 1.1 mrg char *name;
3195 1.1 mrg tree uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3196 1.1 mrg tree decl_name
3197 1.1 mrg = (HAS_DECL_ASSEMBLER_NAME_P (decl) ? DECL_ASSEMBLER_NAME (decl)
3198 1.1 mrg : DECL_NAME (decl));
3199 1.1 mrg /* DECL_NAME theoretically might be NULL. Bail out with 0 in this case. */
3200 1.1 mrg if (decl_name == NULL_TREE)
3201 1.1 mrg return build_int_cst (uptr, 0);
3202 1.1 mrg const char *dname = IDENTIFIER_POINTER (decl_name);
3203 1.1 mrg if (HAS_DECL_ASSEMBLER_NAME_P (decl))
3204 1.1 mrg dname = targetm.strip_name_encoding (dname);
3205 1.1 mrg size_t len = strlen (dname) + sizeof ("__odr_asan_");
3206 1.1 mrg name = XALLOCAVEC (char, len);
3207 1.1 mrg snprintf (name, len, "__odr_asan_%s", dname);
3208 1.1 mrg #ifndef NO_DOT_IN_LABEL
3209 1.1 mrg name[sizeof ("__odr_asan") - 1] = '.';
3210 1.1 mrg #elif !defined(NO_DOLLAR_IN_LABEL)
3211 1.1 mrg name[sizeof ("__odr_asan") - 1] = '$';
3212 1.1 mrg #endif
3213 1.1 mrg tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (name),
3214 1.1 mrg char_type_node);
3215 1.1 mrg TREE_ADDRESSABLE (var) = 1;
3216 1.1 mrg TREE_READONLY (var) = 0;
3217 1.1 mrg TREE_THIS_VOLATILE (var) = 1;
3218 1.1 mrg DECL_ARTIFICIAL (var) = 1;
3219 1.1 mrg DECL_IGNORED_P (var) = 1;
3220 1.1 mrg TREE_STATIC (var) = 1;
3221 1.1 mrg TREE_PUBLIC (var) = 1;
3222 1.1 mrg DECL_VISIBILITY (var) = DECL_VISIBILITY (decl);
3223 1.1 mrg DECL_VISIBILITY_SPECIFIED (var) = DECL_VISIBILITY_SPECIFIED (decl);
3224 1.1 mrg
3225 1.1 mrg TREE_USED (var) = 1;
3226 1.1 mrg tree ctor = build_constructor_va (TREE_TYPE (var), 1, NULL_TREE,
3227 1.1 mrg build_int_cst (unsigned_type_node, 0));
3228 1.1 mrg TREE_CONSTANT (ctor) = 1;
3229 1.1 mrg TREE_STATIC (ctor) = 1;
3230 1.1 mrg DECL_INITIAL (var) = ctor;
3231 1.1 mrg DECL_ATTRIBUTES (var) = tree_cons (get_identifier ("asan odr indicator"),
3232 1.1 mrg NULL, DECL_ATTRIBUTES (var));
3233 1.1 mrg make_decl_rtl (var);
3234 1.1 mrg varpool_node::finalize_decl (var);
3235 1.1 mrg return fold_convert (uptr, build_fold_addr_expr (var));
3236 1.1 mrg }
3237 1.1 mrg
3238 1.1 mrg /* Return true if DECL, a global var, might be overridden and needs
3239 1.1 mrg an additional odr indicator symbol. */
3240 1.1 mrg
3241 1.1 mrg static bool
3242 1.1 mrg asan_needs_odr_indicator_p (tree decl)
3243 1.1 mrg {
3244 1.1 mrg /* Don't emit ODR indicators for kernel because:
3245 1.1 mrg a) Kernel is written in C thus doesn't need ODR indicators.
3246 1.1 mrg b) Some kernel code may have assumptions about symbols containing specific
3247 1.1 mrg patterns in their names. Since ODR indicators contain original names
3248 1.1 mrg of symbols they are emitted for, these assumptions would be broken for
3249 1.1 mrg ODR indicator symbols. */
3250 1.1 mrg return (!(flag_sanitize & SANITIZE_KERNEL_ADDRESS)
3251 1.1 mrg && !DECL_ARTIFICIAL (decl)
3252 1.1 mrg && !DECL_WEAK (decl)
3253 1.1 mrg && TREE_PUBLIC (decl));
3254 1.1 mrg }
3255 1.1 mrg
3256 1.1 mrg /* Append description of a single global DECL into vector V.
3257 1.1 mrg TYPE is __asan_global struct type as returned by asan_global_struct. */
3258 1.1 mrg
3259 1.1 mrg static void
3260 1.1 mrg asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
3261 1.1 mrg {
3262 1.1 mrg tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3263 1.1 mrg unsigned HOST_WIDE_INT size;
3264 1.1 mrg tree str_cst, module_name_cst, refdecl = decl;
3265 1.1 mrg vec<constructor_elt, va_gc> *vinner = NULL;
3266 1.1 mrg
3267 1.1 mrg pretty_printer asan_pp, module_name_pp;
3268 1.1 mrg
3269 1.1 mrg if (DECL_NAME (decl))
3270 1.1 mrg pp_tree_identifier (&asan_pp, DECL_NAME (decl));
3271 1.1 mrg else
3272 1.1 mrg pp_string (&asan_pp, "<unknown>");
3273 1.1 mrg str_cst = asan_pp_string (&asan_pp);
3274 1.1 mrg
3275 1.1 mrg if (!in_lto_p)
3276 1.1 mrg pp_string (&module_name_pp, main_input_filename);
3277 1.1 mrg else
3278 1.1 mrg {
3279 1.1 mrg const_tree tu = get_ultimate_context ((const_tree)decl);
3280 1.1 mrg if (tu != NULL_TREE)
3281 1.1 mrg pp_string (&module_name_pp, IDENTIFIER_POINTER (DECL_NAME (tu)));
3282 1.1 mrg else
3283 1.1 mrg pp_string (&module_name_pp, aux_base_name);
3284 1.1 mrg }
3285 1.1 mrg
3286 1.1 mrg module_name_cst = asan_pp_string (&module_name_pp);
3287 1.1 mrg
3288 1.1 mrg if (asan_needs_local_alias (decl))
3289 1.1 mrg {
3290 1.1 mrg char buf[20];
3291 1.1 mrg ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
3292 1.1 mrg refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
3293 1.1 mrg VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
3294 1.1 mrg TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
3295 1.1 mrg TREE_READONLY (refdecl) = TREE_READONLY (decl);
3296 1.1 mrg TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
3297 1.1 mrg DECL_NOT_GIMPLE_REG_P (refdecl) = DECL_NOT_GIMPLE_REG_P (decl);
3298 1.1 mrg DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
3299 1.1 mrg DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
3300 1.1 mrg TREE_STATIC (refdecl) = 1;
3301 1.1 mrg TREE_PUBLIC (refdecl) = 0;
3302 1.1 mrg TREE_USED (refdecl) = 1;
3303 1.1 mrg assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
3304 1.1 mrg }
3305 1.1 mrg
3306 1.1 mrg tree odr_indicator_ptr
3307 1.1 mrg = (asan_needs_odr_indicator_p (decl) ? create_odr_indicator (decl, type)
3308 1.1 mrg : build_int_cst (uptr, 0));
3309 1.1 mrg CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
3310 1.1 mrg fold_convert (const_ptr_type_node,
3311 1.1 mrg build_fold_addr_expr (refdecl)));
3312 1.1 mrg size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
3313 1.1 mrg CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
3314 1.1 mrg size += asan_red_zone_size (size);
3315 1.1 mrg CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
3316 1.1 mrg CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
3317 1.1 mrg fold_convert (const_ptr_type_node, str_cst));
3318 1.1 mrg CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
3319 1.1 mrg fold_convert (const_ptr_type_node, module_name_cst));
3320 1.1 mrg varpool_node *vnode = varpool_node::get (decl);
3321 1.1 mrg int has_dynamic_init = 0;
3322 1.1 mrg /* FIXME: Enable initialization order fiasco detection in LTO mode once
3323 1.1 mrg proper fix for PR 79061 will be applied. */
3324 1.1 mrg if (!in_lto_p)
3325 1.1 mrg has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
3326 1.1 mrg CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
3327 1.1 mrg build_int_cst (uptr, has_dynamic_init));
3328 1.1 mrg tree locptr = NULL_TREE;
3329 1.1 mrg location_t loc = DECL_SOURCE_LOCATION (decl);
3330 1.1 mrg expanded_location xloc = expand_location (loc);
3331 1.1 mrg if (xloc.file != NULL)
3332 1.1 mrg {
3333 1.1 mrg static int lasanloccnt = 0;
3334 1.1 mrg char buf[25];
3335 1.1 mrg ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
3336 1.1 mrg tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
3337 1.1 mrg ubsan_get_source_location_type ());
3338 1.1 mrg TREE_STATIC (var) = 1;
3339 1.1 mrg TREE_PUBLIC (var) = 0;
3340 1.1 mrg DECL_ARTIFICIAL (var) = 1;
3341 1.1 mrg DECL_IGNORED_P (var) = 1;
3342 1.1 mrg pretty_printer filename_pp;
3343 1.1 mrg pp_string (&filename_pp, xloc.file);
3344 1.1 mrg tree str = asan_pp_string (&filename_pp);
3345 1.1 mrg tree ctor = build_constructor_va (TREE_TYPE (var), 3,
3346 1.1 mrg NULL_TREE, str, NULL_TREE,
3347 1.1 mrg build_int_cst (unsigned_type_node,
3348 1.1 mrg xloc.line), NULL_TREE,
3349 1.1 mrg build_int_cst (unsigned_type_node,
3350 1.1 mrg xloc.column));
3351 1.1 mrg TREE_CONSTANT (ctor) = 1;
3352 1.1 mrg TREE_STATIC (ctor) = 1;
3353 1.1 mrg DECL_INITIAL (var) = ctor;
3354 1.1 mrg varpool_node::finalize_decl (var);
3355 1.1 mrg locptr = fold_convert (uptr, build_fold_addr_expr (var));
3356 1.1 mrg }
3357 1.1 mrg else
3358 1.1 mrg locptr = build_int_cst (uptr, 0);
3359 1.1 mrg CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
3360 1.1 mrg CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, odr_indicator_ptr);
3361 1.1 mrg init = build_constructor (type, vinner);
3362 1.1 mrg CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
3363 1.1 mrg }
3364 1.1 mrg
3365 1.1 mrg /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
3366 1.1 mrg void
3367 1.1 mrg initialize_sanitizer_builtins (void)
3368 1.1 mrg {
3369 1.1 mrg tree decl;
3370 1.1 mrg
3371 1.1 mrg if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
3372 1.1 mrg return;
3373 1.1 mrg
3374 1.1 mrg tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
3375 1.1 mrg tree BT_FN_VOID_PTR
3376 1.1 mrg = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
3377 1.1 mrg tree BT_FN_VOID_CONST_PTR
3378 1.1 mrg = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
3379 1.1 mrg tree BT_FN_VOID_PTR_PTR
3380 1.1 mrg = build_function_type_list (void_type_node, ptr_type_node,
3381 1.1 mrg ptr_type_node, NULL_TREE);
3382 1.1 mrg tree BT_FN_VOID_PTR_PTR_PTR
3383 1.1 mrg = build_function_type_list (void_type_node, ptr_type_node,
3384 1.1 mrg ptr_type_node, ptr_type_node, NULL_TREE);
3385 1.1 mrg tree BT_FN_VOID_PTR_PTRMODE
3386 1.1 mrg = build_function_type_list (void_type_node, ptr_type_node,
3387 1.1 mrg pointer_sized_int_node, NULL_TREE);
3388 1.1 mrg tree BT_FN_VOID_INT
3389 1.1 mrg = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
3390 1.1 mrg tree BT_FN_SIZE_CONST_PTR_INT
3391 1.1 mrg = build_function_type_list (size_type_node, const_ptr_type_node,
3392 1.1 mrg integer_type_node, NULL_TREE);
3393 1.1 mrg
3394 1.1 mrg tree BT_FN_VOID_UINT8_UINT8
3395 1.1 mrg = build_function_type_list (void_type_node, unsigned_char_type_node,
3396 1.1 mrg unsigned_char_type_node, NULL_TREE);
3397 1.1 mrg tree BT_FN_VOID_UINT16_UINT16
3398 1.1 mrg = build_function_type_list (void_type_node, uint16_type_node,
3399 1.1 mrg uint16_type_node, NULL_TREE);
3400 1.1 mrg tree BT_FN_VOID_UINT32_UINT32
3401 1.1 mrg = build_function_type_list (void_type_node, uint32_type_node,
3402 1.1 mrg uint32_type_node, NULL_TREE);
3403 1.1 mrg tree BT_FN_VOID_UINT64_UINT64
3404 1.1 mrg = build_function_type_list (void_type_node, uint64_type_node,
3405 1.1 mrg uint64_type_node, NULL_TREE);
3406 1.1 mrg tree BT_FN_VOID_FLOAT_FLOAT
3407 1.1 mrg = build_function_type_list (void_type_node, float_type_node,
3408 1.1 mrg float_type_node, NULL_TREE);
3409 1.1 mrg tree BT_FN_VOID_DOUBLE_DOUBLE
3410 1.1 mrg = build_function_type_list (void_type_node, double_type_node,
3411 1.1 mrg double_type_node, NULL_TREE);
3412 1.1 mrg tree BT_FN_VOID_UINT64_PTR
3413 1.1 mrg = build_function_type_list (void_type_node, uint64_type_node,
3414 1.1 mrg ptr_type_node, NULL_TREE);
3415 1.1 mrg
3416 1.1 mrg tree BT_FN_PTR_CONST_PTR_UINT8
3417 1.1 mrg = build_function_type_list (ptr_type_node, const_ptr_type_node,
3418 1.1 mrg unsigned_char_type_node, NULL_TREE);
3419 1.1 mrg tree BT_FN_VOID_PTR_UINT8_PTRMODE
3420 1.1 mrg = build_function_type_list (void_type_node, ptr_type_node,
3421 1.1 mrg unsigned_char_type_node,
3422 1.1 mrg pointer_sized_int_node, NULL_TREE);
3423 1.1 mrg
3424 1.1 mrg tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
3425 1.1 mrg tree BT_FN_IX_CONST_VPTR_INT[5];
3426 1.1 mrg tree BT_FN_IX_VPTR_IX_INT[5];
3427 1.1 mrg tree BT_FN_VOID_VPTR_IX_INT[5];
3428 1.1 mrg tree vptr
3429 1.1 mrg = build_pointer_type (build_qualified_type (void_type_node,
3430 1.1 mrg TYPE_QUAL_VOLATILE));
3431 1.1 mrg tree cvptr
3432 1.1 mrg = build_pointer_type (build_qualified_type (void_type_node,
3433 1.1 mrg TYPE_QUAL_VOLATILE
3434 1.1 mrg |TYPE_QUAL_CONST));
3435 1.1 mrg tree boolt
3436 1.1 mrg = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
3437 1.1 mrg int i;
3438 1.1 mrg for (i = 0; i < 5; i++)
3439 1.1 mrg {
3440 1.1 mrg tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
3441 1.1 mrg BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
3442 1.1 mrg = build_function_type_list (boolt, vptr, ptr_type_node, ix,
3443 1.1 mrg integer_type_node, integer_type_node,
3444 1.1 mrg NULL_TREE);
3445 1.1 mrg BT_FN_IX_CONST_VPTR_INT[i]
3446 1.1 mrg = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
3447 1.1 mrg BT_FN_IX_VPTR_IX_INT[i]
3448 1.1 mrg = build_function_type_list (ix, vptr, ix, integer_type_node,
3449 1.1 mrg NULL_TREE);
3450 1.1 mrg BT_FN_VOID_VPTR_IX_INT[i]
3451 1.1 mrg = build_function_type_list (void_type_node, vptr, ix,
3452 1.1 mrg integer_type_node, NULL_TREE);
3453 1.1 mrg }
3454 1.1 mrg #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
3455 1.1 mrg #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
3456 1.1 mrg #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
3457 1.1 mrg #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
3458 1.1 mrg #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
3459 1.1 mrg #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
3460 1.1 mrg #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
3461 1.1 mrg #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
3462 1.1 mrg #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
3463 1.1 mrg #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
3464 1.1 mrg #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
3465 1.1 mrg #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
3466 1.1 mrg #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
3467 1.1 mrg #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
3468 1.1 mrg #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
3469 1.1 mrg #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
3470 1.1 mrg #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
3471 1.1 mrg #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
3472 1.1 mrg #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
3473 1.1 mrg #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
3474 1.1 mrg #undef ATTR_NOTHROW_LIST
3475 1.1 mrg #define ATTR_NOTHROW_LIST ECF_NOTHROW
3476 1.1 mrg #undef ATTR_NOTHROW_LEAF_LIST
3477 1.1 mrg #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
3478 1.1 mrg #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
3479 1.1 mrg #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
3480 1.1 mrg #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
3481 1.1 mrg #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
3482 1.1 mrg #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
3483 1.1 mrg #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
3484 1.1 mrg ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
3485 1.1 mrg #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
3486 1.1 mrg #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
3487 1.1 mrg ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
3488 1.1 mrg #undef ATTR_COLD_NOTHROW_LEAF_LIST
3489 1.1 mrg #define ATTR_COLD_NOTHROW_LEAF_LIST \
3490 1.1 mrg /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
3491 1.1 mrg #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
3492 1.1 mrg #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
3493 1.1 mrg /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
3494 1.1 mrg #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
3495 1.1 mrg #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
3496 1.1 mrg /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
3497 1.1 mrg #undef ATTR_PURE_NOTHROW_LEAF_LIST
3498 1.1 mrg #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
3499 1.1 mrg #undef DEF_BUILTIN_STUB
3500 1.1 mrg #define DEF_BUILTIN_STUB(ENUM, NAME)
3501 1.1 mrg #undef DEF_SANITIZER_BUILTIN_1
3502 1.1 mrg #define DEF_SANITIZER_BUILTIN_1(ENUM, NAME, TYPE, ATTRS) \
3503 1.1 mrg do { \
3504 1.1 mrg decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
3505 1.1 mrg BUILT_IN_NORMAL, NAME, NULL_TREE); \
3506 1.1 mrg set_call_expr_flags (decl, ATTRS); \
3507 1.1 mrg set_builtin_decl (ENUM, decl, true); \
3508 1.1 mrg } while (0)
3509 1.1 mrg #undef DEF_SANITIZER_BUILTIN
3510 1.1 mrg #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
3511 1.1 mrg DEF_SANITIZER_BUILTIN_1 (ENUM, NAME, TYPE, ATTRS);
3512 1.1 mrg
3513 1.1 mrg #include "sanitizer.def"
3514 1.1 mrg
3515 1.1 mrg /* -fsanitize=object-size uses __builtin_object_size, but that might
3516 1.1 mrg not be available for e.g. Fortran at this point. We use
3517 1.1 mrg DEF_SANITIZER_BUILTIN here only as a convenience macro. */
3518 1.1 mrg if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
3519 1.1 mrg && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
3520 1.1 mrg DEF_SANITIZER_BUILTIN_1 (BUILT_IN_OBJECT_SIZE, "object_size",
3521 1.1 mrg BT_FN_SIZE_CONST_PTR_INT,
3522 1.1 mrg ATTR_PURE_NOTHROW_LEAF_LIST);
3523 1.1 mrg
3524 1.1 mrg #undef DEF_SANITIZER_BUILTIN_1
3525 1.1 mrg #undef DEF_SANITIZER_BUILTIN
3526 1.1 mrg #undef DEF_BUILTIN_STUB
3527 1.1 mrg }
3528 1.1 mrg
3529 1.1 mrg /* Called via htab_traverse. Count number of emitted
3530 1.1 mrg STRING_CSTs in the constant hash table. */
3531 1.1 mrg
3532 1.1 mrg int
3533 1.1 mrg count_string_csts (constant_descriptor_tree **slot,
3534 1.1 mrg unsigned HOST_WIDE_INT *data)
3535 1.1 mrg {
3536 1.1 mrg struct constant_descriptor_tree *desc = *slot;
3537 1.1 mrg if (TREE_CODE (desc->value) == STRING_CST
3538 1.1 mrg && TREE_ASM_WRITTEN (desc->value)
3539 1.1 mrg && asan_protect_global (desc->value))
3540 1.1 mrg ++*data;
3541 1.1 mrg return 1;
3542 1.1 mrg }
3543 1.1 mrg
3544 1.1 mrg /* Helper structure to pass two parameters to
3545 1.1 mrg add_string_csts. */
3546 1.1 mrg
3547 1.1 mrg struct asan_add_string_csts_data
3548 1.1 mrg {
3549 1.1 mrg tree type;
3550 1.1 mrg vec<constructor_elt, va_gc> *v;
3551 1.1 mrg };
3552 1.1 mrg
3553 1.1 mrg /* Called via hash_table::traverse. Call asan_add_global
3554 1.1 mrg on emitted STRING_CSTs from the constant hash table. */
3555 1.1 mrg
3556 1.1 mrg int
3557 1.1 mrg add_string_csts (constant_descriptor_tree **slot,
3558 1.1 mrg asan_add_string_csts_data *aascd)
3559 1.1 mrg {
3560 1.1 mrg struct constant_descriptor_tree *desc = *slot;
3561 1.1 mrg if (TREE_CODE (desc->value) == STRING_CST
3562 1.1 mrg && TREE_ASM_WRITTEN (desc->value)
3563 1.1 mrg && asan_protect_global (desc->value))
3564 1.1 mrg {
3565 1.1 mrg asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
3566 1.1 mrg aascd->type, aascd->v);
3567 1.1 mrg }
3568 1.1 mrg return 1;
3569 1.1 mrg }
3570 1.1 mrg
3571 1.1 mrg /* Needs to be GTY(()), because cgraph_build_static_cdtor may
3572 1.1 mrg invoke ggc_collect. */
3573 1.1 mrg static GTY(()) tree asan_ctor_statements;
3574 1.1 mrg
3575 1.1 mrg /* Module-level instrumentation.
3576 1.1 mrg - Insert __asan_init_vN() into the list of CTORs.
3577 1.1 mrg - TODO: insert redzones around globals.
3578 1.1 mrg */
3579 1.1 mrg
3580 1.1 mrg void
3581 1.1 mrg asan_finish_file (void)
3582 1.1 mrg {
3583 1.1 mrg varpool_node *vnode;
3584 1.1 mrg unsigned HOST_WIDE_INT gcount = 0;
3585 1.1 mrg
3586 1.1 mrg if (shadow_ptr_types[0] == NULL_TREE)
3587 1.1 mrg asan_init_shadow_ptr_types ();
3588 1.1 mrg /* Avoid instrumenting code in the asan ctors/dtors.
3589 1.1 mrg We don't need to insert padding after the description strings,
3590 1.1 mrg nor after .LASAN* array. */
3591 1.1 mrg flag_sanitize &= ~SANITIZE_ADDRESS;
3592 1.1 mrg
3593 1.1 mrg /* For user-space we want asan constructors to run first.
3594 1.1 mrg Linux kernel does not support priorities other than default, and the only
3595 1.1 mrg other user of constructors is coverage. So we run with the default
3596 1.1 mrg priority. */
3597 1.1 mrg int priority = flag_sanitize & SANITIZE_USER_ADDRESS
3598 1.1 mrg ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
3599 1.1 mrg
3600 1.1 mrg if (flag_sanitize & SANITIZE_USER_ADDRESS)
3601 1.1 mrg {
3602 1.1 mrg tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
3603 1.1 mrg append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
3604 1.1 mrg fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
3605 1.1 mrg append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
3606 1.1 mrg }
3607 1.1 mrg FOR_EACH_DEFINED_VARIABLE (vnode)
3608 1.1 mrg if (TREE_ASM_WRITTEN (vnode->decl)
3609 1.1 mrg && asan_protect_global (vnode->decl))
3610 1.1 mrg ++gcount;
3611 1.1 mrg hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
3612 1.1 mrg const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
3613 1.1 mrg (&gcount);
3614 1.1 mrg if (gcount)
3615 1.1 mrg {
3616 1.1 mrg tree type = asan_global_struct (), var, ctor;
3617 1.1 mrg tree dtor_statements = NULL_TREE;
3618 1.1 mrg vec<constructor_elt, va_gc> *v;
3619 1.1 mrg char buf[20];
3620 1.1 mrg
3621 1.1 mrg type = build_array_type_nelts (type, gcount);
3622 1.1 mrg ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
3623 1.1 mrg var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
3624 1.1 mrg type);
3625 1.1 mrg TREE_STATIC (var) = 1;
3626 1.1 mrg TREE_PUBLIC (var) = 0;
3627 1.1 mrg DECL_ARTIFICIAL (var) = 1;
3628 1.1 mrg DECL_IGNORED_P (var) = 1;
3629 1.1 mrg vec_alloc (v, gcount);
3630 1.1 mrg FOR_EACH_DEFINED_VARIABLE (vnode)
3631 1.1 mrg if (TREE_ASM_WRITTEN (vnode->decl)
3632 1.1 mrg && asan_protect_global (vnode->decl))
3633 1.1 mrg asan_add_global (vnode->decl, TREE_TYPE (type), v);
3634 1.1 mrg struct asan_add_string_csts_data aascd;
3635 1.1 mrg aascd.type = TREE_TYPE (type);
3636 1.1 mrg aascd.v = v;
3637 1.1 mrg const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
3638 1.1 mrg (&aascd);
3639 1.1 mrg ctor = build_constructor (type, v);
3640 1.1 mrg TREE_CONSTANT (ctor) = 1;
3641 1.1 mrg TREE_STATIC (ctor) = 1;
3642 1.1 mrg DECL_INITIAL (var) = ctor;
3643 1.1 mrg SET_DECL_ALIGN (var, MAX (DECL_ALIGN (var),
3644 1.1 mrg ASAN_SHADOW_GRANULARITY * BITS_PER_UNIT));
3645 1.1 mrg
3646 1.1 mrg varpool_node::finalize_decl (var);
3647 1.1 mrg
3648 1.1 mrg tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
3649 1.1 mrg tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
3650 1.1 mrg append_to_statement_list (build_call_expr (fn, 2,
3651 1.1 mrg build_fold_addr_expr (var),
3652 1.1 mrg gcount_tree),
3653 1.1 mrg &asan_ctor_statements);
3654 1.1 mrg
3655 1.1 mrg fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
3656 1.1 mrg append_to_statement_list (build_call_expr (fn, 2,
3657 1.1 mrg build_fold_addr_expr (var),
3658 1.1 mrg gcount_tree),
3659 1.1 mrg &dtor_statements);
3660 1.1 mrg cgraph_build_static_cdtor ('D', dtor_statements, priority);
3661 1.1 mrg }
3662 1.1 mrg if (asan_ctor_statements)
3663 1.1 mrg cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
3664 1.1 mrg flag_sanitize |= SANITIZE_ADDRESS;
3665 1.1 mrg }
3666 1.1 mrg
3667 1.1 mrg /* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based
3668 1.1 mrg on SHADOW address. Newly added statements will be added to ITER with
3669 1.1 mrg given location LOC. We mark SIZE bytes in shadow memory, where
3670 1.1 mrg LAST_CHUNK_SIZE is greater than zero in situation where we are at the
3671 1.1 mrg end of a variable. */
3672 1.1 mrg
3673 1.1 mrg static void
3674 1.1 mrg asan_store_shadow_bytes (gimple_stmt_iterator *iter, location_t loc,
3675 1.1 mrg tree shadow,
3676 1.1 mrg unsigned HOST_WIDE_INT base_addr_offset,
3677 1.1 mrg bool is_clobber, unsigned size,
3678 1.1 mrg unsigned last_chunk_size)
3679 1.1 mrg {
3680 1.1 mrg tree shadow_ptr_type;
3681 1.1 mrg
3682 1.1 mrg switch (size)
3683 1.1 mrg {
3684 1.1 mrg case 1:
3685 1.1 mrg shadow_ptr_type = shadow_ptr_types[0];
3686 1.1 mrg break;
3687 1.1 mrg case 2:
3688 1.1 mrg shadow_ptr_type = shadow_ptr_types[1];
3689 1.1 mrg break;
3690 1.1 mrg case 4:
3691 1.1 mrg shadow_ptr_type = shadow_ptr_types[2];
3692 1.1 mrg break;
3693 1.1 mrg default:
3694 1.1 mrg gcc_unreachable ();
3695 1.1 mrg }
3696 1.1 mrg
3697 1.1 mrg unsigned char c = (char) is_clobber ? ASAN_STACK_MAGIC_USE_AFTER_SCOPE : 0;
3698 1.1 mrg unsigned HOST_WIDE_INT val = 0;
3699 1.1 mrg unsigned last_pos = size;
3700 1.1 mrg if (last_chunk_size && !is_clobber)
3701 1.1 mrg last_pos = BYTES_BIG_ENDIAN ? 0 : size - 1;
3702 1.1 mrg for (unsigned i = 0; i < size; ++i)
3703 1.1 mrg {
3704 1.1 mrg unsigned char shadow_c = c;
3705 1.1 mrg if (i == last_pos)
3706 1.1 mrg shadow_c = last_chunk_size;
3707 1.1 mrg val |= (unsigned HOST_WIDE_INT) shadow_c << (BITS_PER_UNIT * i);
3708 1.1 mrg }
3709 1.1 mrg
3710 1.1 mrg /* Handle last chunk in unpoisoning. */
3711 1.1 mrg tree magic = build_int_cst (TREE_TYPE (shadow_ptr_type), val);
3712 1.1 mrg
3713 1.1 mrg tree dest = build2 (MEM_REF, TREE_TYPE (shadow_ptr_type), shadow,
3714 1.1 mrg build_int_cst (shadow_ptr_type, base_addr_offset));
3715 1.1 mrg
3716 1.1 mrg gimple *g = gimple_build_assign (dest, magic);
3717 1.1 mrg gimple_set_location (g, loc);
3718 1.1 mrg gsi_insert_after (iter, g, GSI_NEW_STMT);
3719 1.1 mrg }
3720 1.1 mrg
3721 1.1 mrg /* Expand the ASAN_MARK builtins. */
3722 1.1 mrg
3723 1.1 mrg bool
3724 1.1 mrg asan_expand_mark_ifn (gimple_stmt_iterator *iter)
3725 1.1 mrg {
3726 1.1 mrg gimple *g = gsi_stmt (*iter);
3727 1.1 mrg location_t loc = gimple_location (g);
3728 1.1 mrg HOST_WIDE_INT flag = tree_to_shwi (gimple_call_arg (g, 0));
3729 1.1 mrg bool is_poison = ((asan_mark_flags)flag) == ASAN_MARK_POISON;
3730 1.1 mrg
3731 1.1 mrg tree base = gimple_call_arg (g, 1);
3732 1.1 mrg gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
3733 1.1 mrg tree decl = TREE_OPERAND (base, 0);
3734 1.1 mrg
3735 1.1 mrg /* For a nested function, we can have: ASAN_MARK (2, &FRAME.2.fp_input, 4) */
3736 1.1 mrg if (TREE_CODE (decl) == COMPONENT_REF
3737 1.1 mrg && DECL_NONLOCAL_FRAME (TREE_OPERAND (decl, 0)))
3738 1.1 mrg decl = TREE_OPERAND (decl, 0);
3739 1.1 mrg
3740 1.1 mrg gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
3741 1.1 mrg
3742 1.1 mrg if (hwasan_sanitize_p ())
3743 1.1 mrg {
3744 1.1 mrg gcc_assert (param_hwasan_instrument_stack);
3745 1.1 mrg gimple_seq stmts = NULL;
3746 1.1 mrg /* Here we swap ASAN_MARK calls for HWASAN_MARK.
3747 1.1 mrg This is because we are using the approach of using ASAN_MARK as a
3748 1.1 mrg synonym until here.
3749 1.1 mrg That approach means we don't yet have to duplicate all the special
3750 1.1 mrg cases for ASAN_MARK and ASAN_POISON with the exact same handling but
3751 1.1 mrg called HWASAN_MARK etc.
3752 1.1 mrg
3753 1.1 mrg N.b. __asan_poison_stack_memory (which implements ASAN_MARK for ASAN)
3754 1.1 mrg rounds the size up to its shadow memory granularity, while
3755 1.1 mrg __hwasan_tag_memory (which implements the same for HWASAN) does not.
3756 1.1 mrg Hence we emit HWASAN_MARK with an aligned size unlike ASAN_MARK. */
3757 1.1 mrg tree len = gimple_call_arg (g, 2);
3758 1.1 mrg tree new_len = gimple_build_round_up (&stmts, loc, size_type_node, len,
3759 1.1 mrg HWASAN_TAG_GRANULE_SIZE);
3760 1.1 mrg gimple_build (&stmts, loc, CFN_HWASAN_MARK,
3761 1.1 mrg void_type_node, gimple_call_arg (g, 0),
3762 1.1 mrg base, new_len);
3763 1.1 mrg gsi_replace_with_seq (iter, stmts, true);
3764 1.1 mrg return false;
3765 1.1 mrg }
3766 1.1 mrg
3767 1.1 mrg if (is_poison)
3768 1.1 mrg {
3769 1.1 mrg if (asan_handled_variables == NULL)
3770 1.1 mrg asan_handled_variables = new hash_set<tree> (16);
3771 1.1 mrg asan_handled_variables->add (decl);
3772 1.1 mrg }
3773 1.1 mrg tree len = gimple_call_arg (g, 2);
3774 1.1 mrg
3775 1.1 mrg gcc_assert (poly_int_tree_p (len));
3776 1.1 mrg
3777 1.1 mrg g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3778 1.1 mrg NOP_EXPR, base);
3779 1.1 mrg gimple_set_location (g, loc);
3780 1.1 mrg gsi_replace (iter, g, false);
3781 1.1 mrg tree base_addr = gimple_assign_lhs (g);
3782 1.1 mrg
3783 1.1 mrg /* Generate direct emission if size_in_bytes is small. */
3784 1.1 mrg unsigned threshold = param_use_after_scope_direct_emission_threshold;
3785 1.1 mrg if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) <= threshold)
3786 1.1 mrg {
3787 1.1 mrg unsigned HOST_WIDE_INT size_in_bytes = tree_to_uhwi (len);
3788 1.1 mrg const unsigned HOST_WIDE_INT shadow_size
3789 1.1 mrg = shadow_mem_size (size_in_bytes);
3790 1.1 mrg const unsigned int shadow_align
3791 1.1 mrg = (get_pointer_alignment (base) / BITS_PER_UNIT) >> ASAN_SHADOW_SHIFT;
3792 1.1 mrg
3793 1.1 mrg tree shadow = build_shadow_mem_access (iter, loc, base_addr,
3794 1.1 mrg shadow_ptr_types[0], true);
3795 1.1 mrg
3796 1.1 mrg for (unsigned HOST_WIDE_INT offset = 0; offset < shadow_size;)
3797 1.1 mrg {
3798 1.1 mrg unsigned size = 1;
3799 1.1 mrg if (shadow_size - offset >= 4
3800 1.1 mrg && (!STRICT_ALIGNMENT || shadow_align >= 4))
3801 1.1 mrg size = 4;
3802 1.1 mrg else if (shadow_size - offset >= 2
3803 1.1 mrg && (!STRICT_ALIGNMENT || shadow_align >= 2))
3804 1.1 mrg size = 2;
3805 1.1 mrg
3806 1.1 mrg unsigned HOST_WIDE_INT last_chunk_size = 0;
3807 1.1 mrg unsigned HOST_WIDE_INT s = (offset + size) * ASAN_SHADOW_GRANULARITY;
3808 1.1 mrg if (s > size_in_bytes)
3809 1.1 mrg last_chunk_size = ASAN_SHADOW_GRANULARITY - (s - size_in_bytes);
3810 1.1 mrg
3811 1.1 mrg asan_store_shadow_bytes (iter, loc, shadow, offset, is_poison,
3812 1.1 mrg size, last_chunk_size);
3813 1.1 mrg offset += size;
3814 1.1 mrg }
3815 1.1 mrg }
3816 1.1 mrg else
3817 1.1 mrg {
3818 1.1 mrg g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3819 1.1 mrg NOP_EXPR, len);
3820 1.1 mrg gimple_set_location (g, loc);
3821 1.1 mrg gsi_insert_before (iter, g, GSI_SAME_STMT);
3822 1.1 mrg tree sz_arg = gimple_assign_lhs (g);
3823 1.1 mrg
3824 1.1 mrg tree fun
3825 1.1 mrg = builtin_decl_implicit (is_poison ? BUILT_IN_ASAN_POISON_STACK_MEMORY
3826 1.1 mrg : BUILT_IN_ASAN_UNPOISON_STACK_MEMORY);
3827 1.1 mrg g = gimple_build_call (fun, 2, base_addr, sz_arg);
3828 1.1 mrg gimple_set_location (g, loc);
3829 1.1 mrg gsi_insert_after (iter, g, GSI_NEW_STMT);
3830 1.1 mrg }
3831 1.1 mrg
3832 1.1 mrg return false;
3833 1.1 mrg }
3834 1.1 mrg
3835 1.1 mrg /* Expand the ASAN_{LOAD,STORE} builtins. */
3836 1.1 mrg
3837 1.1 mrg bool
3838 1.1 mrg asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
3839 1.1 mrg {
3840 1.1 mrg gcc_assert (!hwasan_sanitize_p ());
3841 1.1 mrg gimple *g = gsi_stmt (*iter);
3842 1.1 mrg location_t loc = gimple_location (g);
3843 1.1 mrg bool recover_p;
3844 1.1 mrg if (flag_sanitize & SANITIZE_USER_ADDRESS)
3845 1.1 mrg recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
3846 1.1 mrg else
3847 1.1 mrg recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
3848 1.1 mrg
3849 1.1 mrg HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
3850 1.1 mrg gcc_assert (flags < ASAN_CHECK_LAST);
3851 1.1 mrg bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
3852 1.1 mrg bool is_store = (flags & ASAN_CHECK_STORE) != 0;
3853 1.1 mrg bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
3854 1.1 mrg
3855 1.1 mrg tree base = gimple_call_arg (g, 1);
3856 1.1 mrg tree len = gimple_call_arg (g, 2);
3857 1.1 mrg HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
3858 1.1 mrg
3859 1.1 mrg HOST_WIDE_INT size_in_bytes
3860 1.1 mrg = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
3861 1.1 mrg
3862 1.1 mrg if (use_calls)
3863 1.1 mrg {
3864 1.1 mrg /* Instrument using callbacks. */
3865 1.1 mrg gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3866 1.1 mrg NOP_EXPR, base);
3867 1.1 mrg gimple_set_location (g, loc);
3868 1.1 mrg gsi_insert_before (iter, g, GSI_SAME_STMT);
3869 1.1 mrg tree base_addr = gimple_assign_lhs (g);
3870 1.1 mrg
3871 1.1 mrg int nargs;
3872 1.1 mrg tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
3873 1.1 mrg if (nargs == 1)
3874 1.1 mrg g = gimple_build_call (fun, 1, base_addr);
3875 1.1 mrg else
3876 1.1 mrg {
3877 1.1 mrg gcc_assert (nargs == 2);
3878 1.1 mrg g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3879 1.1 mrg NOP_EXPR, len);
3880 1.1 mrg gimple_set_location (g, loc);
3881 1.1 mrg gsi_insert_before (iter, g, GSI_SAME_STMT);
3882 1.1 mrg tree sz_arg = gimple_assign_lhs (g);
3883 1.1 mrg g = gimple_build_call (fun, nargs, base_addr, sz_arg);
3884 1.1 mrg }
3885 1.1 mrg gimple_set_location (g, loc);
3886 1.1 mrg gsi_replace (iter, g, false);
3887 1.1 mrg return false;
3888 1.1 mrg }
3889 1.1 mrg
3890 1.1 mrg HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
3891 1.1 mrg
3892 1.1 mrg tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
3893 1.1 mrg tree shadow_type = TREE_TYPE (shadow_ptr_type);
3894 1.1 mrg
3895 1.1 mrg gimple_stmt_iterator gsi = *iter;
3896 1.1 mrg
3897 1.1 mrg if (!is_non_zero_len)
3898 1.1 mrg {
3899 1.1 mrg /* So, the length of the memory area to asan-protect is
3900 1.1 mrg non-constant. Let's guard the generated instrumentation code
3901 1.1 mrg like:
3902 1.1 mrg
3903 1.1 mrg if (len != 0)
3904 1.1 mrg {
3905 1.1 mrg //asan instrumentation code goes here.
3906 1.1 mrg }
3907 1.1 mrg // falltrough instructions, starting with *ITER. */
3908 1.1 mrg
3909 1.1 mrg g = gimple_build_cond (NE_EXPR,
3910 1.1 mrg len,
3911 1.1 mrg build_int_cst (TREE_TYPE (len), 0),
3912 1.1 mrg NULL_TREE, NULL_TREE);
3913 1.1 mrg gimple_set_location (g, loc);
3914 1.1 mrg
3915 1.1 mrg basic_block then_bb, fallthrough_bb;
3916 1.1 mrg insert_if_then_before_iter (as_a <gcond *> (g), iter,
3917 1.1 mrg /*then_more_likely_p=*/true,
3918 1.1 mrg &then_bb, &fallthrough_bb);
3919 1.1 mrg /* Note that fallthrough_bb starts with the statement that was
3920 1.1 mrg pointed to by ITER. */
3921 1.1 mrg
3922 1.1 mrg /* The 'then block' of the 'if (len != 0) condition is where
3923 1.1 mrg we'll generate the asan instrumentation code now. */
3924 1.1 mrg gsi = gsi_last_bb (then_bb);
3925 1.1 mrg }
3926 1.1 mrg
3927 1.1 mrg /* Get an iterator on the point where we can add the condition
3928 1.1 mrg statement for the instrumentation. */
3929 1.1 mrg basic_block then_bb, else_bb;
3930 1.1 mrg gsi = create_cond_insert_point (&gsi, /*before_p*/false,
3931 1.1 mrg /*then_more_likely_p=*/false,
3932 1.1 mrg /*create_then_fallthru_edge*/recover_p,
3933 1.1 mrg &then_bb,
3934 1.1 mrg &else_bb);
3935 1.1 mrg
3936 1.1 mrg g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3937 1.1 mrg NOP_EXPR, base);
3938 1.1 mrg gimple_set_location (g, loc);
3939 1.1 mrg gsi_insert_before (&gsi, g, GSI_NEW_STMT);
3940 1.1 mrg tree base_addr = gimple_assign_lhs (g);
3941 1.1 mrg
3942 1.1 mrg tree t = NULL_TREE;
3943 1.1 mrg if (real_size_in_bytes >= 8)
3944 1.1 mrg {
3945 1.1 mrg tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
3946 1.1 mrg shadow_ptr_type);
3947 1.1 mrg t = shadow;
3948 1.1 mrg }
3949 1.1 mrg else
3950 1.1 mrg {
3951 1.1 mrg /* Slow path for 1, 2 and 4 byte accesses. */
3952 1.1 mrg /* Test (shadow != 0)
3953 1.1 mrg & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
3954 1.1 mrg tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
3955 1.1 mrg shadow_ptr_type);
3956 1.1 mrg gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
3957 1.1 mrg gimple_seq seq = NULL;
3958 1.1 mrg gimple_seq_add_stmt (&seq, shadow_test);
3959 1.1 mrg /* Aligned (>= 8 bytes) can test just
3960 1.1 mrg (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
3961 1.1 mrg to be 0. */
3962 1.1 mrg if (align < 8)
3963 1.1 mrg {
3964 1.1 mrg gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
3965 1.1 mrg base_addr, 7));
3966 1.1 mrg gimple_seq_add_stmt (&seq,
3967 1.1 mrg build_type_cast (shadow_type,
3968 1.1 mrg gimple_seq_last (seq)));
3969 1.1 mrg if (real_size_in_bytes > 1)
3970 1.1 mrg gimple_seq_add_stmt (&seq,
3971 1.1 mrg build_assign (PLUS_EXPR,
3972 1.1 mrg gimple_seq_last (seq),
3973 1.1 mrg real_size_in_bytes - 1));
3974 1.1 mrg t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
3975 1.1 mrg }
3976 1.1 mrg else
3977 1.1 mrg t = build_int_cst (shadow_type, real_size_in_bytes - 1);
3978 1.1 mrg gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
3979 1.1 mrg gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
3980 1.1 mrg gimple_seq_last (seq)));
3981 1.1 mrg t = gimple_assign_lhs (gimple_seq_last (seq));
3982 1.1 mrg gimple_seq_set_location (seq, loc);
3983 1.1 mrg gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3984 1.1 mrg
3985 1.1 mrg /* For non-constant, misaligned or otherwise weird access sizes,
3986 1.1 mrg check first and last byte. */
3987 1.1 mrg if (size_in_bytes == -1)
3988 1.1 mrg {
3989 1.1 mrg g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3990 1.1 mrg MINUS_EXPR, len,
3991 1.1 mrg build_int_cst (pointer_sized_int_node, 1));
3992 1.1 mrg gimple_set_location (g, loc);
3993 1.1 mrg gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3994 1.1 mrg tree last = gimple_assign_lhs (g);
3995 1.1 mrg g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3996 1.1 mrg PLUS_EXPR, base_addr, last);
3997 1.1 mrg gimple_set_location (g, loc);
3998 1.1 mrg gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3999 1.1 mrg tree base_end_addr = gimple_assign_lhs (g);
4000 1.1 mrg
4001 1.1 mrg tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
4002 1.1 mrg shadow_ptr_type);
4003 1.1 mrg gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
4004 1.1 mrg gimple_seq seq = NULL;
4005 1.1 mrg gimple_seq_add_stmt (&seq, shadow_test);
4006 1.1 mrg gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
4007 1.1 mrg base_end_addr, 7));
4008 1.1 mrg gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
4009 1.1 mrg gimple_seq_last (seq)));
4010 1.1 mrg gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
4011 1.1 mrg gimple_seq_last (seq),
4012 1.1 mrg shadow));
4013 1.1 mrg gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
4014 1.1 mrg gimple_seq_last (seq)));
4015 1.1 mrg gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
4016 1.1 mrg gimple_seq_last (seq)));
4017 1.1 mrg t = gimple_assign_lhs (gimple_seq_last (seq));
4018 1.1 mrg gimple_seq_set_location (seq, loc);
4019 1.1 mrg gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
4020 1.1 mrg }
4021 1.1 mrg }
4022 1.1 mrg
4023 1.1 mrg g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
4024 1.1 mrg NULL_TREE, NULL_TREE);
4025 1.1 mrg gimple_set_location (g, loc);
4026 1.1 mrg gsi_insert_after (&gsi, g, GSI_NEW_STMT);
4027 1.1 mrg
4028 1.1 mrg /* Generate call to the run-time library (e.g. __asan_report_load8). */
4029 1.1 mrg gsi = gsi_start_bb (then_bb);
4030 1.1 mrg int nargs;
4031 1.1 mrg tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
4032 1.1 mrg g = gimple_build_call (fun, nargs, base_addr, len);
4033 1.1 mrg gimple_set_location (g, loc);
4034 1.1 mrg gsi_insert_after (&gsi, g, GSI_NEW_STMT);
4035 1.1 mrg
4036 1.1 mrg gsi_remove (iter, true);
4037 1.1 mrg *iter = gsi_start_bb (else_bb);
4038 1.1 mrg
4039 1.1 mrg return true;
4040 1.1 mrg }
4041 1.1 mrg
4042 1.1 mrg /* Create ASAN shadow variable for a VAR_DECL which has been rewritten
4043 1.1 mrg into SSA. Already seen VAR_DECLs are stored in SHADOW_VARS_MAPPING. */
4044 1.1 mrg
4045 1.1 mrg static tree
4046 1.1 mrg create_asan_shadow_var (tree var_decl,
4047 1.1 mrg hash_map<tree, tree> &shadow_vars_mapping)
4048 1.1 mrg {
4049 1.1 mrg tree *slot = shadow_vars_mapping.get (var_decl);
4050 1.1 mrg if (slot == NULL)
4051 1.1 mrg {
4052 1.1 mrg tree shadow_var = copy_node (var_decl);
4053 1.1 mrg
4054 1.1 mrg copy_body_data id;
4055 1.1 mrg memset (&id, 0, sizeof (copy_body_data));
4056 1.1 mrg id.src_fn = id.dst_fn = current_function_decl;
4057 1.1 mrg copy_decl_for_dup_finish (&id, var_decl, shadow_var);
4058 1.1 mrg
4059 1.1 mrg DECL_ARTIFICIAL (shadow_var) = 1;
4060 1.1 mrg DECL_IGNORED_P (shadow_var) = 1;
4061 1.1 mrg DECL_SEEN_IN_BIND_EXPR_P (shadow_var) = 0;
4062 1.1 mrg gimple_add_tmp_var (shadow_var);
4063 1.1 mrg
4064 1.1 mrg shadow_vars_mapping.put (var_decl, shadow_var);
4065 1.1 mrg return shadow_var;
4066 1.1 mrg }
4067 1.1 mrg else
4068 1.1 mrg return *slot;
4069 1.1 mrg }
4070 1.1 mrg
4071 1.1 mrg /* Expand ASAN_POISON ifn. */
4072 1.1 mrg
4073 1.1 mrg bool
4074 1.1 mrg asan_expand_poison_ifn (gimple_stmt_iterator *iter,
4075 1.1 mrg bool *need_commit_edge_insert,
4076 1.1 mrg hash_map<tree, tree> &shadow_vars_mapping)
4077 1.1 mrg {
4078 1.1 mrg gimple *g = gsi_stmt (*iter);
4079 1.1 mrg tree poisoned_var = gimple_call_lhs (g);
4080 1.1 mrg if (!poisoned_var || has_zero_uses (poisoned_var))
4081 1.1 mrg {
4082 1.1 mrg gsi_remove (iter, true);
4083 1.1 mrg return true;
4084 1.1 mrg }
4085 1.1 mrg
4086 1.1 mrg if (SSA_NAME_VAR (poisoned_var) == NULL_TREE)
4087 1.1 mrg SET_SSA_NAME_VAR_OR_IDENTIFIER (poisoned_var,
4088 1.1 mrg create_tmp_var (TREE_TYPE (poisoned_var)));
4089 1.1 mrg
4090 1.1 mrg tree shadow_var = create_asan_shadow_var (SSA_NAME_VAR (poisoned_var),
4091 1.1 mrg shadow_vars_mapping);
4092 1.1 mrg
4093 1.1 mrg bool recover_p;
4094 1.1 mrg if (flag_sanitize & SANITIZE_USER_ADDRESS)
4095 1.1 mrg recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
4096 1.1 mrg else
4097 1.1 mrg recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
4098 1.1 mrg tree size = DECL_SIZE_UNIT (shadow_var);
4099 1.1 mrg gimple *poison_call
4100 1.1 mrg = gimple_build_call_internal (IFN_ASAN_MARK, 3,
4101 1.1 mrg build_int_cst (integer_type_node,
4102 1.1 mrg ASAN_MARK_POISON),
4103 1.1 mrg build_fold_addr_expr (shadow_var), size);
4104 1.1 mrg
4105 1.1 mrg gimple *use;
4106 1.1 mrg imm_use_iterator imm_iter;
4107 1.1 mrg FOR_EACH_IMM_USE_STMT (use, imm_iter, poisoned_var)
4108 1.1 mrg {
4109 1.1 mrg if (is_gimple_debug (use))
4110 1.1 mrg continue;
4111 1.1 mrg
4112 1.1 mrg int nargs;
4113 1.1 mrg bool store_p = gimple_call_internal_p (use, IFN_ASAN_POISON_USE);
4114 1.1 mrg gcall *call;
4115 1.1 mrg if (hwasan_sanitize_p ())
4116 1.1 mrg {
4117 1.1 mrg tree fun = builtin_decl_implicit (BUILT_IN_HWASAN_TAG_MISMATCH4);
4118 1.1 mrg /* NOTE: hwasan has no __hwasan_report_* functions like asan does.
4119 1.1 mrg We use __hwasan_tag_mismatch4 with arguments that tell it the
4120 1.1 mrg size of access and load to report all tag mismatches.
4121 1.1 mrg
4122 1.1 mrg The arguments to this function are:
4123 1.1 mrg Address of invalid access.
4124 1.1 mrg Bitfield containing information about the access
4125 1.1 mrg (access_info)
4126 1.1 mrg Pointer to a frame of registers
4127 1.1 mrg (for use in printing the contents of registers in a dump)
4128 1.1 mrg Not used yet -- to be used by inline instrumentation.
4129 1.1 mrg Size of access.
4130 1.1 mrg
4131 1.1 mrg The access_info bitfield encodes the following pieces of
4132 1.1 mrg information:
4133 1.1 mrg - Is this a store or load?
4134 1.1 mrg access_info & 0x10 => store
4135 1.1 mrg - Should the program continue after reporting the error?
4136 1.1 mrg access_info & 0x20 => recover
4137 1.1 mrg - What size access is this (not used here since we can always
4138 1.1 mrg pass the size in the last argument)
4139 1.1 mrg
4140 1.1 mrg if (access_info & 0xf == 0xf)
4141 1.1 mrg size is taken from last argument.
4142 1.1 mrg else
4143 1.1 mrg size == 1 << (access_info & 0xf)
4144 1.1 mrg
4145 1.1 mrg The last argument contains the size of the access iff the
4146 1.1 mrg access_info size indicator is 0xf (we always use this argument
4147 1.1 mrg rather than storing the size in the access_info bitfield).
4148 1.1 mrg
4149 1.1 mrg See the function definition `__hwasan_tag_mismatch4` in
4150 1.1 mrg libsanitizer/hwasan for the full definition.
4151 1.1 mrg */
4152 1.1 mrg unsigned access_info = (0x20 * recover_p)
4153 1.1 mrg + (0x10 * store_p)
4154 1.1 mrg + (0xf);
4155 1.1 mrg call = gimple_build_call (fun, 4,
4156 1.1 mrg build_fold_addr_expr (shadow_var),
4157 1.1 mrg build_int_cst (pointer_sized_int_node,
4158 1.1 mrg access_info),
4159 1.1 mrg build_int_cst (pointer_sized_int_node, 0),
4160 1.1 mrg size);
4161 1.1 mrg }
4162 1.1 mrg else
4163 1.1 mrg {
4164 1.1 mrg tree fun = report_error_func (store_p, recover_p, tree_to_uhwi (size),
4165 1.1 mrg &nargs);
4166 1.1 mrg call = gimple_build_call (fun, 1,
4167 1.1 mrg build_fold_addr_expr (shadow_var));
4168 1.1 mrg }
4169 1.1 mrg gimple_set_location (call, gimple_location (use));
4170 1.1 mrg gimple *call_to_insert = call;
4171 1.1 mrg
4172 1.1 mrg /* The USE can be a gimple PHI node. If so, insert the call on
4173 1.1 mrg all edges leading to the PHI node. */
4174 1.1 mrg if (is_a <gphi *> (use))
4175 1.1 mrg {
4176 1.1 mrg gphi *phi = dyn_cast<gphi *> (use);
4177 1.1 mrg for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
4178 1.1 mrg if (gimple_phi_arg_def (phi, i) == poisoned_var)
4179 1.1 mrg {
4180 1.1 mrg edge e = gimple_phi_arg_edge (phi, i);
4181 1.1 mrg
4182 1.1 mrg /* Do not insert on an edge we can't split. */
4183 1.1 mrg if (e->flags & EDGE_ABNORMAL)
4184 1.1 mrg continue;
4185 1.1 mrg
4186 1.1 mrg if (call_to_insert == NULL)
4187 1.1 mrg call_to_insert = gimple_copy (call);
4188 1.1 mrg
4189 1.1 mrg gsi_insert_seq_on_edge (e, call_to_insert);
4190 1.1 mrg *need_commit_edge_insert = true;
4191 1.1 mrg call_to_insert = NULL;
4192 1.1 mrg }
4193 1.1 mrg }
4194 1.1 mrg else
4195 1.1 mrg {
4196 1.1 mrg gimple_stmt_iterator gsi = gsi_for_stmt (use);
4197 1.1 mrg if (store_p)
4198 1.1 mrg gsi_replace (&gsi, call, true);
4199 1.1 mrg else
4200 1.1 mrg gsi_insert_before (&gsi, call, GSI_NEW_STMT);
4201 1.1 mrg }
4202 1.1 mrg }
4203 1.1 mrg
4204 1.1 mrg SSA_NAME_IS_DEFAULT_DEF (poisoned_var) = true;
4205 1.1 mrg SSA_NAME_DEF_STMT (poisoned_var) = gimple_build_nop ();
4206 1.1 mrg gsi_replace (iter, poison_call, false);
4207 1.1 mrg
4208 1.1 mrg return true;
4209 1.1 mrg }
4210 1.1 mrg
4211 1.1 mrg /* Instrument the current function. */
4212 1.1 mrg
4213 1.1 mrg static unsigned int
4214 1.1 mrg asan_instrument (void)
4215 1.1 mrg {
4216 1.1 mrg if (hwasan_sanitize_p ())
4217 1.1 mrg {
4218 1.1 mrg transform_statements ();
4219 1.1 mrg return 0;
4220 1.1 mrg }
4221 1.1 mrg
4222 1.1 mrg if (shadow_ptr_types[0] == NULL_TREE)
4223 1.1 mrg asan_init_shadow_ptr_types ();
4224 1.1 mrg transform_statements ();
4225 1.1 mrg last_alloca_addr = NULL_TREE;
4226 1.1 mrg return 0;
4227 1.1 mrg }
4228 1.1 mrg
4229 1.1 mrg static bool
4230 1.1 mrg gate_asan (void)
4231 1.1 mrg {
4232 1.1 mrg return sanitize_flags_p (SANITIZE_ADDRESS);
4233 1.1 mrg }
4234 1.1 mrg
4235 1.1 mrg namespace {
4236 1.1 mrg
4237 1.1 mrg const pass_data pass_data_asan =
4238 1.1 mrg {
4239 1.1 mrg GIMPLE_PASS, /* type */
4240 1.1 mrg "asan", /* name */
4241 1.1 mrg OPTGROUP_NONE, /* optinfo_flags */
4242 1.1 mrg TV_NONE, /* tv_id */
4243 1.1 mrg ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
4244 1.1 mrg 0, /* properties_provided */
4245 1.1 mrg 0, /* properties_destroyed */
4246 1.1 mrg 0, /* todo_flags_start */
4247 1.1 mrg TODO_update_ssa, /* todo_flags_finish */
4248 1.1 mrg };
4249 1.1 mrg
4250 1.1 mrg class pass_asan : public gimple_opt_pass
4251 1.1 mrg {
4252 1.1 mrg public:
4253 1.1 mrg pass_asan (gcc::context *ctxt)
4254 1.1 mrg : gimple_opt_pass (pass_data_asan, ctxt)
4255 1.1 mrg {}
4256 1.1 mrg
4257 1.1 mrg /* opt_pass methods: */
4258 1.1 mrg opt_pass * clone () { return new pass_asan (m_ctxt); }
4259 1.1 mrg virtual bool gate (function *) { return gate_asan () || gate_hwasan (); }
4260 1.1 mrg virtual unsigned int execute (function *) { return asan_instrument (); }
4261 1.1 mrg
4262 1.1 mrg }; // class pass_asan
4263 1.1 mrg
4264 1.1 mrg } // anon namespace
4265 1.1 mrg
4266 1.1 mrg gimple_opt_pass *
4267 1.1 mrg make_pass_asan (gcc::context *ctxt)
4268 1.1 mrg {
4269 1.1 mrg return new pass_asan (ctxt);
4270 1.1 mrg }
4271 1.1 mrg
4272 1.1 mrg namespace {
4273 1.1 mrg
4274 1.1 mrg const pass_data pass_data_asan_O0 =
4275 1.1 mrg {
4276 1.1 mrg GIMPLE_PASS, /* type */
4277 1.1 mrg "asan0", /* name */
4278 1.1 mrg OPTGROUP_NONE, /* optinfo_flags */
4279 1.1 mrg TV_NONE, /* tv_id */
4280 1.1 mrg ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
4281 1.1 mrg 0, /* properties_provided */
4282 1.1 mrg 0, /* properties_destroyed */
4283 1.1 mrg 0, /* todo_flags_start */
4284 1.1 mrg TODO_update_ssa, /* todo_flags_finish */
4285 1.1 mrg };
4286 1.1 mrg
4287 1.1 mrg class pass_asan_O0 : public gimple_opt_pass
4288 1.1 mrg {
4289 1.1 mrg public:
4290 1.1 mrg pass_asan_O0 (gcc::context *ctxt)
4291 1.1 mrg : gimple_opt_pass (pass_data_asan_O0, ctxt)
4292 1.1 mrg {}
4293 1.1 mrg
4294 1.1 mrg /* opt_pass methods: */
4295 1.1 mrg virtual bool gate (function *)
4296 1.1 mrg {
4297 1.1 mrg return !optimize && (gate_asan () || gate_hwasan ());
4298 1.1 mrg }
4299 1.1 mrg virtual unsigned int execute (function *) { return asan_instrument (); }
4300 1.1 mrg
4301 1.1 mrg }; // class pass_asan_O0
4302 1.1 mrg
4303 1.1 mrg } // anon namespace
4304 1.1 mrg
4305 1.1 mrg gimple_opt_pass *
4306 1.1 mrg make_pass_asan_O0 (gcc::context *ctxt)
4307 1.1 mrg {
4308 1.1 mrg return new pass_asan_O0 (ctxt);
4309 1.1 mrg }
4310 1.1 mrg
4311 1.1 mrg /* HWASAN */
4312 1.1 mrg
4313 1.1 mrg /* For stack tagging:
4314 1.1 mrg
4315 1.1 mrg Return the offset from the frame base tag that the "next" expanded object
4316 1.1 mrg should have. */
4317 1.1 mrg uint8_t
4318 1.1 mrg hwasan_current_frame_tag ()
4319 1.1 mrg {
4320 1.1 mrg return hwasan_frame_tag_offset;
4321 1.1 mrg }
4322 1.1 mrg
4323 1.1 mrg /* For stack tagging:
4324 1.1 mrg
4325 1.1 mrg Return the 'base pointer' for this function. If that base pointer has not
4326 1.1 mrg yet been created then we create a register to hold it and record the insns
4327 1.1 mrg to initialize the register in `hwasan_frame_base_init_seq` for later
4328 1.1 mrg emission. */
4329 1.1 mrg rtx
4330 1.1 mrg hwasan_frame_base ()
4331 1.1 mrg {
4332 1.1 mrg if (! hwasan_frame_base_ptr)
4333 1.1 mrg {
4334 1.1 mrg start_sequence ();
4335 1.1 mrg hwasan_frame_base_ptr
4336 1.1 mrg = force_reg (Pmode,
4337 1.1 mrg targetm.memtag.insert_random_tag (virtual_stack_vars_rtx,
4338 1.1 mrg NULL_RTX));
4339 1.1 mrg hwasan_frame_base_init_seq = get_insns ();
4340 1.1 mrg end_sequence ();
4341 1.1 mrg }
4342 1.1 mrg
4343 1.1 mrg return hwasan_frame_base_ptr;
4344 1.1 mrg }
4345 1.1 mrg
4346 1.1 mrg /* For stack tagging:
4347 1.1 mrg
4348 1.1 mrg Check whether this RTX is a standard pointer addressing the base of the
4349 1.1 mrg stack variables for this frame. Returns true if the RTX is either
4350 1.1 mrg virtual_stack_vars_rtx or hwasan_frame_base_ptr. */
4351 1.1 mrg bool
4352 1.1 mrg stack_vars_base_reg_p (rtx base)
4353 1.1 mrg {
4354 1.1 mrg return base == virtual_stack_vars_rtx || base == hwasan_frame_base_ptr;
4355 1.1 mrg }
4356 1.1 mrg
4357 1.1 mrg /* For stack tagging:
4358 1.1 mrg
4359 1.1 mrg Emit frame base initialisation.
4360 1.1 mrg If hwasan_frame_base has been used before here then
4361 1.1 mrg hwasan_frame_base_init_seq contains the sequence of instructions to
4362 1.1 mrg initialize it. This must be put just before the hwasan prologue, so we emit
4363 1.1 mrg the insns before parm_birth_insn (which will point to the first instruction
4364 1.1 mrg of the hwasan prologue if it exists).
4365 1.1 mrg
4366 1.1 mrg We update `parm_birth_insn` to point to the start of this initialisation
4367 1.1 mrg since that represents the end of the initialisation done by
4368 1.1 mrg expand_function_{start,end} functions and we want to maintain that. */
4369 1.1 mrg void
4370 1.1 mrg hwasan_maybe_emit_frame_base_init ()
4371 1.1 mrg {
4372 1.1 mrg if (! hwasan_frame_base_init_seq)
4373 1.1 mrg return;
4374 1.1 mrg emit_insn_before (hwasan_frame_base_init_seq, parm_birth_insn);
4375 1.1 mrg parm_birth_insn = hwasan_frame_base_init_seq;
4376 1.1 mrg }
4377 1.1 mrg
4378 1.1 mrg /* Record a compile-time constant size stack variable that HWASAN will need to
4379 1.1 mrg tag. This record of the range of a stack variable will be used by
4380 1.1 mrg `hwasan_emit_prologue` to emit the RTL at the start of each frame which will
4381 1.1 mrg set tags in the shadow memory according to the assigned tag for each object.
4382 1.1 mrg
4383 1.1 mrg The range that the object spans in stack space should be described by the
4384 1.1 mrg bounds `untagged_base + nearest_offset` and
4385 1.1 mrg `untagged_base + farthest_offset`.
4386 1.1 mrg `tagged_base` is the base address which contains the "base frame tag" for
4387 1.1 mrg this frame, and from which the value to address this object with will be
4388 1.1 mrg calculated.
4389 1.1 mrg
4390 1.1 mrg We record the `untagged_base` since the functions in the hwasan library we
4391 1.1 mrg use to tag memory take pointers without a tag. */
4392 1.1 mrg void
4393 1.1 mrg hwasan_record_stack_var (rtx untagged_base, rtx tagged_base,
4394 1.1 mrg poly_int64 nearest_offset, poly_int64 farthest_offset)
4395 1.1 mrg {
4396 1.1 mrg hwasan_stack_var cur_var;
4397 1.1 mrg cur_var.untagged_base = untagged_base;
4398 1.1 mrg cur_var.tagged_base = tagged_base;
4399 1.1 mrg cur_var.nearest_offset = nearest_offset;
4400 1.1 mrg cur_var.farthest_offset = farthest_offset;
4401 1.1 mrg cur_var.tag_offset = hwasan_current_frame_tag ();
4402 1.1 mrg
4403 1.1 mrg hwasan_tagged_stack_vars.safe_push (cur_var);
4404 1.1 mrg }
4405 1.1 mrg
4406 1.1 mrg /* Return the RTX representing the farthest extent of the statically allocated
4407 1.1 mrg stack objects for this frame. If hwasan_frame_base_ptr has not been
4408 1.1 mrg initialized then we are not storing any static variables on the stack in
4409 1.1 mrg this frame. In this case we return NULL_RTX to represent that.
4410 1.1 mrg
4411 1.1 mrg Otherwise simply return virtual_stack_vars_rtx + frame_offset. */
4412 1.1 mrg rtx
4413 1.1 mrg hwasan_get_frame_extent ()
4414 1.1 mrg {
4415 1.1 mrg return (hwasan_frame_base_ptr
4416 1.1 mrg ? plus_constant (Pmode, virtual_stack_vars_rtx, frame_offset)
4417 1.1 mrg : NULL_RTX);
4418 1.1 mrg }
4419 1.1 mrg
4420 1.1 mrg /* For stack tagging:
4421 1.1 mrg
4422 1.1 mrg Increment the frame tag offset modulo the size a tag can represent. */
4423 1.1 mrg void
4424 1.1 mrg hwasan_increment_frame_tag ()
4425 1.1 mrg {
4426 1.1 mrg uint8_t tag_bits = HWASAN_TAG_SIZE;
4427 1.1 mrg gcc_assert (HWASAN_TAG_SIZE
4428 1.1 mrg <= sizeof (hwasan_frame_tag_offset) * CHAR_BIT);
4429 1.1 mrg hwasan_frame_tag_offset = (hwasan_frame_tag_offset + 1) % (1 << tag_bits);
4430 1.1 mrg /* The "background tag" of the stack is zero by definition.
4431 1.1 mrg This is the tag that objects like parameters passed on the stack and
4432 1.1 mrg spilled registers are given. It is handy to avoid this tag for objects
4433 1.1 mrg whose tags we decide ourselves, partly to ensure that buffer overruns
4434 1.1 mrg can't affect these important variables (e.g. saved link register, saved
4435 1.1 mrg stack pointer etc) and partly to make debugging easier (everything with a
4436 1.1 mrg tag of zero is space allocated automatically by the compiler).
4437 1.1 mrg
4438 1.1 mrg This is not feasible when using random frame tags (the default
4439 1.1 mrg configuration for hwasan) since the tag for the given frame is randomly
4440 1.1 mrg chosen at runtime. In order to avoid any tags matching the stack
4441 1.1 mrg background we would need to decide tag offsets at runtime instead of
4442 1.1 mrg compile time (and pay the resulting performance cost).
4443 1.1 mrg
4444 1.1 mrg When not using random base tags for each frame (i.e. when compiled with
4445 1.1 mrg `--param hwasan-random-frame-tag=0`) the base tag for each frame is zero.
4446 1.1 mrg This means the tag that each object gets is equal to the
4447 1.1 mrg hwasan_frame_tag_offset used in determining it.
4448 1.1 mrg When this is the case we *can* ensure no object gets the tag of zero by
4449 1.1 mrg simply ensuring no object has the hwasan_frame_tag_offset of zero.
4450 1.1 mrg
4451 1.1 mrg There is the extra complication that we only record the
4452 1.1 mrg hwasan_frame_tag_offset here (which is the offset from the tag stored in
4453 1.1 mrg the stack pointer). In the kernel, the tag in the stack pointer is 0xff
4454 1.1 mrg rather than zero. This does not cause problems since tags of 0xff are
4455 1.1 mrg never checked in the kernel. As mentioned at the beginning of this
4456 1.1 mrg comment the background tag of the stack is zero by definition, which means
4457 1.1 mrg that for the kernel we should skip offsets of both 0 and 1 from the stack
4458 1.1 mrg pointer. Avoiding the offset of 0 ensures we use a tag which will be
4459 1.1 mrg checked, avoiding the offset of 1 ensures we use a tag that is not the
4460 1.1 mrg same as the background. */
4461 1.1 mrg if (hwasan_frame_tag_offset == 0 && ! param_hwasan_random_frame_tag)
4462 1.1 mrg hwasan_frame_tag_offset += 1;
4463 1.1 mrg if (hwasan_frame_tag_offset == 1 && ! param_hwasan_random_frame_tag
4464 1.1 mrg && sanitize_flags_p (SANITIZE_KERNEL_HWADDRESS))
4465 1.1 mrg hwasan_frame_tag_offset += 1;
4466 1.1 mrg }
4467 1.1 mrg
4468 1.1 mrg /* Clear internal state for the next function.
4469 1.1 mrg This function is called before variables on the stack get expanded, in
4470 1.1 mrg `init_vars_expansion`. */
4471 1.1 mrg void
4472 1.1 mrg hwasan_record_frame_init ()
4473 1.1 mrg {
4474 1.1 mrg delete asan_used_labels;
4475 1.1 mrg asan_used_labels = NULL;
4476 1.1 mrg
4477 1.1 mrg /* If this isn't the case then some stack variable was recorded *before*
4478 1.1 mrg hwasan_record_frame_init is called, yet *after* the hwasan prologue for
4479 1.1 mrg the previous frame was emitted. Such stack variables would not have
4480 1.1 mrg their shadow stack filled in. */
4481 1.1 mrg gcc_assert (hwasan_tagged_stack_vars.is_empty ());
4482 1.1 mrg hwasan_frame_base_ptr = NULL_RTX;
4483 1.1 mrg hwasan_frame_base_init_seq = NULL;
4484 1.1 mrg
4485 1.1 mrg /* When not using a random frame tag we can avoid the background stack
4486 1.1 mrg color which gives the user a little better debug output upon a crash.
4487 1.1 mrg Meanwhile, when using a random frame tag it will be nice to avoid adding
4488 1.1 mrg tags for the first object since that is unnecessary extra work.
4489 1.1 mrg Hence set the initial hwasan_frame_tag_offset to be 0 if using a random
4490 1.1 mrg frame tag and 1 otherwise.
4491 1.1 mrg
4492 1.1 mrg As described in hwasan_increment_frame_tag, in the kernel the stack
4493 1.1 mrg pointer has the tag 0xff. That means that to avoid 0xff and 0 (the tag
4494 1.1 mrg which the kernel does not check and the background tag respectively) we
4495 1.1 mrg start with a tag offset of 2. */
4496 1.1 mrg hwasan_frame_tag_offset = param_hwasan_random_frame_tag
4497 1.1 mrg ? 0
4498 1.1 mrg : sanitize_flags_p (SANITIZE_KERNEL_HWADDRESS) ? 2 : 1;
4499 1.1 mrg }
4500 1.1 mrg
4501 1.1 mrg /* For stack tagging:
4502 1.1 mrg (Emits HWASAN equivalent of what is emitted by
4503 1.1 mrg `asan_emit_stack_protection`).
4504 1.1 mrg
4505 1.1 mrg Emits the extra prologue code to set the shadow stack as required for HWASAN
4506 1.1 mrg stack instrumentation.
4507 1.1 mrg
4508 1.1 mrg Uses the vector of recorded stack variables hwasan_tagged_stack_vars. When
4509 1.1 mrg this function has completed hwasan_tagged_stack_vars is empty and all
4510 1.1 mrg objects it had pointed to are deallocated. */
4511 1.1 mrg void
4512 1.1 mrg hwasan_emit_prologue ()
4513 1.1 mrg {
4514 1.1 mrg /* We need untagged base pointers since libhwasan only accepts untagged
4515 1.1 mrg pointers in __hwasan_tag_memory. We need the tagged base pointer to obtain
4516 1.1 mrg the base tag for an offset. */
4517 1.1 mrg
4518 1.1 mrg if (hwasan_tagged_stack_vars.is_empty ())
4519 1.1 mrg return;
4520 1.1 mrg
4521 1.1 mrg poly_int64 bot = 0, top = 0;
4522 1.1 mrg for (hwasan_stack_var &cur : hwasan_tagged_stack_vars)
4523 1.1 mrg {
4524 1.1 mrg poly_int64 nearest = cur.nearest_offset;
4525 1.1 mrg poly_int64 farthest = cur.farthest_offset;
4526 1.1 mrg
4527 1.1 mrg if (known_ge (nearest, farthest))
4528 1.1 mrg {
4529 1.1 mrg top = nearest;
4530 1.1 mrg bot = farthest;
4531 1.1 mrg }
4532 1.1 mrg else
4533 1.1 mrg {
4534 1.1 mrg /* Given how these values are calculated, one must be known greater
4535 1.1 mrg than the other. */
4536 1.1 mrg gcc_assert (known_le (nearest, farthest));
4537 1.1 mrg top = farthest;
4538 1.1 mrg bot = nearest;
4539 1.1 mrg }
4540 1.1 mrg poly_int64 size = (top - bot);
4541 1.1 mrg
4542 1.1 mrg /* Assert the edge of each variable is aligned to the HWASAN tag granule
4543 1.1 mrg size. */
4544 1.1 mrg gcc_assert (multiple_p (top, HWASAN_TAG_GRANULE_SIZE));
4545 1.1 mrg gcc_assert (multiple_p (bot, HWASAN_TAG_GRANULE_SIZE));
4546 1.1 mrg gcc_assert (multiple_p (size, HWASAN_TAG_GRANULE_SIZE));
4547 1.1 mrg
4548 1.1 mrg rtx fn = init_one_libfunc ("__hwasan_tag_memory");
4549 1.1 mrg rtx base_tag = targetm.memtag.extract_tag (cur.tagged_base, NULL_RTX);
4550 1.1 mrg rtx tag = plus_constant (QImode, base_tag, cur.tag_offset);
4551 1.1 mrg tag = hwasan_truncate_to_tag_size (tag, NULL_RTX);
4552 1.1 mrg
4553 1.1 mrg rtx bottom = convert_memory_address (ptr_mode,
4554 1.1 mrg plus_constant (Pmode,
4555 1.1 mrg cur.untagged_base,
4556 1.1 mrg bot));
4557 1.1 mrg emit_library_call (fn, LCT_NORMAL, VOIDmode,
4558 1.1 mrg bottom, ptr_mode,
4559 1.1 mrg tag, QImode,
4560 1.1 mrg gen_int_mode (size, ptr_mode), ptr_mode);
4561 1.1 mrg }
4562 1.1 mrg /* Clear the stack vars, we've emitted the prologue for them all now. */
4563 1.1 mrg hwasan_tagged_stack_vars.truncate (0);
4564 1.1 mrg }
4565 1.1 mrg
4566 1.1 mrg /* For stack tagging:
4567 1.1 mrg
4568 1.1 mrg Return RTL insns to clear the tags between DYNAMIC and VARS pointers
4569 1.1 mrg into the stack. These instructions should be emitted at the end of
4570 1.1 mrg every function.
4571 1.1 mrg
4572 1.1 mrg If `dynamic` is NULL_RTX then no insns are returned. */
4573 1.1 mrg rtx_insn *
4574 1.1 mrg hwasan_emit_untag_frame (rtx dynamic, rtx vars)
4575 1.1 mrg {
4576 1.1 mrg if (! dynamic)
4577 1.1 mrg return NULL;
4578 1.1 mrg
4579 1.1 mrg start_sequence ();
4580 1.1 mrg
4581 1.1 mrg dynamic = convert_memory_address (ptr_mode, dynamic);
4582 1.1 mrg vars = convert_memory_address (ptr_mode, vars);
4583 1.1 mrg
4584 1.1 mrg rtx top_rtx;
4585 1.1 mrg rtx bot_rtx;
4586 1.1 mrg if (FRAME_GROWS_DOWNWARD)
4587 1.1 mrg {
4588 1.1 mrg top_rtx = vars;
4589 1.1 mrg bot_rtx = dynamic;
4590 1.1 mrg }
4591 1.1 mrg else
4592 1.1 mrg {
4593 1.1 mrg top_rtx = dynamic;
4594 1.1 mrg bot_rtx = vars;
4595 1.1 mrg }
4596 1.1 mrg
4597 1.1 mrg rtx size_rtx = expand_simple_binop (ptr_mode, MINUS, top_rtx, bot_rtx,
4598 1.1 mrg NULL_RTX, /* unsignedp = */0,
4599 1.1 mrg OPTAB_DIRECT);
4600 1.1 mrg
4601 1.1 mrg rtx fn = init_one_libfunc ("__hwasan_tag_memory");
4602 1.1 mrg emit_library_call (fn, LCT_NORMAL, VOIDmode,
4603 1.1 mrg bot_rtx, ptr_mode,
4604 1.1 mrg HWASAN_STACK_BACKGROUND, QImode,
4605 1.1 mrg size_rtx, ptr_mode);
4606 1.1 mrg
4607 1.1 mrg do_pending_stack_adjust ();
4608 1.1 mrg rtx_insn *insns = get_insns ();
4609 1.1 mrg end_sequence ();
4610 1.1 mrg return insns;
4611 1.1 mrg }
4612 1.1 mrg
4613 1.1 mrg /* Needs to be GTY(()), because cgraph_build_static_cdtor may
4614 1.1 mrg invoke ggc_collect. */
4615 1.1 mrg static GTY(()) tree hwasan_ctor_statements;
4616 1.1 mrg
4617 1.1 mrg /* Insert module initialization into this TU. This initialization calls the
4618 1.1 mrg initialization code for libhwasan. */
4619 1.1 mrg void
4620 1.1 mrg hwasan_finish_file (void)
4621 1.1 mrg {
4622 1.1 mrg /* Do not emit constructor initialization for the kernel.
4623 1.1 mrg (the kernel has its own initialization already). */
4624 1.1 mrg if (flag_sanitize & SANITIZE_KERNEL_HWADDRESS)
4625 1.1 mrg return;
4626 1.1 mrg
4627 1.1 mrg /* Avoid instrumenting code in the hwasan constructors/destructors. */
4628 1.1 mrg flag_sanitize &= ~SANITIZE_HWADDRESS;
4629 1.1 mrg int priority = MAX_RESERVED_INIT_PRIORITY - 1;
4630 1.1 mrg tree fn = builtin_decl_implicit (BUILT_IN_HWASAN_INIT);
4631 1.1 mrg append_to_statement_list (build_call_expr (fn, 0), &hwasan_ctor_statements);
4632 1.1 mrg cgraph_build_static_cdtor ('I', hwasan_ctor_statements, priority);
4633 1.1 mrg flag_sanitize |= SANITIZE_HWADDRESS;
4634 1.1 mrg }
4635 1.1 mrg
4636 1.1 mrg /* For stack tagging:
4637 1.1 mrg
4638 1.1 mrg Truncate `tag` to the number of bits that a tag uses (i.e. to
4639 1.1 mrg HWASAN_TAG_SIZE). Store the result in `target` if it's convenient. */
4640 1.1 mrg rtx
4641 1.1 mrg hwasan_truncate_to_tag_size (rtx tag, rtx target)
4642 1.1 mrg {
4643 1.1 mrg gcc_assert (GET_MODE (tag) == QImode);
4644 1.1 mrg if (HWASAN_TAG_SIZE != GET_MODE_PRECISION (QImode))
4645 1.1 mrg {
4646 1.1 mrg gcc_assert (GET_MODE_PRECISION (QImode) > HWASAN_TAG_SIZE);
4647 1.1 mrg rtx mask = gen_int_mode ((HOST_WIDE_INT_1U << HWASAN_TAG_SIZE) - 1,
4648 1.1 mrg QImode);
4649 1.1 mrg tag = expand_simple_binop (QImode, AND, tag, mask, target,
4650 1.1 mrg /* unsignedp = */1, OPTAB_WIDEN);
4651 1.1 mrg gcc_assert (tag);
4652 1.1 mrg }
4653 1.1 mrg return tag;
4654 1.1 mrg }
4655 1.1 mrg
4656 1.1 mrg /* Construct a function tree for __hwasan_{load,store}{1,2,4,8,16,_n}.
4657 1.1 mrg IS_STORE is either 1 (for a store) or 0 (for a load). */
4658 1.1 mrg static combined_fn
4659 1.1 mrg hwasan_check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
4660 1.1 mrg int *nargs)
4661 1.1 mrg {
4662 1.1 mrg static enum built_in_function check[2][2][6]
4663 1.1 mrg = { { { BUILT_IN_HWASAN_LOAD1, BUILT_IN_HWASAN_LOAD2,
4664 1.1 mrg BUILT_IN_HWASAN_LOAD4, BUILT_IN_HWASAN_LOAD8,
4665 1.1 mrg BUILT_IN_HWASAN_LOAD16, BUILT_IN_HWASAN_LOADN },
4666 1.1 mrg { BUILT_IN_HWASAN_STORE1, BUILT_IN_HWASAN_STORE2,
4667 1.1 mrg BUILT_IN_HWASAN_STORE4, BUILT_IN_HWASAN_STORE8,
4668 1.1 mrg BUILT_IN_HWASAN_STORE16, BUILT_IN_HWASAN_STOREN } },
4669 1.1 mrg { { BUILT_IN_HWASAN_LOAD1_NOABORT,
4670 1.1 mrg BUILT_IN_HWASAN_LOAD2_NOABORT,
4671 1.1 mrg BUILT_IN_HWASAN_LOAD4_NOABORT,
4672 1.1 mrg BUILT_IN_HWASAN_LOAD8_NOABORT,
4673 1.1 mrg BUILT_IN_HWASAN_LOAD16_NOABORT,
4674 1.1 mrg BUILT_IN_HWASAN_LOADN_NOABORT },
4675 1.1 mrg { BUILT_IN_HWASAN_STORE1_NOABORT,
4676 1.1 mrg BUILT_IN_HWASAN_STORE2_NOABORT,
4677 1.1 mrg BUILT_IN_HWASAN_STORE4_NOABORT,
4678 1.1 mrg BUILT_IN_HWASAN_STORE8_NOABORT,
4679 1.1 mrg BUILT_IN_HWASAN_STORE16_NOABORT,
4680 1.1 mrg BUILT_IN_HWASAN_STOREN_NOABORT } } };
4681 1.1 mrg if (size_in_bytes == -1)
4682 1.1 mrg {
4683 1.1 mrg *nargs = 2;
4684 1.1 mrg return as_combined_fn (check[recover_p][is_store][5]);
4685 1.1 mrg }
4686 1.1 mrg *nargs = 1;
4687 1.1 mrg int size_log2 = exact_log2 (size_in_bytes);
4688 1.1 mrg gcc_assert (size_log2 >= 0 && size_log2 <= 5);
4689 1.1 mrg return as_combined_fn (check[recover_p][is_store][size_log2]);
4690 1.1 mrg }
4691 1.1 mrg
4692 1.1 mrg /* Expand the HWASAN_{LOAD,STORE} builtins. */
4693 1.1 mrg bool
4694 1.1 mrg hwasan_expand_check_ifn (gimple_stmt_iterator *iter, bool)
4695 1.1 mrg {
4696 1.1 mrg gimple *g = gsi_stmt (*iter);
4697 1.1 mrg location_t loc = gimple_location (g);
4698 1.1 mrg bool recover_p;
4699 1.1 mrg if (flag_sanitize & SANITIZE_USER_HWADDRESS)
4700 1.1 mrg recover_p = (flag_sanitize_recover & SANITIZE_USER_HWADDRESS) != 0;
4701 1.1 mrg else
4702 1.1 mrg recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_HWADDRESS) != 0;
4703 1.1 mrg
4704 1.1 mrg HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
4705 1.1 mrg gcc_assert (flags < ASAN_CHECK_LAST);
4706 1.1 mrg bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
4707 1.1 mrg bool is_store = (flags & ASAN_CHECK_STORE) != 0;
4708 1.1 mrg bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
4709 1.1 mrg
4710 1.1 mrg tree base = gimple_call_arg (g, 1);
4711 1.1 mrg tree len = gimple_call_arg (g, 2);
4712 1.1 mrg
4713 1.1 mrg /* `align` is unused for HWASAN_CHECK, but we pass the argument anyway
4714 1.1 mrg since that way the arguments match ASAN_CHECK. */
4715 1.1 mrg /* HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3)); */
4716 1.1 mrg
4717 1.1 mrg unsigned HOST_WIDE_INT size_in_bytes
4718 1.1 mrg = is_scalar_access ? tree_to_shwi (len) : -1;
4719 1.1 mrg
4720 1.1 mrg gimple_stmt_iterator gsi = *iter;
4721 1.1 mrg
4722 1.1 mrg if (!is_non_zero_len)
4723 1.1 mrg {
4724 1.1 mrg /* So, the length of the memory area to hwasan-protect is
4725 1.1 mrg non-constant. Let's guard the generated instrumentation code
4726 1.1 mrg like:
4727 1.1 mrg
4728 1.1 mrg if (len != 0)
4729 1.1 mrg {
4730 1.1 mrg // hwasan instrumentation code goes here.
4731 1.1 mrg }
4732 1.1 mrg // falltrough instructions, starting with *ITER. */
4733 1.1 mrg
4734 1.1 mrg g = gimple_build_cond (NE_EXPR,
4735 1.1 mrg len,
4736 1.1 mrg build_int_cst (TREE_TYPE (len), 0),
4737 1.1 mrg NULL_TREE, NULL_TREE);
4738 1.1 mrg gimple_set_location (g, loc);
4739 1.1 mrg
4740 1.1 mrg basic_block then_bb, fallthrough_bb;
4741 1.1 mrg insert_if_then_before_iter (as_a <gcond *> (g), iter,
4742 1.1 mrg /*then_more_likely_p=*/true,
4743 1.1 mrg &then_bb, &fallthrough_bb);
4744 1.1 mrg /* Note that fallthrough_bb starts with the statement that was
4745 1.1 mrg pointed to by ITER. */
4746 1.1 mrg
4747 1.1 mrg /* The 'then block' of the 'if (len != 0) condition is where
4748 1.1 mrg we'll generate the hwasan instrumentation code now. */
4749 1.1 mrg gsi = gsi_last_bb (then_bb);
4750 1.1 mrg }
4751 1.1 mrg
4752 1.1 mrg gimple_seq stmts = NULL;
4753 1.1 mrg tree base_addr = gimple_build (&stmts, loc, NOP_EXPR,
4754 1.1 mrg pointer_sized_int_node, base);
4755 1.1 mrg
4756 1.1 mrg int nargs = 0;
4757 1.1 mrg combined_fn fn
4758 1.1 mrg = hwasan_check_func (is_store, recover_p, size_in_bytes, &nargs);
4759 1.1 mrg if (nargs == 1)
4760 1.1 mrg gimple_build (&stmts, loc, fn, void_type_node, base_addr);
4761 1.1 mrg else
4762 1.1 mrg {
4763 1.1 mrg gcc_assert (nargs == 2);
4764 1.1 mrg tree sz_arg = gimple_build (&stmts, loc, NOP_EXPR,
4765 1.1 mrg pointer_sized_int_node, len);
4766 1.1 mrg gimple_build (&stmts, loc, fn, void_type_node, base_addr, sz_arg);
4767 1.1 mrg }
4768 1.1 mrg
4769 1.1 mrg gsi_insert_seq_after (&gsi, stmts, GSI_NEW_STMT);
4770 1.1 mrg gsi_remove (iter, true);
4771 1.1 mrg *iter = gsi;
4772 1.1 mrg return false;
4773 1.1 mrg }
4774 1.1 mrg
4775 1.1 mrg /* For stack tagging:
4776 1.1 mrg
4777 1.1 mrg Dummy: the HWASAN_MARK internal function should only ever be in the code
4778 1.1 mrg after the sanopt pass. */
4779 1.1 mrg bool
4780 1.1 mrg hwasan_expand_mark_ifn (gimple_stmt_iterator *)
4781 1.1 mrg {
4782 1.1 mrg gcc_unreachable ();
4783 1.1 mrg }
4784 1.1 mrg
4785 1.1 mrg bool
4786 1.1 mrg gate_hwasan ()
4787 1.1 mrg {
4788 1.1 mrg return hwasan_sanitize_p ();
4789 1.1 mrg }
4790 1.1 mrg
4791 1.1 mrg #include "gt-asan.h"
4792