subr_asan.c revision 1.11 1 /* $NetBSD: subr_asan.c,v 1.11 2019/09/05 16:19:16 maxv Exp $ */
2
3 /*
4 * Copyright (c) 2018 The NetBSD Foundation, Inc.
5 * All rights reserved.
6 *
7 * This code is derived from software contributed to The NetBSD Foundation
8 * by Maxime Villard.
9 *
10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions
12 * are met:
13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer.
15 * 2. Redistributions in binary form must reproduce the above copyright
16 * notice, this list of conditions and the following disclaimer in the
17 * documentation and/or other materials provided with the distribution.
18 *
19 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29 * POSSIBILITY OF SUCH DAMAGE.
30 */
31
32 #include <sys/cdefs.h>
33 __KERNEL_RCSID(0, "$NetBSD: subr_asan.c,v 1.11 2019/09/05 16:19:16 maxv Exp $");
34
35 #include <sys/param.h>
36 #include <sys/device.h>
37 #include <sys/kernel.h>
38 #include <sys/param.h>
39 #include <sys/conf.h>
40 #include <sys/systm.h>
41 #include <sys/types.h>
42 #include <sys/asan.h>
43
44 #include <uvm/uvm.h>
45
46 #ifdef KASAN_PANIC
47 #define REPORT panic
48 #else
49 #define REPORT printf
50 #endif
51
52 /* ASAN constants. Part of the compiler ABI. */
53 #define KASAN_SHADOW_SCALE_SHIFT 3
54 #define KASAN_SHADOW_SCALE_SIZE (1UL << KASAN_SHADOW_SCALE_SHIFT)
55 #define KASAN_SHADOW_MASK (KASAN_SHADOW_SCALE_SIZE - 1)
56
57 /* The MD code. */
58 #include <machine/asan.h>
59
60 /* ASAN ABI version. */
61 #if defined(__clang__) && (__clang_major__ - 0 >= 6)
62 #define ASAN_ABI_VERSION 8
63 #elif __GNUC_PREREQ__(7, 1) && !defined(__clang__)
64 #define ASAN_ABI_VERSION 8
65 #elif __GNUC_PREREQ__(6, 1) && !defined(__clang__)
66 #define ASAN_ABI_VERSION 6
67 #else
68 #error "Unsupported compiler version"
69 #endif
70
71 #define __RET_ADDR (unsigned long)__builtin_return_address(0)
72
73 /* Global variable descriptor. Part of the compiler ABI. */
74 struct __asan_global_source_location {
75 const char *filename;
76 int line_no;
77 int column_no;
78 };
79 struct __asan_global {
80 const void *beg; /* address of the global variable */
81 size_t size; /* size of the global variable */
82 size_t size_with_redzone; /* size with the redzone */
83 const void *name; /* name of the variable */
84 const void *module_name; /* name of the module where the var is declared */
85 unsigned long has_dynamic_init; /* the var has dyn initializer (c++) */
86 struct __asan_global_source_location *location;
87 #if ASAN_ABI_VERSION >= 7
88 uintptr_t odr_indicator; /* the address of the ODR indicator symbol */
89 #endif
90 };
91
92 static bool kasan_enabled __read_mostly = false;
93
94 /* -------------------------------------------------------------------------- */
95
96 void
97 kasan_shadow_map(void *addr, size_t size)
98 {
99 size_t sz, npages, i;
100 vaddr_t sva, eva;
101
102 KASSERT((vaddr_t)addr % KASAN_SHADOW_SCALE_SIZE == 0);
103
104 sz = roundup(size, KASAN_SHADOW_SCALE_SIZE) / KASAN_SHADOW_SCALE_SIZE;
105
106 sva = (vaddr_t)kasan_md_addr_to_shad(addr);
107 eva = (vaddr_t)kasan_md_addr_to_shad(addr) + sz;
108
109 sva = rounddown(sva, PAGE_SIZE);
110 eva = roundup(eva, PAGE_SIZE);
111
112 npages = (eva - sva) / PAGE_SIZE;
113
114 KASSERT(sva >= KASAN_MD_SHADOW_START && eva < KASAN_MD_SHADOW_END);
115
116 for (i = 0; i < npages; i++) {
117 kasan_md_shadow_map_page(sva + i * PAGE_SIZE);
118 }
119 }
120
121 static void
122 kasan_ctors(void)
123 {
124 extern uint64_t __CTOR_LIST__, __CTOR_END__;
125 size_t nentries, i;
126 uint64_t *ptr;
127
128 nentries = ((size_t)&__CTOR_END__ - (size_t)&__CTOR_LIST__) /
129 sizeof(uintptr_t);
130
131 ptr = &__CTOR_LIST__;
132 for (i = 0; i < nentries; i++) {
133 void (*func)(void);
134
135 func = (void *)(*ptr);
136 (*func)();
137
138 ptr++;
139 }
140 }
141
142 void
143 kasan_early_init(void *stack)
144 {
145 kasan_md_early_init(stack);
146 }
147
148 void
149 kasan_init(void)
150 {
151 /* MD initialization. */
152 kasan_md_init();
153
154 /* Now officially enabled. */
155 kasan_enabled = true;
156
157 /* Call the ASAN constructors. */
158 kasan_ctors();
159 }
160
161 static inline const char *
162 kasan_code_name(uint8_t code)
163 {
164 switch (code) {
165 case KASAN_GENERIC_REDZONE:
166 return "GenericRedZone";
167 case KASAN_MALLOC_REDZONE:
168 return "MallocRedZone";
169 case KASAN_KMEM_REDZONE:
170 return "KmemRedZone";
171 case KASAN_POOL_REDZONE:
172 return "PoolRedZone";
173 case KASAN_POOL_FREED:
174 return "PoolUseAfterFree";
175 case 1 ... 7:
176 return "RedZonePartial";
177 case KASAN_STACK_LEFT:
178 return "StackLeft";
179 case KASAN_STACK_RIGHT:
180 return "StackRight";
181 case KASAN_STACK_PARTIAL:
182 return "StackPartial";
183 case KASAN_USE_AFTER_SCOPE:
184 return "UseAfterScope";
185 default:
186 return "Unknown";
187 }
188 }
189
190 static void
191 kasan_report(unsigned long addr, size_t size, bool write, unsigned long pc,
192 uint8_t code)
193 {
194 REPORT("ASan: Unauthorized Access In %p: Addr %p [%zu byte%s, %s,"
195 " %s]\n",
196 (void *)pc, (void *)addr, size, (size > 1 ? "s" : ""),
197 (write ? "write" : "read"), kasan_code_name(code));
198 kasan_md_unwind();
199 }
200
201 static __always_inline void
202 kasan_shadow_1byte_markvalid(unsigned long addr)
203 {
204 int8_t *byte = kasan_md_addr_to_shad((void *)addr);
205 int8_t last = (addr & KASAN_SHADOW_MASK) + 1;
206
207 *byte = last;
208 }
209
210 static __always_inline void
211 kasan_shadow_Nbyte_markvalid(const void *addr, size_t size)
212 {
213 size_t i;
214
215 for (i = 0; i < size; i++) {
216 kasan_shadow_1byte_markvalid((unsigned long)addr+i);
217 }
218 }
219
220 static __always_inline void
221 kasan_shadow_Nbyte_fill(const void *addr, size_t size, uint8_t code)
222 {
223 void *shad;
224
225 if (__predict_false(size == 0))
226 return;
227 if (__predict_false(kasan_md_unsupported((vaddr_t)addr)))
228 return;
229
230 KASSERT((vaddr_t)addr % KASAN_SHADOW_SCALE_SIZE == 0);
231 KASSERT(size % KASAN_SHADOW_SCALE_SIZE == 0);
232
233 shad = (void *)kasan_md_addr_to_shad(addr);
234 size = size >> KASAN_SHADOW_SCALE_SHIFT;
235
236 __builtin_memset(shad, code, size);
237 }
238
239 void
240 kasan_add_redzone(size_t *size)
241 {
242 *size = roundup(*size, KASAN_SHADOW_SCALE_SIZE);
243 *size += KASAN_SHADOW_SCALE_SIZE;
244 }
245
246 void
247 kasan_softint(struct lwp *l)
248 {
249 const void *stk = (const void *)uvm_lwp_getuarea(l);
250
251 kasan_shadow_Nbyte_fill(stk, USPACE, 0);
252 }
253
254 /*
255 * In an area of size 'sz_with_redz', mark the 'size' first bytes as valid,
256 * and the rest as invalid. There are generally two use cases:
257 *
258 * o kasan_mark(addr, origsize, size, code), with origsize < size. This marks
259 * the redzone at the end of the buffer as invalid.
260 *
261 * o kasan_mark(addr, size, size, 0). This marks the entire buffer as valid.
262 */
263 void
264 kasan_mark(const void *addr, size_t size, size_t sz_with_redz, uint8_t code)
265 {
266 size_t i, n, redz;
267 int8_t *shad;
268
269 KASSERT((vaddr_t)addr % KASAN_SHADOW_SCALE_SIZE == 0);
270 redz = sz_with_redz - roundup(size, KASAN_SHADOW_SCALE_SIZE);
271 KASSERT(redz % KASAN_SHADOW_SCALE_SIZE == 0);
272 shad = kasan_md_addr_to_shad(addr);
273
274 /* Chunks of 8 bytes, valid. */
275 n = size / KASAN_SHADOW_SCALE_SIZE;
276 for (i = 0; i < n; i++) {
277 *shad++ = 0;
278 }
279
280 /* Possibly one chunk, mid. */
281 if ((size & KASAN_SHADOW_MASK) != 0) {
282 *shad++ = (size & KASAN_SHADOW_MASK);
283 }
284
285 /* Chunks of 8 bytes, invalid. */
286 n = redz / KASAN_SHADOW_SCALE_SIZE;
287 for (i = 0; i < n; i++) {
288 *shad++ = code;
289 }
290 }
291
292 /* -------------------------------------------------------------------------- */
293
294 #define ADDR_CROSSES_SCALE_BOUNDARY(addr, size) \
295 (addr >> KASAN_SHADOW_SCALE_SHIFT) != \
296 ((addr + size - 1) >> KASAN_SHADOW_SCALE_SHIFT)
297
298 static __always_inline bool
299 kasan_shadow_1byte_isvalid(unsigned long addr, uint8_t *code)
300 {
301 int8_t *byte = kasan_md_addr_to_shad((void *)addr);
302 int8_t last = (addr & KASAN_SHADOW_MASK) + 1;
303
304 if (__predict_true(*byte == 0 || last <= *byte)) {
305 return true;
306 }
307 *code = *byte;
308 return false;
309 }
310
311 static __always_inline bool
312 kasan_shadow_2byte_isvalid(unsigned long addr, uint8_t *code)
313 {
314 int8_t *byte, last;
315
316 if (ADDR_CROSSES_SCALE_BOUNDARY(addr, 2)) {
317 return (kasan_shadow_1byte_isvalid(addr, code) &&
318 kasan_shadow_1byte_isvalid(addr+1, code));
319 }
320
321 byte = kasan_md_addr_to_shad((void *)addr);
322 last = ((addr + 1) & KASAN_SHADOW_MASK) + 1;
323
324 if (__predict_true(*byte == 0 || last <= *byte)) {
325 return true;
326 }
327 *code = *byte;
328 return false;
329 }
330
331 static __always_inline bool
332 kasan_shadow_4byte_isvalid(unsigned long addr, uint8_t *code)
333 {
334 int8_t *byte, last;
335
336 if (ADDR_CROSSES_SCALE_BOUNDARY(addr, 4)) {
337 return (kasan_shadow_2byte_isvalid(addr, code) &&
338 kasan_shadow_2byte_isvalid(addr+2, code));
339 }
340
341 byte = kasan_md_addr_to_shad((void *)addr);
342 last = ((addr + 3) & KASAN_SHADOW_MASK) + 1;
343
344 if (__predict_true(*byte == 0 || last <= *byte)) {
345 return true;
346 }
347 *code = *byte;
348 return false;
349 }
350
351 static __always_inline bool
352 kasan_shadow_8byte_isvalid(unsigned long addr, uint8_t *code)
353 {
354 int8_t *byte, last;
355
356 if (ADDR_CROSSES_SCALE_BOUNDARY(addr, 8)) {
357 return (kasan_shadow_4byte_isvalid(addr, code) &&
358 kasan_shadow_4byte_isvalid(addr+4, code));
359 }
360
361 byte = kasan_md_addr_to_shad((void *)addr);
362 last = ((addr + 7) & KASAN_SHADOW_MASK) + 1;
363
364 if (__predict_true(*byte == 0 || last <= *byte)) {
365 return true;
366 }
367 *code = *byte;
368 return false;
369 }
370
371 static __always_inline bool
372 kasan_shadow_Nbyte_isvalid(unsigned long addr, size_t size, uint8_t *code)
373 {
374 size_t i;
375
376 for (i = 0; i < size; i++) {
377 if (!kasan_shadow_1byte_isvalid(addr+i, code))
378 return false;
379 }
380
381 return true;
382 }
383
384 static __always_inline void
385 kasan_shadow_check(unsigned long addr, size_t size, bool write,
386 unsigned long retaddr)
387 {
388 uint8_t code;
389 bool valid;
390
391 if (__predict_false(!kasan_enabled))
392 return;
393 if (__predict_false(size == 0))
394 return;
395 if (__predict_false(kasan_md_unsupported(addr)))
396 return;
397
398 if (__builtin_constant_p(size)) {
399 switch (size) {
400 case 1:
401 valid = kasan_shadow_1byte_isvalid(addr, &code);
402 break;
403 case 2:
404 valid = kasan_shadow_2byte_isvalid(addr, &code);
405 break;
406 case 4:
407 valid = kasan_shadow_4byte_isvalid(addr, &code);
408 break;
409 case 8:
410 valid = kasan_shadow_8byte_isvalid(addr, &code);
411 break;
412 default:
413 valid = kasan_shadow_Nbyte_isvalid(addr, size, &code);
414 break;
415 }
416 } else {
417 valid = kasan_shadow_Nbyte_isvalid(addr, size, &code);
418 }
419
420 if (__predict_false(!valid)) {
421 kasan_report(addr, size, write, retaddr, code);
422 }
423 }
424
425 /* -------------------------------------------------------------------------- */
426
427 void *
428 kasan_memcpy(void *dst, const void *src, size_t len)
429 {
430 kasan_shadow_check((unsigned long)src, len, false, __RET_ADDR);
431 kasan_shadow_check((unsigned long)dst, len, true, __RET_ADDR);
432 return __builtin_memcpy(dst, src, len);
433 }
434
435 int
436 kasan_memcmp(const void *b1, const void *b2, size_t len)
437 {
438 kasan_shadow_check((unsigned long)b1, len, false, __RET_ADDR);
439 kasan_shadow_check((unsigned long)b2, len, false, __RET_ADDR);
440 return __builtin_memcmp(b1, b2, len);
441 }
442
443 void *
444 kasan_memset(void *b, int c, size_t len)
445 {
446 kasan_shadow_check((unsigned long)b, len, true, __RET_ADDR);
447 return __builtin_memset(b, c, len);
448 }
449
450 char *
451 kasan_strcpy(char *dst, const char *src)
452 {
453 char *save = dst;
454
455 while (1) {
456 kasan_shadow_check((unsigned long)src, 1, false, __RET_ADDR);
457 kasan_shadow_check((unsigned long)dst, 1, true, __RET_ADDR);
458 *dst = *src;
459 if (*src == '\0')
460 break;
461 src++, dst++;
462 }
463
464 return save;
465 }
466
467 int
468 kasan_strcmp(const char *s1, const char *s2)
469 {
470 while (1) {
471 kasan_shadow_check((unsigned long)s1, 1, false, __RET_ADDR);
472 kasan_shadow_check((unsigned long)s2, 1, false, __RET_ADDR);
473 if (*s1 != *s2)
474 break;
475 if (*s1 == '\0')
476 return 0;
477 s1++, s2++;
478 }
479
480 return (*(const unsigned char *)s1 - *(const unsigned char *)s2);
481 }
482
483 size_t
484 kasan_strlen(const char *str)
485 {
486 const char *s;
487
488 s = str;
489 while (1) {
490 kasan_shadow_check((unsigned long)s, 1, false, __RET_ADDR);
491 if (*s == '\0')
492 break;
493 s++;
494 }
495
496 return (s - str);
497 }
498
499 #undef kcopy
500 #undef copystr
501 #undef copyinstr
502 #undef copyoutstr
503 #undef copyin
504
505 int kasan_kcopy(const void *, void *, size_t);
506 int kasan_copystr(const void *, void *, size_t, size_t *);
507 int kasan_copyinstr(const void *, void *, size_t, size_t *);
508 int kasan_copyoutstr(const void *, void *, size_t, size_t *);
509 int kasan_copyin(const void *, void *, size_t);
510 int kcopy(const void *, void *, size_t);
511 int copystr(const void *, void *, size_t, size_t *);
512 int copyinstr(const void *, void *, size_t, size_t *);
513 int copyoutstr(const void *, void *, size_t, size_t *);
514 int copyin(const void *, void *, size_t);
515
516 int
517 kasan_kcopy(const void *src, void *dst, size_t len)
518 {
519 kasan_shadow_check((unsigned long)src, len, false, __RET_ADDR);
520 kasan_shadow_check((unsigned long)dst, len, true, __RET_ADDR);
521 return kcopy(src, dst, len);
522 }
523
524 int
525 kasan_copystr(const void *kfaddr, void *kdaddr, size_t len, size_t *done)
526 {
527 kasan_shadow_check((unsigned long)kdaddr, len, true, __RET_ADDR);
528 return copystr(kfaddr, kdaddr, len, done);
529 }
530
531 int
532 kasan_copyin(const void *uaddr, void *kaddr, size_t len)
533 {
534 kasan_shadow_check((unsigned long)kaddr, len, true, __RET_ADDR);
535 return copyin(uaddr, kaddr, len);
536 }
537
538 int
539 kasan_copyinstr(const void *uaddr, void *kaddr, size_t len, size_t *done)
540 {
541 kasan_shadow_check((unsigned long)kaddr, len, true, __RET_ADDR);
542 return copyinstr(uaddr, kaddr, len, done);
543 }
544
545 int
546 kasan_copyoutstr(const void *kaddr, void *uaddr, size_t len, size_t *done)
547 {
548 kasan_shadow_check((unsigned long)kaddr, len, false, __RET_ADDR);
549 return copyoutstr(kaddr, uaddr, len, done);
550 }
551
552 /* -------------------------------------------------------------------------- */
553
554 #undef atomic_add_32
555 #undef atomic_add_int
556 #undef atomic_add_long
557 #undef atomic_add_ptr
558 #undef atomic_add_64
559 #undef atomic_add_32_nv
560 #undef atomic_add_int_nv
561 #undef atomic_add_long_nv
562 #undef atomic_add_ptr_nv
563 #undef atomic_add_64_nv
564 #undef atomic_and_32
565 #undef atomic_and_uint
566 #undef atomic_and_ulong
567 #undef atomic_and_64
568 #undef atomic_and_32_nv
569 #undef atomic_and_uint_nv
570 #undef atomic_and_ulong_nv
571 #undef atomic_and_64_nv
572 #undef atomic_or_32
573 #undef atomic_or_uint
574 #undef atomic_or_ulong
575 #undef atomic_or_64
576 #undef atomic_or_32_nv
577 #undef atomic_or_uint_nv
578 #undef atomic_or_ulong_nv
579 #undef atomic_or_64_nv
580 #undef atomic_cas_32
581 #undef atomic_cas_uint
582 #undef atomic_cas_ulong
583 #undef atomic_cas_ptr
584 #undef atomic_cas_64
585 #undef atomic_cas_32_ni
586 #undef atomic_cas_uint_ni
587 #undef atomic_cas_ulong_ni
588 #undef atomic_cas_ptr_ni
589 #undef atomic_cas_64_ni
590 #undef atomic_swap_32
591 #undef atomic_swap_uint
592 #undef atomic_swap_ulong
593 #undef atomic_swap_ptr
594 #undef atomic_swap_64
595 #undef atomic_dec_32
596 #undef atomic_dec_uint
597 #undef atomic_dec_ulong
598 #undef atomic_dec_ptr
599 #undef atomic_dec_64
600 #undef atomic_dec_32_nv
601 #undef atomic_dec_uint_nv
602 #undef atomic_dec_ulong_nv
603 #undef atomic_dec_ptr_nv
604 #undef atomic_dec_64_nv
605 #undef atomic_inc_32
606 #undef atomic_inc_uint
607 #undef atomic_inc_ulong
608 #undef atomic_inc_ptr
609 #undef atomic_inc_64
610 #undef atomic_inc_32_nv
611 #undef atomic_inc_uint_nv
612 #undef atomic_inc_ulong_nv
613 #undef atomic_inc_ptr_nv
614 #undef atomic_inc_64_nv
615
616 #define ASAN_ATOMIC_FUNC_ADD(name, tret, targ1, targ2) \
617 void atomic_add_##name(volatile targ1 *, targ2); \
618 void kasan_atomic_add_##name(volatile targ1 *, targ2); \
619 void kasan_atomic_add_##name(volatile targ1 *ptr, targ2 val) \
620 { \
621 kasan_shadow_check((uintptr_t)ptr, sizeof(tret), true, \
622 __RET_ADDR); \
623 atomic_add_##name(ptr, val); \
624 } \
625 tret atomic_add_##name##_nv(volatile targ1 *, targ2); \
626 tret kasan_atomic_add_##name##_nv(volatile targ1 *, targ2); \
627 tret kasan_atomic_add_##name##_nv(volatile targ1 *ptr, targ2 val) \
628 { \
629 kasan_shadow_check((uintptr_t)ptr, sizeof(tret), true, \
630 __RET_ADDR); \
631 return atomic_add_##name##_nv(ptr, val); \
632 }
633
634 #define ASAN_ATOMIC_FUNC_AND(name, tret, targ1, targ2) \
635 void atomic_and_##name(volatile targ1 *, targ2); \
636 void kasan_atomic_and_##name(volatile targ1 *, targ2); \
637 void kasan_atomic_and_##name(volatile targ1 *ptr, targ2 val) \
638 { \
639 kasan_shadow_check((uintptr_t)ptr, sizeof(tret), true, \
640 __RET_ADDR); \
641 atomic_and_##name(ptr, val); \
642 } \
643 tret atomic_and_##name##_nv(volatile targ1 *, targ2); \
644 tret kasan_atomic_and_##name##_nv(volatile targ1 *, targ2); \
645 tret kasan_atomic_and_##name##_nv(volatile targ1 *ptr, targ2 val) \
646 { \
647 kasan_shadow_check((uintptr_t)ptr, sizeof(tret), true, \
648 __RET_ADDR); \
649 return atomic_and_##name##_nv(ptr, val); \
650 }
651
652 #define ASAN_ATOMIC_FUNC_OR(name, tret, targ1, targ2) \
653 void atomic_or_##name(volatile targ1 *, targ2); \
654 void kasan_atomic_or_##name(volatile targ1 *, targ2); \
655 void kasan_atomic_or_##name(volatile targ1 *ptr, targ2 val) \
656 { \
657 kasan_shadow_check((uintptr_t)ptr, sizeof(tret), true, \
658 __RET_ADDR); \
659 atomic_or_##name(ptr, val); \
660 } \
661 tret atomic_or_##name##_nv(volatile targ1 *, targ2); \
662 tret kasan_atomic_or_##name##_nv(volatile targ1 *, targ2); \
663 tret kasan_atomic_or_##name##_nv(volatile targ1 *ptr, targ2 val) \
664 { \
665 kasan_shadow_check((uintptr_t)ptr, sizeof(tret), true, \
666 __RET_ADDR); \
667 return atomic_or_##name##_nv(ptr, val); \
668 }
669
670 #define ASAN_ATOMIC_FUNC_CAS(name, tret, targ1, targ2) \
671 tret atomic_cas_##name(volatile targ1 *, targ2, targ2); \
672 tret kasan_atomic_cas_##name(volatile targ1 *, targ2, targ2); \
673 tret kasan_atomic_cas_##name(volatile targ1 *ptr, targ2 exp, targ2 new) \
674 { \
675 kasan_shadow_check((uintptr_t)ptr, sizeof(tret), true, \
676 __RET_ADDR); \
677 return atomic_cas_##name(ptr, exp, new); \
678 } \
679 tret atomic_cas_##name##_ni(volatile targ1 *, targ2, targ2); \
680 tret kasan_atomic_cas_##name##_ni(volatile targ1 *, targ2, targ2); \
681 tret kasan_atomic_cas_##name##_ni(volatile targ1 *ptr, targ2 exp, targ2 new) \
682 { \
683 kasan_shadow_check((uintptr_t)ptr, sizeof(tret), true, \
684 __RET_ADDR); \
685 return atomic_cas_##name##_ni(ptr, exp, new); \
686 }
687
688 #define ASAN_ATOMIC_FUNC_SWAP(name, tret, targ1, targ2) \
689 tret atomic_swap_##name(volatile targ1 *, targ2); \
690 tret kasan_atomic_swap_##name(volatile targ1 *, targ2); \
691 tret kasan_atomic_swap_##name(volatile targ1 *ptr, targ2 val) \
692 { \
693 kasan_shadow_check((uintptr_t)ptr, sizeof(tret), true, \
694 __RET_ADDR); \
695 return atomic_swap_##name(ptr, val); \
696 }
697
698 #define ASAN_ATOMIC_FUNC_DEC(name, tret, targ1) \
699 void atomic_dec_##name(volatile targ1 *); \
700 void kasan_atomic_dec_##name(volatile targ1 *); \
701 void kasan_atomic_dec_##name(volatile targ1 *ptr) \
702 { \
703 kasan_shadow_check((uintptr_t)ptr, sizeof(tret), true, \
704 __RET_ADDR); \
705 atomic_dec_##name(ptr); \
706 } \
707 tret atomic_dec_##name##_nv(volatile targ1 *); \
708 tret kasan_atomic_dec_##name##_nv(volatile targ1 *); \
709 tret kasan_atomic_dec_##name##_nv(volatile targ1 *ptr) \
710 { \
711 kasan_shadow_check((uintptr_t)ptr, sizeof(tret), true, \
712 __RET_ADDR); \
713 return atomic_dec_##name##_nv(ptr); \
714 }
715
716 #define ASAN_ATOMIC_FUNC_INC(name, tret, targ1) \
717 void atomic_inc_##name(volatile targ1 *); \
718 void kasan_atomic_inc_##name(volatile targ1 *); \
719 void kasan_atomic_inc_##name(volatile targ1 *ptr) \
720 { \
721 kasan_shadow_check((uintptr_t)ptr, sizeof(tret), true, \
722 __RET_ADDR); \
723 atomic_inc_##name(ptr); \
724 } \
725 tret atomic_inc_##name##_nv(volatile targ1 *); \
726 tret kasan_atomic_inc_##name##_nv(volatile targ1 *); \
727 tret kasan_atomic_inc_##name##_nv(volatile targ1 *ptr) \
728 { \
729 kasan_shadow_check((uintptr_t)ptr, sizeof(tret), true, \
730 __RET_ADDR); \
731 return atomic_inc_##name##_nv(ptr); \
732 }
733
734 ASAN_ATOMIC_FUNC_ADD(32, uint32_t, uint32_t, int32_t);
735 ASAN_ATOMIC_FUNC_ADD(64, uint64_t, uint64_t, int64_t);
736 ASAN_ATOMIC_FUNC_ADD(int, unsigned int, unsigned int, int);
737 ASAN_ATOMIC_FUNC_ADD(long, unsigned long, unsigned long, long);
738 ASAN_ATOMIC_FUNC_ADD(ptr, void *, void, ssize_t);
739
740 ASAN_ATOMIC_FUNC_AND(32, uint32_t, uint32_t, uint32_t);
741 ASAN_ATOMIC_FUNC_AND(64, uint64_t, uint64_t, uint64_t);
742 ASAN_ATOMIC_FUNC_AND(uint, unsigned int, unsigned int, unsigned int);
743 ASAN_ATOMIC_FUNC_AND(ulong, unsigned long, unsigned long, unsigned long);
744
745 ASAN_ATOMIC_FUNC_OR(32, uint32_t, uint32_t, uint32_t);
746 ASAN_ATOMIC_FUNC_OR(64, uint64_t, uint64_t, uint64_t);
747 ASAN_ATOMIC_FUNC_OR(uint, unsigned int, unsigned int, unsigned int);
748 ASAN_ATOMIC_FUNC_OR(ulong, unsigned long, unsigned long, unsigned long);
749
750 ASAN_ATOMIC_FUNC_CAS(32, uint32_t, uint32_t, uint32_t);
751 ASAN_ATOMIC_FUNC_CAS(64, uint64_t, uint64_t, uint64_t);
752 ASAN_ATOMIC_FUNC_CAS(uint, unsigned int, unsigned int, unsigned int);
753 ASAN_ATOMIC_FUNC_CAS(ulong, unsigned long, unsigned long, unsigned long);
754 ASAN_ATOMIC_FUNC_CAS(ptr, void *, void, void *);
755
756 ASAN_ATOMIC_FUNC_SWAP(32, uint32_t, uint32_t, uint32_t);
757 ASAN_ATOMIC_FUNC_SWAP(64, uint64_t, uint64_t, uint64_t);
758 ASAN_ATOMIC_FUNC_SWAP(uint, unsigned int, unsigned int, unsigned int);
759 ASAN_ATOMIC_FUNC_SWAP(ulong, unsigned long, unsigned long, unsigned long);
760 ASAN_ATOMIC_FUNC_SWAP(ptr, void *, void, void *);
761
762 ASAN_ATOMIC_FUNC_DEC(32, uint32_t, uint32_t)
763 ASAN_ATOMIC_FUNC_DEC(64, uint64_t, uint64_t)
764 ASAN_ATOMIC_FUNC_DEC(uint, unsigned int, unsigned int);
765 ASAN_ATOMIC_FUNC_DEC(ulong, unsigned long, unsigned long);
766 ASAN_ATOMIC_FUNC_DEC(ptr, void *, void);
767
768 ASAN_ATOMIC_FUNC_INC(32, uint32_t, uint32_t)
769 ASAN_ATOMIC_FUNC_INC(64, uint64_t, uint64_t)
770 ASAN_ATOMIC_FUNC_INC(uint, unsigned int, unsigned int);
771 ASAN_ATOMIC_FUNC_INC(ulong, unsigned long, unsigned long);
772 ASAN_ATOMIC_FUNC_INC(ptr, void *, void);
773
774 /* -------------------------------------------------------------------------- */
775
776 void __asan_register_globals(struct __asan_global *, size_t);
777 void __asan_unregister_globals(struct __asan_global *, size_t);
778
779 void
780 __asan_register_globals(struct __asan_global *globals, size_t n)
781 {
782 size_t i;
783
784 for (i = 0; i < n; i++) {
785 kasan_mark(globals[i].beg, globals[i].size,
786 globals[i].size_with_redzone, KASAN_GENERIC_REDZONE);
787 }
788 }
789
790 void
791 __asan_unregister_globals(struct __asan_global *globals, size_t n)
792 {
793 /* never called */
794 }
795
796 #define ASAN_LOAD_STORE(size) \
797 void __asan_load##size(unsigned long); \
798 void __asan_load##size(unsigned long addr) \
799 { \
800 kasan_shadow_check(addr, size, false, __RET_ADDR);\
801 } \
802 void __asan_load##size##_noabort(unsigned long); \
803 void __asan_load##size##_noabort(unsigned long addr) \
804 { \
805 kasan_shadow_check(addr, size, false, __RET_ADDR);\
806 } \
807 void __asan_store##size(unsigned long); \
808 void __asan_store##size(unsigned long addr) \
809 { \
810 kasan_shadow_check(addr, size, true, __RET_ADDR);\
811 } \
812 void __asan_store##size##_noabort(unsigned long); \
813 void __asan_store##size##_noabort(unsigned long addr) \
814 { \
815 kasan_shadow_check(addr, size, true, __RET_ADDR);\
816 }
817
818 ASAN_LOAD_STORE(1);
819 ASAN_LOAD_STORE(2);
820 ASAN_LOAD_STORE(4);
821 ASAN_LOAD_STORE(8);
822 ASAN_LOAD_STORE(16);
823
824 void __asan_loadN(unsigned long, size_t);
825 void __asan_loadN_noabort(unsigned long, size_t);
826 void __asan_storeN(unsigned long, size_t);
827 void __asan_storeN_noabort(unsigned long, size_t);
828 void __asan_handle_no_return(void);
829
830 void
831 __asan_loadN(unsigned long addr, size_t size)
832 {
833 kasan_shadow_check(addr, size, false, __RET_ADDR);
834 }
835
836 void
837 __asan_loadN_noabort(unsigned long addr, size_t size)
838 {
839 kasan_shadow_check(addr, size, false, __RET_ADDR);
840 }
841
842 void
843 __asan_storeN(unsigned long addr, size_t size)
844 {
845 kasan_shadow_check(addr, size, true, __RET_ADDR);
846 }
847
848 void
849 __asan_storeN_noabort(unsigned long addr, size_t size)
850 {
851 kasan_shadow_check(addr, size, true, __RET_ADDR);
852 }
853
854 void
855 __asan_handle_no_return(void)
856 {
857 /* nothing */
858 }
859
860 #define ASAN_SET_SHADOW(byte) \
861 void __asan_set_shadow_##byte(void *, size_t); \
862 void __asan_set_shadow_##byte(void *addr, size_t size) \
863 { \
864 __builtin_memset((void *)addr, 0x##byte, size); \
865 }
866
867 ASAN_SET_SHADOW(00);
868 ASAN_SET_SHADOW(f1);
869 ASAN_SET_SHADOW(f2);
870 ASAN_SET_SHADOW(f3);
871 ASAN_SET_SHADOW(f5);
872 ASAN_SET_SHADOW(f8);
873
874 void __asan_poison_stack_memory(const void *, size_t);
875 void __asan_unpoison_stack_memory(const void *, size_t);
876
877 void __asan_poison_stack_memory(const void *addr, size_t size)
878 {
879 size = roundup(size, KASAN_SHADOW_SCALE_SIZE);
880 kasan_shadow_Nbyte_fill(addr, size, KASAN_USE_AFTER_SCOPE);
881 }
882
883 void __asan_unpoison_stack_memory(const void *addr, size_t size)
884 {
885 kasan_shadow_Nbyte_markvalid(addr, size);
886 }
887