Home | History | Annotate | Line # | Download | only in asan
      1  1.1  mrg //===-- asan_fake_stack.cpp -----------------------------------------------===//
      2  1.1  mrg //
      3  1.1  mrg // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
      4  1.1  mrg // See https://llvm.org/LICENSE.txt for license information.
      5  1.1  mrg // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
      6  1.1  mrg //
      7  1.1  mrg //===----------------------------------------------------------------------===//
      8  1.1  mrg //
      9  1.1  mrg // This file is a part of AddressSanitizer, an address sanity checker.
     10  1.1  mrg //
     11  1.1  mrg // FakeStack is used to detect use-after-return bugs.
     12  1.1  mrg //===----------------------------------------------------------------------===//
     13  1.1  mrg 
     14  1.1  mrg #include "asan_allocator.h"
     15  1.1  mrg #include "asan_poisoning.h"
     16  1.1  mrg #include "asan_thread.h"
     17  1.1  mrg 
     18  1.1  mrg namespace __asan {
     19  1.1  mrg 
     20  1.1  mrg static const u64 kMagic1 = kAsanStackAfterReturnMagic;
     21  1.1  mrg static const u64 kMagic2 = (kMagic1 << 8) | kMagic1;
     22  1.1  mrg static const u64 kMagic4 = (kMagic2 << 16) | kMagic2;
     23  1.1  mrg static const u64 kMagic8 = (kMagic4 << 32) | kMagic4;
     24  1.1  mrg 
     25  1.1  mrg static const u64 kAllocaRedzoneSize = 32UL;
     26  1.1  mrg static const u64 kAllocaRedzoneMask = 31UL;
     27  1.1  mrg 
     28  1.1  mrg // For small size classes inline PoisonShadow for better performance.
     29  1.1  mrg ALWAYS_INLINE void SetShadow(uptr ptr, uptr size, uptr class_id, u64 magic) {
     30  1.1  mrg   u64 *shadow = reinterpret_cast<u64*>(MemToShadow(ptr));
     31  1.4  mrg   if (ASAN_SHADOW_SCALE == 3 && class_id <= 6) {
     32  1.4  mrg     // This code expects ASAN_SHADOW_SCALE=3.
     33  1.1  mrg     for (uptr i = 0; i < (((uptr)1) << class_id); i++) {
     34  1.1  mrg       shadow[i] = magic;
     35  1.1  mrg       // Make sure this does not become memset.
     36  1.1  mrg       SanitizerBreakOptimization(nullptr);
     37  1.1  mrg     }
     38  1.1  mrg   } else {
     39  1.1  mrg     // The size class is too big, it's cheaper to poison only size bytes.
     40  1.1  mrg     PoisonShadow(ptr, size, static_cast<u8>(magic));
     41  1.1  mrg   }
     42  1.1  mrg }
     43  1.1  mrg 
     44  1.1  mrg FakeStack *FakeStack::Create(uptr stack_size_log) {
     45  1.1  mrg   static uptr kMinStackSizeLog = 16;
     46  1.1  mrg   static uptr kMaxStackSizeLog = FIRST_32_SECOND_64(24, 28);
     47  1.1  mrg   if (stack_size_log < kMinStackSizeLog)
     48  1.1  mrg     stack_size_log = kMinStackSizeLog;
     49  1.1  mrg   if (stack_size_log > kMaxStackSizeLog)
     50  1.1  mrg     stack_size_log = kMaxStackSizeLog;
     51  1.1  mrg   uptr size = RequiredSize(stack_size_log);
     52  1.1  mrg   FakeStack *res = reinterpret_cast<FakeStack *>(
     53  1.1  mrg       flags()->uar_noreserve ? MmapNoReserveOrDie(size, "FakeStack")
     54  1.1  mrg                              : MmapOrDie(size, "FakeStack"));
     55  1.1  mrg   res->stack_size_log_ = stack_size_log;
     56  1.1  mrg   u8 *p = reinterpret_cast<u8 *>(res);
     57  1.3  mrg   VReport(1,
     58  1.3  mrg           "T%d: FakeStack created: %p -- %p stack_size_log: %zd; "
     59  1.1  mrg           "mmapped %zdK, noreserve=%d \n",
     60  1.3  mrg           GetCurrentTidOrInvalid(), (void *)p,
     61  1.3  mrg           (void *)(p + FakeStack::RequiredSize(stack_size_log)), stack_size_log,
     62  1.1  mrg           size >> 10, flags()->uar_noreserve);
     63  1.1  mrg   return res;
     64  1.1  mrg }
     65  1.1  mrg 
     66  1.1  mrg void FakeStack::Destroy(int tid) {
     67  1.1  mrg   PoisonAll(0);
     68  1.1  mrg   if (Verbosity() >= 2) {
     69  1.3  mrg     InternalScopedString str;
     70  1.1  mrg     for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++)
     71  1.4  mrg       str.AppendF("%zd: %zd/%zd; ", class_id, hint_position_[class_id],
     72  1.4  mrg                   NumberOfFrames(stack_size_log(), class_id));
     73  1.1  mrg     Report("T%d: FakeStack destroyed: %s\n", tid, str.data());
     74  1.1  mrg   }
     75  1.1  mrg   uptr size = RequiredSize(stack_size_log_);
     76  1.1  mrg   FlushUnneededASanShadowMemory(reinterpret_cast<uptr>(this), size);
     77  1.1  mrg   UnmapOrDie(this, size);
     78  1.1  mrg }
     79  1.1  mrg 
     80  1.1  mrg void FakeStack::PoisonAll(u8 magic) {
     81  1.1  mrg   PoisonShadow(reinterpret_cast<uptr>(this), RequiredSize(stack_size_log()),
     82  1.1  mrg                magic);
     83  1.1  mrg }
     84  1.1  mrg 
     85  1.1  mrg #if !defined(_MSC_VER) || defined(__clang__)
     86  1.1  mrg ALWAYS_INLINE USED
     87  1.1  mrg #endif
     88  1.1  mrg FakeFrame *FakeStack::Allocate(uptr stack_size_log, uptr class_id,
     89  1.1  mrg                                uptr real_stack) {
     90  1.1  mrg   CHECK_LT(class_id, kNumberOfSizeClasses);
     91  1.1  mrg   if (needs_gc_)
     92  1.1  mrg     GC(real_stack);
     93  1.1  mrg   uptr &hint_position = hint_position_[class_id];
     94  1.1  mrg   const int num_iter = NumberOfFrames(stack_size_log, class_id);
     95  1.1  mrg   u8 *flags = GetFlags(stack_size_log, class_id);
     96  1.1  mrg   for (int i = 0; i < num_iter; i++) {
     97  1.1  mrg     uptr pos = ModuloNumberOfFrames(stack_size_log, class_id, hint_position++);
     98  1.1  mrg     // This part is tricky. On one hand, checking and setting flags[pos]
     99  1.1  mrg     // should be atomic to ensure async-signal safety. But on the other hand,
    100  1.1  mrg     // if the signal arrives between checking and setting flags[pos], the
    101  1.1  mrg     // signal handler's fake stack will start from a different hint_position
    102  1.1  mrg     // and so will not touch this particular byte. So, it is safe to do this
    103  1.1  mrg     // with regular non-atomic load and store (at least I was not able to make
    104  1.1  mrg     // this code crash).
    105  1.1  mrg     if (flags[pos]) continue;
    106  1.1  mrg     flags[pos] = 1;
    107  1.1  mrg     FakeFrame *res = reinterpret_cast<FakeFrame *>(
    108  1.1  mrg         GetFrame(stack_size_log, class_id, pos));
    109  1.1  mrg     res->real_stack = real_stack;
    110  1.1  mrg     *SavedFlagPtr(reinterpret_cast<uptr>(res), class_id) = &flags[pos];
    111  1.1  mrg     return res;
    112  1.1  mrg   }
    113  1.1  mrg   return nullptr; // We are out of fake stack.
    114  1.1  mrg }
    115  1.1  mrg 
    116  1.1  mrg uptr FakeStack::AddrIsInFakeStack(uptr ptr, uptr *frame_beg, uptr *frame_end) {
    117  1.1  mrg   uptr stack_size_log = this->stack_size_log();
    118  1.1  mrg   uptr beg = reinterpret_cast<uptr>(GetFrame(stack_size_log, 0, 0));
    119  1.1  mrg   uptr end = reinterpret_cast<uptr>(this) + RequiredSize(stack_size_log);
    120  1.1  mrg   if (ptr < beg || ptr >= end) return 0;
    121  1.1  mrg   uptr class_id = (ptr - beg) >> stack_size_log;
    122  1.1  mrg   uptr base = beg + (class_id << stack_size_log);
    123  1.1  mrg   CHECK_LE(base, ptr);
    124  1.1  mrg   CHECK_LT(ptr, base + (((uptr)1) << stack_size_log));
    125  1.1  mrg   uptr pos = (ptr - base) >> (kMinStackFrameSizeLog + class_id);
    126  1.1  mrg   uptr res = base + pos * BytesInSizeClass(class_id);
    127  1.1  mrg   *frame_end = res + BytesInSizeClass(class_id);
    128  1.1  mrg   *frame_beg = res + sizeof(FakeFrame);
    129  1.1  mrg   return res;
    130  1.1  mrg }
    131  1.1  mrg 
    132  1.1  mrg void FakeStack::HandleNoReturn() {
    133  1.1  mrg   needs_gc_ = true;
    134  1.1  mrg }
    135  1.1  mrg 
    136  1.4  mrg // Hack: The statement below is not true if we take into account sigaltstack or
    137  1.4  mrg // makecontext. It should be possible to make GC to discard wrong stack frame if
    138  1.4  mrg // we use these tools. For now, let's support the simplest case and allow GC to
    139  1.4  mrg // discard only frames from the default stack, assuming there is no buffer on
    140  1.4  mrg // the stack which is used for makecontext or sigaltstack.
    141  1.4  mrg //
    142  1.1  mrg // When throw, longjmp or some such happens we don't call OnFree() and
    143  1.1  mrg // as the result may leak one or more fake frames, but the good news is that
    144  1.1  mrg // we are notified about all such events by HandleNoReturn().
    145  1.1  mrg // If we recently had such no-return event we need to collect garbage frames.
    146  1.1  mrg // We do it based on their 'real_stack' values -- everything that is lower
    147  1.1  mrg // than the current real_stack is garbage.
    148  1.1  mrg NOINLINE void FakeStack::GC(uptr real_stack) {
    149  1.4  mrg   AsanThread *curr_thread = GetCurrentThread();
    150  1.4  mrg   if (!curr_thread)
    151  1.4  mrg     return;  // Try again when we have a thread.
    152  1.4  mrg   auto top = curr_thread->stack_top();
    153  1.4  mrg   auto bottom = curr_thread->stack_bottom();
    154  1.4  mrg   if (real_stack < bottom || real_stack > top)
    155  1.4  mrg     return;  // Not the default stack.
    156  1.4  mrg 
    157  1.1  mrg   for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) {
    158  1.1  mrg     u8 *flags = GetFlags(stack_size_log(), class_id);
    159  1.1  mrg     for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n;
    160  1.1  mrg          i++) {
    161  1.1  mrg       if (flags[i] == 0) continue;  // not allocated.
    162  1.1  mrg       FakeFrame *ff = reinterpret_cast<FakeFrame *>(
    163  1.1  mrg           GetFrame(stack_size_log(), class_id, i));
    164  1.4  mrg       // GC only on the default stack.
    165  1.4  mrg       if (bottom < ff->real_stack && ff->real_stack < real_stack) {
    166  1.1  mrg         flags[i] = 0;
    167  1.4  mrg         // Poison the frame, so the any access will be reported as UAR.
    168  1.4  mrg         SetShadow(reinterpret_cast<uptr>(ff), BytesInSizeClass(class_id),
    169  1.4  mrg                   class_id, kMagic8);
    170  1.1  mrg       }
    171  1.1  mrg     }
    172  1.1  mrg   }
    173  1.1  mrg   needs_gc_ = false;
    174  1.1  mrg }
    175  1.1  mrg 
    176  1.1  mrg void FakeStack::ForEachFakeFrame(RangeIteratorCallback callback, void *arg) {
    177  1.1  mrg   for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) {
    178  1.1  mrg     u8 *flags = GetFlags(stack_size_log(), class_id);
    179  1.1  mrg     for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n;
    180  1.1  mrg          i++) {
    181  1.1  mrg       if (flags[i] == 0) continue;  // not allocated.
    182  1.1  mrg       FakeFrame *ff = reinterpret_cast<FakeFrame *>(
    183  1.1  mrg           GetFrame(stack_size_log(), class_id, i));
    184  1.1  mrg       uptr begin = reinterpret_cast<uptr>(ff);
    185  1.1  mrg       callback(begin, begin + FakeStack::BytesInSizeClass(class_id), arg);
    186  1.1  mrg     }
    187  1.1  mrg   }
    188  1.1  mrg }
    189  1.1  mrg 
    190  1.1  mrg #if (SANITIZER_LINUX && !SANITIZER_ANDROID) || SANITIZER_FUCHSIA
    191  1.1  mrg static THREADLOCAL FakeStack *fake_stack_tls;
    192  1.1  mrg 
    193  1.1  mrg FakeStack *GetTLSFakeStack() {
    194  1.1  mrg   return fake_stack_tls;
    195  1.1  mrg }
    196  1.1  mrg void SetTLSFakeStack(FakeStack *fs) {
    197  1.1  mrg   fake_stack_tls = fs;
    198  1.1  mrg }
    199  1.1  mrg #else
    200  1.1  mrg FakeStack *GetTLSFakeStack() { return 0; }
    201  1.1  mrg void SetTLSFakeStack(FakeStack *fs) { }
    202  1.1  mrg #endif  // (SANITIZER_LINUX && !SANITIZER_ANDROID) || SANITIZER_FUCHSIA
    203  1.1  mrg 
    204  1.1  mrg static FakeStack *GetFakeStack() {
    205  1.1  mrg   AsanThread *t = GetCurrentThread();
    206  1.1  mrg   if (!t) return nullptr;
    207  1.3  mrg   return t->get_or_create_fake_stack();
    208  1.1  mrg }
    209  1.1  mrg 
    210  1.1  mrg static FakeStack *GetFakeStackFast() {
    211  1.1  mrg   if (FakeStack *fs = GetTLSFakeStack())
    212  1.1  mrg     return fs;
    213  1.1  mrg   if (!__asan_option_detect_stack_use_after_return)
    214  1.1  mrg     return nullptr;
    215  1.1  mrg   return GetFakeStack();
    216  1.1  mrg }
    217  1.1  mrg 
    218  1.3  mrg static FakeStack *GetFakeStackFastAlways() {
    219  1.3  mrg   if (FakeStack *fs = GetTLSFakeStack())
    220  1.3  mrg     return fs;
    221  1.3  mrg   return GetFakeStack();
    222  1.3  mrg }
    223  1.3  mrg 
    224  1.3  mrg static ALWAYS_INLINE uptr OnMalloc(uptr class_id, uptr size) {
    225  1.1  mrg   FakeStack *fs = GetFakeStackFast();
    226  1.4  mrg   if (!fs)
    227  1.4  mrg     return 0;
    228  1.4  mrg   FakeFrame *ff =
    229  1.4  mrg       fs->Allocate(fs->stack_size_log(), class_id, GET_CURRENT_FRAME());
    230  1.4  mrg   if (!ff)
    231  1.4  mrg     return 0;  // Out of fake stack.
    232  1.1  mrg   uptr ptr = reinterpret_cast<uptr>(ff);
    233  1.1  mrg   SetShadow(ptr, size, class_id, 0);
    234  1.1  mrg   return ptr;
    235  1.1  mrg }
    236  1.1  mrg 
    237  1.3  mrg static ALWAYS_INLINE uptr OnMallocAlways(uptr class_id, uptr size) {
    238  1.3  mrg   FakeStack *fs = GetFakeStackFastAlways();
    239  1.3  mrg   if (!fs)
    240  1.3  mrg     return 0;
    241  1.4  mrg   FakeFrame *ff =
    242  1.4  mrg       fs->Allocate(fs->stack_size_log(), class_id, GET_CURRENT_FRAME());
    243  1.3  mrg   if (!ff)
    244  1.3  mrg     return 0;  // Out of fake stack.
    245  1.3  mrg   uptr ptr = reinterpret_cast<uptr>(ff);
    246  1.3  mrg   SetShadow(ptr, size, class_id, 0);
    247  1.3  mrg   return ptr;
    248  1.3  mrg }
    249  1.3  mrg 
    250  1.3  mrg static ALWAYS_INLINE void OnFree(uptr ptr, uptr class_id, uptr size) {
    251  1.1  mrg   FakeStack::Deallocate(ptr, class_id);
    252  1.1  mrg   SetShadow(ptr, size, class_id, kMagic8);
    253  1.1  mrg }
    254  1.1  mrg 
    255  1.1  mrg } // namespace __asan
    256  1.1  mrg 
    257  1.1  mrg // ---------------------- Interface ---------------- {{{1
    258  1.1  mrg using namespace __asan;
    259  1.3  mrg #define DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(class_id)                      \
    260  1.3  mrg   extern "C" SANITIZER_INTERFACE_ATTRIBUTE uptr                               \
    261  1.3  mrg       __asan_stack_malloc_##class_id(uptr size) {                             \
    262  1.3  mrg     return OnMalloc(class_id, size);                                          \
    263  1.3  mrg   }                                                                           \
    264  1.3  mrg   extern "C" SANITIZER_INTERFACE_ATTRIBUTE uptr                               \
    265  1.3  mrg       __asan_stack_malloc_always_##class_id(uptr size) {                      \
    266  1.3  mrg     return OnMallocAlways(class_id, size);                                    \
    267  1.3  mrg   }                                                                           \
    268  1.3  mrg   extern "C" SANITIZER_INTERFACE_ATTRIBUTE void __asan_stack_free_##class_id( \
    269  1.3  mrg       uptr ptr, uptr size) {                                                  \
    270  1.3  mrg     OnFree(ptr, class_id, size);                                              \
    271  1.1  mrg   }
    272  1.1  mrg 
    273  1.1  mrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(0)
    274  1.1  mrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(1)
    275  1.1  mrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(2)
    276  1.1  mrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(3)
    277  1.1  mrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(4)
    278  1.1  mrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(5)
    279  1.1  mrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(6)
    280  1.1  mrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(7)
    281  1.1  mrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(8)
    282  1.1  mrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(9)
    283  1.1  mrg DEFINE_STACK_MALLOC_FREE_WITH_CLASS_ID(10)
    284  1.3  mrg 
    285  1.1  mrg extern "C" {
    286  1.3  mrg // TODO: remove this method and fix tests that use it by setting
    287  1.3  mrg // -asan-use-after-return=never, after modal UAR flag lands
    288  1.3  mrg // (https://github.com/google/sanitizers/issues/1394)
    289  1.1  mrg SANITIZER_INTERFACE_ATTRIBUTE
    290  1.1  mrg void *__asan_get_current_fake_stack() { return GetFakeStackFast(); }
    291  1.1  mrg 
    292  1.1  mrg SANITIZER_INTERFACE_ATTRIBUTE
    293  1.1  mrg void *__asan_addr_is_in_fake_stack(void *fake_stack, void *addr, void **beg,
    294  1.1  mrg                                    void **end) {
    295  1.1  mrg   FakeStack *fs = reinterpret_cast<FakeStack*>(fake_stack);
    296  1.1  mrg   if (!fs) return nullptr;
    297  1.1  mrg   uptr frame_beg, frame_end;
    298  1.1  mrg   FakeFrame *frame = reinterpret_cast<FakeFrame *>(fs->AddrIsInFakeStack(
    299  1.1  mrg       reinterpret_cast<uptr>(addr), &frame_beg, &frame_end));
    300  1.1  mrg   if (!frame) return nullptr;
    301  1.1  mrg   if (frame->magic != kCurrentStackFrameMagic)
    302  1.1  mrg     return nullptr;
    303  1.1  mrg   if (beg) *beg = reinterpret_cast<void*>(frame_beg);
    304  1.1  mrg   if (end) *end = reinterpret_cast<void*>(frame_end);
    305  1.1  mrg   return reinterpret_cast<void*>(frame->real_stack);
    306  1.1  mrg }
    307  1.1  mrg 
    308  1.1  mrg SANITIZER_INTERFACE_ATTRIBUTE
    309  1.1  mrg void __asan_alloca_poison(uptr addr, uptr size) {
    310  1.1  mrg   uptr LeftRedzoneAddr = addr - kAllocaRedzoneSize;
    311  1.1  mrg   uptr PartialRzAddr = addr + size;
    312  1.1  mrg   uptr RightRzAddr = (PartialRzAddr + kAllocaRedzoneMask) & ~kAllocaRedzoneMask;
    313  1.4  mrg   uptr PartialRzAligned = PartialRzAddr & ~(ASAN_SHADOW_GRANULARITY - 1);
    314  1.1  mrg   FastPoisonShadow(LeftRedzoneAddr, kAllocaRedzoneSize, kAsanAllocaLeftMagic);
    315  1.1  mrg   FastPoisonShadowPartialRightRedzone(
    316  1.4  mrg       PartialRzAligned, PartialRzAddr % ASAN_SHADOW_GRANULARITY,
    317  1.1  mrg       RightRzAddr - PartialRzAligned, kAsanAllocaRightMagic);
    318  1.1  mrg   FastPoisonShadow(RightRzAddr, kAllocaRedzoneSize, kAsanAllocaRightMagic);
    319  1.1  mrg }
    320  1.1  mrg 
    321  1.1  mrg SANITIZER_INTERFACE_ATTRIBUTE
    322  1.1  mrg void __asan_allocas_unpoison(uptr top, uptr bottom) {
    323  1.1  mrg   if ((!top) || (top > bottom)) return;
    324  1.4  mrg   REAL(memset)
    325  1.4  mrg   (reinterpret_cast<void *>(MemToShadow(top)), 0,
    326  1.4  mrg    (bottom - top) / ASAN_SHADOW_GRANULARITY);
    327  1.1  mrg }
    328  1.1  mrg } // extern "C"
    329