1 1.1 kamil //===-- msan_poisoning.cc ---------------------------------------*- C++ -*-===// 2 1.1 kamil // 3 1.1 kamil // The LLVM Compiler Infrastructure 4 1.1 kamil // 5 1.1 kamil // This file is distributed under the University of Illinois Open Source 6 1.1 kamil // License. See LICENSE.TXT for details. 7 1.1 kamil // 8 1.1 kamil //===----------------------------------------------------------------------===// 9 1.1 kamil // 10 1.1 kamil // This file is a part of MemorySanitizer. 11 1.1 kamil // 12 1.1 kamil //===----------------------------------------------------------------------===// 13 1.1 kamil 14 1.1 kamil #include "msan_poisoning.h" 15 1.1 kamil 16 1.1 kamil #include "interception/interception.h" 17 1.1 kamil #include "msan_origin.h" 18 1.1 kamil #include "sanitizer_common/sanitizer_common.h" 19 1.1 kamil 20 1.1 kamil DECLARE_REAL(void *, memset, void *dest, int c, uptr n) 21 1.1 kamil DECLARE_REAL(void *, memcpy, void *dest, const void *src, uptr n) 22 1.1 kamil DECLARE_REAL(void *, memmove, void *dest, const void *src, uptr n) 23 1.1 kamil 24 1.1 kamil namespace __msan { 25 1.1 kamil 26 1.1 kamil u32 GetOriginIfPoisoned(uptr addr, uptr size) { 27 1.1 kamil unsigned char *s = (unsigned char *)MEM_TO_SHADOW(addr); 28 1.1 kamil for (uptr i = 0; i < size; ++i) 29 1.1 kamil if (s[i]) return *(u32 *)SHADOW_TO_ORIGIN(((uptr)s + i) & ~3UL); 30 1.1 kamil return 0; 31 1.1 kamil } 32 1.1 kamil 33 1.1 kamil void SetOriginIfPoisoned(uptr addr, uptr src_shadow, uptr size, 34 1.1 kamil u32 src_origin) { 35 1.1 kamil uptr dst_s = MEM_TO_SHADOW(addr); 36 1.1 kamil uptr src_s = src_shadow; 37 1.1 kamil uptr src_s_end = src_s + size; 38 1.1 kamil 39 1.1 kamil for (; src_s < src_s_end; ++dst_s, ++src_s) 40 1.1 kamil if (*(u8 *)src_s) *(u32 *)SHADOW_TO_ORIGIN(dst_s & ~3UL) = src_origin; 41 1.1 kamil } 42 1.1 kamil 43 1.1 kamil void CopyOrigin(const void *dst, const void *src, uptr size, 44 1.1 kamil StackTrace *stack) { 45 1.1 kamil if (!MEM_IS_APP(dst) || !MEM_IS_APP(src)) return; 46 1.1 kamil 47 1.1 kamil uptr d = (uptr)dst; 48 1.1 kamil uptr beg = d & ~3UL; 49 1.1 kamil // Copy left unaligned origin if that memory is poisoned. 50 1.1 kamil if (beg < d) { 51 1.1 kamil u32 o = GetOriginIfPoisoned((uptr)src, d - beg); 52 1.1 kamil if (o) { 53 1.1 kamil if (__msan_get_track_origins() > 1) o = ChainOrigin(o, stack); 54 1.1 kamil *(u32 *)MEM_TO_ORIGIN(beg) = o; 55 1.1 kamil } 56 1.1 kamil beg += 4; 57 1.1 kamil } 58 1.1 kamil 59 1.1 kamil uptr end = (d + size) & ~3UL; 60 1.1 kamil // If both ends fall into the same 4-byte slot, we are done. 61 1.1 kamil if (end < beg) return; 62 1.1 kamil 63 1.1 kamil // Copy right unaligned origin if that memory is poisoned. 64 1.1 kamil if (end < d + size) { 65 1.1 kamil u32 o = GetOriginIfPoisoned((uptr)src + (end - d), (d + size) - end); 66 1.1 kamil if (o) { 67 1.1 kamil if (__msan_get_track_origins() > 1) o = ChainOrigin(o, stack); 68 1.1 kamil *(u32 *)MEM_TO_ORIGIN(end) = o; 69 1.1 kamil } 70 1.1 kamil } 71 1.1 kamil 72 1.1 kamil if (beg < end) { 73 1.1 kamil // Align src up. 74 1.1 kamil uptr s = ((uptr)src + 3) & ~3UL; 75 1.1 kamil // FIXME: factor out to msan_copy_origin_aligned 76 1.1 kamil if (__msan_get_track_origins() > 1) { 77 1.1 kamil u32 *src = (u32 *)MEM_TO_ORIGIN(s); 78 1.1 kamil u32 *src_s = (u32 *)MEM_TO_SHADOW(s); 79 1.1 kamil u32 *src_end = (u32 *)MEM_TO_ORIGIN(s + (end - beg)); 80 1.1 kamil u32 *dst = (u32 *)MEM_TO_ORIGIN(beg); 81 1.1 kamil u32 src_o = 0; 82 1.1 kamil u32 dst_o = 0; 83 1.1 kamil for (; src < src_end; ++src, ++src_s, ++dst) { 84 1.1 kamil if (!*src_s) continue; 85 1.1 kamil if (*src != src_o) { 86 1.1 kamil src_o = *src; 87 1.1 kamil dst_o = ChainOrigin(src_o, stack); 88 1.1 kamil } 89 1.1 kamil *dst = dst_o; 90 1.1 kamil } 91 1.1 kamil } else { 92 1.1 kamil REAL(memcpy)((void *)MEM_TO_ORIGIN(beg), (void *)MEM_TO_ORIGIN(s), 93 1.1 kamil end - beg); 94 1.1 kamil } 95 1.1 kamil } 96 1.1 kamil } 97 1.1 kamil 98 1.1 kamil void MoveShadowAndOrigin(const void *dst, const void *src, uptr size, 99 1.1 kamil StackTrace *stack) { 100 1.1 kamil if (!MEM_IS_APP(dst)) return; 101 1.1 kamil if (!MEM_IS_APP(src)) return; 102 1.1 kamil if (src == dst) return; 103 1.1 kamil REAL(memmove)((void *)MEM_TO_SHADOW((uptr)dst), 104 1.1 kamil (void *)MEM_TO_SHADOW((uptr)src), size); 105 1.1 kamil if (__msan_get_track_origins()) CopyOrigin(dst, src, size, stack); 106 1.1 kamil } 107 1.1 kamil 108 1.1 kamil void CopyShadowAndOrigin(const void *dst, const void *src, uptr size, 109 1.1 kamil StackTrace *stack) { 110 1.1 kamil if (!MEM_IS_APP(dst)) return; 111 1.1 kamil if (!MEM_IS_APP(src)) return; 112 1.1 kamil REAL(memcpy)((void *)MEM_TO_SHADOW((uptr)dst), 113 1.1 kamil (void *)MEM_TO_SHADOW((uptr)src), size); 114 1.1 kamil if (__msan_get_track_origins()) CopyOrigin(dst, src, size, stack); 115 1.1 kamil } 116 1.1 kamil 117 1.1 kamil void CopyMemory(void *dst, const void *src, uptr size, StackTrace *stack) { 118 1.1 kamil REAL(memcpy)(dst, src, size); 119 1.1 kamil CopyShadowAndOrigin(dst, src, size, stack); 120 1.1 kamil } 121 1.1 kamil 122 1.1 kamil void SetShadow(const void *ptr, uptr size, u8 value) { 123 1.1 kamil uptr PageSize = GetPageSizeCached(); 124 1.1 kamil uptr shadow_beg = MEM_TO_SHADOW(ptr); 125 1.1 kamil uptr shadow_end = shadow_beg + size; 126 1.1 kamil if (value || 127 1.1 kamil shadow_end - shadow_beg < common_flags()->clear_shadow_mmap_threshold) { 128 1.1 kamil REAL(memset)((void *)shadow_beg, value, shadow_end - shadow_beg); 129 1.1 kamil } else { 130 1.1 kamil uptr page_beg = RoundUpTo(shadow_beg, PageSize); 131 1.1 kamil uptr page_end = RoundDownTo(shadow_end, PageSize); 132 1.1 kamil 133 1.1 kamil if (page_beg >= page_end) { 134 1.1 kamil REAL(memset)((void *)shadow_beg, 0, shadow_end - shadow_beg); 135 1.1 kamil } else { 136 1.1 kamil if (page_beg != shadow_beg) { 137 1.1 kamil REAL(memset)((void *)shadow_beg, 0, page_beg - shadow_beg); 138 1.1 kamil } 139 1.1 kamil if (page_end != shadow_end) { 140 1.1 kamil REAL(memset)((void *)page_end, 0, shadow_end - page_end); 141 1.1 kamil } 142 1.1 kamil if (!MmapFixedNoReserve(page_beg, page_end - page_beg)) 143 1.1 kamil Die(); 144 1.1 kamil } 145 1.1 kamil } 146 1.1 kamil } 147 1.1 kamil 148 1.1 kamil void SetOrigin(const void *dst, uptr size, u32 origin) { 149 1.1 kamil // Origin mapping is 4 bytes per 4 bytes of application memory. 150 1.1 kamil // Here we extend the range such that its left and right bounds are both 151 1.1 kamil // 4 byte aligned. 152 1.1 kamil uptr x = MEM_TO_ORIGIN((uptr)dst); 153 1.1 kamil uptr beg = x & ~3UL; // align down. 154 1.1 kamil uptr end = (x + size + 3) & ~3UL; // align up. 155 1.1 kamil u64 origin64 = ((u64)origin << 32) | origin; 156 1.1 kamil // This is like memset, but the value is 32-bit. We unroll by 2 to write 157 1.1 kamil // 64 bits at once. May want to unroll further to get 128-bit stores. 158 1.1 kamil if (beg & 7ULL) { 159 1.1 kamil *(u32 *)beg = origin; 160 1.1 kamil beg += 4; 161 1.1 kamil } 162 1.1 kamil for (uptr addr = beg; addr < (end & ~7UL); addr += 8) *(u64 *)addr = origin64; 163 1.1 kamil if (end & 7ULL) *(u32 *)(end - 4) = origin; 164 1.1 kamil } 165 1.1 kamil 166 1.1 kamil void PoisonMemory(const void *dst, uptr size, StackTrace *stack) { 167 1.1 kamil SetShadow(dst, size, (u8)-1); 168 1.1 kamil 169 1.1 kamil if (__msan_get_track_origins()) { 170 1.1 kamil Origin o = Origin::CreateHeapOrigin(stack); 171 1.1 kamil SetOrigin(dst, size, o.raw_id()); 172 1.1 kamil } 173 1.1 kamil } 174 1.1 kamil 175 1.1 kamil } // namespace __msan 176