1 1.1 kamil //===-- asan_poisoning.cc -------------------------------------------------===// 2 1.1 kamil // 3 1.1 kamil // The LLVM Compiler Infrastructure 4 1.1 kamil // 5 1.1 kamil // This file is distributed under the University of Illinois Open Source 6 1.1 kamil // License. See LICENSE.TXT for details. 7 1.1 kamil // 8 1.1 kamil //===----------------------------------------------------------------------===// 9 1.1 kamil // 10 1.1 kamil // This file is a part of AddressSanitizer, an address sanity checker. 11 1.1 kamil // 12 1.1 kamil // Shadow memory poisoning by ASan RTL and by user application. 13 1.1 kamil //===----------------------------------------------------------------------===// 14 1.1 kamil 15 1.1 kamil #include "asan_poisoning.h" 16 1.1 kamil #include "asan_report.h" 17 1.1 kamil #include "asan_stack.h" 18 1.1 kamil #include "sanitizer_common/sanitizer_atomic.h" 19 1.1 kamil #include "sanitizer_common/sanitizer_libc.h" 20 1.1 kamil #include "sanitizer_common/sanitizer_flags.h" 21 1.1 kamil 22 1.1 kamil namespace __asan { 23 1.1 kamil 24 1.1 kamil static atomic_uint8_t can_poison_memory; 25 1.1 kamil 26 1.1 kamil void SetCanPoisonMemory(bool value) { 27 1.1 kamil atomic_store(&can_poison_memory, value, memory_order_release); 28 1.1 kamil } 29 1.1 kamil 30 1.1 kamil bool CanPoisonMemory() { 31 1.1 kamil return atomic_load(&can_poison_memory, memory_order_acquire); 32 1.1 kamil } 33 1.1 kamil 34 1.1 kamil void PoisonShadow(uptr addr, uptr size, u8 value) { 35 1.1 kamil if (value && !CanPoisonMemory()) return; 36 1.1 kamil CHECK(AddrIsAlignedByGranularity(addr)); 37 1.1 kamil CHECK(AddrIsInMem(addr)); 38 1.1 kamil CHECK(AddrIsAlignedByGranularity(addr + size)); 39 1.1 kamil CHECK(AddrIsInMem(addr + size - SHADOW_GRANULARITY)); 40 1.1 kamil CHECK(REAL(memset)); 41 1.1 kamil FastPoisonShadow(addr, size, value); 42 1.1 kamil } 43 1.1 kamil 44 1.1 kamil void PoisonShadowPartialRightRedzone(uptr addr, 45 1.1 kamil uptr size, 46 1.1 kamil uptr redzone_size, 47 1.1 kamil u8 value) { 48 1.1 kamil if (!CanPoisonMemory()) return; 49 1.1 kamil CHECK(AddrIsAlignedByGranularity(addr)); 50 1.1 kamil CHECK(AddrIsInMem(addr)); 51 1.1 kamil FastPoisonShadowPartialRightRedzone(addr, size, redzone_size, value); 52 1.1 kamil } 53 1.1 kamil 54 1.1 kamil struct ShadowSegmentEndpoint { 55 1.1 kamil u8 *chunk; 56 1.1 kamil s8 offset; // in [0, SHADOW_GRANULARITY) 57 1.1 kamil s8 value; // = *chunk; 58 1.1 kamil 59 1.1 kamil explicit ShadowSegmentEndpoint(uptr address) { 60 1.1 kamil chunk = (u8*)MemToShadow(address); 61 1.1 kamil offset = address & (SHADOW_GRANULARITY - 1); 62 1.1 kamil value = *chunk; 63 1.1 kamil } 64 1.1 kamil }; 65 1.1 kamil 66 1.1 kamil void FlushUnneededASanShadowMemory(uptr p, uptr size) { 67 1.1 kamil // Since asan's mapping is compacting, the shadow chunk may be 68 1.1 kamil // not page-aligned, so we only flush the page-aligned portion. 69 1.1 kamil ReleaseMemoryPagesToOS(MemToShadow(p), MemToShadow(p + size)); 70 1.1 kamil } 71 1.1 kamil 72 1.1 kamil void AsanPoisonOrUnpoisonIntraObjectRedzone(uptr ptr, uptr size, bool poison) { 73 1.1 kamil uptr end = ptr + size; 74 1.1 kamil if (Verbosity()) { 75 1.1 kamil Printf("__asan_%spoison_intra_object_redzone [%p,%p) %zd\n", 76 1.1 kamil poison ? "" : "un", ptr, end, size); 77 1.1 kamil if (Verbosity() >= 2) 78 1.1 kamil PRINT_CURRENT_STACK(); 79 1.1 kamil } 80 1.1 kamil CHECK(size); 81 1.1 kamil CHECK_LE(size, 4096); 82 1.1 kamil CHECK(IsAligned(end, SHADOW_GRANULARITY)); 83 1.1 kamil if (!IsAligned(ptr, SHADOW_GRANULARITY)) { 84 1.1 kamil *(u8 *)MemToShadow(ptr) = 85 1.1 kamil poison ? static_cast<u8>(ptr % SHADOW_GRANULARITY) : 0; 86 1.1 kamil ptr |= SHADOW_GRANULARITY - 1; 87 1.1 kamil ptr++; 88 1.1 kamil } 89 1.1 kamil for (; ptr < end; ptr += SHADOW_GRANULARITY) 90 1.1 kamil *(u8*)MemToShadow(ptr) = poison ? kAsanIntraObjectRedzone : 0; 91 1.1 kamil } 92 1.1 kamil 93 1.1 kamil } // namespace __asan 94 1.1 kamil 95 1.1 kamil // ---------------------- Interface ---------------- {{{1 96 1.1 kamil using namespace __asan; // NOLINT 97 1.1 kamil 98 1.1 kamil // Current implementation of __asan_(un)poison_memory_region doesn't check 99 1.1 kamil // that user program (un)poisons the memory it owns. It poisons memory 100 1.1 kamil // conservatively, and unpoisons progressively to make sure asan shadow 101 1.1 kamil // mapping invariant is preserved (see detailed mapping description here: 102 1.1 kamil // https://github.com/google/sanitizers/wiki/AddressSanitizerAlgorithm). 103 1.1 kamil // 104 1.1 kamil // * if user asks to poison region [left, right), the program poisons 105 1.1 kamil // at least [left, AlignDown(right)). 106 1.1 kamil // * if user asks to unpoison region [left, right), the program unpoisons 107 1.1 kamil // at most [AlignDown(left), right). 108 1.1 kamil void __asan_poison_memory_region(void const volatile *addr, uptr size) { 109 1.1 kamil if (!flags()->allow_user_poisoning || size == 0) return; 110 1.1 kamil uptr beg_addr = (uptr)addr; 111 1.1 kamil uptr end_addr = beg_addr + size; 112 1.1 kamil VPrintf(3, "Trying to poison memory region [%p, %p)\n", (void *)beg_addr, 113 1.1 kamil (void *)end_addr); 114 1.1 kamil ShadowSegmentEndpoint beg(beg_addr); 115 1.1 kamil ShadowSegmentEndpoint end(end_addr); 116 1.1 kamil if (beg.chunk == end.chunk) { 117 1.1 kamil CHECK_LT(beg.offset, end.offset); 118 1.1 kamil s8 value = beg.value; 119 1.1 kamil CHECK_EQ(value, end.value); 120 1.1 kamil // We can only poison memory if the byte in end.offset is unaddressable. 121 1.1 kamil // No need to re-poison memory if it is poisoned already. 122 1.1 kamil if (value > 0 && value <= end.offset) { 123 1.1 kamil if (beg.offset > 0) { 124 1.1 kamil *beg.chunk = Min(value, beg.offset); 125 1.1 kamil } else { 126 1.1 kamil *beg.chunk = kAsanUserPoisonedMemoryMagic; 127 1.1 kamil } 128 1.1 kamil } 129 1.1 kamil return; 130 1.1 kamil } 131 1.1 kamil CHECK_LT(beg.chunk, end.chunk); 132 1.1 kamil if (beg.offset > 0) { 133 1.1 kamil // Mark bytes from beg.offset as unaddressable. 134 1.1 kamil if (beg.value == 0) { 135 1.1 kamil *beg.chunk = beg.offset; 136 1.1 kamil } else { 137 1.1 kamil *beg.chunk = Min(beg.value, beg.offset); 138 1.1 kamil } 139 1.1 kamil beg.chunk++; 140 1.1 kamil } 141 1.1 kamil REAL(memset)(beg.chunk, kAsanUserPoisonedMemoryMagic, end.chunk - beg.chunk); 142 1.1 kamil // Poison if byte in end.offset is unaddressable. 143 1.1 kamil if (end.value > 0 && end.value <= end.offset) { 144 1.1 kamil *end.chunk = kAsanUserPoisonedMemoryMagic; 145 1.1 kamil } 146 1.1 kamil } 147 1.1 kamil 148 1.1 kamil void __asan_unpoison_memory_region(void const volatile *addr, uptr size) { 149 1.1 kamil if (!flags()->allow_user_poisoning || size == 0) return; 150 1.1 kamil uptr beg_addr = (uptr)addr; 151 1.1 kamil uptr end_addr = beg_addr + size; 152 1.1 kamil VPrintf(3, "Trying to unpoison memory region [%p, %p)\n", (void *)beg_addr, 153 1.1 kamil (void *)end_addr); 154 1.1 kamil ShadowSegmentEndpoint beg(beg_addr); 155 1.1 kamil ShadowSegmentEndpoint end(end_addr); 156 1.1 kamil if (beg.chunk == end.chunk) { 157 1.1 kamil CHECK_LT(beg.offset, end.offset); 158 1.1 kamil s8 value = beg.value; 159 1.1 kamil CHECK_EQ(value, end.value); 160 1.1 kamil // We unpoison memory bytes up to enbytes up to end.offset if it is not 161 1.1 kamil // unpoisoned already. 162 1.1 kamil if (value != 0) { 163 1.1 kamil *beg.chunk = Max(value, end.offset); 164 1.1 kamil } 165 1.1 kamil return; 166 1.1 kamil } 167 1.1 kamil CHECK_LT(beg.chunk, end.chunk); 168 1.1 kamil if (beg.offset > 0) { 169 1.1 kamil *beg.chunk = 0; 170 1.1 kamil beg.chunk++; 171 1.1 kamil } 172 1.1 kamil REAL(memset)(beg.chunk, 0, end.chunk - beg.chunk); 173 1.1 kamil if (end.offset > 0 && end.value != 0) { 174 1.1 kamil *end.chunk = Max(end.value, end.offset); 175 1.1 kamil } 176 1.1 kamil } 177 1.1 kamil 178 1.1 kamil int __asan_address_is_poisoned(void const volatile *addr) { 179 1.1 kamil return __asan::AddressIsPoisoned((uptr)addr); 180 1.1 kamil } 181 1.1 kamil 182 1.1 kamil uptr __asan_region_is_poisoned(uptr beg, uptr size) { 183 1.1 kamil if (!size) return 0; 184 1.1 kamil uptr end = beg + size; 185 1.1 kamil if (SANITIZER_MYRIAD2) { 186 1.1 kamil // On Myriad, address not in DRAM range need to be treated as 187 1.1 kamil // unpoisoned. 188 1.1 kamil if (!AddrIsInMem(beg) && !AddrIsInShadow(beg)) return 0; 189 1.1 kamil if (!AddrIsInMem(end) && !AddrIsInShadow(end)) return 0; 190 1.1 kamil } else { 191 1.1 kamil if (!AddrIsInMem(beg)) return beg; 192 1.1 kamil if (!AddrIsInMem(end)) return end; 193 1.1 kamil } 194 1.1 kamil CHECK_LT(beg, end); 195 1.1 kamil uptr aligned_b = RoundUpTo(beg, SHADOW_GRANULARITY); 196 1.1 kamil uptr aligned_e = RoundDownTo(end, SHADOW_GRANULARITY); 197 1.1 kamil uptr shadow_beg = MemToShadow(aligned_b); 198 1.1 kamil uptr shadow_end = MemToShadow(aligned_e); 199 1.1 kamil // First check the first and the last application bytes, 200 1.1 kamil // then check the SHADOW_GRANULARITY-aligned region by calling 201 1.1 kamil // mem_is_zero on the corresponding shadow. 202 1.1 kamil if (!__asan::AddressIsPoisoned(beg) && 203 1.1 kamil !__asan::AddressIsPoisoned(end - 1) && 204 1.1 kamil (shadow_end <= shadow_beg || 205 1.1 kamil __sanitizer::mem_is_zero((const char *)shadow_beg, 206 1.1 kamil shadow_end - shadow_beg))) 207 1.1 kamil return 0; 208 1.1 kamil // The fast check failed, so we have a poisoned byte somewhere. 209 1.1 kamil // Find it slowly. 210 1.1 kamil for (; beg < end; beg++) 211 1.1 kamil if (__asan::AddressIsPoisoned(beg)) 212 1.1 kamil return beg; 213 1.1 kamil UNREACHABLE("mem_is_zero returned false, but poisoned byte was not found"); 214 1.1 kamil return 0; 215 1.1 kamil } 216 1.1 kamil 217 1.1 kamil #define CHECK_SMALL_REGION(p, size, isWrite) \ 218 1.1 kamil do { \ 219 1.1 kamil uptr __p = reinterpret_cast<uptr>(p); \ 220 1.1 kamil uptr __size = size; \ 221 1.1 kamil if (UNLIKELY(__asan::AddressIsPoisoned(__p) || \ 222 1.1 kamil __asan::AddressIsPoisoned(__p + __size - 1))) { \ 223 1.1 kamil GET_CURRENT_PC_BP_SP; \ 224 1.1 kamil uptr __bad = __asan_region_is_poisoned(__p, __size); \ 225 1.1 kamil __asan_report_error(pc, bp, sp, __bad, isWrite, __size, 0);\ 226 1.1 kamil } \ 227 1.1 kamil } while (false) 228 1.1 kamil 229 1.1 kamil 230 1.1 kamil extern "C" SANITIZER_INTERFACE_ATTRIBUTE 231 1.1 kamil u16 __sanitizer_unaligned_load16(const uu16 *p) { 232 1.1 kamil CHECK_SMALL_REGION(p, sizeof(*p), false); 233 1.1 kamil return *p; 234 1.1 kamil } 235 1.1 kamil 236 1.1 kamil extern "C" SANITIZER_INTERFACE_ATTRIBUTE 237 1.1 kamil u32 __sanitizer_unaligned_load32(const uu32 *p) { 238 1.1 kamil CHECK_SMALL_REGION(p, sizeof(*p), false); 239 1.1 kamil return *p; 240 1.1 kamil } 241 1.1 kamil 242 1.1 kamil extern "C" SANITIZER_INTERFACE_ATTRIBUTE 243 1.1 kamil u64 __sanitizer_unaligned_load64(const uu64 *p) { 244 1.1 kamil CHECK_SMALL_REGION(p, sizeof(*p), false); 245 1.1 kamil return *p; 246 1.1 kamil } 247 1.1 kamil 248 1.1 kamil extern "C" SANITIZER_INTERFACE_ATTRIBUTE 249 1.1 kamil void __sanitizer_unaligned_store16(uu16 *p, u16 x) { 250 1.1 kamil CHECK_SMALL_REGION(p, sizeof(*p), true); 251 1.1 kamil *p = x; 252 1.1 kamil } 253 1.1 kamil 254 1.1 kamil extern "C" SANITIZER_INTERFACE_ATTRIBUTE 255 1.1 kamil void __sanitizer_unaligned_store32(uu32 *p, u32 x) { 256 1.1 kamil CHECK_SMALL_REGION(p, sizeof(*p), true); 257 1.1 kamil *p = x; 258 1.1 kamil } 259 1.1 kamil 260 1.1 kamil extern "C" SANITIZER_INTERFACE_ATTRIBUTE 261 1.1 kamil void __sanitizer_unaligned_store64(uu64 *p, u64 x) { 262 1.1 kamil CHECK_SMALL_REGION(p, sizeof(*p), true); 263 1.1 kamil *p = x; 264 1.1 kamil } 265 1.1 kamil 266 1.1 kamil extern "C" SANITIZER_INTERFACE_ATTRIBUTE 267 1.1 kamil void __asan_poison_cxx_array_cookie(uptr p) { 268 1.1 kamil if (SANITIZER_WORDSIZE != 64) return; 269 1.1 kamil if (!flags()->poison_array_cookie) return; 270 1.1 kamil uptr s = MEM_TO_SHADOW(p); 271 1.1 kamil *reinterpret_cast<u8*>(s) = kAsanArrayCookieMagic; 272 1.1 kamil } 273 1.1 kamil 274 1.1 kamil extern "C" SANITIZER_INTERFACE_ATTRIBUTE 275 1.1 kamil uptr __asan_load_cxx_array_cookie(uptr *p) { 276 1.1 kamil if (SANITIZER_WORDSIZE != 64) return *p; 277 1.1 kamil if (!flags()->poison_array_cookie) return *p; 278 1.1 kamil uptr s = MEM_TO_SHADOW(reinterpret_cast<uptr>(p)); 279 1.1 kamil u8 sval = *reinterpret_cast<u8*>(s); 280 1.1 kamil if (sval == kAsanArrayCookieMagic) return *p; 281 1.1 kamil // If sval is not kAsanArrayCookieMagic it can only be freed memory, 282 1.1 kamil // which means that we are going to get double-free. So, return 0 to avoid 283 1.1 kamil // infinite loop of destructors. We don't want to report a double-free here 284 1.1 kamil // though, so print a warning just in case. 285 1.1 kamil // CHECK_EQ(sval, kAsanHeapFreeMagic); 286 1.1 kamil if (sval == kAsanHeapFreeMagic) { 287 1.1 kamil Report("AddressSanitizer: loaded array cookie from free-d memory; " 288 1.1 kamil "expect a double-free report\n"); 289 1.1 kamil return 0; 290 1.1 kamil } 291 1.1 kamil // The cookie may remain unpoisoned if e.g. it comes from a custom 292 1.1 kamil // operator new defined inside a class. 293 1.1 kamil return *p; 294 1.1 kamil } 295 1.1 kamil 296 1.1 kamil // This is a simplified version of __asan_(un)poison_memory_region, which 297 1.1 kamil // assumes that left border of region to be poisoned is properly aligned. 298 1.1 kamil static void PoisonAlignedStackMemory(uptr addr, uptr size, bool do_poison) { 299 1.1 kamil if (size == 0) return; 300 1.1 kamil uptr aligned_size = size & ~(SHADOW_GRANULARITY - 1); 301 1.1 kamil PoisonShadow(addr, aligned_size, 302 1.1 kamil do_poison ? kAsanStackUseAfterScopeMagic : 0); 303 1.1 kamil if (size == aligned_size) 304 1.1 kamil return; 305 1.1 kamil s8 end_offset = (s8)(size - aligned_size); 306 1.1 kamil s8* shadow_end = (s8*)MemToShadow(addr + aligned_size); 307 1.1 kamil s8 end_value = *shadow_end; 308 1.1 kamil if (do_poison) { 309 1.1 kamil // If possible, mark all the bytes mapping to last shadow byte as 310 1.1 kamil // unaddressable. 311 1.1 kamil if (end_value > 0 && end_value <= end_offset) 312 1.1 kamil *shadow_end = (s8)kAsanStackUseAfterScopeMagic; 313 1.1 kamil } else { 314 1.1 kamil // If necessary, mark few first bytes mapping to last shadow byte 315 1.1 kamil // as addressable 316 1.1 kamil if (end_value != 0) 317 1.1 kamil *shadow_end = Max(end_value, end_offset); 318 1.1 kamil } 319 1.1 kamil } 320 1.1 kamil 321 1.1 kamil void __asan_set_shadow_00(uptr addr, uptr size) { 322 1.1 kamil REAL(memset)((void *)addr, 0, size); 323 1.1 kamil } 324 1.1 kamil 325 1.1 kamil void __asan_set_shadow_f1(uptr addr, uptr size) { 326 1.1 kamil REAL(memset)((void *)addr, 0xf1, size); 327 1.1 kamil } 328 1.1 kamil 329 1.1 kamil void __asan_set_shadow_f2(uptr addr, uptr size) { 330 1.1 kamil REAL(memset)((void *)addr, 0xf2, size); 331 1.1 kamil } 332 1.1 kamil 333 1.1 kamil void __asan_set_shadow_f3(uptr addr, uptr size) { 334 1.1 kamil REAL(memset)((void *)addr, 0xf3, size); 335 1.1 kamil } 336 1.1 kamil 337 1.1 kamil void __asan_set_shadow_f5(uptr addr, uptr size) { 338 1.1 kamil REAL(memset)((void *)addr, 0xf5, size); 339 1.1 kamil } 340 1.1 kamil 341 1.1 kamil void __asan_set_shadow_f8(uptr addr, uptr size) { 342 1.1 kamil REAL(memset)((void *)addr, 0xf8, size); 343 1.1 kamil } 344 1.1 kamil 345 1.1 kamil void __asan_poison_stack_memory(uptr addr, uptr size) { 346 1.1 kamil VReport(1, "poisoning: %p %zx\n", (void *)addr, size); 347 1.1 kamil PoisonAlignedStackMemory(addr, size, true); 348 1.1 kamil } 349 1.1 kamil 350 1.1 kamil void __asan_unpoison_stack_memory(uptr addr, uptr size) { 351 1.1 kamil VReport(1, "unpoisoning: %p %zx\n", (void *)addr, size); 352 1.1 kamil PoisonAlignedStackMemory(addr, size, false); 353 1.1 kamil } 354 1.1 kamil 355 1.1 kamil void __sanitizer_annotate_contiguous_container(const void *beg_p, 356 1.1 kamil const void *end_p, 357 1.1 kamil const void *old_mid_p, 358 1.1 kamil const void *new_mid_p) { 359 1.1 kamil if (!flags()->detect_container_overflow) return; 360 1.1 kamil VPrintf(2, "contiguous_container: %p %p %p %p\n", beg_p, end_p, old_mid_p, 361 1.1 kamil new_mid_p); 362 1.1 kamil uptr beg = reinterpret_cast<uptr>(beg_p); 363 1.1 kamil uptr end = reinterpret_cast<uptr>(end_p); 364 1.1 kamil uptr old_mid = reinterpret_cast<uptr>(old_mid_p); 365 1.1 kamil uptr new_mid = reinterpret_cast<uptr>(new_mid_p); 366 1.1 kamil uptr granularity = SHADOW_GRANULARITY; 367 1.1 kamil if (!(beg <= old_mid && beg <= new_mid && old_mid <= end && new_mid <= end && 368 1.1 kamil IsAligned(beg, granularity))) { 369 1.1 kamil GET_STACK_TRACE_FATAL_HERE; 370 1.1 kamil ReportBadParamsToAnnotateContiguousContainer(beg, end, old_mid, new_mid, 371 1.1 kamil &stack); 372 1.1 kamil } 373 1.1 kamil CHECK_LE(end - beg, 374 1.1 kamil FIRST_32_SECOND_64(1UL << 30, 1ULL << 34)); // Sanity check. 375 1.1 kamil 376 1.1 kamil uptr a = RoundDownTo(Min(old_mid, new_mid), granularity); 377 1.1 kamil uptr c = RoundUpTo(Max(old_mid, new_mid), granularity); 378 1.1 kamil uptr d1 = RoundDownTo(old_mid, granularity); 379 1.1 kamil // uptr d2 = RoundUpTo(old_mid, granularity); 380 1.1 kamil // Currently we should be in this state: 381 1.1 kamil // [a, d1) is good, [d2, c) is bad, [d1, d2) is partially good. 382 1.1 kamil // Make a quick sanity check that we are indeed in this state. 383 1.1 kamil // 384 1.1 kamil // FIXME: Two of these three checks are disabled until we fix 385 1.1 kamil // https://github.com/google/sanitizers/issues/258. 386 1.1 kamil // if (d1 != d2) 387 1.1 kamil // CHECK_EQ(*(u8*)MemToShadow(d1), old_mid - d1); 388 1.1 kamil if (a + granularity <= d1) 389 1.1 kamil CHECK_EQ(*(u8*)MemToShadow(a), 0); 390 1.1 kamil // if (d2 + granularity <= c && c <= end) 391 1.1 kamil // CHECK_EQ(*(u8 *)MemToShadow(c - granularity), 392 1.1 kamil // kAsanContiguousContainerOOBMagic); 393 1.1 kamil 394 1.1 kamil uptr b1 = RoundDownTo(new_mid, granularity); 395 1.1 kamil uptr b2 = RoundUpTo(new_mid, granularity); 396 1.1 kamil // New state: 397 1.1 kamil // [a, b1) is good, [b2, c) is bad, [b1, b2) is partially good. 398 1.1 kamil PoisonShadow(a, b1 - a, 0); 399 1.1 kamil PoisonShadow(b2, c - b2, kAsanContiguousContainerOOBMagic); 400 1.1 kamil if (b1 != b2) { 401 1.1 kamil CHECK_EQ(b2 - b1, granularity); 402 1.1 kamil *(u8*)MemToShadow(b1) = static_cast<u8>(new_mid - b1); 403 1.1 kamil } 404 1.1 kamil } 405 1.1 kamil 406 1.1 kamil const void *__sanitizer_contiguous_container_find_bad_address( 407 1.1 kamil const void *beg_p, const void *mid_p, const void *end_p) { 408 1.1 kamil if (!flags()->detect_container_overflow) 409 1.1 kamil return nullptr; 410 1.1 kamil uptr beg = reinterpret_cast<uptr>(beg_p); 411 1.1 kamil uptr end = reinterpret_cast<uptr>(end_p); 412 1.1 kamil uptr mid = reinterpret_cast<uptr>(mid_p); 413 1.1 kamil CHECK_LE(beg, mid); 414 1.1 kamil CHECK_LE(mid, end); 415 1.1 kamil // Check some bytes starting from beg, some bytes around mid, and some bytes 416 1.1 kamil // ending with end. 417 1.1 kamil uptr kMaxRangeToCheck = 32; 418 1.1 kamil uptr r1_beg = beg; 419 1.1 kamil uptr r1_end = Min(beg + kMaxRangeToCheck, mid); 420 1.1 kamil uptr r2_beg = Max(beg, mid - kMaxRangeToCheck); 421 1.1 kamil uptr r2_end = Min(end, mid + kMaxRangeToCheck); 422 1.1 kamil uptr r3_beg = Max(end - kMaxRangeToCheck, mid); 423 1.1 kamil uptr r3_end = end; 424 1.1 kamil for (uptr i = r1_beg; i < r1_end; i++) 425 1.1 kamil if (AddressIsPoisoned(i)) 426 1.1 kamil return reinterpret_cast<const void *>(i); 427 1.1 kamil for (uptr i = r2_beg; i < mid; i++) 428 1.1 kamil if (AddressIsPoisoned(i)) 429 1.1 kamil return reinterpret_cast<const void *>(i); 430 1.1 kamil for (uptr i = mid; i < r2_end; i++) 431 1.1 kamil if (!AddressIsPoisoned(i)) 432 1.1 kamil return reinterpret_cast<const void *>(i); 433 1.1 kamil for (uptr i = r3_beg; i < r3_end; i++) 434 1.1 kamil if (!AddressIsPoisoned(i)) 435 1.1 kamil return reinterpret_cast<const void *>(i); 436 1.1 kamil return nullptr; 437 1.1 kamil } 438 1.1 kamil 439 1.1 kamil int __sanitizer_verify_contiguous_container(const void *beg_p, 440 1.1 kamil const void *mid_p, 441 1.1 kamil const void *end_p) { 442 1.1 kamil return __sanitizer_contiguous_container_find_bad_address(beg_p, mid_p, 443 1.1 kamil end_p) == nullptr; 444 1.1 kamil } 445 1.1 kamil 446 1.1 kamil extern "C" SANITIZER_INTERFACE_ATTRIBUTE 447 1.1 kamil void __asan_poison_intra_object_redzone(uptr ptr, uptr size) { 448 1.1 kamil AsanPoisonOrUnpoisonIntraObjectRedzone(ptr, size, true); 449 1.1 kamil } 450 1.1 kamil 451 1.1 kamil extern "C" SANITIZER_INTERFACE_ATTRIBUTE 452 1.1 kamil void __asan_unpoison_intra_object_redzone(uptr ptr, uptr size) { 453 1.1 kamil AsanPoisonOrUnpoisonIntraObjectRedzone(ptr, size, false); 454 1.1 kamil } 455 1.1 kamil 456 1.1 kamil // --- Implementation of LSan-specific functions --- {{{1 457 1.1 kamil namespace __lsan { 458 1.1 kamil bool WordIsPoisoned(uptr addr) { 459 1.1 kamil return (__asan_region_is_poisoned(addr, sizeof(uptr)) != 0); 460 1.1 kamil } 461 1.1 kamil } 462