/src/sys/external/bsd/compiler_rt/dist/lib/asan/ |
asan_stack.cc | 27 return atomic_load(&malloc_context_size, memory_order_acquire);
|
asan_thread.h | 111 return !atomic_load(&stack_switching_, memory_order_relaxed) && 118 if (atomic_load(&stack_switching_, memory_order_relaxed))
|
/src/sys/external/bsd/compiler_rt/dist/lib/sanitizer_common/ |
sanitizer_lfstack.h | 33 return (atomic_load(&head_, memory_order_relaxed) & kPtrMask) == 0; 37 u64 cmp = atomic_load(&head_, memory_order_relaxed); 49 u64 cmp = atomic_load(&head_, memory_order_acquire);
|
sanitizer_mutex.h | 44 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), 1); 56 if (atomic_load(&state_, memory_order_relaxed) == 0 105 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked); 137 CHECK_NE(atomic_load(&state_, memory_order_relaxed), kUnlocked); 155 u32 cmp = atomic_load(&state_, memory_order_relaxed); 169 u32 prev = atomic_load(&state_, memory_order_acquire);
|
sanitizer_libignore.h | 92 const uptr n = atomic_load(&ignored_ranges_count_, memory_order_acquire); 106 const uptr n = atomic_load(&instrumented_ranges_count_, memory_order_acquire);
|
sanitizer_persistent_allocator.h | 38 uptr cmp = atomic_load(®ion_pos, memory_order_acquire); 39 uptr end = atomic_load(®ion_end, memory_order_acquire);
|
sanitizer_addrhashmap.h | 188 uptr addr1 = atomic_load(&c->addr, memory_order_acquire); 196 if (atomic_load(&b->add, memory_order_relaxed)) { 198 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); 201 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); 217 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); 229 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); 233 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); 257 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); 290 CHECK_EQ(atomic_load(&c->addr, memory_order_relaxed), 0); 301 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed) [all...] |
sanitizer_allocator_stats.h | 35 v += atomic_load(&stats_[i], memory_order_relaxed); 40 v = atomic_load(&stats_[i], memory_order_relaxed) - v; 49 return atomic_load(&stats_[i], memory_order_relaxed);
|
sanitizer_atomic.h | 77 return atomic_load(a, memory_order_relaxed);
|
sanitizer_stackdepot.cc | 40 atomic_load(&hash_and_use_count, memory_order_relaxed) & kHashMask; 92 return atomic_load(&node_->hash_and_use_count, memory_order_relaxed) & 142 uptr v = atomic_load(p, memory_order_consume);
|
sanitizer_libignore.cc | 83 atomic_load(&ignored_ranges_count_, memory_order_relaxed); 112 atomic_load(&instrumented_ranges_count_, memory_order_relaxed);
|
sanitizer_stackdepotbase.h | 78 uptr cmp = atomic_load(p, memory_order_relaxed); 104 uptr v = atomic_load(p, memory_order_consume); 149 uptr v = atomic_load(p, memory_order_consume); 171 uptr s = atomic_load(p, memory_order_relaxed);
|
sanitizer_atomic_clang_other.h | 26 INLINE typename T::Type atomic_load( function in namespace:__sanitizer
|
sanitizer_atomic_clang_x86.h | 28 INLINE typename T::Type atomic_load( function in namespace:__sanitizer
|
sanitizer_allocator.cc | 98 if (atomic_load(&internal_allocator_initialized, memory_order_acquire) == 0) { 100 if (atomic_load(&internal_allocator_initialized, memory_order_relaxed) == 243 return atomic_load(&allocator_may_return_null, memory_order_relaxed);
|
sanitizer_allocator_bytemap.h | 88 atomic_load(&map1_[idx], memory_order_acquire));
|
/src/sys/external/bsd/compiler_rt/dist/lib/xray/ |
xray_init.cc | 67 if (atomic_load(&XRayInitialized, memory_order_acquire)) 73 if (!atomic_load(&XRayFlagsInitialized, memory_order_acquire)) {
|
xray_interface.cc | 181 if (!atomic_load(&XRayInitialized, 232 if (!atomic_load(&XRayInitialized, 374 if (atomic_load(&XRayInitialized, 387 if (atomic_load(&XRayInitialized, 399 if (atomic_load(&XRayInitialized, 450 if (!atomic_load(&XRayInitialized,
|
xray_basic_logging.cc | 95 if (atomic_load(&UseRealTSC, memory_order_acquire)) 107 Header.CycleFrequency = atomic_load(&CycleFrequency, memory_order_acquire); 220 if (Delta < atomic_load(&ThresholdTicks, memory_order_relaxed)) { 393 if (!atomic_load(&UseRealTSC, memory_order_relaxed) && Verbosity()) 425 atomic_load(&TicksPerSec, memory_order_acquire) * 428 __xray_set_handler_arg1(atomic_load(&UseRealTSC, memory_order_acquire) 431 __xray_set_handler(atomic_load(&UseRealTSC, memory_order_acquire)
|
xray_buffer_queue.h | 234 return atomic_load(&Finalizing, memory_order_acquire); 238 return atomic_load(&Generation, memory_order_acquire);
|
/src/sys/external/bsd/compiler_rt/dist/lib/tsan/rtl/ |
tsan_external.cc | 32 if (tag >= atomic_load(&used_tags, memory_order_relaxed)) return nullptr; 51 uptr tag_count = atomic_load(&used_tags, memory_order_relaxed); 62 CHECK_LT(tag, atomic_load(&used_tags, memory_order_relaxed)); 100 CHECK_LT(tag, atomic_load(&used_tags, memory_order_relaxed));
|
tsan_mutex.cc | 224 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked); 236 if (atomic_load(&state_, memory_order_relaxed) == kUnlocked) { 266 prev = atomic_load(&state_, memory_order_acquire); 287 CHECK_NE(atomic_load(&state_, memory_order_relaxed), 0);
|
tsan_fd.cc | 58 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) 64 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) { 78 uptr l1 = atomic_load(pl1, memory_order_consume); 133 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed); 145 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed);
|
/src/common/lib/libc/arch/arm/atomic/ |
Makefile.inc | 27 atomic_load.c atomic_store.c \
|
/src/common/lib/libc/arch/sparc/atomic/ |
Makefile.inc | 49 atomic_load.c atomic_store.c \
|