/src/sys/external/bsd/compiler_rt/dist/lib/sanitizer_common/tests/ |
sanitizer_atomic_test.cc | 55 CheckStoreLoad<atomic_uint8_t, memory_order_relaxed, memory_order_relaxed>(); 56 CheckStoreLoad<atomic_uint8_t, memory_order_consume, memory_order_relaxed>(); 57 CheckStoreLoad<atomic_uint8_t, memory_order_acquire, memory_order_relaxed>(); 58 CheckStoreLoad<atomic_uint8_t, memory_order_relaxed, memory_order_release>(); 61 CheckStoreLoad<atomic_uint16_t, memory_order_relaxed, memory_order_relaxed>(); 62 CheckStoreLoad<atomic_uint16_t, memory_order_consume, memory_order_relaxed>(); 63 CheckStoreLoad<atomic_uint16_t, memory_order_acquire, memory_order_relaxed>(); 64 CheckStoreLoad<atomic_uint16_t, memory_order_relaxed, memory_order_release>() [all...] |
/src/sys/external/bsd/compiler_rt/dist/lib/sanitizer_common/ |
sanitizer_atomic.h | 22 memory_order_relaxed = 1 << 0, enumerator in enum:__sanitizer::memory_order 77 return atomic_load(a, memory_order_relaxed); 82 atomic_store(a, v, memory_order_relaxed);
|
sanitizer_allocator_stats.h | 35 v += atomic_load(&stats_[i], memory_order_relaxed); 36 atomic_store(&stats_[i], v, memory_order_relaxed); 40 v = atomic_load(&stats_[i], memory_order_relaxed) - v; 41 atomic_store(&stats_[i], v, memory_order_relaxed); 45 atomic_store(&stats_[i], v, memory_order_relaxed); 49 return atomic_load(&stats_[i], memory_order_relaxed);
|
sanitizer_atomic_clang_other.h | 28 DCHECK(mo & (memory_order_relaxed | memory_order_consume 35 if (mo == memory_order_relaxed) { 65 DCHECK(mo & (memory_order_relaxed | memory_order_release 71 if (mo == memory_order_relaxed) {
|
sanitizer_atomic_clang_x86.h | 30 DCHECK(mo & (memory_order_relaxed | memory_order_consume 37 if (mo == memory_order_relaxed) { 75 DCHECK(mo & (memory_order_relaxed | memory_order_release 81 if (mo == memory_order_relaxed) {
|
sanitizer_lfstack.h | 29 atomic_store(&head_, 0, memory_order_relaxed); 33 return (atomic_load(&head_, memory_order_relaxed) & kPtrMask) == 0; 37 u64 cmp = atomic_load(&head_, memory_order_relaxed);
|
sanitizer_mutex.h | 26 atomic_store(&state_, 0, memory_order_relaxed); 44 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), 1); 56 if (atomic_load(&state_, memory_order_relaxed) == 0 101 atomic_store(&state_, kUnlocked, memory_order_relaxed); 105 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked); 137 CHECK_NE(atomic_load(&state_, memory_order_relaxed), kUnlocked); 155 u32 cmp = atomic_load(&state_, memory_order_relaxed);
|
sanitizer_atomic_clang_mips.h | 45 (memory_order_relaxed | memory_order_releasae | memory_order_seq_cst)); 71 (memory_order_relaxed | memory_order_releasae | memory_order_seq_cst)); 94 (memory_order_relaxed | memory_order_releasae | memory_order_seq_cst)); 107 (memory_order_relaxed | memory_order_releasae | memory_order_seq_cst));
|
sanitizer_addrhashmap.h | 196 if (atomic_load(&b->add, memory_order_relaxed)) { 198 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); 201 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); 217 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); 229 AddBucket *add = (AddBucket*)atomic_load(&b->add, memory_order_relaxed); 233 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); 257 uptr addr1 = atomic_load(&c->addr, memory_order_relaxed); 272 atomic_store(&b->add, (uptr)add, memory_order_relaxed); 284 atomic_store(&b->add, (uptr)add1, memory_order_relaxed); 290 CHECK_EQ(atomic_load(&c->addr, memory_order_relaxed), 0) [all...] |
sanitizer_stackdepot.cc | 40 atomic_load(&hash_and_use_count, memory_order_relaxed) & kHashMask; 75 atomic_store(&hash_and_use_count, hash & kHashMask, memory_order_relaxed); 92 return atomic_load(&node_->hash_and_use_count, memory_order_relaxed) & 97 atomic_fetch_add(&node_->hash_and_use_count, 1, memory_order_relaxed) &
|
sanitizer_tls_get_addr.cc | 50 atomic_fetch_sub(&number_of_live_dtls, 1, memory_order_relaxed); 60 atomic_fetch_add(&number_of_live_dtls, 1, memory_order_relaxed); 103 atomic_load(&number_of_live_dtls, memory_order_relaxed));
|
sanitizer_libignore.cc | 83 atomic_load(&ignored_ranges_count_, memory_order_relaxed); 112 atomic_load(&instrumented_ranges_count_, memory_order_relaxed);
|
sanitizer_atomic_msvc.h | 83 DCHECK(mo & (memory_order_relaxed | memory_order_consume 88 if (mo == memory_order_relaxed) { 100 DCHECK(mo & (memory_order_relaxed | memory_order_release 104 if (mo == memory_order_relaxed) {
|
sanitizer_persistent_allocator.h | 56 atomic_store(®ion_pos, 0, memory_order_relaxed);
|
sanitizer_termination.cc | 73 if (atomic_fetch_add(&num_calls, 1, memory_order_relaxed) > 10) {
|
sanitizer_allocator.cc | 100 if (atomic_load(&internal_allocator_initialized, memory_order_relaxed) == 243 return atomic_load(&allocator_may_return_null, memory_order_relaxed); 248 memory_order_relaxed);
|
sanitizer_common.cc | 184 atomic_fetch_add(&g_total_mmaped, size, memory_order_relaxed) + size; 192 atomic_fetch_sub(&g_total_mmaped, size, memory_order_relaxed); 338 return !atomic_exchange(&in_crash_state, 1, memory_order_relaxed);
|
sanitizer_stackdepotbase.h | 78 uptr cmp = atomic_load(p, memory_order_relaxed); 119 u32 id = atomic_fetch_add(&seq[part], 1, memory_order_relaxed) + 1; 171 uptr s = atomic_load(p, memory_order_relaxed);
|
sanitizer_deadlock_detector2.cc | 153 atomic_store(&m->owner, 0, memory_order_relaxed); 196 uptr owner = atomic_load(&m->owner, memory_order_relaxed); 275 uptr owner = atomic_load(&m->owner, memory_order_relaxed); 287 atomic_store(&m->owner, (uptr)cb->lt, memory_order_relaxed); 307 uptr owner = atomic_load(&m->owner, memory_order_relaxed); 313 atomic_store(&m->owner, 0, memory_order_relaxed);
|
/src/sys/external/bsd/compiler_rt/dist/lib/tsan/rtl/ |
tsan_external.cc | 32 if (tag >= atomic_load(&used_tags, memory_order_relaxed)) return nullptr; 51 uptr tag_count = atomic_load(&used_tags, memory_order_relaxed); 62 CHECK_LT(tag, atomic_load(&used_tags, memory_order_relaxed)); 77 uptr new_tag = atomic_fetch_add(&used_tags, 1, memory_order_relaxed); 100 CHECK_LT(tag, atomic_load(&used_tags, memory_order_relaxed));
|
tsan_fd.cc | 53 atomic_store(&s->rc, 1, memory_order_relaxed); 58 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) 59 atomic_fetch_add(&s->rc, 1, memory_order_relaxed); 64 if (s && atomic_load(&s->rc, memory_order_relaxed) != (u64)-1) { 123 atomic_store(&fdctx.globsync.rc, (u64)-1, memory_order_relaxed); 124 atomic_store(&fdctx.filesync.rc, (u64)-1, memory_order_relaxed); 125 atomic_store(&fdctx.socksync.rc, (u64)-1, memory_order_relaxed); 133 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed); 145 FdDesc *tab = (FdDesc*)atomic_load(&fdctx.tab[l1], memory_order_relaxed);
|
tsan_mutex.cc | 220 atomic_store(&state_, kUnlocked, memory_order_relaxed); 224 CHECK_EQ(atomic_load(&state_, memory_order_relaxed), kUnlocked); 236 if (atomic_load(&state_, memory_order_relaxed) == kUnlocked) { 287 CHECK_NE(atomic_load(&state_, memory_order_relaxed), 0);
|
tsan_suppressions.cc | 109 atomic_fetch_add(&(*sp)->hit_count, 1, memory_order_relaxed); 147 atomic_fetch_add(&s->hit_count, 1, memory_order_relaxed);
|
/src/tests/lib/libc/sync/ |
cpp_atomic_ops_linkable.cc | 54 std::memory_order_release, std::memory_order_relaxed);
|
/src/sys/external/bsd/compiler_rt/dist/lib/asan/ |
asan_thread.h | 111 return !atomic_load(&stack_switching_, memory_order_relaxed) && 118 if (atomic_load(&stack_switching_, memory_order_relaxed))
|