HomeSort by: relevance | last modified time | path
    Searched refs:memory_order_acquire (Results 1 - 25 of 46) sorted by relevancy

1 2

  /src/sys/external/bsd/compiler_rt/dist/lib/asan/
asan_stack.cc 27 return atomic_load(&malloc_context_size, memory_order_acquire);
asan_allocator.cc 337 options->min_redzone = atomic_load(&min_redzone, memory_order_acquire);
338 options->max_redzone = atomic_load(&max_redzone, memory_order_acquire);
341 atomic_load(&alloc_dealloc_mismatch, memory_order_acquire);
355 u32 min_rz = atomic_load(&min_redzone, memory_order_acquire);
356 u32 max_rz = atomic_load(&max_redzone, memory_order_acquire);
556 memory_order_acquire)) {
634 if (atomic_load(&alloc_dealloc_mismatch, memory_order_acquire)) {
  /src/sys/external/bsd/compiler_rt/dist/lib/sanitizer_common/
sanitizer_persistent_allocator.h 38 uptr cmp = atomic_load(&region_pos, memory_order_acquire);
39 uptr end = atomic_load(&region_end, memory_order_acquire);
42 memory_order_acquire))
sanitizer_mutex.h 36 return atomic_exchange(&state_, 1, memory_order_acquire) == 0;
57 && atomic_exchange(&state_, 1, memory_order_acquire) == 0)
111 memory_order_acquire))
123 u32 prev = atomic_fetch_add(&state_, kReadLock, memory_order_acquire);
158 memory_order_acquire))
169 u32 prev = atomic_load(&state_, memory_order_acquire);
sanitizer_lfstack.h 49 u64 cmp = atomic_load(&head_, memory_order_acquire);
58 memory_order_acquire))
sanitizer_libignore.h 92 const uptr n = atomic_load(&ignored_ranges_count_, memory_order_acquire);
106 const uptr n = atomic_load(&instrumented_ranges_count_, memory_order_acquire);
sanitizer_atomic.h 24 memory_order_acquire = 1 << 2, enumerator in enum:__sanitizer::memory_order
sanitizer_atomic_clang_other.h 29 | memory_order_acquire | memory_order_seq_cst));
43 } else if (mo == memory_order_acquire) {
sanitizer_atomic_clang_x86.h 31 | memory_order_acquire | memory_order_seq_cst));
45 } else if (mo == memory_order_acquire) {
sanitizer_allocator_bytemap.h 88 atomic_load(&map1_[idx], memory_order_acquire));
sanitizer_rtems.cc 122 if (atomic_exchange(m, MtxLocked, memory_order_acquire) == MtxUnlocked)
124 while (atomic_exchange(m, MtxSleeping, memory_order_acquire) != MtxUnlocked) {
  /src/sys/external/bsd/compiler_rt/dist/lib/xray/
xray_init.cc 67 if (atomic_load(&XRayInitialized, memory_order_acquire))
73 if (!atomic_load(&XRayFlagsInitialized, memory_order_acquire)) {
xray_interface.cc 182 memory_order_acquire))
233 memory_order_acquire))
375 memory_order_acquire)) {
388 memory_order_acquire)) {
400 memory_order_acquire)) {
451 memory_order_acquire))
xray_buffer_queue.h 234 return atomic_load(&Finalizing, memory_order_acquire);
238 return atomic_load(&Generation, memory_order_acquire);
xray_basic_logging.cc 95 if (atomic_load(&UseRealTSC, memory_order_acquire))
107 Header.CycleFrequency = atomic_load(&CycleFrequency, memory_order_acquire);
425 atomic_load(&TicksPerSec, memory_order_acquire) *
428 __xray_set_handler_arg1(atomic_load(&UseRealTSC, memory_order_acquire)
431 __xray_set_handler(atomic_load(&UseRealTSC, memory_order_acquire)
xray_buffer_queue.cc 163 if (atomic_load(&Finalizing, memory_order_acquire))
213 atomic_store(B->Buff.Extents, atomic_load(Buf.Extents, memory_order_acquire),
xray_fdr_logging.cc 253 atomic_thread_fence(memory_order_acquire);
254 auto BufferSize = atomic_load(It->Extents, memory_order_acquire);
281 if (atomic_load(&LoggingStatus, memory_order_acquire) !=
368 auto BufferExtents = atomic_load(B.Extents, memory_order_acquire);
450 auto Status = atomic_load(&LoggingStatus, memory_order_acquire);
xray_profile_collector.cc 116 if (!atomic_load(&CollectorInitialized, memory_order_acquire)) {
234 if (!atomic_load(&CollectorInitialized, memory_order_acquire))
  /src/sys/external/bsd/compiler_rt/dist/lib/sanitizer_common/tests/
sanitizer_atomic_test.cc 57 CheckStoreLoad<atomic_uint8_t, memory_order_acquire, memory_order_relaxed>();
63 CheckStoreLoad<atomic_uint16_t, memory_order_acquire, memory_order_relaxed>();
69 CheckStoreLoad<atomic_uint32_t, memory_order_acquire, memory_order_relaxed>();
75 CheckStoreLoad<atomic_uint64_t, memory_order_acquire, memory_order_relaxed>();
83 CheckStoreLoad<atomic_uintptr_t, memory_order_acquire, memory_order_relaxed>
  /src/sys/external/bsd/compiler_rt/dist/lib/tsan/rtl/
tsan_mutex.cc 233 memory_order_acquire))
239 memory_order_acquire)) {
262 uptr prev = atomic_fetch_add(&state_, kReadLock, memory_order_acquire);
266 prev = atomic_load(&state_, memory_order_acquire);
tsan_clock.cc 94 u32 v = atomic_load(ref, memory_order_acquire);
302 if (atomic_load(ref, memory_order_acquire) == 1)
461 u32 v = atomic_load(ref, memory_order_acquire);
  /src/sys/external/bsd/compiler_rt/dist/lib/xray/tests/unit/
buffer_queue_test.cc 215 while (Counter.load(std::memory_order_acquire) != 2)
222 while (Counter.load(std::memory_order_acquire) != 0)
231 ASSERT_EQ(Counter.load(std::memory_order_acquire), 0);
  /src/sys/external/bsd/compiler_rt/dist/lib/tsan/tests/rtl/
tsan_mutex.cc 169 int *val = (int *)atomic_load(singleton, memory_order_acquire);
215 uptr v = atomic_load(&flag, memory_order_acquire);
tsan_test_util_posix.cc 335 Event* ev = (Event*)atomic_load(&impl->event, memory_order_acquire);
357 while (atomic_load(&event, memory_order_acquire) != 0)
  /src/sys/external/bsd/compiler_rt/dist/lib/lsan/
lsan_interceptors.cc 363 while ((tid = atomic_load(&p->tid, memory_order_acquire)) == 0)
401 while (atomic_load(&p.tid, memory_order_acquire) != 0)

Completed in 22 milliseconds

1 2