Home | History | Annotate | Line # | Download | only in internal
      1  1.1  christos #ifndef JEMALLOC_INTERNAL_LOCKEDINT_H
      2  1.1  christos #define JEMALLOC_INTERNAL_LOCKEDINT_H
      3  1.1  christos 
      4  1.1  christos /*
      5  1.1  christos  * In those architectures that support 64-bit atomics, we use atomic updates for
      6  1.1  christos  * our 64-bit values.  Otherwise, we use a plain uint64_t and synchronize
      7  1.1  christos  * externally.
      8  1.1  christos  */
      9  1.1  christos 
     10  1.1  christos typedef struct locked_u64_s locked_u64_t;
     11  1.1  christos #ifdef JEMALLOC_ATOMIC_U64
     12  1.1  christos struct locked_u64_s {
     13  1.1  christos 	atomic_u64_t val;
     14  1.1  christos };
     15  1.1  christos #else
     16  1.1  christos /* Must hold the associated mutex. */
     17  1.1  christos struct locked_u64_s {
     18  1.1  christos 	uint64_t val;
     19  1.1  christos };
     20  1.1  christos #endif
     21  1.1  christos 
     22  1.1  christos typedef struct locked_zu_s locked_zu_t;
     23  1.1  christos struct locked_zu_s {
     24  1.1  christos 	atomic_zu_t val;
     25  1.1  christos };
     26  1.1  christos 
     27  1.1  christos #ifndef JEMALLOC_ATOMIC_U64
     28  1.1  christos #  define LOCKEDINT_MTX_DECLARE(name) malloc_mutex_t name;
     29  1.1  christos #  define LOCKEDINT_MTX_INIT(mu, name, rank, rank_mode)			\
     30  1.1  christos     malloc_mutex_init(&(mu), name, rank, rank_mode)
     31  1.1  christos #  define LOCKEDINT_MTX(mtx) (&(mtx))
     32  1.1  christos #  define LOCKEDINT_MTX_LOCK(tsdn, mu) malloc_mutex_lock(tsdn, &(mu))
     33  1.1  christos #  define LOCKEDINT_MTX_UNLOCK(tsdn, mu) malloc_mutex_unlock(tsdn, &(mu))
     34  1.1  christos #  define LOCKEDINT_MTX_PREFORK(tsdn, mu) malloc_mutex_prefork(tsdn, &(mu))
     35  1.1  christos #  define LOCKEDINT_MTX_POSTFORK_PARENT(tsdn, mu)			\
     36  1.1  christos     malloc_mutex_postfork_parent(tsdn, &(mu))
     37  1.1  christos #  define LOCKEDINT_MTX_POSTFORK_CHILD(tsdn, mu)			\
     38  1.1  christos     malloc_mutex_postfork_child(tsdn, &(mu))
     39  1.1  christos #else
     40  1.1  christos #  define LOCKEDINT_MTX_DECLARE(name)
     41  1.1  christos #  define LOCKEDINT_MTX(mtx) NULL
     42  1.1  christos #  define LOCKEDINT_MTX_INIT(mu, name, rank, rank_mode) false
     43  1.1  christos #  define LOCKEDINT_MTX_LOCK(tsdn, mu)
     44  1.1  christos #  define LOCKEDINT_MTX_UNLOCK(tsdn, mu)
     45  1.1  christos #  define LOCKEDINT_MTX_PREFORK(tsdn, mu)
     46  1.1  christos #  define LOCKEDINT_MTX_POSTFORK_PARENT(tsdn, mu)
     47  1.1  christos #  define LOCKEDINT_MTX_POSTFORK_CHILD(tsdn, mu)
     48  1.1  christos #endif
     49  1.1  christos 
     50  1.1  christos #ifdef JEMALLOC_ATOMIC_U64
     51  1.1  christos #  define LOCKEDINT_MTX_ASSERT_INTERNAL(tsdn, mtx) assert((mtx) == NULL)
     52  1.1  christos #else
     53  1.1  christos #  define LOCKEDINT_MTX_ASSERT_INTERNAL(tsdn, mtx)			\
     54  1.1  christos     malloc_mutex_assert_owner(tsdn, (mtx))
     55  1.1  christos #endif
     56  1.1  christos 
     57  1.1  christos static inline uint64_t
     58  1.1  christos locked_read_u64(tsdn_t *tsdn, malloc_mutex_t *mtx, locked_u64_t *p) {
     59  1.1  christos 	LOCKEDINT_MTX_ASSERT_INTERNAL(tsdn, mtx);
     60  1.1  christos #ifdef JEMALLOC_ATOMIC_U64
     61  1.1  christos 	return atomic_load_u64(&p->val, ATOMIC_RELAXED);
     62  1.1  christos #else
     63  1.1  christos 	return p->val;
     64  1.1  christos #endif
     65  1.1  christos }
     66  1.1  christos 
     67  1.1  christos static inline void
     68  1.1  christos locked_inc_u64(tsdn_t *tsdn, malloc_mutex_t *mtx, locked_u64_t *p,
     69  1.1  christos     uint64_t x) {
     70  1.1  christos 	LOCKEDINT_MTX_ASSERT_INTERNAL(tsdn, mtx);
     71  1.1  christos #ifdef JEMALLOC_ATOMIC_U64
     72  1.1  christos 	atomic_fetch_add_u64(&p->val, x, ATOMIC_RELAXED);
     73  1.1  christos #else
     74  1.1  christos 	p->val += x;
     75  1.1  christos #endif
     76  1.1  christos }
     77  1.1  christos 
     78  1.1  christos static inline void
     79  1.1  christos locked_dec_u64(tsdn_t *tsdn, malloc_mutex_t *mtx, locked_u64_t *p,
     80  1.1  christos     uint64_t x) {
     81  1.1  christos 	LOCKEDINT_MTX_ASSERT_INTERNAL(tsdn, mtx);
     82  1.1  christos #ifdef JEMALLOC_ATOMIC_U64
     83  1.1  christos 	uint64_t r = atomic_fetch_sub_u64(&p->val, x, ATOMIC_RELAXED);
     84  1.1  christos 	assert(r - x <= r);
     85  1.1  christos #else
     86  1.1  christos 	p->val -= x;
     87  1.1  christos 	assert(p->val + x >= p->val);
     88  1.1  christos #endif
     89  1.1  christos }
     90  1.1  christos 
     91  1.1  christos /* Increment and take modulus.  Returns whether the modulo made any change.  */
     92  1.1  christos static inline bool
     93  1.1  christos locked_inc_mod_u64(tsdn_t *tsdn, malloc_mutex_t *mtx, locked_u64_t *p,
     94  1.1  christos     const uint64_t x, const uint64_t modulus) {
     95  1.1  christos 	LOCKEDINT_MTX_ASSERT_INTERNAL(tsdn, mtx);
     96  1.1  christos 	uint64_t before, after;
     97  1.1  christos 	bool overflow;
     98  1.1  christos #ifdef JEMALLOC_ATOMIC_U64
     99  1.1  christos 	before = atomic_load_u64(&p->val, ATOMIC_RELAXED);
    100  1.1  christos 	do {
    101  1.1  christos 		after = before + x;
    102  1.1  christos 		assert(after >= before);
    103  1.1  christos 		overflow = (after >= modulus);
    104  1.1  christos 		if (overflow) {
    105  1.1  christos 			after %= modulus;
    106  1.1  christos 		}
    107  1.1  christos 	} while (!atomic_compare_exchange_weak_u64(&p->val, &before, after,
    108  1.1  christos 	    ATOMIC_RELAXED, ATOMIC_RELAXED));
    109  1.1  christos #else
    110  1.1  christos 	before = p->val;
    111  1.1  christos 	after = before + x;
    112  1.1  christos 	overflow = (after >= modulus);
    113  1.1  christos 	if (overflow) {
    114  1.1  christos 		after %= modulus;
    115  1.1  christos 	}
    116  1.1  christos 	p->val = after;
    117  1.1  christos #endif
    118  1.1  christos 	return overflow;
    119  1.1  christos }
    120  1.1  christos 
    121  1.1  christos /*
    122  1.1  christos  * Non-atomically sets *dst += src.  *dst needs external synchronization.
    123  1.1  christos  * This lets us avoid the cost of a fetch_add when its unnecessary (note that
    124  1.1  christos  * the types here are atomic).
    125  1.1  christos  */
    126  1.1  christos static inline void
    127  1.1  christos locked_inc_u64_unsynchronized(locked_u64_t *dst, uint64_t src) {
    128  1.1  christos #ifdef JEMALLOC_ATOMIC_U64
    129  1.1  christos 	uint64_t cur_dst = atomic_load_u64(&dst->val, ATOMIC_RELAXED);
    130  1.1  christos 	atomic_store_u64(&dst->val, src + cur_dst, ATOMIC_RELAXED);
    131  1.1  christos #else
    132  1.1  christos 	dst->val += src;
    133  1.1  christos #endif
    134  1.1  christos }
    135  1.1  christos 
    136  1.1  christos static inline uint64_t
    137  1.1  christos locked_read_u64_unsynchronized(locked_u64_t *p) {
    138  1.1  christos #ifdef JEMALLOC_ATOMIC_U64
    139  1.1  christos 	return atomic_load_u64(&p->val, ATOMIC_RELAXED);
    140  1.1  christos #else
    141  1.1  christos 	return p->val;
    142  1.1  christos #endif
    143  1.1  christos }
    144  1.1  christos 
    145  1.1  christos static inline void
    146  1.1  christos locked_init_u64_unsynchronized(locked_u64_t *p, uint64_t x) {
    147  1.1  christos #ifdef JEMALLOC_ATOMIC_U64
    148  1.1  christos 	atomic_store_u64(&p->val, x, ATOMIC_RELAXED);
    149  1.1  christos #else
    150  1.1  christos 	p->val = x;
    151  1.1  christos #endif
    152  1.1  christos }
    153  1.1  christos 
    154  1.1  christos static inline size_t
    155  1.1  christos locked_read_zu(tsdn_t *tsdn, malloc_mutex_t *mtx, locked_zu_t *p) {
    156  1.1  christos 	LOCKEDINT_MTX_ASSERT_INTERNAL(tsdn, mtx);
    157  1.1  christos #ifdef JEMALLOC_ATOMIC_U64
    158  1.1  christos 	return atomic_load_zu(&p->val, ATOMIC_RELAXED);
    159  1.1  christos #else
    160  1.1  christos 	return atomic_load_zu(&p->val, ATOMIC_RELAXED);
    161  1.1  christos #endif
    162  1.1  christos }
    163  1.1  christos 
    164  1.1  christos static inline void
    165  1.1  christos locked_inc_zu(tsdn_t *tsdn, malloc_mutex_t *mtx, locked_zu_t *p,
    166  1.1  christos     size_t x) {
    167  1.1  christos 	LOCKEDINT_MTX_ASSERT_INTERNAL(tsdn, mtx);
    168  1.1  christos #ifdef JEMALLOC_ATOMIC_U64
    169  1.1  christos 	atomic_fetch_add_zu(&p->val, x, ATOMIC_RELAXED);
    170  1.1  christos #else
    171  1.1  christos 	size_t cur = atomic_load_zu(&p->val, ATOMIC_RELAXED);
    172  1.1  christos 	atomic_store_zu(&p->val, cur + x, ATOMIC_RELAXED);
    173  1.1  christos #endif
    174  1.1  christos }
    175  1.1  christos 
    176  1.1  christos static inline void
    177  1.1  christos locked_dec_zu(tsdn_t *tsdn, malloc_mutex_t *mtx, locked_zu_t *p,
    178  1.1  christos     size_t x) {
    179  1.1  christos 	LOCKEDINT_MTX_ASSERT_INTERNAL(tsdn, mtx);
    180  1.1  christos #ifdef JEMALLOC_ATOMIC_U64
    181  1.1  christos 	size_t r = atomic_fetch_sub_zu(&p->val, x, ATOMIC_RELAXED);
    182  1.1  christos 	assert(r - x <= r);
    183  1.1  christos #else
    184  1.1  christos 	size_t cur = atomic_load_zu(&p->val, ATOMIC_RELAXED);
    185  1.1  christos 	atomic_store_zu(&p->val, cur - x, ATOMIC_RELAXED);
    186  1.1  christos #endif
    187  1.1  christos }
    188  1.1  christos 
    189  1.1  christos /* Like the _u64 variant, needs an externally synchronized *dst. */
    190  1.1  christos static inline void
    191  1.1  christos locked_inc_zu_unsynchronized(locked_zu_t *dst, size_t src) {
    192  1.1  christos 	size_t cur_dst = atomic_load_zu(&dst->val, ATOMIC_RELAXED);
    193  1.1  christos 	atomic_store_zu(&dst->val, src + cur_dst, ATOMIC_RELAXED);
    194  1.1  christos }
    195  1.1  christos 
    196  1.1  christos /*
    197  1.1  christos  * Unlike the _u64 variant, this is safe to call unconditionally.
    198  1.1  christos  */
    199  1.1  christos static inline size_t
    200  1.1  christos locked_read_atomic_zu(locked_zu_t *p) {
    201  1.1  christos 	return atomic_load_zu(&p->val, ATOMIC_RELAXED);
    202  1.1  christos }
    203  1.1  christos 
    204  1.1  christos #endif /* JEMALLOC_INTERNAL_LOCKEDINT_H */
    205