Home | History | Annotate | Line # | Download | only in internal
      1  1.1  christos #ifndef JEMALLOC_INTERNAL_PAI_H
      2  1.1  christos #define JEMALLOC_INTERNAL_PAI_H
      3  1.1  christos 
      4  1.1  christos /* An interface for page allocation. */
      5  1.1  christos 
      6  1.1  christos typedef struct pai_s pai_t;
      7  1.1  christos struct pai_s {
      8  1.1  christos 	/* Returns NULL on failure. */
      9  1.1  christos 	edata_t *(*alloc)(tsdn_t *tsdn, pai_t *self, size_t size,
     10  1.1  christos 	    size_t alignment, bool zero, bool guarded, bool frequent_reuse,
     11  1.1  christos 	    bool *deferred_work_generated);
     12  1.1  christos 	/*
     13  1.1  christos 	 * Returns the number of extents added to the list (which may be fewer
     14  1.1  christos 	 * than requested, in case of OOM).  The list should already be
     15  1.1  christos 	 * initialized.  The only alignment guarantee is page-alignment, and
     16  1.1  christos 	 * the results are not necessarily zeroed.
     17  1.1  christos 	 */
     18  1.1  christos 	size_t (*alloc_batch)(tsdn_t *tsdn, pai_t *self, size_t size,
     19  1.1  christos 	    size_t nallocs, edata_list_active_t *results,
     20  1.1  christos 	    bool *deferred_work_generated);
     21  1.1  christos 	bool (*expand)(tsdn_t *tsdn, pai_t *self, edata_t *edata,
     22  1.1  christos 	    size_t old_size, size_t new_size, bool zero,
     23  1.1  christos 	    bool *deferred_work_generated);
     24  1.1  christos 	bool (*shrink)(tsdn_t *tsdn, pai_t *self, edata_t *edata,
     25  1.1  christos 	    size_t old_size, size_t new_size, bool *deferred_work_generated);
     26  1.1  christos 	void (*dalloc)(tsdn_t *tsdn, pai_t *self, edata_t *edata,
     27  1.1  christos 	    bool *deferred_work_generated);
     28  1.1  christos 	/* This function empties out list as a side-effect of being called. */
     29  1.1  christos 	void (*dalloc_batch)(tsdn_t *tsdn, pai_t *self,
     30  1.1  christos 	    edata_list_active_t *list, bool *deferred_work_generated);
     31  1.1  christos 	uint64_t (*time_until_deferred_work)(tsdn_t *tsdn, pai_t *self);
     32  1.1  christos };
     33  1.1  christos 
     34  1.1  christos /*
     35  1.1  christos  * These are just simple convenience functions to avoid having to reference the
     36  1.1  christos  * same pai_t twice on every invocation.
     37  1.1  christos  */
     38  1.1  christos 
     39  1.1  christos static inline edata_t *
     40  1.1  christos pai_alloc(tsdn_t *tsdn, pai_t *self, size_t size, size_t alignment,
     41  1.1  christos     bool zero, bool guarded, bool frequent_reuse,
     42  1.1  christos     bool *deferred_work_generated) {
     43  1.1  christos 	return self->alloc(tsdn, self, size, alignment, zero, guarded,
     44  1.1  christos 	    frequent_reuse, deferred_work_generated);
     45  1.1  christos }
     46  1.1  christos 
     47  1.1  christos static inline size_t
     48  1.1  christos pai_alloc_batch(tsdn_t *tsdn, pai_t *self, size_t size, size_t nallocs,
     49  1.1  christos     edata_list_active_t *results, bool *deferred_work_generated) {
     50  1.1  christos 	return self->alloc_batch(tsdn, self, size, nallocs, results,
     51  1.1  christos 	    deferred_work_generated);
     52  1.1  christos }
     53  1.1  christos 
     54  1.1  christos static inline bool
     55  1.1  christos pai_expand(tsdn_t *tsdn, pai_t *self, edata_t *edata, size_t old_size,
     56  1.1  christos     size_t new_size, bool zero, bool *deferred_work_generated) {
     57  1.1  christos 	return self->expand(tsdn, self, edata, old_size, new_size, zero,
     58  1.1  christos 	    deferred_work_generated);
     59  1.1  christos }
     60  1.1  christos 
     61  1.1  christos static inline bool
     62  1.1  christos pai_shrink(tsdn_t *tsdn, pai_t *self, edata_t *edata, size_t old_size,
     63  1.1  christos     size_t new_size, bool *deferred_work_generated) {
     64  1.1  christos 	return self->shrink(tsdn, self, edata, old_size, new_size,
     65  1.1  christos 	    deferred_work_generated);
     66  1.1  christos }
     67  1.1  christos 
     68  1.1  christos static inline void
     69  1.1  christos pai_dalloc(tsdn_t *tsdn, pai_t *self, edata_t *edata,
     70  1.1  christos     bool *deferred_work_generated) {
     71  1.1  christos 	self->dalloc(tsdn, self, edata, deferred_work_generated);
     72  1.1  christos }
     73  1.1  christos 
     74  1.1  christos static inline void
     75  1.1  christos pai_dalloc_batch(tsdn_t *tsdn, pai_t *self, edata_list_active_t *list,
     76  1.1  christos     bool *deferred_work_generated) {
     77  1.1  christos 	self->dalloc_batch(tsdn, self, list, deferred_work_generated);
     78  1.1  christos }
     79  1.1  christos 
     80  1.1  christos static inline uint64_t
     81  1.1  christos pai_time_until_deferred_work(tsdn_t *tsdn, pai_t *self) {
     82  1.1  christos 	return self->time_until_deferred_work(tsdn, self);
     83  1.1  christos }
     84  1.1  christos 
     85  1.1  christos /*
     86  1.1  christos  * An implementation of batch allocation that simply calls alloc once for
     87  1.1  christos  * each item in the list.
     88  1.1  christos  */
     89  1.1  christos size_t pai_alloc_batch_default(tsdn_t *tsdn, pai_t *self, size_t size,
     90  1.1  christos     size_t nallocs, edata_list_active_t *results, bool *deferred_work_generated);
     91  1.1  christos /* Ditto, for dalloc. */
     92  1.1  christos void pai_dalloc_batch_default(tsdn_t *tsdn, pai_t *self,
     93  1.1  christos     edata_list_active_t *list, bool *deferred_work_generated);
     94  1.1  christos 
     95  1.1  christos #endif /* JEMALLOC_INTERNAL_PAI_H */
     96