NOVA User-Level Environment  Version testbox/changed-memory-timing-317-g320d8b5
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
dlmalloc.c File Reference
#include "dlmalloc-config.h"
#include <stddef.h>
#include <stdlib.h>
#include <pthread.h>

Classes

struct  mallinfo
struct  malloc_chunk
struct  malloc_tree_chunk
struct  malloc_segment
struct  malloc_state
struct  malloc_params

Macros

#define DLMALLOC_VERSION   20805
#define MAX_SIZE_T   (~(size_t)0)
#define ONLY_MSPACES   0
#define MSPACES   0
#define FOOTERS   0
#define ABORT   abort()
#define ABORT_ON_ASSERT_FAILURE   1
#define PROCEED_ON_ERROR   0
#define INSECURE   0
#define MALLOC_INSPECT_ALL   0
#define MMAP_CLEARS   1
#define MORECORE_CONTIGUOUS   0
#define DEFAULT_TRIM_THRESHOLD   ((size_t)2U * (size_t)1024U * (size_t)1024U)
#define DEFAULT_MMAP_THRESHOLD   ((size_t)256U * (size_t)1024U)
#define MAX_RELEASE_CHECK_RATE   4095
#define USE_BUILTIN_FFS   0
#define USE_DEV_RANDOM   0
#define NO_MALLINFO   0
#define MALLINFO_FIELD_TYPE   size_t
#define NO_MALLOC_STATS   0
#define NO_SEGMENT_TRAVERSAL   0
#define M_TRIM_THRESHOLD   (-1)
#define M_GRANULARITY   (-2)
#define M_MMAP_THRESHOLD   (-3)
#define _STRUCT_MALLINFO
#define STRUCT_MALLINFO_DECLARED   1
#define NOINLINE
#define FORCEINLINE
#define malloc_getpagesize   ((size_t)4096U)
#define SIZE_T_SIZE   (sizeof(size_t))
#define SIZE_T_BITSIZE   (sizeof(size_t) << 3)
#define SIZE_T_ZERO   ((size_t)0)
#define SIZE_T_ONE   ((size_t)1)
#define SIZE_T_TWO   ((size_t)2)
#define SIZE_T_FOUR   ((size_t)4)
#define TWO_SIZE_T_SIZES   (SIZE_T_SIZE<<1)
#define FOUR_SIZE_T_SIZES   (SIZE_T_SIZE<<2)
#define SIX_SIZE_T_SIZES   (FOUR_SIZE_T_SIZES+TWO_SIZE_T_SIZES)
#define HALF_MAX_SIZE_T   (MAX_SIZE_T / 2U)
#define CHUNK_ALIGN_MASK   (MALLOC_ALIGNMENT - SIZE_T_ONE)
#define is_aligned(A)   (((size_t)((A)) & (CHUNK_ALIGN_MASK)) == 0)
#define align_offset(A)
#define MFAIL   ((void*)(MAX_SIZE_T))
#define CMFAIL   ((char*)(MFAIL)) /* defined for convenience */
#define MUNMAP_DEFAULT(a, s)   munmap((a), (s))
#define MMAP_PROT   (PROT_READ|PROT_WRITE)
#define MMAP_FLAGS   (MAP_PRIVATE|MAP_ANONYMOUS)
#define MMAP_DEFAULT(s)   mmap(0, (s), MMAP_PROT, MMAP_FLAGS, -1, 0)
#define DIRECT_MMAP_DEFAULT(s)   MMAP_DEFAULT(s)
#define CALL_MORECORE(S)   MFAIL
 Define CALL_MORECORE.
#define USE_MMAP_BIT   (SIZE_T_ONE)
 Define CALL_MMAP/CALL_MUNMAP/CALL_DIRECT_MMAP.
#define CALL_MMAP(s)   MMAP_DEFAULT(s)
#define CALL_MUNMAP(a, s)   MUNMAP_DEFAULT((a), (s))
#define CALL_DIRECT_MMAP(s)   DIRECT_MMAP_DEFAULT(s)
#define CALL_MREMAP(addr, osz, nsz, mv)   MFAIL
 Define CALL_MREMAP.
#define USE_NONCONTIGUOUS_BIT   (4U)
#define EXTERN_BIT   (8U)
#define MLOCK_T   cap_sel
#define INITIAL_LOCK(lk)   (semaphore_init(lk, 1), 0)
#define DESTROY_LOCK(lk)   (semaphore_destroy(lk), 0)
#define ACQUIRE_LOCK(lk)   (semaphore_down(lk), 0)
#define RELEASE_LOCK(lk)   (semaphore_up(lk), 0)
#define USE_LOCK_BIT   (2U)
#define ACQUIRE_MALLOC_GLOBAL_LOCK()   ACQUIRE_LOCK(&malloc_global_mutex);
#define RELEASE_MALLOC_GLOBAL_LOCK()   RELEASE_LOCK(&malloc_global_mutex);
#define MCHUNK_SIZE   (sizeof(mchunk))
#define CHUNK_OVERHEAD   (SIZE_T_SIZE)
#define MMAP_CHUNK_OVERHEAD   (TWO_SIZE_T_SIZES)
#define MMAP_FOOT_PAD   (FOUR_SIZE_T_SIZES)
#define MIN_CHUNK_SIZE   ((MCHUNK_SIZE + CHUNK_ALIGN_MASK) & ~CHUNK_ALIGN_MASK)
#define chunk2mem(p)   ((void*)((char*)(p) + TWO_SIZE_T_SIZES))
#define mem2chunk(mem)   ((mchunkptr)((char*)(mem) - TWO_SIZE_T_SIZES))
#define align_as_chunk(A)   (mchunkptr)((A) + align_offset(chunk2mem(A)))
#define MAX_REQUEST   ((-MIN_CHUNK_SIZE) << 2)
#define MIN_REQUEST   (MIN_CHUNK_SIZE - CHUNK_OVERHEAD - SIZE_T_ONE)
#define pad_request(req)   (((req) + CHUNK_OVERHEAD + CHUNK_ALIGN_MASK) & ~CHUNK_ALIGN_MASK)
#define request2size(req)   (((req) < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(req))
#define PINUSE_BIT   (SIZE_T_ONE)
#define CINUSE_BIT   (SIZE_T_TWO)
#define FLAG4_BIT   (SIZE_T_FOUR)
#define INUSE_BITS   (PINUSE_BIT|CINUSE_BIT)
#define FLAG_BITS   (PINUSE_BIT|CINUSE_BIT|FLAG4_BIT)
#define FENCEPOST_HEAD   (INUSE_BITS|SIZE_T_SIZE)
#define cinuse(p)   ((p)->head & CINUSE_BIT)
#define pinuse(p)   ((p)->head & PINUSE_BIT)
#define flag4inuse(p)   ((p)->head & FLAG4_BIT)
#define is_inuse(p)   (((p)->head & INUSE_BITS) != PINUSE_BIT)
#define is_mmapped(p)   (((p)->head & INUSE_BITS) == 0)
#define chunksize(p)   ((p)->head & ~(FLAG_BITS))
#define clear_pinuse(p)   ((p)->head &= ~PINUSE_BIT)
#define set_flag4(p)   ((p)->head |= FLAG4_BIT)
#define clear_flag4(p)   ((p)->head &= ~FLAG4_BIT)
#define chunk_plus_offset(p, s)   ((mchunkptr)(((char*)(p)) + (s)))
#define chunk_minus_offset(p, s)   ((mchunkptr)(((char*)(p)) - (s)))
#define next_chunk(p)   ((mchunkptr)( ((char*)(p)) + ((p)->head & ~FLAG_BITS)))
#define prev_chunk(p)   ((mchunkptr)( ((char*)(p)) - ((p)->prev_foot) ))
#define next_pinuse(p)   ((next_chunk(p)->head) & PINUSE_BIT)
#define get_foot(p, s)   (((mchunkptr)((char*)(p) + (s)))->prev_foot)
#define set_foot(p, s)   (((mchunkptr)((char*)(p) + (s)))->prev_foot = (s))
#define set_size_and_pinuse_of_free_chunk(p, s)   ((p)->head = (s|PINUSE_BIT), set_foot(p, s))
#define set_free_with_pinuse(p, s, n)   (clear_pinuse(n), set_size_and_pinuse_of_free_chunk(p, s))
#define overhead_for(p)   (is_mmapped(p)? MMAP_CHUNK_OVERHEAD : CHUNK_OVERHEAD)
#define calloc_must_clear(p)   (!is_mmapped(p))
#define leftmost_child(t)   ((t)->child[0] != 0? (t)->child[0] : (t)->child[1])
#define is_mmapped_segment(S)   ((S)->sflags & USE_MMAP_BIT)
#define is_extern_segment(S)   ((S)->sflags & EXTERN_BIT)
#define NSMALLBINS   (32U)
#define NTREEBINS   (32U)
#define SMALLBIN_SHIFT   (3U)
#define SMALLBIN_WIDTH   (SIZE_T_ONE << SMALLBIN_SHIFT)
#define TREEBIN_SHIFT   (8U)
#define MIN_LARGE_SIZE   (SIZE_T_ONE << TREEBIN_SHIFT)
#define MAX_SMALL_SIZE   (MIN_LARGE_SIZE - SIZE_T_ONE)
#define MAX_SMALL_REQUEST   (MAX_SMALL_SIZE - CHUNK_ALIGN_MASK - CHUNK_OVERHEAD)
#define ensure_initialization()   (void)(mparams.magic != 0 || init_mparams())
#define gm   (&_gm_)
#define is_global(M)   ((M) == &_gm_)
#define is_initialized(M)   ((M)->top != 0)
#define use_lock(M)   ((M)->mflags & USE_LOCK_BIT)
#define enable_lock(M)   ((M)->mflags |= USE_LOCK_BIT)
#define disable_lock(M)   ((M)->mflags &= ~USE_LOCK_BIT)
#define use_mmap(M)   ((M)->mflags & USE_MMAP_BIT)
#define enable_mmap(M)   ((M)->mflags |= USE_MMAP_BIT)
#define disable_mmap(M)   ((M)->mflags &= ~USE_MMAP_BIT)
#define use_noncontiguous(M)   ((M)->mflags & USE_NONCONTIGUOUS_BIT)
#define disable_contiguous(M)   ((M)->mflags |= USE_NONCONTIGUOUS_BIT)
#define set_lock(M, L)
#define page_align(S)   (((S) + (mparams.page_size - SIZE_T_ONE)) & ~(mparams.page_size - SIZE_T_ONE))
#define granularity_align(S)
#define mmap_align(S)   page_align(S)
#define SYS_ALLOC_PADDING   (TOP_FOOT_SIZE + MALLOC_ALIGNMENT)
#define is_page_aligned(S)   (((size_t)(S) & (mparams.page_size - SIZE_T_ONE)) == 0)
#define is_granularity_aligned(S)   (((size_t)(S) & (mparams.granularity - SIZE_T_ONE)) == 0)
#define segment_holds(S, A)   ((char*)(A) >= S->base && (char*)(A) < S->base + S->size)
#define should_trim(M, s)   ((s) > (M)->trim_check)
#define TOP_FOOT_SIZE   (align_offset(chunk2mem(0))+pad_request(sizeof(struct malloc_segment))+MIN_CHUNK_SIZE)
#define PREACTION(M)   ((use_lock(M))? ACQUIRE_LOCK(&(M)->mutex) : 0)
#define POSTACTION(M)   { if (use_lock(M)) RELEASE_LOCK(&(M)->mutex); }
#define CORRUPTION_ERROR_ACTION(m)   ABORT
#define USAGE_ERROR_ACTION(m, p)   ABORT
#define check_free_chunk(M, P)
#define check_inuse_chunk(M, P)
#define check_malloced_chunk(M, P, N)
#define check_mmapped_chunk(M, P)
#define check_malloc_state(M)
#define check_top_chunk(M, P)
#define is_small(s)   (((s) >> SMALLBIN_SHIFT) < NSMALLBINS)
#define small_index(s)   (bindex_t)((s) >> SMALLBIN_SHIFT)
#define small_index2size(i)   ((i) << SMALLBIN_SHIFT)
#define MIN_SMALL_INDEX   (small_index(MIN_CHUNK_SIZE))
#define smallbin_at(M, i)   ((sbinptr)((char*)&((M)->smallbins[(i)<<1])))
#define treebin_at(M, i)   (&((M)->treebins[i]))
#define compute_tree_index(S, I)
#define bit_for_tree_index(i)   (i == NTREEBINS-1)? (SIZE_T_BITSIZE-1) : (((i) >> 1) + TREEBIN_SHIFT - 2)
#define leftshift_for_tree_index(i)
#define minsize_for_tree_index(i)
#define idx2bit(i)   ((binmap_t)(1) << (i))
#define mark_smallmap(M, i)   ((M)->smallmap |= idx2bit(i))
#define clear_smallmap(M, i)   ((M)->smallmap &= ~idx2bit(i))
#define smallmap_is_marked(M, i)   ((M)->smallmap & idx2bit(i))
#define mark_treemap(M, i)   ((M)->treemap |= idx2bit(i))
#define clear_treemap(M, i)   ((M)->treemap &= ~idx2bit(i))
#define treemap_is_marked(M, i)   ((M)->treemap & idx2bit(i))
#define least_bit(x)   ((x) & -(x))
#define left_bits(x)   ((x<<1) | -(x<<1))
#define same_or_left_bits(x)   ((x) | -(x))
#define compute_bit2idx(X, I)
#define ok_address(M, a)   ((char*)(a) >= (M)->least_addr)
#define ok_next(p, n)   ((char*)(p) < (char*)(n))
#define ok_inuse(p)   is_inuse(p)
#define ok_pinuse(p)   pinuse(p)
#define ok_magic(M)   (1)
#define RTCHECK(e)   (e)
#define mark_inuse_foot(M, p, s)
#define set_inuse(M, p, s)
#define set_inuse_and_pinuse(M, p, s)
#define set_size_and_pinuse_of_inuse_chunk(M, p, s)   ((p)->head = (s|PINUSE_BIT|CINUSE_BIT))
#define insert_small_chunk(M, P, S)
#define unlink_small_chunk(M, P, S)
#define unlink_first_small_chunk(M, B, P, I)
#define replace_dv(M, P, S)
#define insert_large_chunk(M, X, S)
#define unlink_large_chunk(M, X)
#define insert_chunk(M, P, S)
#define unlink_chunk(M, P, S)
#define internal_malloc(m, b)   dlmalloc(b)
#define internal_free(m, mem)   dlfree(mem)
#define fm   gm

Typedefs

typedef struct malloc_chunk mchunk
typedef struct malloc_chunkmchunkptr
typedef struct malloc_chunksbinptr
typedef unsigned int bindex_t
typedef unsigned int binmap_t
typedef unsigned int flag_t
typedef struct malloc_tree_chunk tchunk
typedef struct malloc_tree_chunktchunkptr
typedef struct malloc_tree_chunktbinptr
typedef struct malloc_segment msegment
typedef struct malloc_segmentmsegmentptr
typedef struct malloc_statemstate

Functions

DLMALLOC_EXPORT void * dlmalloc (size_t)
DLMALLOC_EXPORT void dlfree (void *)
DLMALLOC_EXPORT void * dlcalloc (size_t, size_t)
DLMALLOC_EXPORT void * dlrealloc (void *, size_t)
DLMALLOC_EXPORT void * dlrealloc_in_place (void *, size_t)
DLMALLOC_EXPORT void * dlmemalign (size_t, size_t)
DLMALLOC_EXPORT int dlposix_memalign (void **, size_t, size_t)
DLMALLOC_EXPORT void * dlvalloc (size_t)
DLMALLOC_EXPORT int dlmallopt (int, int)
DLMALLOC_EXPORT size_t dlmalloc_footprint (void)
DLMALLOC_EXPORT size_t dlmalloc_max_footprint (void)
DLMALLOC_EXPORT size_t dlmalloc_footprint_limit ()
DLMALLOC_EXPORT size_t dlmalloc_set_footprint_limit (size_t bytes)
DLMALLOC_EXPORT struct mallinfo dlmallinfo (void)
DLMALLOC_EXPORT void ** dlindependent_calloc (size_t, size_t, void **)
DLMALLOC_EXPORT void ** dlindependent_comalloc (size_t, size_t *, void **)
DLMALLOC_EXPORT size_t dlbulk_free (void **, size_t n_elements)
DLMALLOC_EXPORT void * dlpvalloc (size_t)
DLMALLOC_EXPORT int dlmalloc_trim (size_t)
DLMALLOC_EXPORT void dlmalloc_stats (void)
size_t dlmalloc_usable_size (void *)
void dlmalloc_init_locks (void)
void ** dlindependent_calloc (size_t n_elements, size_t elem_size, void *chunks[])
void ** dlindependent_comalloc (size_t n_elements, size_t sizes[], void *chunks[])
size_t dlbulk_free (void *array[], size_t nelem)

Macro Definition Documentation

#define _STRUCT_MALLINFO
#define ABORT   abort()
#define ABORT_ON_ASSERT_FAILURE   1
#define ACQUIRE_LOCK (   lk)    (semaphore_down(lk), 0)
#define ACQUIRE_MALLOC_GLOBAL_LOCK ( )    ACQUIRE_LOCK(&malloc_global_mutex);
#define align_as_chunk (   A)    (mchunkptr)((A) + align_offset(chunk2mem(A)))
#define align_offset (   A)
Value:
((((size_t)(A) & CHUNK_ALIGN_MASK) == 0)? 0 :\
#define bit_for_tree_index (   i)    (i == NTREEBINS-1)? (SIZE_T_BITSIZE-1) : (((i) >> 1) + TREEBIN_SHIFT - 2)
#define CALL_DIRECT_MMAP (   s)    DIRECT_MMAP_DEFAULT(s)
#define CALL_MMAP (   s)    MMAP_DEFAULT(s)
#define CALL_MORECORE (   S)    MFAIL

Define CALL_MORECORE.

#define CALL_MREMAP (   addr,
  osz,
  nsz,
  mv 
)    MFAIL

Define CALL_MREMAP.

#define CALL_MUNMAP (   a,
 
)    MUNMAP_DEFAULT((a), (s))
#define calloc_must_clear (   p)    (!is_mmapped(p))
#define check_free_chunk (   M,
 
)
#define check_inuse_chunk (   M,
 
)
#define check_malloc_state (   M)
#define check_malloced_chunk (   M,
  P,
 
)
#define check_mmapped_chunk (   M,
 
)
#define check_top_chunk (   M,
 
)
#define chunk2mem (   p)    ((void*)((char*)(p) + TWO_SIZE_T_SIZES))
#define CHUNK_ALIGN_MASK   (MALLOC_ALIGNMENT - SIZE_T_ONE)
#define chunk_minus_offset (   p,
 
)    ((mchunkptr)(((char*)(p)) - (s)))
#define CHUNK_OVERHEAD   (SIZE_T_SIZE)
#define chunk_plus_offset (   p,
 
)    ((mchunkptr)(((char*)(p)) + (s)))
#define chunksize (   p)    ((p)->head & ~(FLAG_BITS))
#define cinuse (   p)    ((p)->head & CINUSE_BIT)
#define CINUSE_BIT   (SIZE_T_TWO)
#define clear_flag4 (   p)    ((p)->head &= ~FLAG4_BIT)
#define clear_pinuse (   p)    ((p)->head &= ~PINUSE_BIT)
#define clear_smallmap (   M,
 
)    ((M)->smallmap &= ~idx2bit(i))
#define clear_treemap (   M,
 
)    ((M)->treemap &= ~idx2bit(i))
#define CMFAIL   ((char*)(MFAIL)) /* defined for convenience */
#define compute_bit2idx (   X,
 
)
Value:
{\
unsigned int Y = X - 1;\
unsigned int K = Y >> (16-4) & 16;\
unsigned int N = K; Y >>= K;\
N += K = Y >> (8-3) & 8; Y >>= K;\
N += K = Y >> (4-2) & 4; Y >>= K;\
N += K = Y >> (2-1) & 2; Y >>= K;\
N += K = Y >> (1-0) & 1; Y >>= K;\
I = (bindex_t)(N + Y);\
}
#define compute_tree_index (   S,
 
)
Value:
{\
size_t X = S >> TREEBIN_SHIFT;\
if (X == 0)\
I = 0;\
else if (X > 0xFFFF)\
I = NTREEBINS-1;\
else {\
unsigned int Y = (unsigned int)X;\
unsigned int N = ((Y - 0x100) >> 16) & 8;\
unsigned int K = (((Y <<= N) - 0x1000) >> 16) & 4;\
N += K;\
N += K = (((Y <<= K) - 0x4000) >> 16) & 2;\
K = 14 - N + ((Y <<= K) >> 15);\
I = (K << 1) + ((S >> (K + (TREEBIN_SHIFT-1)) & 1));\
}\
}
#define CORRUPTION_ERROR_ACTION (   m)    ABORT
#define DEFAULT_MMAP_THRESHOLD   ((size_t)256U * (size_t)1024U)
#define DEFAULT_TRIM_THRESHOLD   ((size_t)2U * (size_t)1024U * (size_t)1024U)
#define DESTROY_LOCK (   lk)    (semaphore_destroy(lk), 0)
#define DIRECT_MMAP_DEFAULT (   s)    MMAP_DEFAULT(s)
#define disable_contiguous (   M)    ((M)->mflags |= USE_NONCONTIGUOUS_BIT)
#define disable_lock (   M)    ((M)->mflags &= ~USE_LOCK_BIT)
#define disable_mmap (   M)    ((M)->mflags &= ~USE_MMAP_BIT)
#define DLMALLOC_VERSION   20805
#define enable_lock (   M)    ((M)->mflags |= USE_LOCK_BIT)
#define enable_mmap (   M)    ((M)->mflags |= USE_MMAP_BIT)
#define ensure_initialization ( )    (void)(mparams.magic != 0 || init_mparams())
#define EXTERN_BIT   (8U)
#define FENCEPOST_HEAD   (INUSE_BITS|SIZE_T_SIZE)
#define FLAG4_BIT   (SIZE_T_FOUR)
#define flag4inuse (   p)    ((p)->head & FLAG4_BIT)
#define FLAG_BITS   (PINUSE_BIT|CINUSE_BIT|FLAG4_BIT)
#define fm   gm
#define FOOTERS   0
#define FORCEINLINE
#define FOUR_SIZE_T_SIZES   (SIZE_T_SIZE<<2)
#define get_foot (   p,
 
)    (((mchunkptr)((char*)(p) + (s)))->prev_foot)
#define gm   (&_gm_)
#define granularity_align (   S)
Value:
(((S) + (mparams.granularity - SIZE_T_ONE))\
& ~(mparams.granularity - SIZE_T_ONE))
#define HALF_MAX_SIZE_T   (MAX_SIZE_T / 2U)
#define idx2bit (   i)    ((binmap_t)(1) << (i))
#define INITIAL_LOCK (   lk)    (semaphore_init(lk, 1), 0)
#define INSECURE   0
#define insert_chunk (   M,
  P,
  S 
)
Value:
if (is_small(S)) insert_small_chunk(M, P, S)\
else { tchunkptr TP = (tchunkptr)(P); insert_large_chunk(M, TP, S); }
#define insert_large_chunk (   M,
  X,
  S 
)
#define insert_small_chunk (   M,
  P,
  S 
)
Value:
{\
mchunkptr F = B;\
assert(S >= MIN_CHUNK_SIZE);\
if (!smallmap_is_marked(M, I))\
mark_smallmap(M, I);\
else if (RTCHECK(ok_address(M, B->fd)))\
F = B->fd;\
else {\
CORRUPTION_ERROR_ACTION(M);\
}\
B->fd = P;\
F->bk = P;\
P->fd = F;\
P->bk = B;\
}
#define internal_free (   m,
  mem 
)    dlfree(mem)
#define internal_malloc (   m,
 
)    dlmalloc(b)
#define INUSE_BITS   (PINUSE_BIT|CINUSE_BIT)
#define is_aligned (   A)    (((size_t)((A)) & (CHUNK_ALIGN_MASK)) == 0)
#define is_extern_segment (   S)    ((S)->sflags & EXTERN_BIT)
#define is_global (   M)    ((M) == &_gm_)
#define is_granularity_aligned (   S)    (((size_t)(S) & (mparams.granularity - SIZE_T_ONE)) == 0)
#define is_initialized (   M)    ((M)->top != 0)
#define is_inuse (   p)    (((p)->head & INUSE_BITS) != PINUSE_BIT)
#define is_mmapped (   p)    (((p)->head & INUSE_BITS) == 0)
#define is_mmapped_segment (   S)    ((S)->sflags & USE_MMAP_BIT)
#define is_page_aligned (   S)    (((size_t)(S) & (mparams.page_size - SIZE_T_ONE)) == 0)
#define is_small (   s)    (((s) >> SMALLBIN_SHIFT) < NSMALLBINS)
#define least_bit (   x)    ((x) & -(x))
#define left_bits (   x)    ((x<<1) | -(x<<1))
#define leftmost_child (   t)    ((t)->child[0] != 0? (t)->child[0] : (t)->child[1])
#define leftshift_for_tree_index (   i)
Value:
((i == NTREEBINS-1)? 0 : \
((SIZE_T_BITSIZE-SIZE_T_ONE) - (((i) >> 1) + TREEBIN_SHIFT - 2)))
#define M_GRANULARITY   (-2)
#define M_MMAP_THRESHOLD   (-3)
#define M_TRIM_THRESHOLD   (-1)
#define MALLINFO_FIELD_TYPE   size_t
#define malloc_getpagesize   ((size_t)4096U)
#define MALLOC_INSPECT_ALL   0
#define mark_inuse_foot (   M,
  p,
 
)
#define mark_smallmap (   M,
 
)    ((M)->smallmap |= idx2bit(i))
#define mark_treemap (   M,
 
)    ((M)->treemap |= idx2bit(i))
#define MAX_RELEASE_CHECK_RATE   4095
#define MAX_REQUEST   ((-MIN_CHUNK_SIZE) << 2)
#define MAX_SIZE_T   (~(size_t)0)
#define MAX_SMALL_REQUEST   (MAX_SMALL_SIZE - CHUNK_ALIGN_MASK - CHUNK_OVERHEAD)
#define MAX_SMALL_SIZE   (MIN_LARGE_SIZE - SIZE_T_ONE)
#define MCHUNK_SIZE   (sizeof(mchunk))
#define mem2chunk (   mem)    ((mchunkptr)((char*)(mem) - TWO_SIZE_T_SIZES))
#define MFAIL   ((void*)(MAX_SIZE_T))
#define MIN_CHUNK_SIZE   ((MCHUNK_SIZE + CHUNK_ALIGN_MASK) & ~CHUNK_ALIGN_MASK)
#define MIN_LARGE_SIZE   (SIZE_T_ONE << TREEBIN_SHIFT)
#define MIN_REQUEST   (MIN_CHUNK_SIZE - CHUNK_OVERHEAD - SIZE_T_ONE)
#define MIN_SMALL_INDEX   (small_index(MIN_CHUNK_SIZE))
#define minsize_for_tree_index (   i)
Value:
((SIZE_T_ONE << (((i) >> 1) + TREEBIN_SHIFT)) | \
(((size_t)((i) & SIZE_T_ONE)) << (((i) >> 1) + TREEBIN_SHIFT - 1)))
#define MLOCK_T   cap_sel
#define mmap_align (   S)    page_align(S)
#define MMAP_CHUNK_OVERHEAD   (TWO_SIZE_T_SIZES)
#define MMAP_CLEARS   1
#define MMAP_DEFAULT (   s)    mmap(0, (s), MMAP_PROT, MMAP_FLAGS, -1, 0)
#define MMAP_FLAGS   (MAP_PRIVATE|MAP_ANONYMOUS)
#define MMAP_FOOT_PAD   (FOUR_SIZE_T_SIZES)
#define MMAP_PROT   (PROT_READ|PROT_WRITE)
#define MORECORE_CONTIGUOUS   0
#define MSPACES   0
#define MUNMAP_DEFAULT (   a,
 
)    munmap((a), (s))
#define next_chunk (   p)    ((mchunkptr)( ((char*)(p)) + ((p)->head & ~FLAG_BITS)))
#define next_pinuse (   p)    ((next_chunk(p)->head) & PINUSE_BIT)
#define NO_MALLINFO   0
#define NO_MALLOC_STATS   0
#define NO_SEGMENT_TRAVERSAL   0
#define NOINLINE
#define NSMALLBINS   (32U)
#define NTREEBINS   (32U)
#define ok_address (   M,
 
)    ((char*)(a) >= (M)->least_addr)
#define ok_inuse (   p)    is_inuse(p)
#define ok_magic (   M)    (1)
#define ok_next (   p,
 
)    ((char*)(p) < (char*)(n))
#define ok_pinuse (   p)    pinuse(p)
#define ONLY_MSPACES   0
#define overhead_for (   p)    (is_mmapped(p)? MMAP_CHUNK_OVERHEAD : CHUNK_OVERHEAD)
#define pad_request (   req)    (((req) + CHUNK_OVERHEAD + CHUNK_ALIGN_MASK) & ~CHUNK_ALIGN_MASK)
#define page_align (   S)    (((S) + (mparams.page_size - SIZE_T_ONE)) & ~(mparams.page_size - SIZE_T_ONE))
#define pinuse (   p)    ((p)->head & PINUSE_BIT)
#define PINUSE_BIT   (SIZE_T_ONE)
#define POSTACTION (   M)    { if (use_lock(M)) RELEASE_LOCK(&(M)->mutex); }
#define PREACTION (   M)    ((use_lock(M))? ACQUIRE_LOCK(&(M)->mutex) : 0)
#define prev_chunk (   p)    ((mchunkptr)( ((char*)(p)) - ((p)->prev_foot) ))
#define PROCEED_ON_ERROR   0
#define RELEASE_LOCK (   lk)    (semaphore_up(lk), 0)
#define RELEASE_MALLOC_GLOBAL_LOCK ( )    RELEASE_LOCK(&malloc_global_mutex);
#define replace_dv (   M,
  P,
  S 
)
Value:
{\
size_t DVS = M->dvsize;\
assert(is_small(DVS));\
if (DVS != 0) {\
mchunkptr DV = M->dv;\
insert_small_chunk(M, DV, DVS);\
}\
M->dvsize = S;\
M->dv = P;\
}
#define request2size (   req)    (((req) < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(req))
#define RTCHECK (   e)    (e)
#define same_or_left_bits (   x)    ((x) | -(x))
#define segment_holds (   S,
 
)    ((char*)(A) >= S->base && (char*)(A) < S->base + S->size)
#define set_flag4 (   p)    ((p)->head |= FLAG4_BIT)
#define set_foot (   p,
 
)    (((mchunkptr)((char*)(p) + (s)))->prev_foot = (s))
#define set_free_with_pinuse (   p,
  s,
 
)    (clear_pinuse(n), set_size_and_pinuse_of_free_chunk(p, s))
#define set_inuse (   M,
  p,
 
)
Value:
((p)->head = (((p)->head & PINUSE_BIT)|s|CINUSE_BIT),\
((mchunkptr)(((char*)(p)) + (s)))->head |= PINUSE_BIT)
#define set_inuse_and_pinuse (   M,
  p,
 
)
Value:
((p)->head = (s|PINUSE_BIT|CINUSE_BIT),\
((mchunkptr)(((char*)(p)) + (s)))->head |= PINUSE_BIT)
#define set_lock (   M,
 
)
Value:
((M)->mflags = (L)?\
((M)->mflags | USE_LOCK_BIT) :\
((M)->mflags & ~USE_LOCK_BIT))
#define set_size_and_pinuse_of_free_chunk (   p,
 
)    ((p)->head = (s|PINUSE_BIT), set_foot(p, s))
#define set_size_and_pinuse_of_inuse_chunk (   M,
  p,
 
)    ((p)->head = (s|PINUSE_BIT|CINUSE_BIT))
#define should_trim (   M,
 
)    ((s) > (M)->trim_check)
#define SIX_SIZE_T_SIZES   (FOUR_SIZE_T_SIZES+TWO_SIZE_T_SIZES)
#define SIZE_T_BITSIZE   (sizeof(size_t) << 3)
#define SIZE_T_FOUR   ((size_t)4)
#define SIZE_T_ONE   ((size_t)1)
#define SIZE_T_SIZE   (sizeof(size_t))
#define SIZE_T_TWO   ((size_t)2)
#define SIZE_T_ZERO   ((size_t)0)
#define small_index (   s)    (bindex_t)((s) >> SMALLBIN_SHIFT)
#define small_index2size (   i)    ((i) << SMALLBIN_SHIFT)
#define smallbin_at (   M,
 
)    ((sbinptr)((char*)&((M)->smallbins[(i)<<1])))
#define SMALLBIN_SHIFT   (3U)
#define SMALLBIN_WIDTH   (SIZE_T_ONE << SMALLBIN_SHIFT)
#define smallmap_is_marked (   M,
 
)    ((M)->smallmap & idx2bit(i))
#define STRUCT_MALLINFO_DECLARED   1
#define SYS_ALLOC_PADDING   (TOP_FOOT_SIZE + MALLOC_ALIGNMENT)
#define TOP_FOOT_SIZE   (align_offset(chunk2mem(0))+pad_request(sizeof(struct malloc_segment))+MIN_CHUNK_SIZE)
#define treebin_at (   M,
 
)    (&((M)->treebins[i]))
#define TREEBIN_SHIFT   (8U)
#define treemap_is_marked (   M,
 
)    ((M)->treemap & idx2bit(i))
#define TWO_SIZE_T_SIZES   (SIZE_T_SIZE<<1)
#define unlink_chunk (   M,
  P,
  S 
)
Value:
if (is_small(S)) unlink_small_chunk(M, P, S)\
else { tchunkptr TP = (tchunkptr)(P); unlink_large_chunk(M, TP); }
#define unlink_first_small_chunk (   M,
  B,
  P,
 
)
Value:
{\
mchunkptr F = P->fd;\
assert(P != B);\
assert(P != F);\
assert(chunksize(P) == small_index2size(I));\
if (B == F) {\
clear_smallmap(M, I);\
}\
else if (RTCHECK(ok_address(M, F) && F->bk == P)) {\
F->bk = B;\
B->fd = F;\
}\
else {\
CORRUPTION_ERROR_ACTION(M);\
}\
}
#define unlink_large_chunk (   M,
 
)
#define unlink_small_chunk (   M,
  P,
  S 
)
Value:
{\
mchunkptr F = P->fd;\
mchunkptr B = P->bk;\
assert(P != B);\
assert(P != F);\
assert(chunksize(P) == small_index2size(I));\
if (RTCHECK(F == smallbin_at(M,I) || (ok_address(M, F) && F->bk == P))) { \
if (B == F) {\
clear_smallmap(M, I);\
}\
else if (RTCHECK(B == smallbin_at(M,I) ||\
(ok_address(M, B) && B->fd == P))) {\
F->bk = B;\
B->fd = F;\
}\
else {\
CORRUPTION_ERROR_ACTION(M);\
}\
}\
else {\
CORRUPTION_ERROR_ACTION(M);\
}\
}
#define USAGE_ERROR_ACTION (   m,
 
)    ABORT
#define USE_BUILTIN_FFS   0
#define USE_DEV_RANDOM   0
#define use_lock (   M)    ((M)->mflags & USE_LOCK_BIT)
#define USE_LOCK_BIT   (2U)
#define use_mmap (   M)    ((M)->mflags & USE_MMAP_BIT)
#define USE_MMAP_BIT   (SIZE_T_ONE)

Define CALL_MMAP/CALL_MUNMAP/CALL_DIRECT_MMAP.

#define use_noncontiguous (   M)    ((M)->mflags & USE_NONCONTIGUOUS_BIT)
#define USE_NONCONTIGUOUS_BIT   (4U)

Typedef Documentation

typedef unsigned int bindex_t
typedef unsigned int binmap_t
typedef unsigned int flag_t
typedef struct malloc_chunk mchunk
typedef struct malloc_chunk* mchunkptr
typedef struct malloc_segment msegment
typedef struct malloc_segment* msegmentptr
typedef struct malloc_state* mstate
typedef struct malloc_chunk* sbinptr
typedef struct malloc_tree_chunk* tbinptr
typedef struct malloc_tree_chunk tchunk
typedef struct malloc_tree_chunk* tchunkptr

Function Documentation

DLMALLOC_EXPORT size_t dlbulk_free ( void **  ,
size_t  n_elements 
)
size_t dlbulk_free ( void *  array[],
size_t  nelem 
)
void * dlcalloc ( size_t  n_elements,
size_t  elem_size 
)
DLMALLOC_EXPORT void dlfree ( void *  mem)
DLMALLOC_EXPORT void** dlindependent_calloc ( size_t  ,
size_t  ,
void **   
)
void** dlindependent_calloc ( size_t  n_elements,
size_t  elem_size,
void *  chunks[] 
)
DLMALLOC_EXPORT void** dlindependent_comalloc ( size_t  ,
size_t *  ,
void **   
)
void** dlindependent_comalloc ( size_t  n_elements,
size_t  sizes[],
void *  chunks[] 
)
struct mallinfo dlmallinfo ( void  )
read
DLMALLOC_EXPORT void* dlmalloc ( size_t  bytes)
size_t dlmalloc_footprint ( void  )
size_t dlmalloc_footprint_limit ( void  )
void dlmalloc_init_locks ( void  )
size_t dlmalloc_max_footprint ( void  )
size_t dlmalloc_set_footprint_limit ( size_t  bytes)
void dlmalloc_stats ( void  )
int dlmalloc_trim ( size_t  pad)
size_t dlmalloc_usable_size ( void *  mem)
int dlmallopt ( int  param_number,
int  value 
)
DLMALLOC_EXPORT void* dlmemalign ( size_t  alignment,
size_t  bytes 
)
int dlposix_memalign ( void **  pp,
size_t  alignment,
size_t  bytes 
)
void * dlpvalloc ( size_t  bytes)
DLMALLOC_EXPORT void* dlrealloc ( void *  oldmem,
size_t  bytes 
)
void * dlrealloc_in_place ( void *  oldmem,
size_t  bytes 
)
void * dlvalloc ( size_t  bytes)