23 #include <l4/cxx/arith> 24 #include <l4/cxx/minmax> 34 friend class List_alloc_sanity_guard;
44 inline void check_overlap(
void *,
unsigned long );
45 inline void sanity_check_list(
char const *,
char const *);
67 inline void free(
void *block,
unsigned long size,
bool initial_free =
false);
77 inline void *
alloc(
unsigned long size,
unsigned align);
91 unsigned align,
unsigned granularity);
98 inline unsigned long avail();
100 template <
typename DBG>
101 void dump_free_list(DBG &out);
104 #if !defined (CXX_LIST_ALLOC_SANITY) 105 class List_alloc_sanity_guard
108 List_alloc_sanity_guard(
List_alloc *,
char const *)
115 List_alloc::check_overlap(
void *,
unsigned long )
119 List_alloc::sanity_check_list(
char const *,
char const *)
124 class List_alloc_sanity_guard
131 List_alloc_sanity_guard(
List_alloc *a,
char const *func)
133 { a->sanity_check_list(func,
"entry"); }
135 ~List_alloc_sanity_guard()
136 { a->sanity_check_list(func,
"exit"); }
140 List_alloc::check_overlap(
void *b,
unsigned long s)
142 unsigned long const mb_align = (1UL << arith::Ld<sizeof(Mem_block)>::value) - 1;
143 if ((
unsigned long)b & mb_align)
145 L4::cerr <<
"List_alloc(FATAL): trying to free unaligned memory: " 146 << b <<
" align=" << arith::Ld<sizeof(Mem_block)>::value <<
"\n";
149 Mem_block *c = _first;
150 for (;c ; c = c->next)
152 unsigned long x_s = (
unsigned long)b;
153 unsigned long x_e = x_s + s;
154 unsigned long b_s = (
unsigned long)c;
155 unsigned long b_e = b_s + c->size;
157 if ((x_s >= b_s && x_s < b_e)
158 || (x_e > b_s && x_e <= b_e)
159 || (b_s >= x_s && b_s < x_e)
160 || (b_e > x_s && b_e <= x_e))
162 L4::cerr <<
"List_alloc(FATAL): trying to free memory that " 163 "is already free: \n [" 164 << (
void*)x_s <<
'-' << (
void*)x_e <<
") overlaps [" 165 << (
void*)b_s <<
'-' << (
void*)b_e <<
")\n";
171 List_alloc::sanity_check_list(
char const *func,
char const *info)
173 Mem_block *c = _first;
174 for (;c ; c = c->next)
180 L4::cerr <<
"List_alloc(FATAL): " << func <<
'(' << info
181 <<
"): list order violation\n";
184 if (((
unsigned long)c) + c->size > (
unsigned long)c->next)
186 L4::cerr <<
"List_alloc(FATAL): " << func <<
'(' << info
187 <<
"): list order violation\n";
198 List_alloc_sanity_guard __attribute__((unused)) guard(
this, __func__);
199 Mem_block *c = _first;
202 unsigned long f_start = (
unsigned long)c;
203 unsigned long f_end = f_start + c->size;
204 unsigned long n_start = (
unsigned long)c->next;
206 if (f_end == n_start)
208 c->size += c->next->size;
209 c->next = c->next->next;
220 List_alloc_sanity_guard __attribute__((unused)) guard(
this, __func__);
222 unsigned long const mb_align = (1UL << arith::Ld<sizeof(Mem_block)>::value) - 1;
227 unsigned long nblock = ((
unsigned long)block + mb_align) & ~mb_align;
228 size = (size - (nblock - (
unsigned long)block)) & ~mb_align;
229 block = (
void*)nblock;
233 size = (size + mb_align) & ~mb_align;
235 check_overlap(block, size);
237 Mem_block **c = &_first;
242 while (*c && *c < block)
248 *c = (Mem_block*)block;
258 unsigned granularity)
260 List_alloc_sanity_guard __attribute__((unused)) guard(
this, __func__);
262 unsigned char const mb_bits = arith::Ld<sizeof(Mem_block)>::value;
263 unsigned long const mb_align = (1UL << mb_bits) - 1;
270 *max = *max & ~(granularity - 1UL);
275 unsigned long almask = align ? (align - 1) : 0;
278 if (almask < mb_align)
281 Mem_block **c = &_first;
283 unsigned long max_fit = 0;
285 for (; *c; c = &(*c)->next)
288 unsigned long n_start = (
unsigned long)(*c);
292 if ((*c)->size < min)
296 unsigned long a_start = (n_start + almask) & ~almask;
299 if (a_start - n_start >= (*c)->size)
303 unsigned long r_size = (*c)->size - a_start + n_start;
305 r_size &= ~(granularity - 1UL);
318 if (r_size > max_fit)
327 unsigned long n_start = (
unsigned long)(*fit);
328 unsigned long a_start = (n_start + almask) & ~almask;
329 unsigned long r_size = (*fit)->size - a_start + n_start;
331 if (a_start > n_start)
333 (*fit)->size -= r_size;
340 if (r_size == max_fit)
341 return (
void *)a_start;
343 Mem_block *m = (Mem_block*)(a_start + max_fit);
345 m->size = r_size - max_fit;
347 return (
void*)a_start;
356 List_alloc_sanity_guard __attribute__((unused)) guard(
this, __func__);
358 unsigned long const mb_align = (1UL << arith::Ld<sizeof(Mem_block)>::value) - 1;
361 size = (size + mb_align) & ~mb_align;
363 unsigned long almask = align ? (align -1) : 0;
366 if (almask < mb_align)
369 Mem_block **c = &_first;
371 for (; *c; c=&(*c)->next)
374 unsigned long n_start = (
unsigned long)(*c);
378 if ((*c)->size < size)
382 unsigned long a_start = (n_start + almask) & ~almask;
385 if (a_start - n_start >= (*c)->size)
390 unsigned long r_size = (*c)->size - a_start + n_start;
396 if (a_start > n_start)
401 (*c)->size -= r_size;
411 return (
void*)a_start;
414 Mem_block *m = (Mem_block*)(a_start + size);
416 m->size = r_size - size;
418 return (
void*)a_start;
427 List_alloc_sanity_guard __attribute__((unused)) guard(
this, __FUNCTION__);
428 Mem_block *c = _first;
439 template <
typename DBG>
441 List_alloc::dump_free_list(DBG &out)
443 Mem_block *c = _first;
454 else if (c->size < 1 << 20)
465 out.printf(
"%12p - %12p (%u %s)\n", c, (
char *) c + c->size - 1, sz, unit);
List_alloc()
Initializes an empty list allocator.
l4_addr_t l4_round_size(l4_umword_t value, unsigned char bits) L4_NOTHROW
Round value up to the next alignment with bits size.
void * alloc(unsigned long size, unsigned align)
Alloc a memory block.
BasicOStream cerr
Standard error stream.
unsigned long avail()
Get the amount of available memory.
l4_addr_t l4_trunc_size(l4_addr_t address, unsigned char bits) L4_NOTHROW
Round an address down to the next lower flex page with size bits.
Standard list-based allocator.
T1 max(T1 a, T1 b)
Get the maximum of a and b.
void * alloc_max(unsigned long min, unsigned long *max, unsigned align, unsigned granularity)
Allocate a memory block of min <= size <=max.
void free(void *block, unsigned long size, bool initial_free=false)
Return a free memory block to the allocator.
T1 min(T1 a, T1 b)
Get the minimum of a and b.