RTEMS
heap.c
Go to the documentation of this file.
1 
9 /*
10  * COPYRIGHT (c) 1989-2009.
11  * On-Line Applications Research Corporation (OAR).
12  *
13  * Copyright (c) 2009, 2010 embedded brains GmbH.
14  *
15  * The license and distribution terms for this file may be
16  * found in the file LICENSE in this distribution or at
17  * http://www.rtems.org/license/LICENSE.
18  */
19 
20 #ifdef HAVE_CONFIG_H
21 #include "config.h"
22 #endif
23 
24 #include <rtems/score/heapimpl.h>
25 #include <rtems/score/threadimpl.h>
26 #include <rtems/score/interr.h>
27 
28 #include <string.h>
29 
30 #if CPU_ALIGNMENT == 0 || CPU_ALIGNMENT % 2 != 0
31  #error "invalid CPU_ALIGNMENT value"
32 #endif
33 
34 /*
35  * _Heap_Initialize
36  *
37  * This kernel routine initializes a heap.
38  *
39  * Input parameters:
40  * heap - pointer to heap header
41  * area_begin - starting address of heap
42  * size - size of heap
43  * page_size - allocatable unit of memory
44  *
45  * Output parameters:
46  * returns - maximum memory available if RTEMS_SUCCESSFUL
47  * 0 - otherwise
48  *
49  * This is what a heap looks like in memory immediately after initialization:
50  *
51  *
52  * +--------------------------------+ <- begin = area_begin
53  * | unused space due to alignment |
54  * | size < page_size |
55  * 0 +--------------------------------+ <- first block
56  * | prev_size = page_size |
57  * 4 +--------------------------------+
58  * | size = size0 | 1 |
59  * 8 +---------------------+----------+ <- aligned on page_size
60  * | next = HEAP_TAIL | |
61  * 12 +---------------------+ |
62  * | prev = HEAP_HEAD | memory |
63  * +---------------------+ |
64  * | available |
65  * | |
66  * | for allocation |
67  * | |
68  * size0 +--------------------------------+ <- last dummy block
69  * | prev_size = size0 |
70  * +4 +--------------------------------+
71  * | size = page_size | 0 | <- prev block is free
72  * +8 +--------------------------------+ <- aligned on page_size
73  * | unused space due to alignment |
74  * | size < page_size |
75  * +--------------------------------+ <- end = begin + size
76  *
77  * Below is what a heap looks like after first allocation of SIZE bytes using
78  * _Heap_allocate(). BSIZE stands for SIZE + 4 aligned up on 'page_size'
79  * boundary.
80  * [NOTE: If allocation were performed by _Heap_Allocate_aligned(), the
81  * block size BSIZE is defined differently, and previously free block will
82  * be split so that upper part of it will become used block (see
83  * 'heapallocatealigned.c' for details).]
84  *
85  * +--------------------------------+ <- begin = area_begin
86  * | unused space due to alignment |
87  * | size < page_size |
88  * 0 +--------------------------------+ <- used block
89  * | prev_size = page_size |
90  * 4 +--------------------------------+
91  * | size = BSIZE | 1 | <- prev block is used
92  * 8 +--------------------------------+ <- aligned on page_size
93  * | . | Pointer returned to the user
94  * | . | is 8 for _Heap_Allocate()
95  * | . | and is in range
96  * 8 + | user-accessible | [8,8+page_size) for
97  * page_size +- - - - - -+ _Heap_Allocate_aligned()
98  * | area |
99  * | . |
100  * BSIZE +- - - - - . - - - - -+ <- free block
101  * | . |
102  * BSIZE +4 +--------------------------------+
103  * | size = S = size0 - BSIZE | 1 | <- prev block is used
104  * BSIZE +8 +-------------------+------------+ <- aligned on page_size
105  * | next = HEAP_TAIL | |
106  * BSIZE +12 +-------------------+ |
107  * | prev = HEAP_HEAD | memory |
108  * +-------------------+ |
109  * | . available |
110  * | . |
111  * | . for |
112  * | . |
113  * BSIZE +S+0 +-------------------+ allocation + <- last dummy block
114  * | prev_size = S | |
115  * +S+4 +-------------------+------------+
116  * | size = page_size | 0 | <- prev block is free
117  * +S+8 +--------------------------------+ <- aligned on page_size
118  * | unused space due to alignment |
119  * | size < page_size |
120  * +--------------------------------+ <- end = begin + size
121  *
122  */
123 
124 #ifdef HEAP_PROTECTION
125  static void _Heap_Protection_block_initialize_default(
126  Heap_Control *heap,
127  Heap_Block *block
128  )
129  {
130  block->Protection_begin.protector [0] = HEAP_BEGIN_PROTECTOR_0;
131  block->Protection_begin.protector [1] = HEAP_BEGIN_PROTECTOR_1;
132  block->Protection_begin.next_delayed_free_block = NULL;
133  block->Protection_begin.task = _Thread_Get_executing();
134  block->Protection_begin.tag = NULL;
135  block->Protection_end.protector [0] = HEAP_END_PROTECTOR_0;
136  block->Protection_end.protector [1] = HEAP_END_PROTECTOR_1;
137  }
138 
139  static void _Heap_Protection_block_check_default(
140  Heap_Control *heap,
141  Heap_Block *block
142  )
143  {
144  if (
145  block->Protection_begin.protector [0] != HEAP_BEGIN_PROTECTOR_0
146  || block->Protection_begin.protector [1] != HEAP_BEGIN_PROTECTOR_1
147  || block->Protection_end.protector [0] != HEAP_END_PROTECTOR_0
148  || block->Protection_end.protector [1] != HEAP_END_PROTECTOR_1
149  ) {
150  _Heap_Protection_block_error( heap, block, HEAP_ERROR_BROKEN_PROTECTOR );
151  }
152  }
153 
154  static void _Heap_Protection_block_error_default(
155  Heap_Control *heap,
156  Heap_Block *block,
157  Heap_Error_reason reason
158  )
159  {
160  Heap_Error_context error_context = {
161  .heap = heap,
162  .block = block,
163  .reason = reason
164  };
165 
166  _Terminate( RTEMS_FATAL_SOURCE_HEAP, (uintptr_t) &error_context );
167  }
168 #endif
169 
171  uintptr_t heap_area_begin,
172  uintptr_t heap_area_size,
173  uintptr_t page_size,
174  uintptr_t min_block_size,
175  Heap_Block **first_block_ptr,
176  Heap_Block **last_block_ptr
177 )
178 {
179  uintptr_t const heap_area_end = heap_area_begin + heap_area_size;
180  uintptr_t const alloc_area_begin =
181  _Heap_Align_up( heap_area_begin + HEAP_BLOCK_HEADER_SIZE, page_size );
182  uintptr_t const first_block_begin =
183  alloc_area_begin - HEAP_BLOCK_HEADER_SIZE;
184  uintptr_t const overhead =
185  HEAP_BLOCK_HEADER_SIZE + (first_block_begin - heap_area_begin);
186  uintptr_t const first_block_size =
187  _Heap_Align_down( heap_area_size - overhead, page_size );
188  Heap_Block *const first_block = (Heap_Block *) first_block_begin;
189  Heap_Block *const last_block =
190  _Heap_Block_at( first_block, first_block_size );
191 
192  if (
193  heap_area_end < heap_area_begin
194  || heap_area_size <= overhead
195  || first_block_size < min_block_size
196  ) {
197  /* Invalid area or area too small */
198  return false;
199  }
200 
201  *first_block_ptr = first_block;
202  *last_block_ptr = last_block;
203 
204  return true;
205 }
206 
208  Heap_Control *heap,
209  void *heap_area_begin_ptr,
210  uintptr_t heap_area_size,
211  uintptr_t page_size
212 )
213 {
214  Heap_Statistics *const stats = &heap->stats;
215  uintptr_t const heap_area_begin = (uintptr_t) heap_area_begin_ptr;
216  uintptr_t const heap_area_end = heap_area_begin + heap_area_size;
217  uintptr_t first_block_begin = 0;
218  uintptr_t first_block_size = 0;
219  uintptr_t last_block_begin = 0;
220  uintptr_t min_block_size = 0;
221  bool area_ok = false;
222  Heap_Block *first_block = NULL;
223  Heap_Block *last_block = NULL;
224 
225  if ( page_size == 0 ) {
226  page_size = CPU_ALIGNMENT;
227  } else {
228  page_size = _Heap_Align_up( page_size, CPU_ALIGNMENT );
229 
230  if ( page_size < CPU_ALIGNMENT ) {
231  /* Integer overflow */
232  return 0;
233  }
234  }
235 
236  min_block_size = _Heap_Min_block_size( page_size );
237 
239  heap_area_begin,
240  heap_area_size,
241  page_size,
242  min_block_size,
243  &first_block,
244  &last_block
245  );
246  if ( !area_ok ) {
247  return 0;
248  }
249 
250  memset(heap, 0, sizeof(*heap));
251 
252  #ifdef HEAP_PROTECTION
253  heap->Protection.block_initialize = _Heap_Protection_block_initialize_default;
254  heap->Protection.block_check = _Heap_Protection_block_check_default;
255  heap->Protection.block_error = _Heap_Protection_block_error_default;
256  #endif
257 
258  first_block_begin = (uintptr_t) first_block;
259  last_block_begin = (uintptr_t) last_block;
260  first_block_size = last_block_begin - first_block_begin;
261 
262  /* First block */
263  first_block->prev_size = heap_area_end;
264  first_block->size_and_flag = first_block_size | HEAP_PREV_BLOCK_USED;
265  first_block->next = _Heap_Free_list_tail( heap );
266  first_block->prev = _Heap_Free_list_head( heap );
267  _Heap_Protection_block_initialize( heap, first_block );
268 
269  /* Heap control */
270  heap->page_size = page_size;
271  heap->min_block_size = min_block_size;
272  heap->area_begin = heap_area_begin;
273  heap->area_end = heap_area_end;
274  heap->first_block = first_block;
275  heap->last_block = last_block;
276  _Heap_Free_list_head( heap )->next = first_block;
277  _Heap_Free_list_tail( heap )->prev = first_block;
278 
279  /* Last block */
280  last_block->prev_size = first_block_size;
281  last_block->size_and_flag = 0;
283  _Heap_Protection_block_initialize( heap, last_block );
284 
285  /* Statistics */
286  stats->size = first_block_size;
287  stats->free_size = first_block_size;
288  stats->min_free_size = first_block_size;
289  stats->free_blocks = 1;
290  stats->max_free_blocks = 1;
291 
293 
294  _HAssert( _Heap_Is_aligned( heap->page_size, CPU_ALIGNMENT ) );
295  _HAssert( _Heap_Is_aligned( heap->min_block_size, page_size ) );
296  _HAssert(
297  _Heap_Is_aligned( _Heap_Alloc_area_of_block( first_block ), page_size )
298  );
299  _HAssert(
300  _Heap_Is_aligned( _Heap_Alloc_area_of_block( last_block ), page_size )
301  );
302 
303  return first_block_size;
304 }
305 
306 static void _Heap_Block_split(
307  Heap_Control *heap,
308  Heap_Block *block,
309  Heap_Block *next_block,
310  Heap_Block *free_list_anchor,
311  uintptr_t alloc_size
312 )
313 {
314  Heap_Statistics *const stats = &heap->stats;
315 
316  uintptr_t const page_size = heap->page_size;
317  uintptr_t const min_block_size = heap->min_block_size;
318  uintptr_t const min_alloc_size = min_block_size - HEAP_BLOCK_HEADER_SIZE;
319 
320  uintptr_t const block_size = _Heap_Block_size( block );
321 
322  uintptr_t const used_size =
323  _Heap_Max( alloc_size, min_alloc_size ) + HEAP_BLOCK_HEADER_SIZE;
324  uintptr_t const used_block_size = _Heap_Align_up( used_size, page_size );
325 
326  uintptr_t const free_size = block_size + HEAP_ALLOC_BONUS - used_size;
327  uintptr_t const free_size_limit = min_block_size + HEAP_ALLOC_BONUS;
328 
329  _HAssert( used_size <= block_size + HEAP_ALLOC_BONUS );
330  _HAssert( used_size + free_size == block_size + HEAP_ALLOC_BONUS );
331 
332  if ( free_size >= free_size_limit ) {
333  Heap_Block *const free_block = _Heap_Block_at( block, used_block_size );
334  uintptr_t free_block_size = block_size - used_block_size;
335  uintptr_t const next_block_size = _Heap_Block_size( next_block );
336  Heap_Block *const next_next_block =
337  _Heap_Block_at( next_block, next_block_size );
338 
339  _HAssert( used_block_size + free_block_size == block_size );
340 
341  _Heap_Block_set_size( block, used_block_size );
342 
343  /* Statistics */
344  stats->free_size += free_block_size;
345 
346  if ( _Heap_Is_prev_used( next_next_block ) ) {
347  _Heap_Free_list_insert_after( free_list_anchor, free_block );
348 
349  /* Statistics */
350  ++stats->free_blocks;
351  } else {
352  _Heap_Free_list_replace( next_block, free_block );
353 
354  free_block_size += next_block_size;
355 
356  next_block = _Heap_Block_at( free_block, free_block_size );
357  }
358 
359  free_block->size_and_flag = free_block_size | HEAP_PREV_BLOCK_USED;
360 
361  next_block->prev_size = free_block_size;
362  next_block->size_and_flag &= ~HEAP_PREV_BLOCK_USED;
363 
364  _Heap_Protection_block_initialize( heap, free_block );
365  } else {
366  next_block->size_and_flag |= HEAP_PREV_BLOCK_USED;
367  }
368 }
369 
370 static Heap_Block *_Heap_Block_allocate_from_begin(
371  Heap_Control *heap,
372  Heap_Block *block,
373  Heap_Block *next_block,
374  Heap_Block *free_list_anchor,
375  uintptr_t alloc_size
376 )
377 {
378  _Heap_Block_split( heap, block, next_block, free_list_anchor, alloc_size );
379 
380  return block;
381 }
382 
383 static Heap_Block *_Heap_Block_allocate_from_end(
384  Heap_Control *heap,
385  Heap_Block *block,
386  Heap_Block *next_block,
387  Heap_Block *free_list_anchor,
388  uintptr_t alloc_begin,
389  uintptr_t alloc_size
390 )
391 {
392  Heap_Statistics *const stats = &heap->stats;
393 
394  Heap_Block *const new_block =
395  _Heap_Block_of_alloc_area( alloc_begin, heap->page_size );
396  uintptr_t const new_block_begin = (uintptr_t) new_block;
397  uintptr_t const new_block_size = (uintptr_t) next_block - new_block_begin;
398  uintptr_t block_size_adjusted = (uintptr_t) new_block - (uintptr_t) block;
399 
400  _HAssert( block_size_adjusted >= heap->min_block_size );
401  _HAssert( new_block_size >= heap->min_block_size );
402 
403  /* Statistics */
404  stats->free_size += block_size_adjusted;
405 
406  if ( _Heap_Is_prev_used( block ) ) {
407  _Heap_Free_list_insert_after( free_list_anchor, block );
408 
409  free_list_anchor = block;
410 
411  /* Statistics */
412  ++stats->free_blocks;
413  } else {
414  Heap_Block *const prev_block = _Heap_Prev_block( block );
415  uintptr_t const prev_block_size = _Heap_Block_size( prev_block );
416 
417  block = prev_block;
418  block_size_adjusted += prev_block_size;
419  }
420 
421  block->size_and_flag = block_size_adjusted | HEAP_PREV_BLOCK_USED;
422 
423  new_block->prev_size = block_size_adjusted;
424  new_block->size_and_flag = new_block_size;
425 
426  _Heap_Block_split( heap, new_block, next_block, free_list_anchor, alloc_size );
427 
428  return new_block;
429 }
430 
432  Heap_Control *heap,
433  Heap_Block *block,
434  uintptr_t alloc_begin,
435  uintptr_t alloc_size
436 )
437 {
438  Heap_Statistics *const stats = &heap->stats;
439 
440  uintptr_t const alloc_area_begin = _Heap_Alloc_area_of_block( block );
441  uintptr_t const alloc_area_offset = alloc_begin - alloc_area_begin;
442  uintptr_t const block_size = _Heap_Block_size( block );
443  Heap_Block *const next_block = _Heap_Block_at( block, block_size );
444 
445  Heap_Block *free_list_anchor = NULL;
446 
447  _HAssert( alloc_area_begin <= alloc_begin );
448 
449  if ( _Heap_Is_prev_used( next_block ) ) {
450  free_list_anchor = _Heap_Free_list_head( heap );
451  } else {
452  free_list_anchor = block->prev;
453 
454  _Heap_Free_list_remove( block );
455 
456  /* Statistics */
457  --stats->free_blocks;
458  ++stats->used_blocks;
459  stats->free_size -= block_size;
460  }
461 
462  if ( alloc_area_offset < heap->page_size ) {
463  alloc_size += alloc_area_offset;
464 
465  block = _Heap_Block_allocate_from_begin(
466  heap,
467  block,
468  next_block,
469  free_list_anchor,
470  alloc_size
471  );
472  } else {
473  block = _Heap_Block_allocate_from_end(
474  heap,
475  block,
476  next_block,
477  free_list_anchor,
478  alloc_begin,
479  alloc_size
480  );
481  }
482 
483  /* Statistics */
484  if ( stats->min_free_size > stats->free_size ) {
485  stats->min_free_size = stats->free_size;
486  }
487 
488  _Heap_Protection_block_initialize( heap, block );
489 
490  return block;
491 }
Constants and Prototypes Related to the Internal Error Handler.
Run-time heap statistics.
Definition: heapinfo.h:40
RTEMS_INLINE_ROUTINE uintptr_t _Heap_Alloc_area_of_block(const Heap_Block *block)
Returns the first address in the block without the heap header.
Definition: heapimpl.h:635
RTEMS_INLINE_ROUTINE uintptr_t _Heap_Max(uintptr_t a, uintptr_t b)
Returns the bigger one of the two arguments.
Definition: heapimpl.h:796
uintptr_t size_and_flag
Contains the size of the current block and a flag which indicates if the previous block is free or us...
Definition: heap.h:289
RTEMS_INLINE_ROUTINE Heap_Block * _Heap_Prev_block(const Heap_Block *block)
Returns the address of the previous block.
Definition: heapimpl.h:621
RTEMS_INLINE_ROUTINE Heap_Block * _Heap_Block_at(const Heap_Block *block, uintptr_t offset)
Returns the block which is offset away from block.
Definition: heapimpl.h:606
void _Terminate(Internal_errors_Source the_source, Internal_errors_t the_error) RTEMS_NO_RETURN
Initiates system termination.
Definition: interr.c:31
RTEMS_INLINE_ROUTINE Heap_Block * _Heap_Block_of_alloc_area(uintptr_t alloc_begin, uintptr_t page_size)
Returns the starting address of the block corresponding to the allocatable area.
Definition: heapimpl.h:650
RTEMS_INLINE_ROUTINE uintptr_t _Heap_Align_up(uintptr_t value, uintptr_t alignment)
Aligns the value to a given alignment, rounding up.
Definition: heap.h:413
There is an unexpected value in the heap block protector area.
Definition: heap.h:146
RTEMS_INLINE_ROUTINE void _Heap_Free_list_remove(Heap_Block *block)
Removes the block from the free list.
Definition: heapimpl.h:497
Heap Handler Implementation.
RTEMS_INLINE_ROUTINE Heap_Block * _Heap_Free_list_tail(Heap_Control *heap)
Returns the tail of the free list of the heap.
Definition: heapimpl.h:463
RTEMS_INLINE_ROUTINE void _Heap_Block_set_size(Heap_Block *block, uintptr_t size)
Sets the block size.
Definition: heapimpl.h:677
Fatal source for heap errors.
Definition: interr.h:147
RTEMS_INLINE_ROUTINE uintptr_t _Heap_Min_block_size(uintptr_t page_size)
Returns the minimal Heap Block size for the given page_size.
Definition: heap.h:434
static __inline__ struct _Thread_Control * _Thread_Get_executing(void)
Returns the thread control block of the executing thread.
Definition: percpu.h:878
RTEMS_INLINE_ROUTINE void _Heap_Protection_set_delayed_free_fraction(Heap_Control *heap, uintptr_t fraction)
Sets the fraction of delayed free blocks that is actually freed during memory shortage.
Definition: heapimpl.h:431
Description for free or used blocks.
Definition: heap.h:256
RTEMS_INLINE_ROUTINE bool _Heap_Is_prev_used(const Heap_Block *block)
Returns if the previous heap block is used.
Definition: heapimpl.h:696
uint32_t max_free_blocks
Maximum number of free blocks ever.
Definition: heapinfo.h:84
RTEMS_INLINE_ROUTINE Heap_Block * _Heap_Free_list_head(Heap_Control *heap)
Returns the head of the free list of the heap.
Definition: heapimpl.h:451
uint32_t used_blocks
Current number of used blocks.
Definition: heapinfo.h:89
#define CPU_ALIGNMENT
Definition: cpu.h:742
RTEMS_INLINE_ROUTINE bool _Heap_Is_aligned(uintptr_t value, uintptr_t alignment)
Checks if the value is aligned to the given alignment.
Definition: heapimpl.h:574
Control block used to manage a heap.
Definition: heap.h:318
RTEMS_INLINE_ROUTINE uintptr_t _Heap_Align_down(uintptr_t value, uintptr_t alignment)
Returns the aligned value, truncating.
Definition: heapimpl.h:590
uintptr_t free_size
Current free size in bytes.
Definition: heapinfo.h:67
uintptr_t _Heap_Initialize(Heap_Control *heap, void *heap_area_begin_ptr, uintptr_t heap_area_size, uintptr_t page_size)
Initializes the heap control block.
Definition: heap.c:207
RTEMS_INLINE_ROUTINE void _Heap_Free_list_replace(Heap_Block *old_block, Heap_Block *new_block)
Replaces one block in the free list by another.
Definition: heapimpl.h:512
uintptr_t size
Size of the allocatable area in bytes.
Definition: heapinfo.h:60
uintptr_t min_free_size
Minimum free size ever in bytes.
Definition: heapinfo.h:74
Heap_Error_reason
The heap error reason.
Definition: heap.h:142
Heap_Block * next
Pointer to the next free block or part of the allocated area.
Definition: heap.h:304
#define HEAP_ALLOC_BONUS
Size of the part at the block begin which may be used for allocation in charge of the previous block...
Definition: heapimpl.h:42
Context of a heap error.
Definition: heap.h:176
uintptr_t prev_size
Size of the previous block or part of the allocated area of the previous block.
Definition: heap.h:270
uint32_t free_blocks
Current number of free blocks.
Definition: heapinfo.h:79
Inlined Routines from the Thread Handler.
Heap_Block * _Heap_Block_allocate(Heap_Control *heap, Heap_Block *block, uintptr_t alloc_begin, uintptr_t alloc_size)
Allocates the memory area. starting at alloc_begin of size alloc_size bytes in the block block...
Definition: heap.c:431
RTEMS_INLINE_ROUTINE void _Heap_Free_list_insert_after(Heap_Block *block_before, Heap_Block *new_block)
Inserts a block after an existing block in the free list.
Definition: heapimpl.h:533
Heap_Control * heap
The heap of the block.
Definition: heap.h:180
Heap_Block * prev
Pointer to the previous free block or part of the allocated area.
Definition: heap.h:312
#define HEAP_PREV_BLOCK_USED
See also Heap_Block::size_and_flag.
Definition: heapimpl.h:36
#define HEAP_BLOCK_HEADER_SIZE
The block header consists of the two size fields (Heap_Block::prev_size and Heap_Block::size_and_flag...
Definition: heap.h:250
RTEMS_INLINE_ROUTINE void _Heap_Set_last_block_size(Heap_Control *heap)
Sets the size of the last block for the heap.
Definition: heapimpl.h:765
bool _Heap_Get_first_and_last_block(uintptr_t heap_area_begin, uintptr_t heap_area_size, uintptr_t page_size, uintptr_t min_block_size, Heap_Block **first_block_ptr, Heap_Block **last_block_ptr)
Gets the first and last block for the heap area.
Definition: heap.c:170
RTEMS_INLINE_ROUTINE uintptr_t _Heap_Block_size(const Heap_Block *block)
Returns the block size.
Definition: heapimpl.h:666