30 #if CPU_ALIGNMENT == 0 || CPU_ALIGNMENT % 2 != 0 31 #error "invalid CPU_ALIGNMENT value" 124 #ifdef HEAP_PROTECTION 125 static void _Heap_Protection_block_initialize_default(
130 block->Protection_begin.protector [0] = HEAP_BEGIN_PROTECTOR_0;
131 block->Protection_begin.protector [1] = HEAP_BEGIN_PROTECTOR_1;
132 block->Protection_begin.next_delayed_free_block = NULL;
134 block->Protection_begin.tag = NULL;
135 block->Protection_end.protector [0] = HEAP_END_PROTECTOR_0;
136 block->Protection_end.protector [1] = HEAP_END_PROTECTOR_1;
139 static void _Heap_Protection_block_check_default(
145 block->Protection_begin.protector [0] != HEAP_BEGIN_PROTECTOR_0
146 || block->Protection_begin.protector [1] != HEAP_BEGIN_PROTECTOR_1
147 || block->Protection_end.protector [0] != HEAP_END_PROTECTOR_0
148 || block->Protection_end.protector [1] != HEAP_END_PROTECTOR_1
154 static void _Heap_Protection_block_error_default(
171 uintptr_t heap_area_begin,
172 uintptr_t heap_area_size,
174 uintptr_t min_block_size,
179 uintptr_t
const heap_area_end = heap_area_begin + heap_area_size;
180 uintptr_t
const alloc_area_begin =
182 uintptr_t
const first_block_begin =
184 uintptr_t
const overhead =
186 uintptr_t
const first_block_size =
193 heap_area_end < heap_area_begin
194 || heap_area_size <= overhead
195 || first_block_size < min_block_size
201 *first_block_ptr = first_block;
202 *last_block_ptr = last_block;
209 void *heap_area_begin_ptr,
210 uintptr_t heap_area_size,
215 uintptr_t
const heap_area_begin = (uintptr_t) heap_area_begin_ptr;
216 uintptr_t
const heap_area_end = heap_area_begin + heap_area_size;
217 uintptr_t first_block_begin = 0;
218 uintptr_t first_block_size = 0;
219 uintptr_t last_block_begin = 0;
220 uintptr_t min_block_size = 0;
221 bool area_ok =
false;
225 if ( page_size == 0 ) {
250 memset(heap, 0,
sizeof(*heap));
252 #ifdef HEAP_PROTECTION 253 heap->Protection.block_initialize = _Heap_Protection_block_initialize_default;
254 heap->Protection.block_check = _Heap_Protection_block_check_default;
255 heap->Protection.block_error = _Heap_Protection_block_error_default;
258 first_block_begin = (uintptr_t) first_block;
259 last_block_begin = (uintptr_t) last_block;
260 first_block_size = last_block_begin - first_block_begin;
267 _Heap_Protection_block_initialize( heap, first_block );
270 heap->page_size = page_size;
271 heap->min_block_size = min_block_size;
272 heap->area_begin = heap_area_begin;
273 heap->area_end = heap_area_end;
274 heap->first_block = first_block;
275 heap->last_block = last_block;
280 last_block->
prev_size = first_block_size;
283 _Heap_Protection_block_initialize( heap, last_block );
286 stats->
size = first_block_size;
303 return first_block_size;
306 static void _Heap_Block_split(
316 uintptr_t
const page_size = heap->page_size;
317 uintptr_t
const min_block_size = heap->min_block_size;
322 uintptr_t
const used_size =
324 uintptr_t
const used_block_size =
_Heap_Align_up( used_size, page_size );
332 if ( free_size >= free_size_limit ) {
334 uintptr_t free_block_size = block_size - used_block_size;
339 _HAssert( used_block_size + free_block_size == block_size );
354 free_block_size += next_block_size;
364 _Heap_Protection_block_initialize( heap, free_block );
370 static Heap_Block *_Heap_Block_allocate_from_begin(
378 _Heap_Block_split( heap, block, next_block, free_list_anchor, alloc_size );
383 static Heap_Block *_Heap_Block_allocate_from_end(
388 uintptr_t alloc_begin,
396 uintptr_t
const new_block_begin = (uintptr_t) new_block;
397 uintptr_t
const new_block_size = (uintptr_t) next_block - new_block_begin;
398 uintptr_t block_size_adjusted = (uintptr_t) new_block - (uintptr_t) block;
400 _HAssert( block_size_adjusted >= heap->min_block_size );
401 _HAssert( new_block_size >= heap->min_block_size );
409 free_list_anchor = block;
418 block_size_adjusted += prev_block_size;
423 new_block->
prev_size = block_size_adjusted;
426 _Heap_Block_split( heap, new_block, next_block, free_list_anchor, alloc_size );
434 uintptr_t alloc_begin,
441 uintptr_t
const alloc_area_offset = alloc_begin - alloc_area_begin;
447 _HAssert( alloc_area_begin <= alloc_begin );
452 free_list_anchor = block->
prev;
462 if ( alloc_area_offset < heap->page_size ) {
463 alloc_size += alloc_area_offset;
465 block = _Heap_Block_allocate_from_begin(
473 block = _Heap_Block_allocate_from_end(
488 _Heap_Protection_block_initialize( heap, block );
Constants and Prototypes Related to the Internal Error Handler.
Run-time heap statistics.
RTEMS_INLINE_ROUTINE uintptr_t _Heap_Alloc_area_of_block(const Heap_Block *block)
Returns the first address in the block without the heap header.
RTEMS_INLINE_ROUTINE uintptr_t _Heap_Max(uintptr_t a, uintptr_t b)
Returns the bigger one of the two arguments.
uintptr_t size_and_flag
Contains the size of the current block and a flag which indicates if the previous block is free or us...
RTEMS_INLINE_ROUTINE Heap_Block * _Heap_Prev_block(const Heap_Block *block)
Returns the address of the previous block.
RTEMS_INLINE_ROUTINE Heap_Block * _Heap_Block_at(const Heap_Block *block, uintptr_t offset)
Returns the block which is offset away from block.
void _Terminate(Internal_errors_Source the_source, Internal_errors_t the_error) RTEMS_NO_RETURN
Initiates system termination.
RTEMS_INLINE_ROUTINE Heap_Block * _Heap_Block_of_alloc_area(uintptr_t alloc_begin, uintptr_t page_size)
Returns the starting address of the block corresponding to the allocatable area.
RTEMS_INLINE_ROUTINE uintptr_t _Heap_Align_up(uintptr_t value, uintptr_t alignment)
Aligns the value to a given alignment, rounding up.
There is an unexpected value in the heap block protector area.
RTEMS_INLINE_ROUTINE void _Heap_Free_list_remove(Heap_Block *block)
Removes the block from the free list.
Heap Handler Implementation.
RTEMS_INLINE_ROUTINE Heap_Block * _Heap_Free_list_tail(Heap_Control *heap)
Returns the tail of the free list of the heap.
RTEMS_INLINE_ROUTINE void _Heap_Block_set_size(Heap_Block *block, uintptr_t size)
Sets the block size.
Fatal source for heap errors.
RTEMS_INLINE_ROUTINE uintptr_t _Heap_Min_block_size(uintptr_t page_size)
Returns the minimal Heap Block size for the given page_size.
static __inline__ struct _Thread_Control * _Thread_Get_executing(void)
Returns the thread control block of the executing thread.
RTEMS_INLINE_ROUTINE void _Heap_Protection_set_delayed_free_fraction(Heap_Control *heap, uintptr_t fraction)
Sets the fraction of delayed free blocks that is actually freed during memory shortage.
Description for free or used blocks.
RTEMS_INLINE_ROUTINE bool _Heap_Is_prev_used(const Heap_Block *block)
Returns if the previous heap block is used.
uint32_t max_free_blocks
Maximum number of free blocks ever.
RTEMS_INLINE_ROUTINE Heap_Block * _Heap_Free_list_head(Heap_Control *heap)
Returns the head of the free list of the heap.
uint32_t used_blocks
Current number of used blocks.
RTEMS_INLINE_ROUTINE bool _Heap_Is_aligned(uintptr_t value, uintptr_t alignment)
Checks if the value is aligned to the given alignment.
Control block used to manage a heap.
RTEMS_INLINE_ROUTINE uintptr_t _Heap_Align_down(uintptr_t value, uintptr_t alignment)
Returns the aligned value, truncating.
uintptr_t free_size
Current free size in bytes.
uintptr_t _Heap_Initialize(Heap_Control *heap, void *heap_area_begin_ptr, uintptr_t heap_area_size, uintptr_t page_size)
Initializes the heap control block.
RTEMS_INLINE_ROUTINE void _Heap_Free_list_replace(Heap_Block *old_block, Heap_Block *new_block)
Replaces one block in the free list by another.
uintptr_t size
Size of the allocatable area in bytes.
uintptr_t min_free_size
Minimum free size ever in bytes.
Heap_Error_reason
The heap error reason.
Heap_Block * next
Pointer to the next free block or part of the allocated area.
#define HEAP_ALLOC_BONUS
Size of the part at the block begin which may be used for allocation in charge of the previous block...
uintptr_t prev_size
Size of the previous block or part of the allocated area of the previous block.
uint32_t free_blocks
Current number of free blocks.
Inlined Routines from the Thread Handler.
Heap_Block * _Heap_Block_allocate(Heap_Control *heap, Heap_Block *block, uintptr_t alloc_begin, uintptr_t alloc_size)
Allocates the memory area. starting at alloc_begin of size alloc_size bytes in the block block...
RTEMS_INLINE_ROUTINE void _Heap_Free_list_insert_after(Heap_Block *block_before, Heap_Block *new_block)
Inserts a block after an existing block in the free list.
Heap_Control * heap
The heap of the block.
Heap_Block * prev
Pointer to the previous free block or part of the allocated area.
#define HEAP_PREV_BLOCK_USED
See also Heap_Block::size_and_flag.
#define HEAP_BLOCK_HEADER_SIZE
The block header consists of the two size fields (Heap_Block::prev_size and Heap_Block::size_and_flag...
RTEMS_INLINE_ROUTINE void _Heap_Set_last_block_size(Heap_Control *heap)
Sets the size of the last block for the heap.
bool _Heap_Get_first_and_last_block(uintptr_t heap_area_begin, uintptr_t heap_area_size, uintptr_t page_size, uintptr_t min_block_size, Heap_Block **first_block_ptr, Heap_Block **last_block_ptr)
Gets the first and last block for the heap area.
RTEMS_INLINE_ROUTINE uintptr_t _Heap_Block_size(const Heap_Block *block)
Returns the block size.