heap/multi_heap: added initial implementation of aligned alloc function

This commit is contained in:
Felipe Neves 2019-11-13 09:24:08 +08:00
parent a05c00c2b3
commit d26ddaa644
3 changed files with 91 additions and 0 deletions

View file

@ -29,6 +29,17 @@ extern "C" {
/** @brief Opaque handle to a registered heap */
typedef struct multi_heap_info *multi_heap_handle_t;
/**
* @brief allocate a chunk of memory with specific alignment
*
* @param heap Handle to a registered heap.
* @param size size in bytes of memory chunk
* @param alignment how the memory must be aligned
*
* @return pointer to the memory allocated, NULL on failure
*/
void *multi_heap_aligned_alloc(multi_heap_handle_t heap, size_t size, size_t alignment);
/** @brief malloc() a buffer in a given heap
*
* Semantics are the same as standard malloc(), only the returned buffer will be allocated in the specified heap.
@ -40,6 +51,14 @@ typedef struct multi_heap_info *multi_heap_handle_t;
*/
void *multi_heap_malloc(multi_heap_handle_t heap, size_t size);
/** @brief free() a buffer aligned in a given heap.
*
* @param heap Handle to a registered heap.
* @param p NULL, or a pointer previously returned from multi_heap_aligned_alloc() for the same heap.
*/
void multi_heap_aligned_free(multi_heap_handle_t heap, void *p);
/** @brief free() a buffer in a given heap.
*
* Semantics are the same as standard free(), only the argument 'p' must be NULL or have been allocated in the specified heap.

View file

@ -23,6 +23,7 @@ typedef const struct heap_block *multi_heap_block_handle_t;
If heap poisoning is enabled, wrapper functions call each of these.
*/
void *multi_heap_malloc_impl(multi_heap_handle_t heap, size_t size);
void multi_heap_free_impl(multi_heap_handle_t heap, void *p);
void *multi_heap_realloc_impl(multi_heap_handle_t heap, void *p, size_t size);

View file

@ -45,6 +45,9 @@
#define HEAD_CANARY_PATTERN 0xABBA1234
#define TAIL_CANARY_PATTERN 0xBAAD5678
#define ALIGN_UP(num, align) (((num) + ((align) - 1)) & ~((align) - 1))
typedef struct {
uint32_t head_canary;
MULTI_HEAP_BLOCK_OWNER
@ -182,6 +185,49 @@ static bool verify_fill_pattern(void *data, size_t size, bool print_errors, bool
}
#endif
void *multi_heap_aligned_alloc(multi_heap_handle_t heap, size_t size, size_t alignment)
{
if(heap == NULL) {
return NULL;
}
if(!size) {
return NULL;
}
//Alignment must be a power of two...
if((alignment & (alignment - 1)) != 0) {
return NULL;
}
if(size > SIZE_MAX - POISON_OVERHEAD) {
return NULL;
}
uint32_t overhead = (sizeof(uint32_t) + (alignment - 1) + POISON_OVERHEAD);
multi_heap_internal_lock(heap);
poison_head_t *head = multi_heap_malloc_impl(heap, size + overhead);
uint8_t *data = NULL;
if (head != NULL) {
data = poison_allocated_region(head, size);
#ifdef SLOW
/* check everything we got back is FREE_FILL_PATTERN & swap for MALLOC_FILL_PATTERN */
bool ret = verify_fill_pattern(data, size, true, true, true);
assert( ret );
#endif
}
//Lets align our new obtained block address:
//and save the original heap pointer to allow deallocation
void *ptr = (void *)ALIGN_UP((uintptr_t)head + sizeof(uint32_t) + POISON_OVERHEAD, alignment);
*((uint32_t *)ptr - 1) = (uint32_t)((uintptr_t)ptr - (uintptr_t)head);
multi_heap_internal_unlock(heap);
return ptr;
}
void *multi_heap_malloc(multi_heap_handle_t heap, size_t size)
{
if(size > SIZE_MAX - POISON_OVERHEAD) {
@ -203,6 +249,31 @@ void *multi_heap_malloc(multi_heap_handle_t heap, size_t size)
return data;
}
void multi_heap_aligned_free(multi_heap_handle_t heap, void *p)
{
if(p == NULL) {
return;
}
multi_heap_internal_lock(heap);
uint32_t offset = *((uint32_t *)p - 1);
void *block_head = (void *)((uint8_t *)p - (offset - POISON_OVERHEAD));
poison_head_t *head = verify_allocated_region(block_head, true);
assert(head != NULL);
#ifdef SLOW
/* replace everything with FREE_FILL_PATTERN, including the poison head/tail */
memset(head, FREE_FILL_PATTERN,
head->alloc_size + POISON_OVERHEAD);
#endif
multi_heap_free_impl(heap, head);
multi_heap_internal_unlock(heap);
}
void multi_heap_free(multi_heap_handle_t heap, void *p)
{
if (p == NULL) {