multi_heap: Add heap poisoning features
This commit is contained in:
parent
5222428dde
commit
5c417963eb
8 changed files with 546 additions and 35 deletions
|
@ -84,6 +84,7 @@ SECTIONS
|
||||||
*(.iram1 .iram1.*)
|
*(.iram1 .iram1.*)
|
||||||
*libfreertos.a:(.literal .text .literal.* .text.*)
|
*libfreertos.a:(.literal .text .literal.* .text.*)
|
||||||
*libheap.a:multi_heap.o(.literal .text .literal.* .text.*)
|
*libheap.a:multi_heap.o(.literal .text .literal.* .text.*)
|
||||||
|
*libheap.a:multi_heap_poisoning.o(.literal .text .literal.* .text.*)
|
||||||
*libesp32.a:panic.o(.literal .text .literal.* .text.*)
|
*libesp32.a:panic.o(.literal .text .literal.* .text.*)
|
||||||
*libesp32.a:core_dump.o(.literal .text .literal.* .text.*)
|
*libesp32.a:core_dump.o(.literal .text .literal.* .text.*)
|
||||||
*libapp_trace.a:(.literal .text .literal.* .text.*)
|
*libapp_trace.a:(.literal .text .literal.* .text.*)
|
||||||
|
@ -116,6 +117,7 @@ SECTIONS
|
||||||
*libphy.a:(.rodata .rodata.*)
|
*libphy.a:(.rodata .rodata.*)
|
||||||
*libapp_trace.a:(.rodata .rodata.*)
|
*libapp_trace.a:(.rodata .rodata.*)
|
||||||
*libheap.a:multi_heap.o(.rodata .rodata.*)
|
*libheap.a:multi_heap.o(.rodata .rodata.*)
|
||||||
|
*libheap.a:multi_heap_poisoning.o(.rodata .rodata.*)
|
||||||
_data_end = ABSOLUTE(.);
|
_data_end = ABSOLUTE(.);
|
||||||
. = ALIGN(4);
|
. = ALIGN(4);
|
||||||
} >dram0_0_seg
|
} >dram0_0_seg
|
||||||
|
|
30
components/heap/Kconfig
Normal file
30
components/heap/Kconfig
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
menu "Heap memory debugging"
|
||||||
|
|
||||||
|
choice HEAP_CORRUPTION_DETECTION
|
||||||
|
prompt "Heap corruption detection"
|
||||||
|
default HEAP_POISONING_DISABLED
|
||||||
|
help
|
||||||
|
Enable heap poisoning features to detect heap corruption caused by out-of-bounds access to heap memory.
|
||||||
|
|
||||||
|
"Basic" heap corruption detection disables poisoning, but in Debug mode an assertion will trigger if an
|
||||||
|
application overwrites the heap's internal block headers and corrupts the heap structure.
|
||||||
|
|
||||||
|
"Light impact" detection "poisons" memory allocated from the heap with 4-byte head and tail "canaries". If an
|
||||||
|
application overruns its bounds at all, these canaries will be compromised. This option increases memory usage,
|
||||||
|
each allocated buffer will use an extra 9-12 bytes from the heap.
|
||||||
|
|
||||||
|
"Comprehensive" detection incorporates the "light impact" detection features plus additional checks for
|
||||||
|
uinitialised-access and use-after-free bugs. All freshly allocated memory is set to the pattern 0xce, and all
|
||||||
|
freed memory is set to the pattern 0xfe. These options have a noticeable additional performance impact.
|
||||||
|
|
||||||
|
To check the integrity of all heap memory at runtime, see the function heap_caps_check_integrity().
|
||||||
|
|
||||||
|
config HEAP_POISONING_DISABLED
|
||||||
|
bool "Basic (no poisoning)"
|
||||||
|
config HEAP_POISONING_LIGHT
|
||||||
|
bool "Light impact"
|
||||||
|
config HEAP_POISONING_COMPREHENSIVE
|
||||||
|
bool "Comprehensive"
|
||||||
|
endchoice
|
||||||
|
|
||||||
|
endmenu
|
|
@ -19,11 +19,43 @@
|
||||||
#include <stddef.h>
|
#include <stddef.h>
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
#include <multi_heap.h>
|
#include <multi_heap.h>
|
||||||
|
#include "multi_heap_internal.h"
|
||||||
|
|
||||||
/* Note: Keep platform-specific parts in this header, this source
|
/* Note: Keep platform-specific parts in this header, this source
|
||||||
file should depend on libc only */
|
file should depend on libc only */
|
||||||
#include "multi_heap_platform.h"
|
#include "multi_heap_platform.h"
|
||||||
|
|
||||||
|
/* Defines compile-time configuration macros */
|
||||||
|
#include "multi_heap_config.h"
|
||||||
|
|
||||||
|
#ifndef MULTI_HEAP_POISONING
|
||||||
|
/* if no heap poisoning, public API aliases directly to these implementations */
|
||||||
|
void *multi_heap_malloc(multi_heap_handle_t heap, size_t size)
|
||||||
|
__attribute__((alias("multi_heap_malloc_impl")));
|
||||||
|
|
||||||
|
void multi_heap_free(multi_heap_handle_t heap, void *p)
|
||||||
|
__attribute__((alias("multi_heap_free_impl")));
|
||||||
|
|
||||||
|
void *multi_heap_realloc(multi_heap_handle_t heap, void *p, size_t size)
|
||||||
|
__attribute__((alias("multi_heap_realloc_impl")));
|
||||||
|
|
||||||
|
size_t multi_heap_get_allocated_size(multi_heap_handle_t heap, void *p)
|
||||||
|
__attribute__((alias("multi_heap_get_allocated_size_impl")));
|
||||||
|
|
||||||
|
multi_heap_handle_t multi_heap_register(void *start, size_t size)
|
||||||
|
__attribute__((alias("multi_heap_register_impl")));
|
||||||
|
|
||||||
|
void multi_get_heap_info(multi_heap_handle_t heap, multi_heap_info_t *info)
|
||||||
|
__attribute__((alias("multi_get_heap_info_impl")));
|
||||||
|
|
||||||
|
size_t multi_heap_free_size(multi_heap_handle_t heap)
|
||||||
|
__attribute__((alias("multi_heap_free_size_impl")));
|
||||||
|
|
||||||
|
size_t multi_heap_minimum_free_size(multi_heap_handle_t heap)
|
||||||
|
__attribute__((alias("multi_heap_minimum_free_size_impl")));
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
||||||
#define ALIGN(X) ((X) & ~(sizeof(void *)-1))
|
#define ALIGN(X) ((X) & ~(sizeof(void *)-1))
|
||||||
#define ALIGN_UP(X) ALIGN((X)+sizeof(void *)-1)
|
#define ALIGN_UP(X) ALIGN((X)+sizeof(void *)-1)
|
||||||
|
|
||||||
|
@ -194,6 +226,11 @@ static heap_block_t *merge_adjacent(heap_t *heap, heap_block_t *a, heap_block_t
|
||||||
heap->free_bytes += sizeof(a->header);
|
heap->free_bytes += sizeof(a->header);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef MULTI_HEAP_POISONING_SLOW
|
||||||
|
/* b's former block header needs to be replaced with a fill pattern */
|
||||||
|
multi_heap_internal_poison_fill_region(b, sizeof(heap_block_t), free);
|
||||||
|
#endif
|
||||||
|
|
||||||
return a;
|
return a;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -235,16 +272,16 @@ static void split_if_necessary(heap_t *heap, heap_block_t *block, size_t size, h
|
||||||
heap->free_bytes += block_data_size(new_block);
|
heap->free_bytes += block_data_size(new_block);
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t multi_heap_get_allocated_size(multi_heap_handle_t heap, void *p)
|
size_t multi_heap_get_allocated_size_impl(multi_heap_handle_t heap, void *p)
|
||||||
{
|
{
|
||||||
heap_block_t *pb = get_block(p);
|
heap_block_t *pb = get_block(p);
|
||||||
|
|
||||||
assert_valid_block(heap, pb);
|
assert_valid_block(heap, pb);
|
||||||
assert(!is_free(pb));
|
assert(!is_free(pb));
|
||||||
return block_data_size(pb);
|
return block_data_size(pb);
|
||||||
}
|
}
|
||||||
|
|
||||||
multi_heap_handle_t multi_heap_register(void *start, size_t size)
|
multi_heap_handle_t multi_heap_register_impl(void *start, size_t size)
|
||||||
{
|
{
|
||||||
heap_t *heap = (heap_t *)ALIGN_UP((intptr_t)start);
|
heap_t *heap = (heap_t *)ALIGN_UP((intptr_t)start);
|
||||||
uintptr_t end = ALIGN((uintptr_t)start + size);
|
uintptr_t end = ALIGN((uintptr_t)start + size);
|
||||||
|
@ -285,7 +322,7 @@ void multi_heap_set_lock(multi_heap_handle_t heap, void *lock)
|
||||||
heap->lock = lock;
|
heap->lock = lock;
|
||||||
}
|
}
|
||||||
|
|
||||||
void *multi_heap_malloc(multi_heap_handle_t heap, size_t size)
|
void *multi_heap_malloc_impl(multi_heap_handle_t heap, size_t size)
|
||||||
{
|
{
|
||||||
heap_block_t *best_block = NULL;
|
heap_block_t *best_block = NULL;
|
||||||
heap_block_t *prev_free = NULL;
|
heap_block_t *prev_free = NULL;
|
||||||
|
@ -336,7 +373,7 @@ void *multi_heap_malloc(multi_heap_handle_t heap, size_t size)
|
||||||
return best_block->data;
|
return best_block->data;
|
||||||
}
|
}
|
||||||
|
|
||||||
void multi_heap_free(multi_heap_handle_t heap, void *p)
|
void multi_heap_free_impl(multi_heap_handle_t heap, void *p)
|
||||||
{
|
{
|
||||||
heap_block_t *pb = get_block(p);
|
heap_block_t *pb = get_block(p);
|
||||||
|
|
||||||
|
@ -378,7 +415,7 @@ void multi_heap_free(multi_heap_handle_t heap, void *p)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void *multi_heap_realloc(multi_heap_handle_t heap, void *p, size_t size)
|
void *multi_heap_realloc_impl(multi_heap_handle_t heap, void *p, size_t size)
|
||||||
{
|
{
|
||||||
heap_block_t *pb = get_block(p);
|
heap_block_t *pb = get_block(p);
|
||||||
void *result;
|
void *result;
|
||||||
|
@ -387,14 +424,16 @@ void *multi_heap_realloc(multi_heap_handle_t heap, void *p, size_t size)
|
||||||
assert(heap != NULL);
|
assert(heap != NULL);
|
||||||
|
|
||||||
if (p == NULL) {
|
if (p == NULL) {
|
||||||
return multi_heap_malloc(heap, size);
|
return multi_heap_malloc_impl(heap, size);
|
||||||
}
|
}
|
||||||
|
|
||||||
assert_valid_block(heap, pb);
|
assert_valid_block(heap, pb);
|
||||||
assert(!is_free(pb) && "realloc arg should be allocated");
|
assert(!is_free(pb) && "realloc arg should be allocated");
|
||||||
|
|
||||||
if (size == 0) {
|
if (size == 0) {
|
||||||
multi_heap_free(heap, p);
|
/* note: calling multi_free_impl() here as we've already been
|
||||||
|
through any poison-unwrapping */
|
||||||
|
multi_heap_free_impl(heap, p);
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -449,10 +488,13 @@ void *multi_heap_realloc(multi_heap_handle_t heap, void *p, size_t size)
|
||||||
|
|
||||||
if (result == NULL) {
|
if (result == NULL) {
|
||||||
// Need to allocate elsewhere and copy data over
|
// Need to allocate elsewhere and copy data over
|
||||||
result = multi_heap_malloc(heap, size);
|
//
|
||||||
|
// (Calling _impl versions here as we've already been through any
|
||||||
|
// unwrapping for heap poisoning features.)
|
||||||
|
result = multi_heap_malloc_impl(heap, size);
|
||||||
if (result != NULL) {
|
if (result != NULL) {
|
||||||
memcpy(result, pb->data, block_data_size(pb));
|
memcpy(result, pb->data, block_data_size(pb));
|
||||||
multi_heap_free(heap, pb->data);
|
multi_heap_free_impl(heap, pb->data);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -511,10 +553,26 @@ bool multi_heap_check(multi_heap_handle_t heap, bool print_errors)
|
||||||
total_free_bytes += block_data_size(b);
|
total_free_bytes += block_data_size(b);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
#ifdef MULTI_HEAP_POISONING
|
||||||
|
if (!is_last_block(b)) {
|
||||||
|
/* For slow heap poisoning, any block should contain correct poisoning patterns and/or fills */
|
||||||
|
bool poison_ok;
|
||||||
|
if (is_free(b) && b != heap->last_block) {
|
||||||
|
uint32_t block_len = (intptr_t)get_next_block(b) - (intptr_t)b - sizeof(heap_block_t);
|
||||||
|
poison_ok = multi_heap_internal_check_block_poisoning(&b[1], block_len, true, print_errors);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
poison_ok = multi_heap_internal_check_block_poisoning(b->data, block_data_size(b), false, print_errors);
|
||||||
|
}
|
||||||
|
valid = poison_ok && valid;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
} /* for(heap_block_t b = ... */
|
||||||
|
|
||||||
if (prev != heap->last_block) {
|
if (prev != heap->last_block) {
|
||||||
FAIL_PRINT("CORRUPT HEAP: Ended at %p not %p\n", prev, heap->last_block);
|
FAIL_PRINT("CORRUPT HEAP: Last block %p not %p\n", prev, heap->last_block);
|
||||||
}
|
}
|
||||||
if (!is_free(heap->last_block)) {
|
if (!is_free(heap->last_block)) {
|
||||||
FAIL_PRINT("CORRUPT HEAP: Expected prev block %p to be free\n", heap->last_block);
|
FAIL_PRINT("CORRUPT HEAP: Expected prev block %p to be free\n", heap->last_block);
|
||||||
|
@ -547,7 +605,7 @@ void multi_heap_dump(multi_heap_handle_t heap)
|
||||||
MULTI_HEAP_UNLOCK(heap->lock);
|
MULTI_HEAP_UNLOCK(heap->lock);
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t multi_heap_free_size(multi_heap_handle_t heap)
|
size_t multi_heap_free_size_impl(multi_heap_handle_t heap)
|
||||||
{
|
{
|
||||||
if (heap == NULL) {
|
if (heap == NULL) {
|
||||||
return 0;
|
return 0;
|
||||||
|
@ -555,7 +613,7 @@ size_t multi_heap_free_size(multi_heap_handle_t heap)
|
||||||
return heap->free_bytes;
|
return heap->free_bytes;
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t multi_heap_minimum_free_size(multi_heap_handle_t heap)
|
size_t multi_heap_minimum_free_size_impl(multi_heap_handle_t heap)
|
||||||
{
|
{
|
||||||
if (heap == NULL) {
|
if (heap == NULL) {
|
||||||
return 0;
|
return 0;
|
||||||
|
@ -563,7 +621,7 @@ size_t multi_heap_minimum_free_size(multi_heap_handle_t heap)
|
||||||
return heap->minimum_free_bytes;
|
return heap->minimum_free_bytes;
|
||||||
}
|
}
|
||||||
|
|
||||||
void multi_heap_get_info(multi_heap_handle_t heap, multi_heap_info_t *info)
|
void multi_heap_get_info_impl(multi_heap_handle_t heap, multi_heap_info_t *info)
|
||||||
{
|
{
|
||||||
memset(info, 0, sizeof(multi_heap_info_t));
|
memset(info, 0, sizeof(multi_heap_info_t));
|
||||||
|
|
||||||
|
|
37
components/heap/multi_heap_config.h
Normal file
37
components/heap/multi_heap_config.h
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
// Copyright 2015-2016 Espressif Systems (Shanghai) PTE LTD
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#ifdef ESP_PLATFORM
|
||||||
|
|
||||||
|
#include "sdkconfig.h"
|
||||||
|
|
||||||
|
/* Configuration macros for multi-heap */
|
||||||
|
|
||||||
|
#ifdef CONFIG_HEAP_POISONING_LIGHT
|
||||||
|
#define MULTI_HEAP_POISONING
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef CONFIG_HEAP_POISONING_COMPREHENSIVE
|
||||||
|
#define MULTI_HEAP_POISONING
|
||||||
|
#define MULTI_HEAP_POISONING_SLOW
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#else /* !ESP_PLATFORM */
|
||||||
|
|
||||||
|
/* Host-side tests, enable full poisoning */
|
||||||
|
#define MULTI_HEAP_POISONING
|
||||||
|
#define MULTI_HEAP_POISONING_SLOW
|
||||||
|
|
||||||
|
#endif
|
40
components/heap/multi_heap_internal.h
Normal file
40
components/heap/multi_heap_internal.h
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
// Copyright 2015-2016 Espressif Systems (Shanghai) PTE LTD
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
/* Internal definitions for the "implementation" of the multi_heap API,
|
||||||
|
as defined in multi_heap.c.
|
||||||
|
|
||||||
|
If heap poisioning is disabled, these are aliased directly to the public API.
|
||||||
|
|
||||||
|
If heap poisoning is enabled, wrapper functions call each of these.
|
||||||
|
*/
|
||||||
|
void *multi_heap_malloc_impl(multi_heap_handle_t heap, size_t size);
|
||||||
|
void multi_heap_free_impl(multi_heap_handle_t heap, void *p);
|
||||||
|
void *multi_heap_realloc_impl(multi_heap_handle_t heap, void *p, size_t size);
|
||||||
|
multi_heap_handle_t multi_heap_register_impl(void *start, size_t size);
|
||||||
|
void multi_heap_get_info_impl(multi_heap_handle_t heap, multi_heap_info_t *info);
|
||||||
|
size_t multi_heap_free_size_impl(multi_heap_handle_t heap);
|
||||||
|
size_t multi_heap_minimum_free_size_impl(multi_heap_handle_t heap);
|
||||||
|
size_t multi_heap_get_allocated_size_impl(multi_heap_handle_t heap, void *p);
|
||||||
|
|
||||||
|
/* Some internal functions for heap poisoning use */
|
||||||
|
|
||||||
|
/* Check an allocated block's poison bytes are correct. Called by multi_heap_check(). */
|
||||||
|
bool multi_heap_internal_check_block_poisoning(void *start, size_t size, bool is_free, bool print_errors);
|
||||||
|
|
||||||
|
/* Fill a region of memory with the free or malloced pattern.
|
||||||
|
Called when merging blocks, to overwrite the old block header.
|
||||||
|
*/
|
||||||
|
void multi_heap_internal_poison_fill_region(void *start, size_t size, bool is_free);
|
338
components/heap/multi_heap_poisoning.c
Normal file
338
components/heap/multi_heap_poisoning.c
Normal file
|
@ -0,0 +1,338 @@
|
||||||
|
// Copyright 2015-2016 Espressif Systems (Shanghai) PTE LTD
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
#include <stdint.h>
|
||||||
|
#include <stdlib.h>
|
||||||
|
#include <stdbool.h>
|
||||||
|
#include <assert.h>
|
||||||
|
#include <string.h>
|
||||||
|
#include <stddef.h>
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <sys/param.h>
|
||||||
|
#include <multi_heap.h>
|
||||||
|
#include "multi_heap_internal.h"
|
||||||
|
|
||||||
|
/* Note: Keep platform-specific parts in this header, this source
|
||||||
|
file should depend on libc only */
|
||||||
|
#include "multi_heap_platform.h"
|
||||||
|
|
||||||
|
/* Defines compile-time configuration macros */
|
||||||
|
#include "multi_heap_config.h"
|
||||||
|
|
||||||
|
#ifdef MULTI_HEAP_POISONING
|
||||||
|
|
||||||
|
/* Alias MULTI_HEAP_POISONING_SLOW to SLOW for better readabilty */
|
||||||
|
#ifdef SLOW
|
||||||
|
#error "external header has defined SLOW"
|
||||||
|
#endif
|
||||||
|
#ifdef MULTI_HEAP_POISONING_SLOW
|
||||||
|
#define SLOW 1
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#define MALLOC_FILL_PATTERN 0xce
|
||||||
|
#define FREE_FILL_PATTERN 0xfe
|
||||||
|
|
||||||
|
#define HEAD_CANARY_PATTERN 0xABBA1234
|
||||||
|
#define TAIL_CANARY_PATTERN 0xBAAD5678
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
uint32_t head_canary;
|
||||||
|
size_t alloc_size;
|
||||||
|
} poison_head_t;
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
uint32_t tail_canary;
|
||||||
|
} poison_tail_t;
|
||||||
|
|
||||||
|
#define POISON_OVERHEAD (sizeof(poison_head_t) + sizeof(poison_tail_t))
|
||||||
|
|
||||||
|
/* Given a "poisoned" region with pre-data header 'head', and actual data size 'alloc_size', fill in the head and tail
|
||||||
|
region checks.
|
||||||
|
|
||||||
|
Returns the pointer to the actual usable data buffer (ie after 'head')
|
||||||
|
*/
|
||||||
|
static uint8_t *poison_allocated_region(poison_head_t *head, size_t alloc_size)
|
||||||
|
{
|
||||||
|
uint8_t *data = (uint8_t *)(&head[1]); /* start of data ie 'real' allocated buffer */
|
||||||
|
poison_tail_t *tail = (poison_tail_t *)(data + alloc_size);
|
||||||
|
head->alloc_size = alloc_size;
|
||||||
|
head->head_canary = HEAD_CANARY_PATTERN;
|
||||||
|
|
||||||
|
uint32_t tail_canary = TAIL_CANARY_PATTERN;
|
||||||
|
if ((intptr_t)tail % sizeof(void *) == 0) {
|
||||||
|
tail->tail_canary = tail_canary;
|
||||||
|
} else {
|
||||||
|
/* unaligned tail_canary */
|
||||||
|
memcpy(&tail->tail_canary, &tail_canary, sizeof(uint32_t));
|
||||||
|
}
|
||||||
|
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Given a pointer to some allocated data, check the head & tail poison structures (before & after it) that were
|
||||||
|
previously injected by poison_allocated_region().
|
||||||
|
|
||||||
|
Returns a pointer to the poison header structure, or NULL if the poison structures are corrupt.
|
||||||
|
*/
|
||||||
|
static poison_head_t *verify_allocated_region(void *data, bool print_errors)
|
||||||
|
{
|
||||||
|
poison_head_t *head = (poison_head_t *)((intptr_t)data - sizeof(poison_head_t));
|
||||||
|
poison_tail_t *tail = (poison_tail_t *)((intptr_t)data + head->alloc_size);
|
||||||
|
|
||||||
|
/* check if the beginning of the data was overwritten */
|
||||||
|
if (head->head_canary != HEAD_CANARY_PATTERN) {
|
||||||
|
if (print_errors) {
|
||||||
|
printf("CORRUPT HEAP: Bad head at %p. Expected 0x%08x got 0x%08x\n", &head->head_canary,
|
||||||
|
HEAD_CANARY_PATTERN, head->head_canary);
|
||||||
|
}
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* check if the end of the data was overrun */
|
||||||
|
uint32_t canary;
|
||||||
|
if ((intptr_t)tail % sizeof(void *) == 0) {
|
||||||
|
canary = tail->tail_canary;
|
||||||
|
} else {
|
||||||
|
/* tail is unaligned */
|
||||||
|
memcpy(&canary, &tail->tail_canary, sizeof(canary));
|
||||||
|
}
|
||||||
|
if (canary != TAIL_CANARY_PATTERN) {
|
||||||
|
if (print_errors) {
|
||||||
|
printf("CORRUPT HEAP: Bad tail at %p. Expected 0x%08x got 0x%08x\n", &tail->tail_canary,
|
||||||
|
TAIL_CANARY_PATTERN, canary);
|
||||||
|
}
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
return head;
|
||||||
|
}
|
||||||
|
|
||||||
|
#ifdef SLOW
|
||||||
|
/* Go through a region that should have the specified fill byte 'pattern',
|
||||||
|
verify it.
|
||||||
|
|
||||||
|
if expect_free is true, expect FREE_FILL_PATTERN otherwise MALLOC_FILL_PATTERN.
|
||||||
|
|
||||||
|
if swap_pattern is true, swap patterns in the buffer (ie replace MALLOC_FILL_PATTERN with FREE_FILL_PATTERN, and vice versa.)
|
||||||
|
|
||||||
|
Returns true if verification checks out.
|
||||||
|
*/
|
||||||
|
static bool verify_fill_pattern(void *data, size_t size, bool print_errors, bool expect_free, bool swap_pattern)
|
||||||
|
{
|
||||||
|
const uint32_t FREE_FILL_WORD = (FREE_FILL_PATTERN << 24) | (FREE_FILL_PATTERN << 16) | (FREE_FILL_PATTERN << 8) | FREE_FILL_PATTERN;
|
||||||
|
const uint32_t MALLOC_FILL_WORD = (MALLOC_FILL_PATTERN << 24) | (MALLOC_FILL_PATTERN << 16) | (MALLOC_FILL_PATTERN << 8) | MALLOC_FILL_PATTERN;
|
||||||
|
|
||||||
|
const uint32_t EXPECT_WORD = expect_free ? FREE_FILL_WORD : MALLOC_FILL_WORD;
|
||||||
|
const uint32_t REPLACE_WORD = expect_free ? MALLOC_FILL_WORD : FREE_FILL_WORD;
|
||||||
|
bool valid = true;
|
||||||
|
|
||||||
|
/* Use 4-byte operations as much as possible */
|
||||||
|
if ((intptr_t)data % 4 == 0) {
|
||||||
|
uint32_t *p = data;
|
||||||
|
while (size >= 4) {
|
||||||
|
if (*p != EXPECT_WORD) {
|
||||||
|
if (print_errors) {
|
||||||
|
printf("Invalid data at %p. Expected 0x%08x got 0x%08x\n", p, EXPECT_WORD, *p);
|
||||||
|
}
|
||||||
|
valid = false;
|
||||||
|
}
|
||||||
|
if (swap_pattern) {
|
||||||
|
*p = REPLACE_WORD;
|
||||||
|
}
|
||||||
|
p++;
|
||||||
|
size -= 4;
|
||||||
|
}
|
||||||
|
data = p;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint8_t *p = data;
|
||||||
|
for (int i = 0; i < size; i++) {
|
||||||
|
if (p[i] != (uint8_t)EXPECT_WORD) {
|
||||||
|
if (print_errors) {
|
||||||
|
printf("Invalid data at %p. Expected 0x%02x got 0x%02x\n", p, (uint8_t)EXPECT_WORD, *p);
|
||||||
|
}
|
||||||
|
valid = false;
|
||||||
|
}
|
||||||
|
if (swap_pattern) {
|
||||||
|
p[i] = (uint8_t)REPLACE_WORD;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return valid;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
void *multi_heap_malloc(multi_heap_handle_t heap, size_t size)
|
||||||
|
{
|
||||||
|
poison_head_t *head = multi_heap_malloc_impl(heap, size + POISON_OVERHEAD);
|
||||||
|
if (head == NULL) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
uint8_t *data = poison_allocated_region(head, size);
|
||||||
|
#ifdef SLOW
|
||||||
|
/* check everything we got back is FREE_FILL_PATTERN & swap for MALLOC_FILL_PATTERN */
|
||||||
|
assert( verify_fill_pattern(data, size, true, true, true) );
|
||||||
|
#endif
|
||||||
|
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
void multi_heap_free(multi_heap_handle_t heap, void *p)
|
||||||
|
{
|
||||||
|
if (p == NULL) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
poison_head_t *head = verify_allocated_region(p, true);
|
||||||
|
assert(head != NULL);
|
||||||
|
|
||||||
|
#ifdef SLOW
|
||||||
|
/* replace everything with FREE_FILL_PATTERN, including the poison head/tail */
|
||||||
|
memset(head, FREE_FILL_PATTERN,
|
||||||
|
head->alloc_size + POISON_OVERHEAD);
|
||||||
|
#endif
|
||||||
|
multi_heap_free_impl(heap, head);
|
||||||
|
}
|
||||||
|
|
||||||
|
void *multi_heap_realloc(multi_heap_handle_t heap, void *p, size_t size)
|
||||||
|
{
|
||||||
|
poison_head_t *head = NULL;
|
||||||
|
|
||||||
|
if (p == NULL) {
|
||||||
|
return multi_heap_malloc(heap, size);
|
||||||
|
}
|
||||||
|
if (size == 0) {
|
||||||
|
multi_heap_free(heap, p);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* p != NULL, size != 0 */
|
||||||
|
head = verify_allocated_region(p, true);
|
||||||
|
assert(head != NULL);
|
||||||
|
|
||||||
|
#ifndef SLOW
|
||||||
|
poison_head_t *new_head = multi_heap_realloc_impl(heap, head, size + POISON_OVERHEAD);
|
||||||
|
if (new_head == NULL) { // new allocation failed, everything stays as-is
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
return poison_allocated_region(new_head, size);
|
||||||
|
#else // SLOW
|
||||||
|
/* When slow poisoning is enabled, it becomes very fiddly to try and correctly fill memory when reallocing in place
|
||||||
|
(where the buffer may be moved (including to an overlapping address with the old buffer), grown, or shrunk in
|
||||||
|
place.)
|
||||||
|
|
||||||
|
For now we just malloc a new buffer, copy, and free. :|
|
||||||
|
*/
|
||||||
|
size_t orig_alloc_size = head->alloc_size;
|
||||||
|
|
||||||
|
poison_head_t *new_head = multi_heap_malloc_impl(heap, size + POISON_OVERHEAD);
|
||||||
|
if (new_head == NULL) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
void *new_data = poison_allocated_region(new_head, size);
|
||||||
|
memcpy(new_data, p, MIN(size, orig_alloc_size));
|
||||||
|
multi_heap_free(heap, p);
|
||||||
|
return new_data;
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t multi_heap_get_allocated_size(multi_heap_handle_t heap, void *p)
|
||||||
|
{
|
||||||
|
poison_head_t *head = verify_allocated_region(p, true);
|
||||||
|
assert(head != NULL);
|
||||||
|
size_t result = multi_heap_get_allocated_size_impl(heap, head);
|
||||||
|
if (result > 0) {
|
||||||
|
return result - POISON_OVERHEAD;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
multi_heap_handle_t multi_heap_register(void *start, size_t size)
|
||||||
|
{
|
||||||
|
if (start != NULL) {
|
||||||
|
memset(start, FREE_FILL_PATTERN, size);
|
||||||
|
}
|
||||||
|
return multi_heap_register_impl(start, size);
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline void subtract_poison_overhead(size_t *arg) {
|
||||||
|
if (*arg > POISON_OVERHEAD) {
|
||||||
|
*arg -= POISON_OVERHEAD;
|
||||||
|
} else {
|
||||||
|
*arg = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void multi_heap_get_info(multi_heap_handle_t heap, multi_heap_info_t *info)
|
||||||
|
{
|
||||||
|
multi_heap_get_info_impl(heap, info);
|
||||||
|
/* don't count the heap poison head & tail overhead in the allocated bytes size */
|
||||||
|
info->total_allocated_bytes -= info->allocated_blocks * POISON_OVERHEAD;
|
||||||
|
/* trim largest_free_block to account for poison overhead */
|
||||||
|
subtract_poison_overhead(&info->largest_free_block);
|
||||||
|
/* similarly, trim total_free_bytes so there's no suggestion that
|
||||||
|
a block this big may be available. */
|
||||||
|
subtract_poison_overhead(&info->total_free_bytes);
|
||||||
|
subtract_poison_overhead(&info->minimum_free_bytes);
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t multi_heap_free_size(multi_heap_handle_t heap)
|
||||||
|
{
|
||||||
|
size_t r = multi_heap_free_size_impl(heap);
|
||||||
|
subtract_poison_overhead(&r);
|
||||||
|
return r;
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t multi_heap_minimum_free_size(multi_heap_handle_t heap)
|
||||||
|
{
|
||||||
|
size_t r = multi_heap_minimum_free_size_impl(heap);
|
||||||
|
subtract_poison_overhead(&r);
|
||||||
|
return r;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Internal hooks used by multi_heap to manage poisoning, while keeping some modularity */
|
||||||
|
|
||||||
|
bool multi_heap_internal_check_block_poisoning(void *start, size_t size, bool is_free, bool print_errors)
|
||||||
|
{
|
||||||
|
if (is_free) {
|
||||||
|
#ifdef SLOW
|
||||||
|
return verify_fill_pattern(start, size, print_errors, true, false);
|
||||||
|
#else
|
||||||
|
return true; /* can only verify empty blocks in SLOW mode */
|
||||||
|
#endif
|
||||||
|
} else {
|
||||||
|
void *data = (void *)((intptr_t)start + sizeof(poison_head_t));
|
||||||
|
poison_head_t *head = verify_allocated_region(data, print_errors);
|
||||||
|
if (head != NULL && head->alloc_size > size - POISON_OVERHEAD) {
|
||||||
|
/* block can be bigger than alloc_size, for reasons of alignment & fragmentation,
|
||||||
|
but block can never be smaller than head->alloc_size... */
|
||||||
|
if (print_errors) {
|
||||||
|
printf("CORRUPT HEAP: Size at %p expected <=0x%08x got 0x%08x\n", &head->alloc_size,
|
||||||
|
size - POISON_OVERHEAD, head->alloc_size);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return head != NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void multi_heap_internal_poison_fill_region(void *start, size_t size, bool is_free)
|
||||||
|
{
|
||||||
|
memset(start, is_free ? FREE_FILL_PATTERN : MALLOC_FILL_PATTERN, size);
|
||||||
|
}
|
||||||
|
|
||||||
|
#else // !MULTI_HEAP_POISONING
|
||||||
|
|
||||||
|
#ifdef MULTI_HEAP_POISONING_SLOW
|
||||||
|
#error "MULTI_HEAP_POISONING_SLOW requires MULTI_HEAP_POISONING"
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#endif // MULTI_HEAP_POISONING
|
|
@ -3,6 +3,7 @@ all: $(TEST_PROGRAM)
|
||||||
|
|
||||||
SOURCE_FILES = $(abspath \
|
SOURCE_FILES = $(abspath \
|
||||||
../multi_heap.c \
|
../multi_heap.c \
|
||||||
|
../multi_heap_poisoning.c \
|
||||||
test_multi_heap.cpp \
|
test_multi_heap.cpp \
|
||||||
main.cpp \
|
main.cpp \
|
||||||
)
|
)
|
||||||
|
@ -12,7 +13,7 @@ INCLUDE_FLAGS = -I../include -I../../../tools/catch
|
||||||
GCOV ?= gcov
|
GCOV ?= gcov
|
||||||
|
|
||||||
CPPFLAGS += $(INCLUDE_FLAGS) -D CONFIG_LOG_DEFAULT_LEVEL -g -fstack-protector-all -m32
|
CPPFLAGS += $(INCLUDE_FLAGS) -D CONFIG_LOG_DEFAULT_LEVEL -g -fstack-protector-all -m32
|
||||||
CFLAGS += -fprofile-arcs -ftest-coverage
|
CFLAGS += -Wall -Werror -fprofile-arcs -ftest-coverage
|
||||||
CXXFLAGS += -std=c++11 -Wall -Werror -fprofile-arcs -ftest-coverage
|
CXXFLAGS += -std=c++11 -Wall -Werror -fprofile-arcs -ftest-coverage
|
||||||
LDFLAGS += -lstdc++ -fprofile-arcs -ftest-coverage -m32
|
LDFLAGS += -lstdc++ -fprofile-arcs -ftest-coverage -m32
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,10 @@
|
||||||
#include "catch.hpp"
|
#include "catch.hpp"
|
||||||
#include "multi_heap.h"
|
#include "multi_heap.h"
|
||||||
|
|
||||||
|
#include "../multi_heap_config.h"
|
||||||
|
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
#include <assert.h>
|
||||||
|
|
||||||
/* Insurance against accidentally using libc heap functions in tests */
|
/* Insurance against accidentally using libc heap functions in tests */
|
||||||
#undef free
|
#undef free
|
||||||
|
@ -25,11 +28,7 @@ TEST_CASE("multi_heap simple allocations", "[multi_heap]")
|
||||||
multi_heap_dump(heap);
|
multi_heap_dump(heap);
|
||||||
printf("*********************\n");
|
printf("*********************\n");
|
||||||
|
|
||||||
void *buf = multi_heap_malloc(heap, test_alloc_size);
|
uint8_t *buf = (uint8_t *)multi_heap_malloc(heap, test_alloc_size);
|
||||||
|
|
||||||
printf("First malloc:\n");
|
|
||||||
multi_heap_dump(heap);
|
|
||||||
printf("*********************\n");
|
|
||||||
|
|
||||||
printf("small_heap %p buf %p\n", small_heap, buf);
|
printf("small_heap %p buf %p\n", small_heap, buf);
|
||||||
REQUIRE( buf != NULL );
|
REQUIRE( buf != NULL );
|
||||||
|
@ -50,7 +49,7 @@ TEST_CASE("multi_heap simple allocations", "[multi_heap]")
|
||||||
printf("*********************\n");
|
printf("*********************\n");
|
||||||
|
|
||||||
/* Now there should be space for another allocation */
|
/* Now there should be space for another allocation */
|
||||||
buf = multi_heap_malloc(heap, test_alloc_size);
|
buf = (uint8_t *)multi_heap_malloc(heap, test_alloc_size);
|
||||||
REQUIRE( buf != NULL );
|
REQUIRE( buf != NULL );
|
||||||
multi_heap_free(heap, buf);
|
multi_heap_free(heap, buf);
|
||||||
|
|
||||||
|
@ -60,15 +59,10 @@ TEST_CASE("multi_heap simple allocations", "[multi_heap]")
|
||||||
|
|
||||||
TEST_CASE("multi_heap fragmentation", "[multi_heap]")
|
TEST_CASE("multi_heap fragmentation", "[multi_heap]")
|
||||||
{
|
{
|
||||||
uint8_t small_heap[200];
|
uint8_t small_heap[256];
|
||||||
multi_heap_handle_t heap = multi_heap_register(small_heap, sizeof(small_heap));
|
multi_heap_handle_t heap = multi_heap_register(small_heap, sizeof(small_heap));
|
||||||
|
|
||||||
/* allocate enough that we can't fit 6 alloc_size blocks in the heap (due to
|
const size_t alloc_size = 24;
|
||||||
per-allocation block overhead. This calculation works for 32-bit pointers,
|
|
||||||
probably needs tweaking for 64-bit. */
|
|
||||||
size_t alloc_size = ((multi_heap_free_size(heap)) / 6) & ~(sizeof(void *) - 1);
|
|
||||||
|
|
||||||
printf("alloc_size %zu\n", alloc_size);
|
|
||||||
|
|
||||||
void *p[4];
|
void *p[4];
|
||||||
for (int i = 0; i < 4; i++) {
|
for (int i = 0; i < 4; i++) {
|
||||||
|
@ -116,6 +110,8 @@ TEST_CASE("multi_heap many random allocations", "[multi_heap]")
|
||||||
uint8_t big_heap[1024];
|
uint8_t big_heap[1024];
|
||||||
const int NUM_POINTERS = 64;
|
const int NUM_POINTERS = 64;
|
||||||
|
|
||||||
|
printf("Running multi-allocation test...\n");
|
||||||
|
|
||||||
void *p[NUM_POINTERS] = { 0 };
|
void *p[NUM_POINTERS] = { 0 };
|
||||||
size_t s[NUM_POINTERS] = { 0 };
|
size_t s[NUM_POINTERS] = { 0 };
|
||||||
multi_heap_handle_t heap = multi_heap_register(big_heap, sizeof(big_heap));
|
multi_heap_handle_t heap = multi_heap_register(big_heap, sizeof(big_heap));
|
||||||
|
@ -139,16 +135,17 @@ TEST_CASE("multi_heap many random allocations", "[multi_heap]")
|
||||||
*/
|
*/
|
||||||
size_t new_size = rand() % 1024;
|
size_t new_size = rand() % 1024;
|
||||||
void *new_p = multi_heap_realloc(heap, p[n], new_size);
|
void *new_p = multi_heap_realloc(heap, p[n], new_size);
|
||||||
|
printf("realloc %p -> %p (%zu -> %zu)\n", p[n], new_p, s[n], new_size);
|
||||||
|
multi_heap_check(heap, true);
|
||||||
if (new_size == 0 || new_p != NULL) {
|
if (new_size == 0 || new_p != NULL) {
|
||||||
p[n] = new_p;
|
p[n] = new_p;
|
||||||
|
s[n] = new_size;
|
||||||
if (new_size > 0) {
|
if (new_size > 0) {
|
||||||
REQUIRE( p[n] >= big_heap );
|
REQUIRE( p[n] >= big_heap );
|
||||||
REQUIRE( p[n] < big_heap + sizeof(big_heap) );
|
REQUIRE( p[n] < big_heap + sizeof(big_heap) );
|
||||||
|
memset(p[n], n, new_size);
|
||||||
}
|
}
|
||||||
s[n] = new_size;
|
|
||||||
memset(p[n], n, s[n]);
|
|
||||||
}
|
}
|
||||||
REQUIRE( multi_heap_check(heap, true) );
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -157,10 +154,11 @@ TEST_CASE("multi_heap many random allocations", "[multi_heap]")
|
||||||
/* Verify pre-existing contents of p[n] */
|
/* Verify pre-existing contents of p[n] */
|
||||||
uint8_t compare[s[n]];
|
uint8_t compare[s[n]];
|
||||||
memset(compare, n, s[n]);
|
memset(compare, n, s[n]);
|
||||||
REQUIRE( memcmp(compare, p[n], s[n]) == 0 );
|
/*REQUIRE*/assert( memcmp(compare, p[n], s[n]) == 0 );
|
||||||
}
|
}
|
||||||
//printf("free %zu bytes %p\n", s[n], p[n]);
|
REQUIRE( multi_heap_check(heap, true) );
|
||||||
multi_heap_free(heap, p[n]);
|
multi_heap_free(heap, p[n]);
|
||||||
|
printf("freed %p (%zu)\n", p[n], s[n]);
|
||||||
if (!multi_heap_check(heap, true)) {
|
if (!multi_heap_check(heap, true)) {
|
||||||
printf("FAILED iteration %d after freeing %p\n", i, p[n]);
|
printf("FAILED iteration %d after freeing %p\n", i, p[n]);
|
||||||
multi_heap_dump(heap);
|
multi_heap_dump(heap);
|
||||||
|
@ -169,7 +167,9 @@ TEST_CASE("multi_heap many random allocations", "[multi_heap]")
|
||||||
}
|
}
|
||||||
|
|
||||||
s[n] = rand() % 1024;
|
s[n] = rand() % 1024;
|
||||||
|
REQUIRE( multi_heap_check(heap, true) );
|
||||||
p[n] = multi_heap_malloc(heap, s[n]);
|
p[n] = multi_heap_malloc(heap, s[n]);
|
||||||
|
printf("malloc %p (%zu)\n", p[n], s[n]);
|
||||||
if (p[n] != NULL) {
|
if (p[n] != NULL) {
|
||||||
REQUIRE( p[n] >= big_heap );
|
REQUIRE( p[n] >= big_heap );
|
||||||
REQUIRE( p[n] < big_heap + sizeof(big_heap) );
|
REQUIRE( p[n] < big_heap + sizeof(big_heap) );
|
||||||
|
@ -294,7 +294,7 @@ TEST_CASE("multi_heap minimum-size allocations", "[multi_heap]")
|
||||||
TEST_CASE("multi_heap_realloc()", "[multi_heap]")
|
TEST_CASE("multi_heap_realloc()", "[multi_heap]")
|
||||||
{
|
{
|
||||||
const uint32_t PATTERN = 0xABABDADA;
|
const uint32_t PATTERN = 0xABABDADA;
|
||||||
uint8_t small_heap[256];
|
uint8_t small_heap[300];
|
||||||
multi_heap_handle_t heap = multi_heap_register(small_heap, sizeof(small_heap));
|
multi_heap_handle_t heap = multi_heap_register(small_heap, sizeof(small_heap));
|
||||||
|
|
||||||
uint32_t *a = (uint32_t *)multi_heap_malloc(heap, 64);
|
uint32_t *a = (uint32_t *)multi_heap_malloc(heap, 64);
|
||||||
|
@ -311,6 +311,10 @@ TEST_CASE("multi_heap_realloc()", "[multi_heap]")
|
||||||
REQUIRE( c > b ); /* 'a' moves, 'c' takes the block after 'b' */
|
REQUIRE( c > b ); /* 'a' moves, 'c' takes the block after 'b' */
|
||||||
REQUIRE( *c == PATTERN );
|
REQUIRE( *c == PATTERN );
|
||||||
|
|
||||||
|
#ifndef MULTI_HEAP_POISONING_SLOW
|
||||||
|
// "Slow" poisoning implementation doesn't reallocate in place, so these
|
||||||
|
// test will fail...
|
||||||
|
|
||||||
uint32_t *d = (uint32_t *)multi_heap_realloc(heap, c, 36);
|
uint32_t *d = (uint32_t *)multi_heap_realloc(heap, c, 36);
|
||||||
REQUIRE( multi_heap_check(heap, true) );
|
REQUIRE( multi_heap_check(heap, true) );
|
||||||
REQUIRE( c == d ); /* 'c' block should be shrunk in-place */
|
REQUIRE( c == d ); /* 'c' block should be shrunk in-place */
|
||||||
|
@ -333,6 +337,7 @@ TEST_CASE("multi_heap_realloc()", "[multi_heap]")
|
||||||
g = (uint32_t *)multi_heap_realloc(heap, e, 128);
|
g = (uint32_t *)multi_heap_realloc(heap, e, 128);
|
||||||
REQUIRE( multi_heap_check(heap, true) );
|
REQUIRE( multi_heap_check(heap, true) );
|
||||||
REQUIRE( e == g ); /* 'g' extends 'e' in place, into the space formerly held by 'f' */
|
REQUIRE( e == g ); /* 'g' extends 'e' in place, into the space formerly held by 'f' */
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_CASE("corrupt heap block", "[multi_heap]")
|
TEST_CASE("corrupt heap block", "[multi_heap]")
|
||||||
|
|
Loading…
Reference in a new issue