Merge branch 'bugfix/heap_int_oflow_checks_v3.1' into 'release/v3.1'
Backport heap int overflow checks to v3.1 See merge request idf/esp-idf!4582
This commit is contained in:
commit
9f2ed0f58d
9 changed files with 122 additions and 32 deletions
|
@ -34,7 +34,6 @@ PROVIDE ( cache_sram_mmu_set_rom = 0x400097f4 );
|
|||
/* This is static function, but can be used, not generated by script*/
|
||||
PROVIDE ( calc_rtc_memory_crc = 0x40008170 );
|
||||
PROVIDE ( calloc = 0x4000bee4 );
|
||||
PROVIDE ( _calloc_r = 0x4000bbf8 );
|
||||
PROVIDE ( __clear_cache = 0x40063860 );
|
||||
PROVIDE ( _close_r = 0x4000bd3c );
|
||||
PROVIDE ( co_default_bdaddr = 0x3ffae704 );
|
||||
|
|
|
@ -33,26 +33,21 @@ possible. This should optimize the amount of RAM accessible to the code without
|
|||
|
||||
/*
|
||||
This takes a memory chunk in a region that can be addressed as both DRAM as well as IRAM. It will convert it to
|
||||
IRAM in such a way that it can be later freed. It assumes both the address as wel as the length to be word-aligned.
|
||||
IRAM in such a way that it can be later freed. It assumes both the address as well as the length to be word-aligned.
|
||||
It returns a region that's 1 word smaller than the region given because it stores the original Dram address there.
|
||||
|
||||
In theory, we can also make this work by prepending a struct that looks similar to the block link struct used by the
|
||||
heap allocator itself, which will allow inspection tools relying on any block returned from any sort of malloc to
|
||||
have such a block in front of it, work. We may do this later, if/when there is demand for it. For now, a simple
|
||||
pointer is used.
|
||||
*/
|
||||
IRAM_ATTR static void *dram_alloc_to_iram_addr(void *addr, size_t len)
|
||||
{
|
||||
uint32_t dstart = (int)addr; //First word
|
||||
uint32_t dend = ((int)addr) + len - 4; //Last word
|
||||
assert(dstart >= SOC_DIRAM_DRAM_LOW);
|
||||
assert(dend <= SOC_DIRAM_DRAM_HIGH);
|
||||
uintptr_t dstart = (uintptr_t)addr; //First word
|
||||
uintptr_t dend = dstart + len - 4; //Last word
|
||||
assert(esp_ptr_in_diram_dram((void *)dstart));
|
||||
assert(esp_ptr_in_diram_dram((void *)dend));
|
||||
assert((dstart & 3) == 0);
|
||||
assert((dend & 3) == 0);
|
||||
uint32_t istart = SOC_DIRAM_IRAM_LOW + (SOC_DIRAM_DRAM_HIGH - dend);
|
||||
uint32_t *iptr = (uint32_t *)istart;
|
||||
*iptr = dstart;
|
||||
return (void *)(iptr + 1);
|
||||
return iptr + 1;
|
||||
}
|
||||
|
||||
bool heap_caps_match(const heap_t *heap, uint32_t caps)
|
||||
|
@ -67,6 +62,12 @@ IRAM_ATTR void *heap_caps_malloc( size_t size, uint32_t caps )
|
|||
{
|
||||
void *ret = NULL;
|
||||
|
||||
if (size > HEAP_SIZE_MAX) {
|
||||
// Avoids int overflow when adding small numbers to size, or
|
||||
// calculating 'end' from start+size, by limiting 'size' to the possible range
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (caps & MALLOC_CAP_EXEC) {
|
||||
//MALLOC_CAP_EXEC forces an alloc from IRAM. There is a region which has both this as well as the following
|
||||
//caps, but the following caps are not possible for IRAM. Thus, the combination is impossible and we return
|
||||
|
@ -75,9 +76,17 @@ IRAM_ATTR void *heap_caps_malloc( size_t size, uint32_t caps )
|
|||
if ((caps & MALLOC_CAP_8BIT) || (caps & MALLOC_CAP_DMA)) {
|
||||
return NULL;
|
||||
}
|
||||
//If any, EXEC memory should be 32-bit aligned, so round up to the next multiple of 4.
|
||||
size = (size + 3) & (~3);
|
||||
|
||||
caps |= MALLOC_CAP_32BIT; // IRAM is 32-bit accessible RAM
|
||||
}
|
||||
|
||||
if (caps & MALLOC_CAP_32BIT) {
|
||||
/* 32-bit accessible RAM should allocated in 4 byte aligned sizes
|
||||
* (Future versions of ESP-IDF should possibly fail if an invalid size is requested)
|
||||
*/
|
||||
size = (size + 3) & (~3); // int overflow checked above
|
||||
}
|
||||
|
||||
for (int prio = 0; prio < SOC_MEMORY_TYPE_NO_PRIOS; prio++) {
|
||||
//Iterate over heaps and check capabilities at this priority
|
||||
heap_t *heap;
|
||||
|
@ -90,13 +99,13 @@ IRAM_ATTR void *heap_caps_malloc( size_t size, uint32_t caps )
|
|||
//doesn't cover, see if they're available in other prios.
|
||||
if ((get_all_caps(heap) & caps) == caps) {
|
||||
//This heap can satisfy all the requested capabilities. See if we can grab some memory using it.
|
||||
if ((caps & MALLOC_CAP_EXEC) && heap->start >= SOC_DIRAM_DRAM_LOW && heap->start < SOC_DIRAM_DRAM_HIGH) {
|
||||
if ((caps & MALLOC_CAP_EXEC) && esp_ptr_in_diram_dram((void *)heap->start)) {
|
||||
//This is special, insofar that what we're going to get back is a DRAM address. If so,
|
||||
//we need to 'invert' it (lowest address in DRAM == highest address in IRAM and vice-versa) and
|
||||
//add a pointer to the DRAM equivalent before the address we're going to return.
|
||||
ret = multi_heap_malloc(heap->heap, size + 4);
|
||||
ret = multi_heap_malloc(heap->heap, size + 4); // int overflow checked above
|
||||
if (ret != NULL) {
|
||||
return dram_alloc_to_iram_addr(ret, size + 4);
|
||||
return dram_alloc_to_iram_addr(ret, size + 4); // int overflow checked above
|
||||
}
|
||||
} else {
|
||||
//Just try to alloc, nothing special.
|
||||
|
@ -243,13 +252,11 @@ IRAM_ATTR static heap_t *find_containing_heap(void *ptr )
|
|||
|
||||
IRAM_ATTR void heap_caps_free( void *ptr)
|
||||
{
|
||||
intptr_t p = (intptr_t)ptr;
|
||||
|
||||
if (ptr == NULL) {
|
||||
return;
|
||||
}
|
||||
|
||||
if ((p >= SOC_DIRAM_IRAM_LOW) && (p <= SOC_DIRAM_IRAM_HIGH)) {
|
||||
if (esp_ptr_in_diram_iram(ptr)) {
|
||||
//Memory allocated here is actually allocated in the DRAM alias region and
|
||||
//cannot be de-allocated as usual. dram_alloc_to_iram_addr stores a pointer to
|
||||
//the equivalent DRAM address, though; free that.
|
||||
|
@ -273,6 +280,10 @@ IRAM_ATTR void *heap_caps_realloc( void *ptr, size_t size, int caps)
|
|||
return NULL;
|
||||
}
|
||||
|
||||
if (size > HEAP_SIZE_MAX) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
heap_t *heap = find_containing_heap(ptr);
|
||||
|
||||
assert(heap != NULL && "realloc() pointer is outside heap areas");
|
||||
|
@ -305,12 +316,18 @@ IRAM_ATTR void *heap_caps_realloc( void *ptr, size_t size, int caps)
|
|||
|
||||
IRAM_ATTR void *heap_caps_calloc( size_t n, size_t size, uint32_t caps)
|
||||
{
|
||||
void *r;
|
||||
r = heap_caps_malloc(n*size, caps);
|
||||
if (r != NULL) {
|
||||
bzero(r, n*size);
|
||||
void *result;
|
||||
size_t size_bytes;
|
||||
|
||||
if (__builtin_mul_overflow(n, size, &size_bytes)) {
|
||||
return NULL;
|
||||
}
|
||||
return r;
|
||||
|
||||
result = heap_caps_malloc(size_bytes, caps);
|
||||
if (result != NULL) {
|
||||
bzero(result, size_bytes);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
size_t heap_caps_get_free_size( uint32_t caps )
|
||||
|
|
|
@ -31,7 +31,9 @@ struct registered_heap_ll registered_heaps;
|
|||
|
||||
static void register_heap(heap_t *region)
|
||||
{
|
||||
region->heap = multi_heap_register((void *)region->start, region->end - region->start);
|
||||
size_t heap_size = region->end - region->start;
|
||||
assert(heap_size <= HEAP_SIZE_MAX);
|
||||
region->heap = multi_heap_register((void *)region->start, heap_size);
|
||||
if (region->heap != NULL) {
|
||||
ESP_EARLY_LOGD(TAG, "New heap initialised at %p", region->heap);
|
||||
}
|
||||
|
|
|
@ -28,6 +28,8 @@ extern "C" {
|
|||
for heap_caps_init.c to share heap information with heap_caps.c
|
||||
*/
|
||||
|
||||
#define HEAP_SIZE_MAX (SOC_MAX_CONTIGUOUS_RAM_SIZE)
|
||||
|
||||
/* Type for describing each registered heap */
|
||||
typedef struct heap_t_ {
|
||||
uint32_t caps[SOC_MEMORY_TYPE_NO_PRIOS]; ///< Capabilities for the type of memory in this heap (as a prioritised set). Copied from soc_memory_types so it's in RAM not flash.
|
||||
|
|
|
@ -175,6 +175,9 @@ static bool verify_fill_pattern(void *data, size_t size, bool print_errors, bool
|
|||
|
||||
void *multi_heap_malloc(multi_heap_handle_t heap, size_t size)
|
||||
{
|
||||
if(size > SIZE_MAX - POISON_OVERHEAD) {
|
||||
return NULL;
|
||||
}
|
||||
multi_heap_internal_lock(heap);
|
||||
poison_head_t *head = multi_heap_malloc_impl(heap, size + POISON_OVERHEAD);
|
||||
uint8_t *data = NULL;
|
||||
|
@ -217,6 +220,9 @@ void *multi_heap_realloc(multi_heap_handle_t heap, void *p, size_t size)
|
|||
poison_head_t *new_head;
|
||||
void *result = NULL;
|
||||
|
||||
if(size > SIZE_MAX - POISON_OVERHEAD) {
|
||||
return NULL;
|
||||
}
|
||||
if (p == NULL) {
|
||||
return multi_heap_malloc(heap, size);
|
||||
}
|
||||
|
|
|
@ -65,7 +65,6 @@ TEST_CASE("Malloc/overwrite, then free all available DRAM", "[heap]")
|
|||
TEST_ASSERT(m1==m2);
|
||||
}
|
||||
|
||||
|
||||
#if CONFIG_SPIRAM_USE_MALLOC
|
||||
|
||||
#if (CONFIG_SPIRAM_MALLOC_RESERVE_INTERNAL > 1024)
|
||||
|
@ -88,3 +87,29 @@ TEST_CASE("Check if reserved DMA pool still can allocate even when malloc()'ed m
|
|||
#endif
|
||||
|
||||
#endif
|
||||
|
||||
TEST_CASE("alloc overflows should all fail", "[heap]")
|
||||
{
|
||||
/* allocates 8 bytes if size_t overflows */
|
||||
TEST_ASSERT_NULL(calloc(SIZE_MAX / 2 + 4, 2));
|
||||
|
||||
/* will overflow if any poisoning is enabled
|
||||
(should fail for sensible OOM reasons, otherwise) */
|
||||
TEST_ASSERT_NULL(malloc(SIZE_MAX - 1));
|
||||
TEST_ASSERT_NULL(calloc(SIZE_MAX - 1, 1));
|
||||
|
||||
/* will overflow when the size is rounded up to word align it */
|
||||
TEST_ASSERT_NULL(heap_caps_malloc(SIZE_MAX-1, MALLOC_CAP_32BIT));
|
||||
|
||||
TEST_ASSERT_NULL(heap_caps_malloc(SIZE_MAX-1, MALLOC_CAP_EXEC));
|
||||
}
|
||||
|
||||
TEST_CASE("unreasonable allocs should all fail", "[heap]")
|
||||
{
|
||||
TEST_ASSERT_NULL(calloc(16, 1024*1024));
|
||||
TEST_ASSERT_NULL(malloc(16*1024*1024));
|
||||
TEST_ASSERT_NULL(malloc(SIZE_MAX / 2));
|
||||
TEST_ASSERT_NULL(malloc(SIZE_MAX - 256));
|
||||
TEST_ASSERT_NULL(malloc(xPortGetFreeHeapSize() - 1));
|
||||
}
|
||||
|
||||
|
|
|
@ -47,11 +47,17 @@ void* IRAM_ATTR _realloc_r(struct _reent *r, void* ptr, size_t size)
|
|||
return heap_caps_realloc_default( ptr, size );
|
||||
}
|
||||
|
||||
void* IRAM_ATTR _calloc_r(struct _reent *r, size_t count, size_t size)
|
||||
void* IRAM_ATTR _calloc_r(struct _reent *r, size_t nmemb, size_t size)
|
||||
{
|
||||
void* result = heap_caps_malloc_default(count * size);
|
||||
if (result) {
|
||||
bzero(result, count * size);
|
||||
void *result;
|
||||
size_t size_bytes;
|
||||
if (__builtin_mul_overflow(nmemb, size, &size_bytes)) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
result = malloc(size_bytes);
|
||||
if (result != NULL) {
|
||||
bzero(result, size_bytes);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -63,6 +63,8 @@
|
|||
#define SOC_IROM_HIGH 0x40400000
|
||||
#define SOC_DROM_LOW 0x3F400000
|
||||
#define SOC_DROM_HIGH 0x3F800000
|
||||
#define SOC_DRAM_LOW 0x3FAE0000
|
||||
#define SOC_DRAM_HIGH 0x40000000
|
||||
#define SOC_RTC_IRAM_LOW 0x400C0000
|
||||
#define SOC_RTC_IRAM_HIGH 0x400C2000
|
||||
#define SOC_RTC_DATA_LOW 0x50000000
|
||||
|
@ -70,6 +72,12 @@
|
|||
#define SOC_EXTRAM_DATA_LOW 0x3F800000
|
||||
#define SOC_EXTRAM_DATA_HIGH 0x3FC00000
|
||||
|
||||
#define SOC_MAX_CONTIGUOUS_RAM_SIZE 0x400000 ///< Largest span of contiguous memory (DRAM or IRAM) in the address space
|
||||
|
||||
#define SOC_CACHE_PRO_LOW 0x40070000
|
||||
#define SOC_CACHE_PRO_HIGH 0x40078000
|
||||
#define SOC_CACHE_APP_LOW 0x40078000
|
||||
#define SOC_CACHE_APP_HIGH 0x40080000
|
||||
|
||||
#define DR_REG_DPORT_BASE 0x3ff00000
|
||||
#define DR_REG_AES_BASE 0x3ff01000
|
||||
|
|
|
@ -94,3 +94,28 @@ inline static bool IRAM_ATTR esp_ptr_internal(const void *p) {
|
|||
inline static bool IRAM_ATTR esp_ptr_external_ram(const void *p) {
|
||||
return ((intptr_t)p >= SOC_EXTRAM_DATA_LOW && (intptr_t)p < SOC_EXTRAM_DATA_HIGH);
|
||||
}
|
||||
|
||||
inline static bool IRAM_ATTR esp_ptr_in_iram(const void *p) {
|
||||
#ifndef CONFIG_FREERTOS_UNICORE
|
||||
return ((intptr_t)p >= SOC_IRAM_LOW && (intptr_t)p < SOC_IRAM_HIGH);
|
||||
#else
|
||||
return ((intptr_t)p >= SOC_CACHE_APP_LOW && (intptr_t)p < SOC_IRAM_HIGH);
|
||||
#endif
|
||||
}
|
||||
|
||||
inline static bool IRAM_ATTR esp_ptr_in_drom(const void *p) {
|
||||
return ((intptr_t)p >= SOC_DROM_LOW && (intptr_t)p < SOC_DROM_HIGH);
|
||||
}
|
||||
|
||||
inline static bool IRAM_ATTR esp_ptr_in_dram(const void *p) {
|
||||
return ((intptr_t)p >= SOC_DRAM_LOW && (intptr_t)p < SOC_DRAM_HIGH);
|
||||
}
|
||||
|
||||
inline static bool IRAM_ATTR esp_ptr_in_diram_dram(const void *p) {
|
||||
return ((intptr_t)p >= SOC_DIRAM_DRAM_LOW && (intptr_t)p < SOC_DIRAM_DRAM_HIGH);
|
||||
}
|
||||
|
||||
inline static bool IRAM_ATTR esp_ptr_in_diram_iram(const void *p) {
|
||||
return ((intptr_t)p >= SOC_DIRAM_IRAM_LOW && (intptr_t)p < SOC_DIRAM_IRAM_HIGH);
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue