Merge branch 'bugfix/heap_check_integrity' into 'master'

heap: Fix spurious heap_caps_check_integrity() errors in Comprehensive mode

See merge request !1421
This commit is contained in:
Ivan Grokhotkov
2017-10-19 21:30:16 +08:00
5 changed files with 108 additions and 42 deletions

View File

@@ -91,6 +91,8 @@ multi_heap_handle_t multi_heap_register(void *start, size_t size);
*
* The lock argument is supplied to the MULTI_HEAP_LOCK() and MULTI_HEAP_UNLOCK() macros, defined in multi_heap_platform.h.
*
* The lock in question must be recursive.
*
* When the heap is first registered, the associated lock is NULL.
*
* @param heap Handle to a registered heap.

View File

@@ -329,6 +329,16 @@ void multi_heap_set_lock(multi_heap_handle_t heap, void *lock)
heap->lock = lock;
}
void inline multi_heap_internal_lock(multi_heap_handle_t heap)
{
MULTI_HEAP_LOCK(heap->lock);
}
void inline multi_heap_internal_unlock(multi_heap_handle_t heap)
{
MULTI_HEAP_UNLOCK(heap->lock);
}
void *multi_heap_malloc_impl(multi_heap_handle_t heap, size_t size)
{
heap_block_t *best_block = NULL;
@@ -341,7 +351,7 @@ void *multi_heap_malloc_impl(multi_heap_handle_t heap, size_t size)
return NULL;
}
MULTI_HEAP_LOCK(heap->lock);
multi_heap_internal_lock(heap);
/* Find best free block to perform the allocation in */
prev = &heap->first_block;
@@ -361,7 +371,7 @@ void *multi_heap_malloc_impl(multi_heap_handle_t heap, size_t size)
}
if (best_block == NULL) {
MULTI_HEAP_UNLOCK(heap->lock);
multi_heap_internal_unlock(heap);
return NULL; /* No room in heap */
}
@@ -376,7 +386,7 @@ void *multi_heap_malloc_impl(multi_heap_handle_t heap, size_t size)
heap->minimum_free_bytes = heap->free_bytes;
}
MULTI_HEAP_UNLOCK(heap->lock);
multi_heap_internal_unlock(heap);
return best_block->data;
}
@@ -389,7 +399,7 @@ void multi_heap_free_impl(multi_heap_handle_t heap, void *p)
return;
}
MULTI_HEAP_LOCK(heap->lock);
multi_heap_internal_lock(heap);
assert_valid_block(heap, pb);
MULTI_HEAP_ASSERT(!is_free(pb), pb); // block should not be free
@@ -420,7 +430,7 @@ void multi_heap_free_impl(multi_heap_handle_t heap, void *p)
pb = merge_adjacent(heap, pb, next);
}
MULTI_HEAP_UNLOCK(heap->lock);
multi_heap_internal_unlock(heap);
}
@@ -451,7 +461,7 @@ void *multi_heap_realloc_impl(multi_heap_handle_t heap, void *p, size_t size)
return NULL;
}
MULTI_HEAP_LOCK(heap->lock);
multi_heap_internal_lock(heap);
result = NULL;
if (size <= block_data_size(pb)) {
@@ -461,7 +471,7 @@ void *multi_heap_realloc_impl(multi_heap_handle_t heap, void *p, size_t size)
}
else if (heap->free_bytes < size - block_data_size(pb)) {
// Growing, but there's not enough total free space in the heap
MULTI_HEAP_UNLOCK(heap->lock);
multi_heap_internal_unlock(heap);
return NULL;
}
@@ -512,7 +522,7 @@ void *multi_heap_realloc_impl(multi_heap_handle_t heap, void *p, size_t size)
heap->minimum_free_bytes = heap->free_bytes;
}
MULTI_HEAP_UNLOCK(heap->lock);
multi_heap_internal_unlock(heap);
return result;
}
@@ -530,7 +540,7 @@ bool multi_heap_check(multi_heap_handle_t heap, bool print_errors)
size_t total_free_bytes = 0;
assert(heap != NULL);
MULTI_HEAP_LOCK(heap->lock);
multi_heap_internal_lock(heap);
heap_block_t *prev = NULL;
heap_block_t *prev_free = NULL;
@@ -593,7 +603,7 @@ bool multi_heap_check(multi_heap_handle_t heap, bool print_errors)
}
done:
MULTI_HEAP_UNLOCK(heap->lock);
multi_heap_internal_unlock(heap);
return valid;
}
@@ -602,7 +612,7 @@ void multi_heap_dump(multi_heap_handle_t heap)
{
assert(heap != NULL);
MULTI_HEAP_LOCK(heap->lock);
multi_heap_internal_lock(heap);
printf("Heap start %p end %p\nFirst free block %p\n", &heap->first_block, heap->last_block, heap->first_block.next_free);
for(heap_block_t *b = &heap->first_block; b != NULL; b = get_next_block(b)) {
printf("Block %p data size 0x%08zx bytes next block %p", b, block_data_size(b), get_next_block(b));
@@ -612,7 +622,7 @@ void multi_heap_dump(multi_heap_handle_t heap)
printf("\n");
}
}
MULTI_HEAP_UNLOCK(heap->lock);
multi_heap_internal_unlock(heap);
}
size_t multi_heap_free_size_impl(multi_heap_handle_t heap)
@@ -639,7 +649,7 @@ void multi_heap_get_info_impl(multi_heap_handle_t heap, multi_heap_info_t *info)
return;
}
MULTI_HEAP_LOCK(heap->lock);
multi_heap_internal_lock(heap);
for(heap_block_t *b = get_next_block(&heap->first_block); !is_last_block(b); b = get_next_block(b)) {
info->total_blocks++;
if (is_free(b)) {
@@ -659,6 +669,6 @@ void multi_heap_get_info_impl(multi_heap_handle_t heap, multi_heap_info_t *info)
// heap has wrong total size (address printed here is not indicative of the real error)
MULTI_HEAP_ASSERT(info->total_free_bytes == heap->free_bytes, heap);
MULTI_HEAP_UNLOCK(heap->lock);
multi_heap_internal_unlock(heap);
}

View File

@@ -38,3 +38,10 @@ bool multi_heap_internal_check_block_poisoning(void *start, size_t size, bool is
Called when merging blocks, to overwrite the old block header.
*/
void multi_heap_internal_poison_fill_region(void *start, size_t size, bool is_free);
/* Allow heap poisoning to lock/unlock the heap to avoid race conditions
if multi_heap_check() is running concurrently.
*/
void multi_heap_internal_lock(multi_heap_handle_t heap);
void multi_heap_internal_unlock(multi_heap_handle_t heap);

View File

@@ -173,16 +173,18 @@ static bool verify_fill_pattern(void *data, size_t size, bool print_errors, bool
void *multi_heap_malloc(multi_heap_handle_t heap, size_t size)
{
multi_heap_internal_lock(heap);
poison_head_t *head = multi_heap_malloc_impl(heap, size + POISON_OVERHEAD);
if (head == NULL) {
return NULL;
}
uint8_t *data = poison_allocated_region(head, size);
uint8_t *data = NULL;
if (head != NULL) {
data = poison_allocated_region(head, size);
#ifdef SLOW
/* check everything we got back is FREE_FILL_PATTERN & swap for MALLOC_FILL_PATTERN */
assert( verify_fill_pattern(data, size, true, true, true) );
/* check everything we got back is FREE_FILL_PATTERN & swap for MALLOC_FILL_PATTERN */
assert( verify_fill_pattern(data, size, true, true, true) );
#endif
}
multi_heap_internal_unlock(heap);
return data;
}
@@ -191,6 +193,8 @@ void multi_heap_free(multi_heap_handle_t heap, void *p)
if (p == NULL) {
return;
}
multi_heap_internal_lock(heap);
poison_head_t *head = verify_allocated_region(p, true);
assert(head != NULL);
@@ -200,11 +204,15 @@ void multi_heap_free(multi_heap_handle_t heap, void *p)
head->alloc_size + POISON_OVERHEAD);
#endif
multi_heap_free_impl(heap, head);
multi_heap_internal_unlock(heap);
}
void *multi_heap_realloc(multi_heap_handle_t heap, void *p, size_t size)
{
poison_head_t *head = NULL;
poison_head_t *new_head;
void *result = NULL;
if (p == NULL) {
return multi_heap_malloc(heap, size);
@@ -218,14 +226,18 @@ void *multi_heap_realloc(multi_heap_handle_t heap, void *p, size_t size)
head = verify_allocated_region(p, true);
assert(head != NULL);
multi_heap_internal_lock(heap);
#ifndef SLOW
poison_head_t *new_head = multi_heap_realloc_impl(heap, head, size + POISON_OVERHEAD);
if (new_head == NULL) { // new allocation failed, everything stays as-is
return NULL;
new_head = multi_heap_realloc_impl(heap, head, size + POISON_OVERHEAD);
if (new_head != NULL) {
/* For "fast" poisoning, we only overwrite the head/tail of the new block so it's safe
to poison, so no problem doing this even if realloc resized in place.
*/
result = poison_allocated_region(new_head, size);
}
return poison_allocated_region(new_head, size);
#else // SLOW
/* When slow poisoning is enabled, it becomes very fiddly to try and correctly fill memory when reallocing in place
/* When slow poisoning is enabled, it becomes very fiddly to try and correctly fill memory when resizing in place
(where the buffer may be moved (including to an overlapping address with the old buffer), grown, or shrunk in
place.)
@@ -233,15 +245,17 @@ void *multi_heap_realloc(multi_heap_handle_t heap, void *p, size_t size)
*/
size_t orig_alloc_size = head->alloc_size;
poison_head_t *new_head = multi_heap_malloc_impl(heap, size + POISON_OVERHEAD);
if (new_head == NULL) {
return NULL;
new_head = multi_heap_malloc_impl(heap, size + POISON_OVERHEAD);
if (new_head != NULL) {
result = poison_allocated_region(new_head, size);
memcpy(result, p, MIN(size, orig_alloc_size));
multi_heap_free(heap, p);
}
void *new_data = poison_allocated_region(new_head, size);
memcpy(new_data, p, MIN(size, orig_alloc_size));
multi_heap_free(heap, p);
return new_data;
#endif
multi_heap_internal_unlock(heap);
return result;
}
size_t multi_heap_get_allocated_size(multi_heap_handle_t heap, void *p)