diff options
author | Andrea Fioraldi <andreafioraldi@gmail.com> | 2021-02-12 09:42:22 +0100 |
---|---|---|
committer | Andrea Fioraldi <andreafioraldi@gmail.com> | 2021-02-12 09:42:22 +0100 |
commit | 22a3c7f7d043d9dbf39c847061d88a4577537031 (patch) | |
tree | 37f2e2e3b5561168e92c36a0b96f1caec42f99c6 /qemu_mode/libqasan/malloc.c | |
parent | 16ffbb37f5897ca318e747518fdae6b4e56b52ac (diff) | |
download | afl++-22a3c7f7d043d9dbf39c847061d88a4577537031.tar.gz |
fix #736 (ty b1gr3db)
Diffstat (limited to 'qemu_mode/libqasan/malloc.c')
-rw-r--r-- | qemu_mode/libqasan/malloc.c | 109 |
1 files changed, 55 insertions, 54 deletions
diff --git a/qemu_mode/libqasan/malloc.c b/qemu_mode/libqasan/malloc.c index 54c1096a..5a2d2a0c 100644 --- a/qemu_mode/libqasan/malloc.c +++ b/qemu_mode/libqasan/malloc.c @@ -51,9 +51,9 @@ typedef struct { struct chunk_begin { size_t requested_size; - void* aligned_orig; // NULL if not aligned - struct chunk_begin* next; - struct chunk_begin* prev; + void * aligned_orig; // NULL if not aligned + struct chunk_begin *next; + struct chunk_begin *prev; char redzone[REDZONE_SIZE]; }; @@ -68,45 +68,45 @@ struct chunk_struct { #ifdef __GLIBC__ -void* (*__lq_libc_malloc)(size_t); -void (*__lq_libc_free)(void*); -#define backend_malloc __lq_libc_malloc -#define backend_free __lq_libc_free +void *(*__lq_libc_malloc)(size_t); +void (*__lq_libc_free)(void *); + #define backend_malloc __lq_libc_malloc + #define backend_free __lq_libc_free -#define TMP_ZONE_SIZE 4096 + #define TMP_ZONE_SIZE 4096 static int __tmp_alloc_zone_idx; static unsigned char __tmp_alloc_zone[TMP_ZONE_SIZE]; #else // From dlmalloc.c -void* dlmalloc(size_t); -void dlfree(void*); -#define backend_malloc dlmalloc -#define backend_free dlfree +void * dlmalloc(size_t); +void dlfree(void *); + #define backend_malloc dlmalloc + #define backend_free dlfree #endif int __libqasan_malloc_initialized; -static struct chunk_begin* quarantine_top; -static struct chunk_begin* quarantine_end; +static struct chunk_begin *quarantine_top; +static struct chunk_begin *quarantine_end; static size_t quarantine_bytes; #ifdef __BIONIC__ -static pthread_mutex_t quarantine_lock; -#define LOCK_TRY pthread_mutex_trylock -#define LOCK_INIT pthread_mutex_init -#define LOCK_UNLOCK pthread_mutex_unlock +static pthread_mutex_t quarantine_lock; + #define LOCK_TRY pthread_mutex_trylock + #define LOCK_INIT pthread_mutex_init + #define LOCK_UNLOCK pthread_mutex_unlock #else -static pthread_spinlock_t quarantine_lock; -#define LOCK_TRY pthread_spin_trylock -#define LOCK_INIT pthread_spin_init -#define LOCK_UNLOCK pthread_spin_unlock +static pthread_spinlock_t quarantine_lock; + #define LOCK_TRY pthread_spin_trylock + #define LOCK_INIT pthread_spin_init + #define LOCK_UNLOCK pthread_spin_unlock #endif // need qasan disabled -static int quanratine_push(struct chunk_begin* ck) { +static int quanratine_push(struct chunk_begin *ck) { if (ck->requested_size >= QUARANTINE_MAX_BYTES) return 0; @@ -114,7 +114,7 @@ static int quanratine_push(struct chunk_begin* ck) { while (ck->requested_size + quarantine_bytes >= QUARANTINE_MAX_BYTES) { - struct chunk_begin* tmp = quarantine_end; + struct chunk_begin *tmp = quarantine_end; quarantine_end = tmp->prev; quarantine_bytes -= tmp->requested_size; @@ -154,23 +154,23 @@ void __libqasan_init_malloc(void) { } -size_t __libqasan_malloc_usable_size(void* ptr) { +size_t __libqasan_malloc_usable_size(void *ptr) { - char* p = ptr; + char *p = ptr; p -= sizeof(struct chunk_begin); - return ((struct chunk_begin*)p)->requested_size; + return ((struct chunk_begin *)p)->requested_size; } -void* __libqasan_malloc(size_t size) { +void *__libqasan_malloc(size_t size) { if (!__libqasan_malloc_initialized) { - + __libqasan_init_malloc(); #ifdef __GLIBC__ - void* r = &__tmp_alloc_zone[__tmp_alloc_zone_idx]; + void *r = &__tmp_alloc_zone[__tmp_alloc_zone_idx]; if (size & (ALLOC_ALIGN_SIZE - 1)) __tmp_alloc_zone_idx += @@ -185,7 +185,7 @@ void* __libqasan_malloc(size_t size) { int state = QASAN_SWAP(QASAN_DISABLED); // disable qasan for this thread - struct chunk_begin* p = backend_malloc(sizeof(struct chunk_struct) + size); + struct chunk_begin *p = backend_malloc(sizeof(struct chunk_struct) + size); QASAN_SWAP(state); @@ -197,14 +197,14 @@ void* __libqasan_malloc(size_t size) { p->aligned_orig = NULL; p->next = p->prev = NULL; - QASAN_ALLOC(&p[1], (char*)&p[1] + size); + QASAN_ALLOC(&p[1], (char *)&p[1] + size); QASAN_POISON(p->redzone, REDZONE_SIZE, ASAN_HEAP_LEFT_RZ); if (size & (ALLOC_ALIGN_SIZE - 1)) - QASAN_POISON((char*)&p[1] + size, + QASAN_POISON((char *)&p[1] + size, (size & ~(ALLOC_ALIGN_SIZE - 1)) + 8 - size + REDZONE_SIZE, ASAN_HEAP_RIGHT_RZ); else - QASAN_POISON((char*)&p[1] + size, REDZONE_SIZE, ASAN_HEAP_RIGHT_RZ); + QASAN_POISON((char *)&p[1] + size, REDZONE_SIZE, ASAN_HEAP_RIGHT_RZ); __builtin_memset(&p[1], 0xff, size); @@ -212,17 +212,17 @@ void* __libqasan_malloc(size_t size) { } -void __libqasan_free(void* ptr) { +void __libqasan_free(void *ptr) { if (!ptr) return; - + #ifdef __GLIBC__ - if (ptr >= (void*)__tmp_alloc_zone && - ptr < ((void*)__tmp_alloc_zone + TMP_ZONE_SIZE)) + if (ptr >= (void *)__tmp_alloc_zone && + ptr < ((void *)__tmp_alloc_zone + TMP_ZONE_SIZE)) return; #endif - struct chunk_begin* p = ptr; + struct chunk_begin *p = ptr; p -= 1; size_t n = p->requested_size; @@ -249,21 +249,22 @@ void __libqasan_free(void* ptr) { } -void* __libqasan_calloc(size_t nmemb, size_t size) { +void *__libqasan_calloc(size_t nmemb, size_t size) { size *= nmemb; #ifdef __GLIBC__ if (!__libqasan_malloc_initialized) { - void* r = &__tmp_alloc_zone[__tmp_alloc_zone_idx]; + void *r = &__tmp_alloc_zone[__tmp_alloc_zone_idx]; __tmp_alloc_zone_idx += size; return r; } + #endif - char* p = __libqasan_malloc(size); + char *p = __libqasan_malloc(size); if (!p) return NULL; __builtin_memset(p, 0, size); @@ -272,14 +273,14 @@ void* __libqasan_calloc(size_t nmemb, size_t size) { } -void* __libqasan_realloc(void* ptr, size_t size) { +void *__libqasan_realloc(void *ptr, size_t size) { - char* p = __libqasan_malloc(size); + char *p = __libqasan_malloc(size); if (!p) return NULL; if (!ptr) return p; - size_t n = ((struct chunk_begin*)ptr)[-1].requested_size; + size_t n = ((struct chunk_begin *)ptr)[-1].requested_size; if (size < n) n = size; __builtin_memcpy(p, ptr, n); @@ -289,9 +290,9 @@ void* __libqasan_realloc(void* ptr, size_t size) { } -int __libqasan_posix_memalign(void** ptr, size_t align, size_t len) { +int __libqasan_posix_memalign(void **ptr, size_t align, size_t len) { - if ((align % 2) || (align % sizeof(void*))) return EINVAL; + if ((align % 2) || (align % sizeof(void *))) return EINVAL; if (len == 0) { *ptr = NULL; @@ -305,7 +306,7 @@ int __libqasan_posix_memalign(void** ptr, size_t align, size_t len) { int state = QASAN_SWAP(QASAN_DISABLED); // disable qasan for this thread - char* orig = backend_malloc(sizeof(struct chunk_struct) + size); + char *orig = backend_malloc(sizeof(struct chunk_struct) + size); QASAN_SWAP(state); @@ -313,10 +314,10 @@ int __libqasan_posix_memalign(void** ptr, size_t align, size_t len) { QASAN_UNPOISON(orig, sizeof(struct chunk_struct) + size); - char* data = orig + sizeof(struct chunk_begin); + char *data = orig + sizeof(struct chunk_begin); data += align - ((uintptr_t)data % align); - struct chunk_begin* p = (struct chunk_begin*)data - 1; + struct chunk_begin *p = (struct chunk_begin *)data - 1; p->requested_size = len; p->aligned_orig = orig; @@ -339,9 +340,9 @@ int __libqasan_posix_memalign(void** ptr, size_t align, size_t len) { } -void* __libqasan_memalign(size_t align, size_t len) { +void *__libqasan_memalign(size_t align, size_t len) { - void* ret = NULL; + void *ret = NULL; __libqasan_posix_memalign(&ret, align, len); @@ -349,9 +350,9 @@ void* __libqasan_memalign(size_t align, size_t len) { } -void* __libqasan_aligned_alloc(size_t align, size_t len) { +void *__libqasan_aligned_alloc(size_t align, size_t len) { - void* ret = NULL; + void *ret = NULL; if ((len % align)) return NULL; |