about summary refs log tree commit diff
path: root/qemu_mode/libqasan/malloc.c
diff options
context:
space:
mode:
Diffstat (limited to 'qemu_mode/libqasan/malloc.c')
-rw-r--r--qemu_mode/libqasan/malloc.c147
1 files changed, 100 insertions, 47 deletions
diff --git a/qemu_mode/libqasan/malloc.c b/qemu_mode/libqasan/malloc.c
index f8237826..54c1096a 100644
--- a/qemu_mode/libqasan/malloc.c
+++ b/qemu_mode/libqasan/malloc.c
@@ -24,6 +24,7 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 *******************************************************************************/
 
 #include "libqasan.h"
+#include <features.h>
 #include <errno.h>
 #include <stddef.h>
 #include <assert.h>
@@ -50,9 +51,9 @@ typedef struct {
 struct chunk_begin {
 
   size_t              requested_size;
-  void *              aligned_orig;  // NULL if not aligned
-  struct chunk_begin *next;
-  struct chunk_begin *prev;
+  void*               aligned_orig;  // NULL if not aligned
+  struct chunk_begin* next;
+  struct chunk_begin* prev;
   char                redzone[REDZONE_SIZE];
 
 };
@@ -65,30 +66,47 @@ struct chunk_struct {
 
 };
 
+#ifdef __GLIBC__
+
+void* (*__lq_libc_malloc)(size_t);
+void (*__lq_libc_free)(void*);
+#define backend_malloc __lq_libc_malloc
+#define backend_free __lq_libc_free
+
+#define TMP_ZONE_SIZE 4096
+static int           __tmp_alloc_zone_idx;
+static unsigned char __tmp_alloc_zone[TMP_ZONE_SIZE];
+
+#else
+
 // From dlmalloc.c
-void *dlmalloc(size_t);
-void  dlfree(void *);
+void* dlmalloc(size_t);
+void  dlfree(void*);
+#define backend_malloc dlmalloc
+#define backend_free dlfree
+
+#endif
 
 int __libqasan_malloc_initialized;
 
-static struct chunk_begin *quarantine_top;
-static struct chunk_begin *quarantine_end;
+static struct chunk_begin* quarantine_top;
+static struct chunk_begin* quarantine_end;
 static size_t              quarantine_bytes;
 
 #ifdef __BIONIC__
-static pthread_mutex_t quarantine_lock;
-  #define LOCK_TRY pthread_mutex_trylock
-  #define LOCK_INIT pthread_mutex_init
-  #define LOCK_UNLOCK pthread_mutex_unlock
+static pthread_mutex_t  quarantine_lock;
+#define LOCK_TRY pthread_mutex_trylock
+#define LOCK_INIT pthread_mutex_init
+#define LOCK_UNLOCK pthread_mutex_unlock
 #else
-static pthread_spinlock_t quarantine_lock;
-  #define LOCK_TRY pthread_spin_trylock
-  #define LOCK_INIT pthread_spin_init
-  #define LOCK_UNLOCK pthread_spin_unlock
+static pthread_spinlock_t  quarantine_lock;
+#define LOCK_TRY pthread_spin_trylock
+#define LOCK_INIT pthread_spin_init
+#define LOCK_UNLOCK pthread_spin_unlock
 #endif
 
 // need qasan disabled
-static int quanratine_push(struct chunk_begin *ck) {
+static int quanratine_push(struct chunk_begin* ck) {
 
   if (ck->requested_size >= QUARANTINE_MAX_BYTES) return 0;
 
@@ -96,15 +114,15 @@ static int quanratine_push(struct chunk_begin *ck) {
 
   while (ck->requested_size + quarantine_bytes >= QUARANTINE_MAX_BYTES) {
 
-    struct chunk_begin *tmp = quarantine_end;
+    struct chunk_begin* tmp = quarantine_end;
     quarantine_end = tmp->prev;
 
     quarantine_bytes -= tmp->requested_size;
 
     if (tmp->aligned_orig)
-      dlfree(tmp->aligned_orig);
+      backend_free(tmp->aligned_orig);
     else
-      dlfree(tmp);
+      backend_free(tmp);
 
   }
 
@@ -122,6 +140,11 @@ void __libqasan_init_malloc(void) {
 
   if (__libqasan_malloc_initialized) return;
 
+#ifdef __GLIBC__
+  __lq_libc_malloc = dlsym(RTLD_NEXT, "malloc");
+  __lq_libc_free = dlsym(RTLD_NEXT, "free");
+#endif
+
   LOCK_INIT(&quarantine_lock, PTHREAD_PROCESS_PRIVATE);
 
   __libqasan_malloc_initialized = 1;
@@ -131,24 +154,38 @@ void __libqasan_init_malloc(void) {
 
 }
 
-size_t __libqasan_malloc_usable_size(void *ptr) {
+size_t __libqasan_malloc_usable_size(void* ptr) {
 
-  char *p = ptr;
+  char* p = ptr;
   p -= sizeof(struct chunk_begin);
 
-  return ((struct chunk_begin *)p)->requested_size;
+  return ((struct chunk_begin*)p)->requested_size;
 
 }
 
-void *__libqasan_malloc(size_t size) {
+void* __libqasan_malloc(size_t size) {
 
-  if (!__libqasan_malloc_initialized) { __libqasan_init_malloc(); }
+  if (!__libqasan_malloc_initialized) {
+  
+    __libqasan_init_malloc();
 
-  if (!__libqasan_malloc_initialized) __libqasan_init_malloc();
+#ifdef __GLIBC__
+    void* r = &__tmp_alloc_zone[__tmp_alloc_zone_idx];
+
+    if (size & (ALLOC_ALIGN_SIZE - 1))
+      __tmp_alloc_zone_idx +=
+          (size & ~(ALLOC_ALIGN_SIZE - 1)) + ALLOC_ALIGN_SIZE;
+    else
+      __tmp_alloc_zone_idx += size;
+
+    return r;
+#endif
+
+  }
 
   int state = QASAN_SWAP(QASAN_DISABLED);  // disable qasan for this thread
 
-  struct chunk_begin *p = dlmalloc(sizeof(struct chunk_struct) + size);
+  struct chunk_begin* p = backend_malloc(sizeof(struct chunk_struct) + size);
 
   QASAN_SWAP(state);
 
@@ -160,14 +197,14 @@ void *__libqasan_malloc(size_t size) {
   p->aligned_orig = NULL;
   p->next = p->prev = NULL;
 
-  QASAN_ALLOC(&p[1], (char *)&p[1] + size);
+  QASAN_ALLOC(&p[1], (char*)&p[1] + size);
   QASAN_POISON(p->redzone, REDZONE_SIZE, ASAN_HEAP_LEFT_RZ);
   if (size & (ALLOC_ALIGN_SIZE - 1))
-    QASAN_POISON((char *)&p[1] + size,
+    QASAN_POISON((char*)&p[1] + size,
                  (size & ~(ALLOC_ALIGN_SIZE - 1)) + 8 - size + REDZONE_SIZE,
                  ASAN_HEAP_RIGHT_RZ);
   else
-    QASAN_POISON((char *)&p[1] + size, REDZONE_SIZE, ASAN_HEAP_RIGHT_RZ);
+    QASAN_POISON((char*)&p[1] + size, REDZONE_SIZE, ASAN_HEAP_RIGHT_RZ);
 
   __builtin_memset(&p[1], 0xff, size);
 
@@ -175,11 +212,17 @@ void *__libqasan_malloc(size_t size) {
 
 }
 
-void __libqasan_free(void *ptr) {
+void __libqasan_free(void* ptr) {
 
   if (!ptr) return;
+  
+#ifdef __GLIBC__
+  if (ptr >= (void*)__tmp_alloc_zone &&
+      ptr < ((void*)__tmp_alloc_zone + TMP_ZONE_SIZE))
+    return;
+#endif
 
-  struct chunk_begin *p = ptr;
+  struct chunk_begin* p = ptr;
   p -= 1;
 
   size_t n = p->requested_size;
@@ -190,9 +233,9 @@ void __libqasan_free(void *ptr) {
   if (!quanratine_push(p)) {
 
     if (p->aligned_orig)
-      dlfree(p->aligned_orig);
+      backend_free(p->aligned_orig);
     else
-      dlfree(p);
+      backend_free(p);
 
   }
 
@@ -206,11 +249,21 @@ void __libqasan_free(void *ptr) {
 
 }
 
-void *__libqasan_calloc(size_t nmemb, size_t size) {
+void* __libqasan_calloc(size_t nmemb, size_t size) {
 
   size *= nmemb;
 
-  char *p = __libqasan_malloc(size);
+#ifdef __GLIBC__
+  if (!__libqasan_malloc_initialized) {
+
+    void* r = &__tmp_alloc_zone[__tmp_alloc_zone_idx];
+    __tmp_alloc_zone_idx += size;
+    return r;
+
+  }
+#endif
+
+  char* p = __libqasan_malloc(size);
   if (!p) return NULL;
 
   __builtin_memset(p, 0, size);
@@ -219,14 +272,14 @@ void *__libqasan_calloc(size_t nmemb, size_t size) {
 
 }
 
-void *__libqasan_realloc(void *ptr, size_t size) {
+void* __libqasan_realloc(void* ptr, size_t size) {
 
-  char *p = __libqasan_malloc(size);
+  char* p = __libqasan_malloc(size);
   if (!p) return NULL;
 
   if (!ptr) return p;
 
-  size_t n = ((struct chunk_begin *)ptr)[-1].requested_size;
+  size_t n = ((struct chunk_begin*)ptr)[-1].requested_size;
   if (size < n) n = size;
 
   __builtin_memcpy(p, ptr, n);
@@ -236,9 +289,9 @@ void *__libqasan_realloc(void *ptr, size_t size) {
 
 }
 
-int __libqasan_posix_memalign(void **ptr, size_t align, size_t len) {
+int __libqasan_posix_memalign(void** ptr, size_t align, size_t len) {
 
-  if ((align % 2) || (align % sizeof(void *))) return EINVAL;
+  if ((align % 2) || (align % sizeof(void*))) return EINVAL;
   if (len == 0) {
 
     *ptr = NULL;
@@ -252,7 +305,7 @@ int __libqasan_posix_memalign(void **ptr, size_t align, size_t len) {
 
   int state = QASAN_SWAP(QASAN_DISABLED);  // disable qasan for this thread
 
-  char *orig = dlmalloc(sizeof(struct chunk_struct) + size);
+  char* orig = backend_malloc(sizeof(struct chunk_struct) + size);
 
   QASAN_SWAP(state);
 
@@ -260,10 +313,10 @@ int __libqasan_posix_memalign(void **ptr, size_t align, size_t len) {
 
   QASAN_UNPOISON(orig, sizeof(struct chunk_struct) + size);
 
-  char *data = orig + sizeof(struct chunk_begin);
+  char* data = orig + sizeof(struct chunk_begin);
   data += align - ((uintptr_t)data % align);
 
-  struct chunk_begin *p = (struct chunk_begin *)data - 1;
+  struct chunk_begin* p = (struct chunk_begin*)data - 1;
 
   p->requested_size = len;
   p->aligned_orig = orig;
@@ -286,9 +339,9 @@ int __libqasan_posix_memalign(void **ptr, size_t align, size_t len) {
 
 }
 
-void *__libqasan_memalign(size_t align, size_t len) {
+void* __libqasan_memalign(size_t align, size_t len) {
 
-  void *ret = NULL;
+  void* ret = NULL;
 
   __libqasan_posix_memalign(&ret, align, len);
 
@@ -296,9 +349,9 @@ void *__libqasan_memalign(size_t align, size_t len) {
 
 }
 
-void *__libqasan_aligned_alloc(size_t align, size_t len) {
+void* __libqasan_aligned_alloc(size_t align, size_t len) {
 
-  void *ret = NULL;
+  void* ret = NULL;
 
   if ((len % align)) return NULL;