about summary refs log tree commit diff
diff options
context:
space:
mode:
authorvanhauser-thc <vh@thc.org>2024-06-06 17:52:21 +0200
committervanhauser-thc <vh@thc.org>2024-06-06 17:52:21 +0200
commit477063e9ee88da5feb73b38b27a1241e8b77e002 (patch)
treea009d430c3c4d048a2873be0c5e0590f916ad070
parente46c106b890404fbeb2d0e6120510ddf83113da6 (diff)
downloadafl++-477063e9ee88da5feb73b38b27a1241e8b77e002.tar.gz
memory adjustments
-rw-r--r--TODO.md1
-rw-r--r--docs/Changelog.md1
-rw-r--r--src/afl-fuzz-queue.c112
-rw-r--r--src/afl-fuzz-redqueen.c34
-rw-r--r--src/afl-fuzz-skipdet.c9
5 files changed, 78 insertions, 79 deletions
diff --git a/TODO.md b/TODO.md
index ace07434..000c78dd 100644
--- a/TODO.md
+++ b/TODO.md
@@ -2,6 +2,7 @@
 
 ## Must
 
+ - review: queue_testcase_store_mem and queue_testcase_get
  - hardened_usercopy=0 page_alloc.shuffle=0
  - add value_profile but only enable after 15 minutes without finds
  - cmplog max items env?
diff --git a/docs/Changelog.md b/docs/Changelog.md
index 633e7071..cf5d2500 100644
--- a/docs/Changelog.md
+++ b/docs/Changelog.md
@@ -23,6 +23,7 @@
     - -V timing is now accurately the fuzz time (without syncing), before
       long calibration times and syncing could result in now fuzzing being
       made when the time was already run out until then, thanks to @eqv!
+    - make afl-fuzz use less memory with cmplog and fix a memleak
   * afl-cc:
     - re-enable i386 support that was accidently disabled
     - fixes for LTO and outdated afl-gcc mode for i386
diff --git a/src/afl-fuzz-queue.c b/src/afl-fuzz-queue.c
index d19dd51a..cbdfd5b0 100644
--- a/src/afl-fuzz-queue.c
+++ b/src/afl-fuzz-queue.c
@@ -60,63 +60,6 @@ inline u32 select_next_queue_entry(afl_state_t *afl) {
 
 }
 
-inline double compute_weight(afl_state_t *afl, struct queue_entry *q,
-                             double avg_exec_us, double avg_bitmap_size,
-                             double avg_len) {
-
-  double weight = 1.0;
-
-  if (likely(afl->schedule >= FAST && afl->schedule <= RARE)) {
-
-    u32 hits = afl->n_fuzz[q->n_fuzz_entry];
-    if (likely(hits)) { weight /= (log10(hits) + 1); }
-
-  }
-
-  if (likely(afl->schedule < RARE)) {
-
-    double t = q->exec_us / avg_exec_us;
-    if (likely(t < 0.1)) {
-
-      // nothing
-
-    } else if (likely(t <= 0.25))
-
-      weight *= 0.9;
-    else if (likely(t <= 0.5)) {
-
-      // nothing
-
-    } else if (likely(t < 1.0))
-
-      weight *= 1.15;
-    else if (unlikely(t > 2.5 && t < 5.0))
-      weight *= 1.1;
-    // else nothing
-
-  }
-
-  double l = q->len / avg_len;
-  if (likely(l < 0.1))
-    weight *= 0.75;
-  else if (likely(l < 0.25))
-    weight *= 1.1;
-  else if (unlikely(l >= 10))
-    weight *= 1.1;
-
-  double bms = q->bitmap_size / avg_bitmap_size;
-  if (likely(bms < 0.5))
-    weight *= (1.0 + ((bms - 0.5) / 2));
-  else if (unlikely(bms > 1.33))
-    weight *= 1.1;
-
-  if (unlikely(!q->was_fuzzed)) { weight *= 2.5; }
-  if (unlikely(q->fs_redundant)) { weight *= 0.75; }
-
-  return weight;
-
-}
-
 /* create the alias table that allows weighted random selection - expensive */
 
 void create_alias_table(afl_state_t *afl) {
@@ -177,8 +120,59 @@ void create_alias_table(afl_state_t *afl) {
 
       if (likely(!q->disabled)) {
 
-        q->weight =
-            compute_weight(afl, q, avg_exec_us, avg_bitmap_size, avg_len);
+        double weight = 1.0;
+        {  // inline does result in a compile error with LTO, weird
+
+          if (likely(afl->schedule >= FAST && afl->schedule <= RARE)) {
+
+            u32 hits = afl->n_fuzz[q->n_fuzz_entry];
+            if (likely(hits)) { weight /= (log10(hits) + 1); }
+
+          }
+
+          if (likely(afl->schedule < RARE)) {
+
+            double t = q->exec_us / avg_exec_us;
+            if (likely(t < 0.1)) {
+
+              // nothing
+
+            } else if (likely(t <= 0.25))
+
+              weight *= 0.9;
+            else if (likely(t <= 0.5)) {
+
+              // nothing
+
+            } else if (likely(t < 1.0))
+
+              weight *= 1.15;
+            else if (unlikely(t > 2.5 && t < 5.0))
+              weight *= 1.1;
+            // else nothing
+
+          }
+
+          double l = q->len / avg_len;
+          if (likely(l < 0.1))
+            weight *= 0.75;
+          else if (likely(l < 0.25))
+            weight *= 1.1;
+          else if (unlikely(l >= 10))
+            weight *= 1.1;
+
+          double bms = q->bitmap_size / avg_bitmap_size;
+          if (likely(bms < 0.5))
+            weight *= (1.0 + ((bms - 0.5) / 2));
+          else if (unlikely(bms > 1.33))
+            weight *= 1.1;
+
+          if (unlikely(!q->was_fuzzed)) { weight *= 2.5; }
+          if (unlikely(q->fs_redundant)) { weight *= 0.75; }
+
+        }
+
+        q->weight = weight;
         q->perf_score = calculate_score(afl, q);
         sum += q->weight;
 
diff --git a/src/afl-fuzz-redqueen.c b/src/afl-fuzz-redqueen.c
index 9316da71..6c3582f2 100644
--- a/src/afl-fuzz-redqueen.c
+++ b/src/afl-fuzz-redqueen.c
@@ -322,7 +322,7 @@ static u8 colorization(afl_state_t *afl, u8 *buf, u32 len,
 
   memcpy(backup, buf, len);
   memcpy(changed, buf, len);
-  if (afl->cmplog_random_colorization) {
+  if (likely(afl->cmplog_random_colorization)) {
 
     random_replace(afl, changed, len);
 
@@ -402,6 +402,7 @@ static u8 colorization(afl_state_t *afl, u8 *buf, u32 len,
 
   u32 i = 1;
   u32 positions = 0;
+
   while (i) {
 
   restart:
@@ -2996,15 +2997,16 @@ u8 input_to_state_stage(afl_state_t *afl, u8 *orig_buf, u8 *buf, u32 len) {
 
   struct tainted *t = taint;
 
+#ifdef _DEBUG
   while (t) {
 
-#ifdef _DEBUG
     fprintf(stderr, "T: idx=%u len=%u\n", t->pos, t->len);
-#endif
     t = t->next;
 
   }
 
+#endif
+
 #if defined(_DEBUG) || defined(CMPLOG_INTROSPECTION)
   u64 start_time = get_cur_time();
   u32 cmp_locations = 0;
@@ -3148,27 +3150,27 @@ u8 input_to_state_stage(afl_state_t *afl, u8 *orig_buf, u8 *buf, u32 len) {
 
 exit_its:
 
-  if (afl->cmplog_lvl == CMPLOG_LVL_MAX) {
+  // if (afl->cmplog_lvl == CMPLOG_LVL_MAX) {
 
-    afl->queue_cur->colorized = CMPLOG_LVL_MAX;
+  afl->queue_cur->colorized = CMPLOG_LVL_MAX;
 
-    if (afl->queue_cur->cmplog_colorinput) {
+  if (afl->queue_cur->cmplog_colorinput) {
 
-      ck_free(afl->queue_cur->cmplog_colorinput);
+    ck_free(afl->queue_cur->cmplog_colorinput);
 
-    }
+  }
 
-    while (taint) {
+  while (taint) {
 
-      t = taint->next;
-      ck_free(taint);
-      taint = t;
+    t = taint->next;
+    ck_free(taint);
+    taint = t;
 
-    }
+  }
 
-    afl->queue_cur->taint = NULL;
+  afl->queue_cur->taint = NULL;
 
-  } else {
+  /*} else {
 
     afl->queue_cur->colorized = LVL2;
 
@@ -3182,7 +3184,7 @@ exit_its:
 
     }
 
-  }
+  }*/
 
 #ifdef CMPLOG_COMBINE
   if (afl->queued_items + afl->saved_crashes > orig_hit_cnt + 1) {
diff --git a/src/afl-fuzz-skipdet.c b/src/afl-fuzz-skipdet.c
index e52d59a3..8a927292 100644
--- a/src/afl-fuzz-skipdet.c
+++ b/src/afl-fuzz-skipdet.c
@@ -33,15 +33,15 @@ u8 is_det_timeout(u64 cur_ms, u8 is_flip) {
 
 u8 should_det_fuzz(afl_state_t *afl, struct queue_entry *q) {
 
-  if (!afl->skipdet_g->virgin_det_bits) {
+  if (unlikely(!afl->skipdet_g->virgin_det_bits)) {
 
     afl->skipdet_g->virgin_det_bits =
         (u8 *)ck_alloc(sizeof(u8) * afl->fsrv.map_size);
 
   }
 
-  if (!q->favored || q->passed_det) return 0;
-  if (!q->trace_mini) return 0;
+  if (likely(!q->favored || q->passed_det)) return 0;
+  if (unlikely(!q->trace_mini)) return 0;
 
   if (!afl->skipdet_g->last_cov_undet)
     afl->skipdet_g->last_cov_undet = get_cur_time();
@@ -122,7 +122,8 @@ u8 skip_deterministic_stage(afl_state_t *afl, u8 *orig_buf, u8 *out_buf,
   afl->stage_cur = 0;
   orig_hit_cnt = afl->queued_items + afl->saved_crashes;
 
-  u8 *inf_eff_map = (u8 *)ck_alloc(sizeof(u8) * len);
+  static u8 *inf_eff_map;
+  inf_eff_map = (u8 *)ck_realloc(inf_eff_map, sizeof(u8) * len);
   memset(inf_eff_map, 1, sizeof(u8) * len);
 
   if (common_fuzz_stuff(afl, orig_buf, len)) { return 0; }