about summary refs log tree commit diff
path: root/src/afl-fuzz-queue.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/afl-fuzz-queue.c')
-rw-r--r--src/afl-fuzz-queue.c119
1 files changed, 80 insertions, 39 deletions
diff --git a/src/afl-fuzz-queue.c b/src/afl-fuzz-queue.c
index cfeab798..346c2639 100644
--- a/src/afl-fuzz-queue.c
+++ b/src/afl-fuzz-queue.c
@@ -23,6 +23,7 @@
  */
 
 #include "afl-fuzz.h"
+#include <limits.h>
 
 /* Mark deterministic checks as done for a particular queue entry. We use the
    .state file to avoid repeating deterministic fuzzing when resuming aborted
@@ -30,18 +31,16 @@
 
 void mark_as_det_done(afl_state_t *afl, struct queue_entry *q) {
 
-  u8 *fn = strrchr(q->fname, '/');
+  u8  fn[PATH_MAX];
   s32 fd;
 
-  fn = alloc_printf("%s/queue/.state/deterministic_done/%s", afl->out_dir,
-                    fn + 1);
+  snprintf(fn, PATH_MAX, "%s/queue/.state/deterministic_done/%s", afl->out_dir,
+           strrchr(q->fname, '/') + 1);
 
   fd = open(fn, O_WRONLY | O_CREAT | O_EXCL, 0600);
   if (fd < 0) PFATAL("Unable to create '%s'", fn);
   close(fd);
 
-  ck_free(fn);
-
   q->passed_det = 1;
 
 }
@@ -51,10 +50,13 @@ void mark_as_det_done(afl_state_t *afl, struct queue_entry *q) {
 
 void mark_as_variable(afl_state_t *afl, struct queue_entry *q) {
 
-  u8 *fn = strrchr(q->fname, '/') + 1, *ldest;
+  u8 fn[PATH_MAX];
+  u8 ldest[PATH_MAX];
+
+  u8 *fn_name = strrchr(q->fname, '/') + 1;
 
-  ldest = alloc_printf("../../%s", fn);
-  fn = alloc_printf("%s/queue/.state/variable_behavior/%s", afl->out_dir, fn);
+  sprintf(ldest, "../../%s", fn_name);
+  sprintf(fn, "%s/queue/.state/variable_behavior/%s", afl->out_dir, fn_name);
 
   if (symlink(ldest, fn)) {
 
@@ -64,9 +66,6 @@ void mark_as_variable(afl_state_t *afl, struct queue_entry *q) {
 
   }
 
-  ck_free(ldest);
-  ck_free(fn);
-
   q->var_behavior = 1;
 
 }
@@ -76,14 +75,14 @@ void mark_as_variable(afl_state_t *afl, struct queue_entry *q) {
 
 void mark_as_redundant(afl_state_t *afl, struct queue_entry *q, u8 state) {
 
-  u8 *fn;
+  u8 fn[PATH_MAX];
 
   if (state == q->fs_redundant) return;
 
   q->fs_redundant = state;
 
-  fn = strrchr(q->fname, '/');
-  fn = alloc_printf("%s/queue/.state/redundant_edges/%s", afl->out_dir, fn + 1);
+  sprintf(fn, "%s/queue/.state/redundant_edges/%s", afl->out_dir,
+          strrchr(q->fname, '/') + 1);
 
   if (state) {
 
@@ -99,8 +98,6 @@ void mark_as_redundant(afl_state_t *afl, struct queue_entry *q, u8 state) {
 
   }
 
-  ck_free(fn);
-
 }
 
 /* Append new test case to the queue. */
@@ -114,6 +111,7 @@ void add_to_queue(afl_state_t *afl, u8 *fname, u32 len, u8 passed_det) {
   q->depth = afl->cur_depth + 1;
   q->passed_det = passed_det;
   q->n_fuzz = 1;
+  q->trace_mini = NULL;
 
   if (q->depth > afl->max_depth) afl->max_depth = q->depth;
 
@@ -147,7 +145,8 @@ void add_to_queue(afl_state_t *afl, u8 *fname, u32 len, u8 passed_det) {
     /* At the initialization stage, queue_cur is NULL */
     if (afl->queue_cur) fname_orig = afl->queue_cur->fname;
 
-    afl->mutator->afl_custom_queue_new_entry(afl, fname, fname_orig);
+    afl->mutator->afl_custom_queue_new_entry(afl->mutator->data, fname,
+                                             fname_orig);
 
   }
 
@@ -185,35 +184,50 @@ void destroy_queue(afl_state_t *afl) {
 void update_bitmap_score(afl_state_t *afl, struct queue_entry *q) {
 
   u32 i;
-  u64 fav_factor = q->exec_us * q->len;
-  u64 fuzz_p2 = next_p2(q->n_fuzz);
+  u64 fav_factor;
+  u64 fuzz_p2 = next_pow2(q->n_fuzz);
+
+  if (afl->schedule == MMOPT || afl->schedule == RARE ||
+      unlikely(afl->fixed_seed))
+    fav_factor = q->len << 2;
+  else
+    fav_factor = q->exec_us * q->len;
 
   /* For every byte set in afl->fsrv.trace_bits[], see if there is a previous
      winner, and how it compares to us. */
-
-  for (i = 0; i < MAP_SIZE; ++i)
+  for (i = 0; i < afl->fsrv.map_size; ++i)
 
     if (afl->fsrv.trace_bits[i]) {
 
       if (afl->top_rated[i]) {
 
         /* Faster-executing or smaller test cases are favored. */
-        u64 top_rated_fuzz_p2 = next_p2(afl->top_rated[i]->n_fuzz);
-        u64 top_rated_fav_factor =
-            afl->top_rated[i]->exec_us * afl->top_rated[i]->len;
+        u64 top_rated_fav_factor;
+        u64 top_rated_fuzz_p2 = next_pow2(afl->top_rated[i]->n_fuzz);
 
-        if (fuzz_p2 > top_rated_fuzz_p2) {
+        if (afl->schedule == MMOPT || afl->schedule == RARE ||
+            unlikely(afl->fixed_seed))
+          top_rated_fav_factor = afl->top_rated[i]->len << 2;
+        else
+          top_rated_fav_factor =
+              afl->top_rated[i]->exec_us * afl->top_rated[i]->len;
 
+        if (fuzz_p2 > top_rated_fuzz_p2)
           continue;
+        else if (fuzz_p2 == top_rated_fuzz_p2)
+          if (fav_factor > top_rated_fav_factor) continue;
 
-        } else if (fuzz_p2 == top_rated_fuzz_p2) {
+        if (afl->schedule == MMOPT || afl->schedule == RARE ||
+            unlikely(afl->fixed_seed)) {
 
-          if (fav_factor > top_rated_fav_factor) continue;
+          if (fav_factor > afl->top_rated[i]->len << 2) continue;
 
-        }
+        } else {
 
-        if (fav_factor > afl->top_rated[i]->exec_us * afl->top_rated[i]->len)
-          continue;
+          if (fav_factor > afl->top_rated[i]->exec_us * afl->top_rated[i]->len)
+            continue;
+
+        }
 
         /* Looks like we're going to win. Decrease ref count for the
            previous winner, discard its afl->fsrv.trace_bits[] if necessary. */
@@ -234,8 +248,10 @@ void update_bitmap_score(afl_state_t *afl, struct queue_entry *q) {
 
       if (!q->trace_mini) {
 
-        q->trace_mini = ck_alloc(MAP_SIZE >> 3);
-        minimize_bits(q->trace_mini, afl->fsrv.trace_bits);
+        u32 len = (afl->fsrv.map_size >> 3);
+        if (len == 0) len = 1;
+        q->trace_mini = ck_alloc(len);
+        minimize_bits(afl, q->trace_mini, afl->fsrv.trace_bits);
 
       }
 
@@ -254,14 +270,17 @@ void update_bitmap_score(afl_state_t *afl, struct queue_entry *q) {
 void cull_queue(afl_state_t *afl) {
 
   struct queue_entry *q;
-  static u8           temp_v[MAP_SIZE >> 3];
+  u32                 len = (afl->fsrv.map_size >> 3);
   u32                 i;
+  u8                  temp_v[MAP_SIZE >> 3];
+
+  if (len == 0) len = 1;
 
   if (afl->dumb_mode || !afl->score_changed) return;
 
   afl->score_changed = 0;
 
-  memset(temp_v, 255, MAP_SIZE >> 3);
+  memset(temp_v, 255, len);
 
   afl->queued_favored = 0;
   afl->pending_favored = 0;
@@ -278,10 +297,10 @@ void cull_queue(afl_state_t *afl) {
   /* Let's see if anything in the bitmap isn't captured in temp_v.
      If yes, and if it has a afl->top_rated[] contender, let's use it. */
 
-  for (i = 0; i < MAP_SIZE; ++i)
+  for (i = 0; i < afl->fsrv.map_size; ++i)
     if (afl->top_rated[i] && (temp_v[i >> 3] & (1 << (i & 7)))) {
 
-      u32 j = MAP_SIZE >> 3;
+      u32 j = len;
 
       /* Remove all bits belonging to the current entry from temp_v. */
 
@@ -328,7 +347,8 @@ u32 calculate_score(afl_state_t *afl, struct queue_entry *q) {
   // Longer execution time means longer work on the input, the deeper in
   // coverage, the better the fuzzing, right? -mh
 
-  if (afl->schedule != MMOPT) {
+  if (afl->schedule != MMOPT && afl->schedule != RARE &&
+      likely(!afl->fixed_seed)) {
 
     if (q->exec_us * 0.1 > avg_exec_us)
       perf_score = 10;
@@ -438,7 +458,7 @@ u32 calculate_score(afl_state_t *afl, struct queue_entry *q) {
       if (q->fuzz_level < 16)
         factor = ((u32)(1 << q->fuzz_level)) / (fuzz == 0 ? 1 : fuzz);
       else
-        factor = MAX_FACTOR / (fuzz == 0 ? 1 : next_p2(fuzz));
+        factor = MAX_FACTOR / (fuzz == 0 ? 1 : next_pow2(fuzz));
       break;
 
     case LIN: factor = q->fuzz_level / (fuzz == 0 ? 1 : fuzz); break;
@@ -448,8 +468,29 @@ u32 calculate_score(afl_state_t *afl, struct queue_entry *q) {
       break;
 
     case MMOPT:
+      /* -- this was a more complex setup, which is good, but competed with
+         -- rare. the simpler algo however is good when rare is not.
+        // the newer the entry, the higher the pref_score
+        perf_score *= (1 + (double)((double)q->depth /
+        (double)afl->queued_paths));
+        // with special focus on the last 8 entries
+        if (afl->max_depth - q->depth < 8) perf_score *= (1 + ((8 -
+        (afl->max_depth - q->depth)) / 5));
+      */
+      // put focus on the last 5 entries
+      if (afl->max_depth - q->depth < 5) perf_score *= 2;
+
+      break;
+
+    case RARE:
 
-      if (afl->max_depth - q->depth < 5) perf_score *= 1.5;
+      // increase the score for every bitmap byte for which this entry
+      // is the top contender
+      perf_score += (q->tc_ref * 10);
+      // the more often fuzz result paths are equal to this queue entry,
+      // reduce its value
+      perf_score *=
+          (1 - (double)((double)q->n_fuzz / (double)afl->total_execs));
 
       break;