about summary refs log tree commit diff
path: root/src/afl-fuzz-bitmap.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/afl-fuzz-bitmap.c')
-rw-r--r--src/afl-fuzz-bitmap.c530
1 files changed, 272 insertions, 258 deletions
diff --git a/src/afl-fuzz-bitmap.c b/src/afl-fuzz-bitmap.c
index aa8d5a18..4ed59364 100644
--- a/src/afl-fuzz-bitmap.c
+++ b/src/afl-fuzz-bitmap.c
@@ -25,6 +25,9 @@
 
 #include "afl-fuzz.h"
 #include <limits.h>
+#if !defined NAME_MAX
+  #define NAME_MAX _XOPEN_NAME_MAX
+#endif
 
 /* Write bitmap to file. The bitmap is useful mostly for the secret
    -B option, to focus a separate fuzzing session on a particular
@@ -49,101 +52,6 @@ void write_bitmap(afl_state_t *afl) {
 
 }
 
-/* Check if the current execution path brings anything new to the table.
-   Update virgin bits to reflect the finds. Returns 1 if the only change is
-   the hit-count for a particular tuple; 2 if there are new tuples seen.
-   Updates the map, so subsequent calls will always return 0.
-
-   This function is called after every exec() on a fairly large buffer, so
-   it needs to be fast. We do this in 32-bit and 64-bit flavors. */
-
-u8 has_new_bits(afl_state_t *afl, u8 *virgin_map) {
-
-#ifdef WORD_SIZE_64
-
-  u64 *current = (u64 *)afl->fsrv.trace_bits;
-  u64 *virgin = (u64 *)virgin_map;
-
-  u32 i = (afl->fsrv.map_size >> 3);
-
-#else
-
-  u32 *current = (u32 *)afl->fsrv.trace_bits;
-  u32 *virgin = (u32 *)virgin_map;
-
-  u32 i = (afl->fsrv.map_size >> 2);
-
-#endif                                                     /* ^WORD_SIZE_64 */
-  // the map size must be a minimum of 8 bytes.
-  // for variable/dynamic map sizes this is ensured in the forkserver
-
-  u8 ret = 0;
-
-  while (i--) {
-
-    /* Optimize for (*current & *virgin) == 0 - i.e., no bits in current bitmap
-       that have not been already cleared from the virgin map - since this will
-       almost always be the case. */
-
-    // the (*current) is unnecessary but speeds up the overall comparison
-    if (unlikely(*current) && unlikely(*current & *virgin)) {
-
-      if (likely(ret < 2)) {
-
-        u8 *cur = (u8 *)current;
-        u8 *vir = (u8 *)virgin;
-
-        /* Looks like we have not found any new bytes yet; see if any non-zero
-           bytes in current[] are pristine in virgin[]. */
-
-#ifdef WORD_SIZE_64
-
-        if (*virgin == 0xffffffffffffffff || (cur[0] && vir[0] == 0xff) ||
-            (cur[1] && vir[1] == 0xff) || (cur[2] && vir[2] == 0xff) ||
-            (cur[3] && vir[3] == 0xff) || (cur[4] && vir[4] == 0xff) ||
-            (cur[5] && vir[5] == 0xff) || (cur[6] && vir[6] == 0xff) ||
-            (cur[7] && vir[7] == 0xff)) {
-
-          ret = 2;
-
-        } else {
-
-          ret = 1;
-
-        }
-
-#else
-
-        if (*virgin == 0xffffffff || (cur[0] && vir[0] == 0xff) ||
-            (cur[1] && vir[1] == 0xff) || (cur[2] && vir[2] == 0xff) ||
-            (cur[3] && vir[3] == 0xff))
-          ret = 2;
-        else
-          ret = 1;
-
-#endif                                                     /* ^WORD_SIZE_64 */
-
-      }
-
-      *virgin &= ~*current;
-
-    }
-
-    ++current;
-    ++virgin;
-
-  }
-
-  if (unlikely(ret) && likely(virgin_map == afl->virgin_bits)) {
-
-    afl->bitmap_changed = 1;
-
-  }
-
-  return ret;
-
-}
-
 /* Count the number of bits set in the provided bitmap. Used for the status
    screen several times every second, does not have to be fast. */
 
@@ -192,10 +100,10 @@ u32 count_bytes(afl_state_t *afl, u8 *mem) {
     u32 v = *(ptr++);
 
     if (!v) { continue; }
-    if (v & 0x000000ff) { ++ret; }
-    if (v & 0x0000ff00) { ++ret; }
-    if (v & 0x00ff0000) { ++ret; }
-    if (v & 0xff000000) { ++ret; }
+    if (v & 0x000000ffU) { ++ret; }
+    if (v & 0x0000ff00U) { ++ret; }
+    if (v & 0x00ff0000U) { ++ret; }
+    if (v & 0xff000000U) { ++ret; }
 
   }
 
@@ -219,11 +127,11 @@ u32 count_non_255_bytes(afl_state_t *afl, u8 *mem) {
     /* This is called on the virgin bitmap, so optimize for the most likely
        case. */
 
-    if (v == 0xffffffff) { continue; }
-    if ((v & 0x000000ff) != 0x000000ff) { ++ret; }
-    if ((v & 0x0000ff00) != 0x0000ff00) { ++ret; }
-    if ((v & 0x00ff0000) != 0x00ff0000) { ++ret; }
-    if ((v & 0xff000000) != 0xff000000) { ++ret; }
+    if (v == 0xffffffffU) { continue; }
+    if ((v & 0x000000ffU) != 0x000000ffU) { ++ret; }
+    if ((v & 0x0000ff00U) != 0x0000ff00U) { ++ret; }
+    if ((v & 0x00ff0000U) != 0x00ff0000U) { ++ret; }
+    if ((v & 0xff000000U) != 0xff000000U) { ++ret; }
 
   }
 
@@ -235,98 +143,46 @@ u32 count_non_255_bytes(afl_state_t *afl, u8 *mem) {
    and replacing it with 0x80 or 0x01 depending on whether the tuple
    is hit or not. Called on every new crash or timeout, should be
    reasonably fast. */
-
+#define TIMES4(x) x, x, x, x
+#define TIMES8(x) TIMES4(x), TIMES4(x)
+#define TIMES16(x) TIMES8(x), TIMES8(x)
+#define TIMES32(x) TIMES16(x), TIMES16(x)
+#define TIMES64(x) TIMES32(x), TIMES32(x)
+#define TIMES255(x)                                                      \
+  TIMES64(x), TIMES64(x), TIMES64(x), TIMES32(x), TIMES16(x), TIMES8(x), \
+      TIMES4(x), x, x, x
 const u8 simplify_lookup[256] = {
 
-    [0] = 1, [1 ... 255] = 128
+    [0] = 1, [1] = TIMES255(128)
 
 };
 
-#ifdef WORD_SIZE_64
-
-void simplify_trace(afl_state_t *afl, u64 *mem) {
-
-  u32 i = (afl->fsrv.map_size >> 3);
-
-  while (i--) {
-
-    /* Optimize for sparse bitmaps. */
-
-    if (unlikely(*mem)) {
-
-      u8 *mem8 = (u8 *)mem;
-
-      mem8[0] = simplify_lookup[mem8[0]];
-      mem8[1] = simplify_lookup[mem8[1]];
-      mem8[2] = simplify_lookup[mem8[2]];
-      mem8[3] = simplify_lookup[mem8[3]];
-      mem8[4] = simplify_lookup[mem8[4]];
-      mem8[5] = simplify_lookup[mem8[5]];
-      mem8[6] = simplify_lookup[mem8[6]];
-      mem8[7] = simplify_lookup[mem8[7]];
-
-    } else {
-
-      *mem = 0x0101010101010101ULL;
-
-    }
-
-    ++mem;
-
-  }
-
-}
-
-#else
-
-void simplify_trace(afl_state_t *afl, u32 *mem) {
-
-  u32 i = (afl->fsrv.map_size >> 2);
-
-  while (i--) {
-
-    /* Optimize for sparse bitmaps. */
-
-    if (unlikely(*mem)) {
-
-      u8 *mem8 = (u8 *)mem;
-
-      mem8[0] = simplify_lookup[mem8[0]];
-      mem8[1] = simplify_lookup[mem8[1]];
-      mem8[2] = simplify_lookup[mem8[2]];
-      mem8[3] = simplify_lookup[mem8[3]];
-
-    } else
-
-      *mem = 0x01010101;
-
-    ++mem;
-
-  }
-
-}
-
-#endif                                                     /* ^WORD_SIZE_64 */
-
 /* Destructively classify execution counts in a trace. This is used as a
    preprocessing step for any newly acquired traces. Called on every exec,
    must be fast. */
 
-static const u8 count_class_lookup8[256] = {
+const u8 count_class_lookup8[256] = {
 
     [0] = 0,
     [1] = 1,
     [2] = 2,
     [3] = 4,
-    [4 ... 7] = 8,
-    [8 ... 15] = 16,
-    [16 ... 31] = 32,
-    [32 ... 127] = 64,
-    [128 ... 255] = 128
+    [4] = TIMES4(8),
+    [8] = TIMES8(16),
+    [16] = TIMES16(32),
+    [32] = TIMES32(64),
+    [128] = TIMES64(128)
 
 };
 
-static u16 count_class_lookup16[65536];
+#undef TIMES255
+#undef TIMES64
+#undef TIMES32
+#undef TIMES16
+#undef TIMES8
+#undef TIMES4
+
+u16 count_class_lookup16[65536];
 
 void init_count_class16(void) {
 
@@ -345,63 +201,87 @@ void init_count_class16(void) {
 
 }
 
-#ifdef WORD_SIZE_64
+/* Import coverage processing routines. */
 
-void classify_counts(afl_forkserver_t *fsrv) {
+#ifdef WORD_SIZE_64
+  #include "coverage-64.h"
+#else
+  #include "coverage-32.h"
+#endif
 
-  u64 *mem = (u64 *)fsrv->trace_bits;
+/* Check if the current execution path brings anything new to the table.
+   Update virgin bits to reflect the finds. Returns 1 if the only change is
+   the hit-count for a particular tuple; 2 if there are new tuples seen.
+   Updates the map, so subsequent calls will always return 0.
 
-  u32 i = (fsrv->map_size >> 3);
+   This function is called after every exec() on a fairly large buffer, so
+   it needs to be fast. We do this in 32-bit and 64-bit flavors. */
 
-  while (i--) {
+inline u8 has_new_bits(afl_state_t *afl, u8 *virgin_map) {
 
-    /* Optimize for sparse bitmaps. */
+#ifdef WORD_SIZE_64
 
-    if (unlikely(*mem)) {
+  u64 *current = (u64 *)afl->fsrv.trace_bits;
+  u64 *virgin = (u64 *)virgin_map;
 
-      u16 *mem16 = (u16 *)mem;
+  u32 i = (afl->fsrv.map_size >> 3);
 
-      mem16[0] = count_class_lookup16[mem16[0]];
-      mem16[1] = count_class_lookup16[mem16[1]];
-      mem16[2] = count_class_lookup16[mem16[2]];
-      mem16[3] = count_class_lookup16[mem16[3]];
+#else
 
-    }
+  u32 *current = (u32 *)afl->fsrv.trace_bits;
+  u32 *virgin = (u32 *)virgin_map;
 
-    ++mem;
+  u32 i = (afl->fsrv.map_size >> 2);
 
-  }
+#endif                                                     /* ^WORD_SIZE_64 */
 
-}
+  u8 ret = 0;
+  while (i--) {
 
-#else
+    if (unlikely(*current)) discover_word(&ret, current, virgin);
 
-void classify_counts(afl_forkserver_t *fsrv) {
+    current++;
+    virgin++;
 
-  u32 *mem = (u32 *)fsrv->trace_bits;
+  }
 
-  u32 i = (fsrv->map_size >> 2);
+  if (unlikely(ret) && likely(virgin_map == afl->virgin_bits))
+    afl->bitmap_changed = 1;
 
-  while (i--) {
+  return ret;
 
-    /* Optimize for sparse bitmaps. */
+}
 
-    if (unlikely(*mem)) {
+/* A combination of classify_counts and has_new_bits. If 0 is returned, then the
+ * trace bits are kept as-is. Otherwise, the trace bits are overwritten with
+ * classified values.
+ *
+ * This accelerates the processing: in most cases, no interesting behavior
+ * happen, and the trace bits will be discarded soon. This function optimizes
+ * for such cases: one-pass scan on trace bits without modifying anything. Only
+ * on rare cases it fall backs to the slow path: classify_counts() first, then
+ * return has_new_bits(). */
 
-      u16 *mem16 = (u16 *)mem;
+inline u8 has_new_bits_unclassified(afl_state_t *afl, u8 *virgin_map) {
 
-      mem16[0] = count_class_lookup16[mem16[0]];
-      mem16[1] = count_class_lookup16[mem16[1]];
+  /* Handle the hot path first: no new coverage */
+  u8 *end = afl->fsrv.trace_bits + afl->fsrv.map_size;
 
-    }
+#ifdef WORD_SIZE_64
 
-    ++mem;
+  if (!skim((u64 *)virgin_map, (u64 *)afl->fsrv.trace_bits, (u64 *)end))
+    return 0;
 
-  }
+#else
 
-}
+  if (!skim((u32 *)virgin_map, (u32 *)afl->fsrv.trace_bits, (u32 *)end))
+    return 0;
 
 #endif                                                     /* ^WORD_SIZE_64 */
+  classify_counts(&afl->fsrv);
+  return has_new_bits(afl, virgin_map);
+
+}
 
 /* Compact trace bytes into a smaller bitmap. We effectively just drop the
    count information here. This is called only sporadically, for some
@@ -425,8 +305,10 @@ void minimize_bits(afl_state_t *afl, u8 *dst, u8 *src) {
 /* Construct a file name for a new test case, capturing the operation
    that led to its discovery. Returns a ptr to afl->describe_op_buf_256. */
 
-u8 *describe_op(afl_state_t *afl, u8 hnb) {
+u8 *describe_op(afl_state_t *afl, u8 new_bits, size_t max_description_len) {
 
+  size_t real_max_len =
+      MIN(max_description_len, sizeof(afl->describe_op_buf_256));
   u8 *ret = afl->describe_op_buf_256;
 
   if (unlikely(afl->syncing_party)) {
@@ -443,31 +325,69 @@ u8 *describe_op(afl_state_t *afl, u8 hnb) {
 
     }
 
-    sprintf(ret + strlen(ret), ",time:%llu", get_cur_time() - afl->start_time);
+    sprintf(ret + strlen(ret), ",time:%llu",
+            get_cur_time() + afl->prev_run_time - afl->start_time);
+
+    if (afl->current_custom_fuzz &&
+        afl->current_custom_fuzz->afl_custom_describe) {
 
-    sprintf(ret + strlen(ret), ",op:%s", afl->stage_short);
+      /* We are currently in a custom mutator that supports afl_custom_describe,
+       * use it! */
 
-    if (afl->stage_cur_byte >= 0) {
+      size_t len_current = strlen(ret);
+      ret[len_current++] = ',';
+      ret[len_current] = '\0';
 
-      sprintf(ret + strlen(ret), ",pos:%d", afl->stage_cur_byte);
+      ssize_t size_left = real_max_len - len_current - strlen(",+cov") - 2;
+      if (unlikely(size_left <= 0)) FATAL("filename got too long");
 
-      if (afl->stage_val_type != STAGE_VAL_NONE) {
+      const char *custom_description =
+          afl->current_custom_fuzz->afl_custom_describe(
+              afl->current_custom_fuzz->data, size_left);
+      if (!custom_description || !custom_description[0]) {
 
-        sprintf(ret + strlen(ret), ",val:%s%+d",
-                (afl->stage_val_type == STAGE_VAL_BE) ? "be:" : "",
-                afl->stage_cur_val);
+        DEBUGF("Error getting a description from afl_custom_describe");
+        /* Take the stage name as description fallback */
+        sprintf(ret + len_current, "op:%s", afl->stage_short);
+
+      } else {
+
+        /* We got a proper custom description, use it */
+        strncat(ret + len_current, custom_description, size_left);
 
       }
 
     } else {
 
-      sprintf(ret + strlen(ret), ",rep:%d", afl->stage_cur_val);
+      /* Normal testcase descriptions start here */
+      sprintf(ret + strlen(ret), ",op:%s", afl->stage_short);
+
+      if (afl->stage_cur_byte >= 0) {
+
+        sprintf(ret + strlen(ret), ",pos:%d", afl->stage_cur_byte);
+
+        if (afl->stage_val_type != STAGE_VAL_NONE) {
+
+          sprintf(ret + strlen(ret), ",val:%s%+d",
+                  (afl->stage_val_type == STAGE_VAL_BE) ? "be:" : "",
+                  afl->stage_cur_val);
+
+        }
+
+      } else {
+
+        sprintf(ret + strlen(ret), ",rep:%d", afl->stage_cur_val);
+
+      }
 
     }
 
   }
 
-  if (hnb == 2) { strcat(ret, ",+cov"); }
+  if (new_bits == 2) { strcat(ret, ",+cov"); }
+
+  if (unlikely(strlen(ret) >= max_description_len))
+    FATAL("describe string is too long");
 
   return ret;
 
@@ -534,14 +454,15 @@ static void write_crash_readme(afl_state_t *afl) {
    save or queue the input test case for further analysis if so. Returns 1 if
    entry is saved, 0 otherwise. */
 
-u8 save_if_interesting(afl_state_t *afl, void *mem, u32 len, u8 fault) {
+u8 __attribute__((hot))
+save_if_interesting(afl_state_t *afl, void *mem, u32 len, u8 fault) {
 
   if (unlikely(len == 0)) { return 0; }
 
   u8 *queue_fn = "";
-  u8  hnb = '\0';
+  u8  new_bits = '\0';
   s32 fd;
-  u8  keeping = 0, res;
+  u8  keeping = 0, res, classified = 0;
   u64 cksum = 0;
 
   u8 fn[PATH_MAX];
@@ -554,19 +475,9 @@ u8 save_if_interesting(afl_state_t *afl, void *mem, u32 len, u8 fault) {
 
     cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
 
-    struct queue_entry *q = afl->queue;
-    while (q) {
-
-      if (q->exec_cksum == cksum) {
-
-        ++q->n_fuzz;
-        break;
-
-      }
-
-      q = q->next;
-
-    }
+    /* Saturated increment */
+    if (afl->n_fuzz[cksum % N_FUZZ_SIZE] < 0xFFFFFFFF)
+      afl->n_fuzz[cksum % N_FUZZ_SIZE]++;
 
   }
 
@@ -575,17 +486,22 @@ u8 save_if_interesting(afl_state_t *afl, void *mem, u32 len, u8 fault) {
     /* Keep only if there are new bits in the map, add to queue for
        future fuzzing, etc. */
 
-    if (!(hnb = has_new_bits(afl, afl->virgin_bits))) {
+    new_bits = has_new_bits_unclassified(afl, afl->virgin_bits);
+
+    if (likely(!new_bits)) {
 
       if (unlikely(afl->crash_mode)) { ++afl->total_crashes; }
       return 0;
 
     }
 
+    classified = new_bits;
+
 #ifndef SIMPLE_FILES
 
-    queue_fn = alloc_printf("%s/queue/id:%06u,%s", afl->out_dir,
-                            afl->queued_paths, describe_op(afl, hnb));
+    queue_fn = alloc_printf(
+        "%s/queue/id:%06u,%s", afl->out_dir, afl->queued_paths,
+        describe_op(afl, new_bits, NAME_MAX - strlen("id:000000,")));
 
 #else
 
@@ -593,10 +509,42 @@ u8 save_if_interesting(afl_state_t *afl, void *mem, u32 len, u8 fault) {
         alloc_printf("%s/queue/id_%06u", afl->out_dir, afl->queued_paths);
 
 #endif                                                    /* ^!SIMPLE_FILES */
-
+    fd = open(queue_fn, O_WRONLY | O_CREAT | O_EXCL, 0600);
+    if (unlikely(fd < 0)) { PFATAL("Unable to create '%s'", queue_fn); }
+    ck_write(fd, mem, len, queue_fn);
+    close(fd);
     add_to_queue(afl, queue_fn, len, 0);
 
-    if (hnb == 2) {
+#ifdef INTROSPECTION
+    if (afl->custom_mutators_count && afl->current_custom_fuzz) {
+
+      LIST_FOREACH(&afl->custom_mutator_list, struct custom_mutator, {
+
+        if (afl->current_custom_fuzz == el && el->afl_custom_introspection) {
+
+          const char *ptr = el->afl_custom_introspection(el->data);
+
+          if (ptr != NULL && *ptr != 0) {
+
+            fprintf(afl->introspection_file, "QUEUE CUSTOM %s = %s\n", ptr,
+                    afl->queue_top->fname);
+
+          }
+
+        }
+
+      });
+
+    } else if (afl->mutation[0] != 0) {
+
+      fprintf(afl->introspection_file, "QUEUE %s = %s\n", afl->mutation,
+              afl->queue_top->fname);
+
+    }
+
+#endif
+
+    if (new_bits == 2) {
 
       afl->queue_top->has_new_cov = 1;
       ++afl->queued_with_cov;
@@ -606,9 +554,16 @@ u8 save_if_interesting(afl_state_t *afl, void *mem, u32 len, u8 fault) {
     if (cksum)
       afl->queue_top->exec_cksum = cksum;
     else
-      afl->queue_top->exec_cksum =
+      cksum = afl->queue_top->exec_cksum =
           hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST);
 
+    if (afl->schedule >= FAST && afl->schedule <= RARE) {
+
+      afl->queue_top->n_fuzz_entry = cksum % N_FUZZ_SIZE;
+      afl->n_fuzz[afl->queue_top->n_fuzz_entry] = 1;
+
+    }
+
     /* Try to calibrate inline; this also calls update_bitmap_score() when
        successful. */
 
@@ -620,10 +575,11 @@ u8 save_if_interesting(afl_state_t *afl, void *mem, u32 len, u8 fault) {
 
     }
 
-    fd = open(queue_fn, O_WRONLY | O_CREAT | O_EXCL, 0600);
-    if (unlikely(fd < 0)) { PFATAL("Unable to create '%s'", queue_fn); }
-    ck_write(fd, mem, len, queue_fn);
-    close(fd);
+    if (likely(afl->q_testcase_max_cache_size)) {
+
+      queue_testcase_store_mem(afl, afl->queue_top, mem);
+
+    }
 
     keeping = 1;
 
@@ -644,17 +600,48 @@ u8 save_if_interesting(afl_state_t *afl, void *mem, u32 len, u8 fault) {
 
       if (likely(!afl->non_instrumented_mode)) {
 
-#ifdef WORD_SIZE_64
-        simplify_trace(afl, (u64 *)afl->fsrv.trace_bits);
-#else
-        simplify_trace(afl, (u32 *)afl->fsrv.trace_bits);
-#endif                                                     /* ^WORD_SIZE_64 */
+        if (!classified) {
+
+          classify_counts(&afl->fsrv);
+          classified = 1;
+
+        }
+
+        simplify_trace(afl, afl->fsrv.trace_bits);
 
         if (!has_new_bits(afl, afl->virgin_tmout)) { return keeping; }
 
       }
 
       ++afl->unique_tmouts;
+#ifdef INTROSPECTION
+      if (afl->custom_mutators_count && afl->current_custom_fuzz) {
+
+        LIST_FOREACH(&afl->custom_mutator_list, struct custom_mutator, {
+
+          if (afl->current_custom_fuzz == el && el->afl_custom_introspection) {
+
+            const char *ptr = el->afl_custom_introspection(el->data);
+
+            if (ptr != NULL && *ptr != 0) {
+
+              fprintf(afl->introspection_file,
+                      "UNIQUE_TIMEOUT CUSTOM %s = %s\n", ptr,
+                      afl->queue_top->fname);
+
+            }
+
+          }
+
+        });
+
+      } else if (afl->mutation[0] != 0) {
+
+        fprintf(afl->introspection_file, "UNIQUE_TIMEOUT %s\n", afl->mutation);
+
+      }
+
+#endif
 
       /* Before saving, we make sure that it's a genuine hang by re-running
          the target with a more generous timeout (unless the default timeout
@@ -665,6 +652,7 @@ u8 save_if_interesting(afl_state_t *afl, void *mem, u32 len, u8 fault) {
         u8 new_fault;
         write_to_testcase(afl, mem, len);
         new_fault = fuzz_run_target(afl, &afl->fsrv, afl->hang_tmout);
+        classify_counts(&afl->fsrv);
 
         /* A corner case that one user reported bumping into: increasing the
            timeout actually uncovers a crash. Make sure we don't discard it if
@@ -683,7 +671,8 @@ u8 save_if_interesting(afl_state_t *afl, void *mem, u32 len, u8 fault) {
 #ifndef SIMPLE_FILES
 
       snprintf(fn, PATH_MAX, "%s/hangs/id:%06llu,%s", afl->out_dir,
-               afl->unique_hangs, describe_op(afl, 0));
+               afl->unique_hangs,
+               describe_op(afl, 0, NAME_MAX - strlen("id:000000,")));
 
 #else
 
@@ -712,11 +701,9 @@ u8 save_if_interesting(afl_state_t *afl, void *mem, u32 len, u8 fault) {
 
       if (likely(!afl->non_instrumented_mode)) {
 
-#ifdef WORD_SIZE_64
-        simplify_trace(afl, (u64 *)afl->fsrv.trace_bits);
-#else
-        simplify_trace(afl, (u32 *)afl->fsrv.trace_bits);
-#endif                                                     /* ^WORD_SIZE_64 */
+        if (!classified) { classify_counts(&afl->fsrv); }
+
+        simplify_trace(afl, afl->fsrv.trace_bits);
 
         if (!has_new_bits(afl, afl->virgin_crash)) { return keeping; }
 
@@ -728,7 +715,7 @@ u8 save_if_interesting(afl_state_t *afl, void *mem, u32 len, u8 fault) {
 
       snprintf(fn, PATH_MAX, "%s/crashes/id:%06llu,sig:%02u,%s", afl->out_dir,
                afl->unique_crashes, afl->fsrv.last_kill_signal,
-               describe_op(afl, 0));
+               describe_op(afl, 0, NAME_MAX - strlen("id:000000,sig:00,")));
 
 #else
 
@@ -738,6 +725,33 @@ u8 save_if_interesting(afl_state_t *afl, void *mem, u32 len, u8 fault) {
 #endif                                                    /* ^!SIMPLE_FILES */
 
       ++afl->unique_crashes;
+#ifdef INTROSPECTION
+      if (afl->custom_mutators_count && afl->current_custom_fuzz) {
+
+        LIST_FOREACH(&afl->custom_mutator_list, struct custom_mutator, {
+
+          if (afl->current_custom_fuzz == el && el->afl_custom_introspection) {
+
+            const char *ptr = el->afl_custom_introspection(el->data);
+
+            if (ptr != NULL && *ptr != 0) {
+
+              fprintf(afl->introspection_file, "UNIQUE_CRASH CUSTOM %s = %s\n",
+                      ptr, afl->queue_top->fname);
+
+            }
+
+          }
+
+        });
+
+      } else if (afl->mutation[0] != 0) {
+
+        fprintf(afl->introspection_file, "UNIQUE_CRASH %s\n", afl->mutation);
+
+      }
+
+#endif
       if (unlikely(afl->infoexec)) {
 
         // if the user wants to be informed on new crashes - do that