about summary refs log tree commit diff
diff options
context:
space:
mode:
-rwxr-xr-x.custom-format.py2
-rw-r--r--GNUmakefile4
-rw-r--r--README.md2
-rw-r--r--TODO.md1
-rwxr-xr-xafl-cmin39
-rwxr-xr-xafl-cmin.bash4
-rwxr-xr-xafl-plot15
-rw-r--r--custom_mutators/README.md11
-rw-r--r--custom_mutators/aflpp/Makefile10
-rw-r--r--custom_mutators/aflpp/README.md8
-rw-r--r--custom_mutators/aflpp/aflpp.c89
-rw-r--r--custom_mutators/aflpp/standalone/Makefile10
-rw-r--r--custom_mutators/aflpp/standalone/README.md10
-rw-r--r--custom_mutators/aflpp/standalone/aflpp-standalone.c165
-rw-r--r--docs/Changelog.md24
-rw-r--r--docs/tutorials.md5
-rw-r--r--frida_mode/src/instrument/instrument_x64.c289
-rw-r--r--include/afl-fuzz.h72
-rw-r--r--include/afl-mutations.h2202
-rw-r--r--include/alloc-inl.h8
-rw-r--r--include/config.h8
-rw-r--r--instrumentation/SanitizerCoverageLTO.so.cc2
-rw-r--r--instrumentation/SanitizerCoveragePCGUARD.so.cc52
-rw-r--r--instrumentation/split-compares-pass.so.cc19
-rw-r--r--qemu_mode/libqasan/dlmalloc.c2
-rw-r--r--src/afl-fuzz-bitmap.c12
-rw-r--r--src/afl-fuzz-init.c8
-rw-r--r--src/afl-fuzz-one.c921
-rw-r--r--src/afl-fuzz-queue.c2
-rw-r--r--src/afl-fuzz-redqueen.c12
-rw-r--r--src/afl-fuzz-state.c1
-rw-r--r--src/afl-fuzz-stats.c57
-rw-r--r--src/afl-fuzz.c79
-rw-r--r--src/afl-showmap.c5
-rw-r--r--utils/afl_network_proxy/afl-network-server.c1
35 files changed, 3668 insertions, 483 deletions
diff --git a/.custom-format.py b/.custom-format.py
index 1d5c8839..3521c05d 100755
--- a/.custom-format.py
+++ b/.custom-format.py
@@ -24,7 +24,7 @@ import importlib.metadata
 
 # string_re = re.compile('(\\"(\\\\.|[^"\\\\])*\\")') # TODO: for future use
 
-CURRENT_LLVM = os.getenv('LLVM_VERSION', 15)
+CURRENT_LLVM = os.getenv('LLVM_VERSION', 16)
 CLANG_FORMAT_BIN = os.getenv("CLANG_FORMAT_BIN", "")
 
 
diff --git a/GNUmakefile b/GNUmakefile
index 55676d97..71011858 100644
--- a/GNUmakefile
+++ b/GNUmakefile
@@ -318,7 +318,7 @@ all:	test_x86 test_shm test_python ready $(PROGS) afl-as llvm gcc_plugin test_bu
 	@echo Build Summary:
 	@test -e afl-fuzz && echo "[+] afl-fuzz and supporting tools successfully built" || echo "[-] afl-fuzz could not be built, please set CC to a working compiler"
 	@test -e afl-llvm-pass.so && echo "[+] LLVM basic mode successfully built" || echo "[-] LLVM mode could not be built, please install at least llvm-11 and clang-11 or newer, see docs/INSTALL.md"
-	@test -e SanitizerCoveragePCGUARD.so && echo "[+] LLVM mode successfully built" || echo "[-] LLVM mode could not be built, please install at least llvm-11 and clang-11 or newer, see docs/INSTALL.md"
+	@test -e SanitizerCoveragePCGUARD.so && echo "[+] LLVM mode successfully built" || echo "[-] LLVM mode could not be built, please install at least llvm-13 and clang-13 or newer, see docs/INSTALL.md"
 	@test -e SanitizerCoverageLTO.so && echo "[+] LLVM LTO mode successfully built" || echo "[-] LLVM LTO mode could not be built, it is optional, if you want it, please install LLVM and LLD 11+. More information at instrumentation/README.lto.md on how to build it"
 ifneq "$(SYS)" "Darwin"
 	@test -e afl-gcc-pass.so && echo "[+] gcc_mode successfully built" || echo "[-] gcc_mode could not be built, it is optional, install gcc-VERSION-plugin-dev to enable this"
@@ -740,7 +740,7 @@ endif
 	@echo Build Summary:
 	@test -e afl-fuzz && echo "[+] afl-fuzz and supporting tools successfully built" || echo "[-] afl-fuzz could not be built, please set CC to a working compiler"
 	@test -e afl-llvm-pass.so && echo "[+] LLVM basic mode successfully built" || echo "[-] LLVM mode could not be built, please install at least llvm-11 and clang-11 or newer, see docs/INSTALL.md"
-	@test -e SanitizerCoveragePCGUARD.so && echo "[+] LLVM mode successfully built" || echo "[-] LLVM mode could not be built, please install at least llvm-11 and clang-11 or newer, see docs/INSTALL.md"
+	@test -e SanitizerCoveragePCGUARD.so && echo "[+] LLVM mode successfully built" || echo "[-] LLVM mode could not be built, please install at least llvm-13 and clang-13 or newer, see docs/INSTALL.md"
 	@test -e SanitizerCoverageLTO.so && echo "[+] LLVM LTO mode successfully built" || echo "[-] LLVM LTO mode could not be built, it is optional, if you want it, please install LLVM 11-14. More information at instrumentation/README.lto.md on how to build it"
 ifneq "$(SYS)" "Darwin"
 	test -e afl-gcc-pass.so && echo "[+] gcc_mode successfully built" || echo "[-] gcc_mode could not be built, it is optional, install gcc-VERSION-plugin-dev to enable this"
diff --git a/README.md b/README.md
index 97fd3997..05c662c1 100644
--- a/README.md
+++ b/README.md
@@ -4,7 +4,7 @@
 
 Release version: [4.07c](https://github.com/AFLplusplus/AFLplusplus/releases)
 
-GitHub version: 4.07c
+GitHub version: 4.08a
 
 Repository:
 [https://github.com/AFLplusplus/AFLplusplus](https://github.com/AFLplusplus/AFLplusplus)
diff --git a/TODO.md b/TODO.md
index 26e12cee..7968452e 100644
--- a/TODO.md
+++ b/TODO.md
@@ -3,7 +3,6 @@
 ## Should
 
  - afl-crash-analysis
- - show in the UI when fuzzing is "done"
  - test cmplog for less than 16bit
  - support persistent and deferred fork server in afl-showmap?
  - better autodetection of shifting runtime timeout values
diff --git a/afl-cmin b/afl-cmin
index ae723c1b..d0bbed2b 100755
--- a/afl-cmin
+++ b/afl-cmin
@@ -318,7 +318,9 @@ BEGIN {
 
   if (!nyx_mode && target_bin && !exists_and_is_executable(target_bin)) {
 
-    "command -v "target_bin" 2>/dev/null" | getline tnew
+    cmd = "command -v "target_bin" 2>/dev/null"
+    cmd | getline tnew
+    close(cmd)
     if (!tnew || !exists_and_is_executable(tnew)) {
       print "[-] Error: binary '"target_bin"' not found or not executable." > "/dev/stderr"
       exit 1
@@ -330,6 +332,7 @@ BEGIN {
     echo "[!] Trying to obtain the map size of the target ..."
     get_map_size = "AFL_DUMP_MAP_SIZE=1 " target_bin
     get_map_size | getline mapsize
+    close(get_map_size)
     if (mapsize && mapsize > 65535 && mapsize < 100000000) {
       AFL_MAP_SIZE = "AFL_MAP_SIZE="mapsize" "
       print "[+] Setting "AFL_MAP_SIZE
@@ -359,14 +362,18 @@ BEGIN {
   system("rm -rf "trace_dir" 2>/dev/null");
   system("rm "out_dir"/id[:_]* 2>/dev/null")
 
-  "ls "out_dir"/* 2>/dev/null | wc -l" | getline noofentries
+  cmd = "ls "out_dir"/* 2>/dev/null | wc -l"
+  cmd | getline noofentries
+  close(cmd)
   if (0 == system( "test -d "out_dir" -a "noofentries" -gt 0" )) {
     print "[-] Error: directory '"out_dir"' exists and is not empty - delete it first." > "/dev/stderr"
     exit 1
   }
 
   if (threads) {
-    "nproc" | getline nproc
+    cmd = "nproc"
+    cmd | getline nproc
+    close(cmd)
     if (threads == "all") {
       threads = nproc
     } else {
@@ -386,12 +393,14 @@ BEGIN {
   if (stdin_file) {
     # truncate input file
     printf "" > stdin_file
-    close( stdin_file )
+    close(stdin_file)
   }
 
   # First we look in PATH
   if (0 == system("command -v afl-showmap >/dev/null 2>&1")) {
-    "command -v afl-showmap 2>/dev/null" | getline showmap
+    cmd = "command -v afl-showmap 2>/dev/null"
+    cmd | getline showmap
+    close(cmd)
   } else {
     # then we look in the current directory
     if (0 == system("test -x ./afl-showmap")) {
@@ -413,7 +422,9 @@ BEGIN {
   # yuck, gnu stat is option incompatible to bsd stat
   # we use a heuristic to differentiate between
   # GNU stat and other stats
-  "stat --version 2>/dev/null" | getline statversion
+  cmd = "stat --version 2>/dev/null"
+  cmd | getline statversion
+  close(cmd)
   if (statversion ~ /GNU coreutils/) {
     stat_format = "-c '%s %n'" # GNU
   } else {
@@ -432,6 +443,7 @@ BEGIN {
     infilesSmallToBigFullMap[infilesSmallToBigFull[i]] = infilesSmallToBig[i]
     i++
   }
+  close(cmdline)
   in_count = i
 
   first_file = infilesSmallToBigFull[0]
@@ -468,6 +480,7 @@ BEGIN {
     while ((getline < runtest) > 0) {
       ++first_count
     }
+    close(runtest)
 
     if (first_count) {
       print "[+] OK, "first_count" tuples recorded."
@@ -488,7 +501,7 @@ BEGIN {
 
   if (threads) {
 
-    inputsperfile = in_count / threads
+    inputsperfile = int(in_count / threads)
     if (in_count % threads) {
       inputsperfile++;
     }
@@ -513,7 +526,7 @@ BEGIN {
 
   if (threads > 1) {
 
-    print "[*] Creating " threads " parallel tasks with about " inputsperfile " each."
+    print "[*] Creating " threads " parallel tasks with about " inputsperfile " items each."
     for (i = 1; i <= threads; i++) {
 
       if (!stdin_file) {
@@ -582,6 +595,15 @@ BEGIN {
     else { print "    Processing file "cur"/"in_count }
     # create path for the trace file from afl-showmap
     tracefile_path = trace_dir"/"fn
+    # ensure the file size is not zero
+    cmd = "du -b "tracefile_path
+    "ls -l "tracefile_path
+    cmd | getline output
+    close(cmd)
+    split(output, result, "\t")
+    if (result[1] == 0) {
+      print "[!] WARNING: file "fn" is crashing the target, ignoring..."
+    }
     # gather all keys, and count them
     while ((getline line < tracefile_path) > 0) {
         key = line
@@ -643,6 +665,7 @@ BEGIN {
     }
   }
   close(sortedKeys)
+  print ""
   print "[+] Found "tuple_count" unique tuples across "in_count" files."
 
   if (out_count == 1) {
diff --git a/afl-cmin.bash b/afl-cmin.bash
index dc6d5342..1d080491 100755
--- a/afl-cmin.bash
+++ b/afl-cmin.bash
@@ -479,7 +479,7 @@ else
   echo "[+] all $THREADS running tasks completed."
   rm -f ${TMPFILE}*
 
-  echo trace dir files: $(ls $TRACE_DIR/*|wc -l)
+  #echo trace dir files: $(ls $TRACE_DIR/*|wc -l)
 
 fi
 
@@ -523,6 +523,8 @@ ls -rS "$IN_DIR" | while read -r fn; do
 
   sed "s#\$# $fn#" "$TRACE_DIR/$fn" >>"$TRACE_DIR/.candidate_list"
 
+  test -s "$TRACE_DIR/$fn" || echo Warning: $fn is ignored because of crashing the target
+
 done
 
 echo
diff --git a/afl-plot b/afl-plot
index 230d3bfe..f1f288a3 100755
--- a/afl-plot
+++ b/afl-plot
@@ -75,8 +75,17 @@ outputdir=`get_abs_path "$2"`
 
 if [ ! -f "$inputdir/plot_data" ]; then
 
-  echo "[-] Error: input directory is not valid (missing 'plot_data')." 1>&2
-  exit 1
+  if [ -f "$inputdir/default/plot_data" ]; then
+
+    echo "[-] Error: input directory is not valid (missing 'plot_data'), likely you mean $inputdir/default?" 1>&2
+    exit 1
+
+  else
+
+    echo "[-] Error: input directory is not valid (missing 'plot_data')." 1>&2
+    exit 1
+
+  fi
 
 fi
 
@@ -141,7 +150,7 @@ set output '$outputdir/high_freq.png'
 $GNUPLOT_SETUP
 
 plot '$inputdir/plot_data' using 1:4 with filledcurve x1 title 'corpus count' linecolor rgb '#000000' fillstyle transparent solid 0.2 noborder, \\
-     '' using 1:3 with filledcurve x1 title 'current fuzz item' linecolor rgb '#f0f0f0' fillstyle transparent solid 0.5 noborder, \\
+     '' using 1:3 with filledcurve x1 title 'current item' linecolor rgb '#f0f0f0' fillstyle transparent solid 0.5 noborder, \\
      '' using 1:5 with lines title 'pending items' linecolor rgb '#0090ff' linewidth 3, \\
      '' using 1:6 with lines title 'pending favs' linecolor rgb '#c00080' linewidth 3, \\
      '' using 1:2 with lines title 'cycles done' linecolor rgb '#c000f0' linewidth 3
diff --git a/custom_mutators/README.md b/custom_mutators/README.md
index a5a572c0..2d1220b3 100644
--- a/custom_mutators/README.md
+++ b/custom_mutators/README.md
@@ -70,14 +70,17 @@ requires cmake (among other things):
 
 ### libprotobuf Mutators
 
-There are two WIP protobuf projects, that require work to be working though:
+There are three WIP protobuf projects, that require work to be working though:
+
+ASN.1 example:
+[https://github.com/airbus-seclab/AFLplusplus-blogpost/tree/main/src/mutator](https://github.com/airbus-seclab/AFLplusplus-blogpost/tree/main/src/mutator)
 
 transforms protobuf raw:
-https://github.com/bruce30262/libprotobuf-mutator_fuzzing_learning/tree/master/4_libprotobuf_aflpp_custom_mutator
+[https://github.com/bruce30262/libprotobuf-mutator_fuzzing_learning/tree/master/4_libprotobuf_aflpp_custom_mutator](https://github.com/bruce30262/libprotobuf-mutator_fuzzing_learning/tree/master/4_libprotobuf_aflpp_custom_mutator)
 
 has a transform function you need to fill for your protobuf format, however
 needs to be ported to the updated AFL++ custom mutator API (not much work):
-https://github.com/thebabush/afl-libprotobuf-mutator
+[https://github.com/thebabush/afl-libprotobuf-mutator](https://github.com/thebabush/afl-libprotobuf-mutator)
 
 same as above but is for current AFL++:
-https://github.com/P1umer/AFLplusplus-protobuf-mutator
+[https://github.com/P1umer/AFLplusplus-protobuf-mutator](https://github.com/P1umer/AFLplusplus-protobuf-mutator)
\ No newline at end of file
diff --git a/custom_mutators/aflpp/Makefile b/custom_mutators/aflpp/Makefile
new file mode 100644
index 00000000..8efdf3e4
--- /dev/null
+++ b/custom_mutators/aflpp/Makefile
@@ -0,0 +1,10 @@
+
+CFLAGS = -O3 -funroll-loops -fPIC -Wl,-Bsymbolic
+
+all: aflpp-mutator.so
+
+aflpp-mutator.so:	aflpp.c
+	$(CC) $(CFLAGS) -I../../include -I. -shared -o aflpp-mutator.so aflpp.c ../../src/afl-performance.c
+
+clean:
+	rm -f *.o *~ *.so core
diff --git a/custom_mutators/aflpp/README.md b/custom_mutators/aflpp/README.md
new file mode 100644
index 00000000..04d605c1
--- /dev/null
+++ b/custom_mutators/aflpp/README.md
@@ -0,0 +1,8 @@
+# custum mutator: AFL++
+
+this is the AFL++ havoc mutator as a custom mutator module for AFL++.
+
+just type `make` to build
+
+```AFL_CUSTOM_MUTATOR_LIBRARY=custom_mutators/aflpp/aflpp-mutator.so afl-fuzz ...```
+
diff --git a/custom_mutators/aflpp/aflpp.c b/custom_mutators/aflpp/aflpp.c
new file mode 100644
index 00000000..e15d0391
--- /dev/null
+++ b/custom_mutators/aflpp/aflpp.c
@@ -0,0 +1,89 @@
+#include "afl-mutations.h"
+
+typedef struct my_mutator {
+
+  afl_state_t *afl;
+  u8          *buf;
+  u32          buf_size;
+
+} my_mutator_t;
+
+my_mutator_t *afl_custom_init(afl_state_t *afl, unsigned int seed) {
+
+  (void)seed;
+
+  my_mutator_t *data = calloc(1, sizeof(my_mutator_t));
+  if (!data) {
+
+    perror("afl_custom_init alloc");
+    return NULL;
+
+  }
+
+  if ((data->buf = malloc(MAX_FILE)) == NULL) {
+
+    perror("afl_custom_init alloc");
+    return NULL;
+
+  } else {
+
+    data->buf_size = MAX_FILE;
+
+  }
+
+  data->afl = afl;
+
+  return data;
+
+}
+
+/* here we run the AFL++ mutator, which is the best! */
+
+size_t afl_custom_fuzz(my_mutator_t *data, uint8_t *buf, size_t buf_size,
+                       u8 **out_buf, uint8_t *add_buf, size_t add_buf_size,
+                       size_t max_size) {
+
+  if (max_size > data->buf_size) {
+
+    u8 *ptr = realloc(data->buf, max_size);
+
+    if (ptr) {
+
+      return 0;
+
+    } else {
+
+      data->buf = ptr;
+      data->buf_size = max_size;
+
+    }
+
+  }
+
+  u32 havoc_steps = 1 + rand_below(data->afl, 16);
+
+  /* set everything up, costly ... :( */
+  memcpy(data->buf, buf, buf_size);
+
+  /* the mutation */
+  u32 out_buf_len = afl_mutate(data->afl, data->buf, buf_size, havoc_steps,
+                               false, true, add_buf, add_buf_size, max_size);
+
+  /* return size of mutated data */
+  *out_buf = data->buf;
+  return out_buf_len;
+
+}
+
+/**
+ * Deinitialize everything
+ *
+ * @param data The data ptr from afl_custom_init
+ */
+void afl_custom_deinit(my_mutator_t *data) {
+
+  free(data->buf);
+  free(data);
+
+}
+
diff --git a/custom_mutators/aflpp/standalone/Makefile b/custom_mutators/aflpp/standalone/Makefile
new file mode 100644
index 00000000..f1e99445
--- /dev/null
+++ b/custom_mutators/aflpp/standalone/Makefile
@@ -0,0 +1,10 @@
+
+CFLAGS = -O3 -funroll-loops -fPIC
+
+all: aflpp-standalone
+
+aflpp-standalone:	aflpp-standalone.c
+	$(CC) $(CFLAGS) -I../../../include -I. -o aflpp-standalone aflpp-standalone.c ../../../src/afl-performance.c
+
+clean:
+	rm -f *.o *~ aflpp-standalone core
diff --git a/custom_mutators/aflpp/standalone/README.md b/custom_mutators/aflpp/standalone/README.md
new file mode 100644
index 00000000..a1ffb5f9
--- /dev/null
+++ b/custom_mutators/aflpp/standalone/README.md
@@ -0,0 +1,10 @@
+# AFL++ standalone mutator
+
+this is the AFL++ havoc mutator as a standalone mutator
+
+just type `make` to build.
+
+```
+aflpp-standalone inputfile outputfile [splicefile]
+```
+
diff --git a/custom_mutators/aflpp/standalone/aflpp-standalone.c b/custom_mutators/aflpp/standalone/aflpp-standalone.c
new file mode 100644
index 00000000..91bac4a8
--- /dev/null
+++ b/custom_mutators/aflpp/standalone/aflpp-standalone.c
@@ -0,0 +1,165 @@
+#include "afl-mutations.h"
+
+s8  interesting_8[] = {INTERESTING_8};
+s16 interesting_16[] = {INTERESTING_8, INTERESTING_16};
+s32 interesting_32[] = {INTERESTING_8, INTERESTING_16, INTERESTING_32};
+
+typedef struct my_mutator {
+
+  afl_state_t *afl;
+  u8          *buf;
+  u32          buf_size;
+
+} my_mutator_t;
+
+my_mutator_t *afl_custom_init(afl_state_t *afl, unsigned int seed) {
+
+  (void)seed;
+
+  my_mutator_t *data = calloc(1, sizeof(my_mutator_t));
+  if (!data) {
+
+    perror("afl_custom_init alloc");
+    return NULL;
+
+  }
+
+  if ((data->buf = malloc(1024*1024)) == NULL) {
+
+    perror("afl_custom_init alloc");
+    return NULL;
+
+  } else {
+
+    data->buf_size = 1024*1024;
+
+  }
+
+  /* fake AFL++ state */
+  data->afl = calloc(1, sizeof(afl_state_t));
+  data->afl->queue_cycle = 1;
+  data->afl->fsrv.dev_urandom_fd = open("/dev/urandom", O_RDONLY);
+  rand_set_seed(data->afl, getpid());
+
+  return data;
+
+}
+
+/* here we run the AFL++ mutator, which is the best! */
+
+size_t afl_custom_fuzz(my_mutator_t *data, uint8_t *buf, size_t buf_size,
+                       u8 **out_buf, uint8_t *add_buf, size_t add_buf_size,
+                       size_t max_size) {
+
+  if (max_size > data->buf_size) {
+
+    u8 *ptr = realloc(data->buf, max_size);
+
+    if (ptr) {
+
+      return 0;
+
+    } else {
+
+      data->buf = ptr;
+      data->buf_size = max_size;
+
+    }
+
+  }
+
+  u32 havoc_steps = 1 + rand_below(data->afl, 16);
+
+  /* set everything up, costly ... :( */
+  memcpy(data->buf, buf, buf_size);
+
+  /* the mutation */
+  u32 out_buf_len = afl_mutate(data->afl, data->buf, buf_size, havoc_steps,
+                               false, true, add_buf, add_buf_size, max_size);
+
+  /* return size of mutated data */
+  *out_buf = data->buf;
+  return out_buf_len;
+
+}
+
+int main(int argc, char *argv[]) {
+
+  if (argc > 1 && strncmp(argv[1], "-h", 2) == 0) {
+    printf("Syntax: %s [-v] [inputfile [outputfile [splicefile]]]\n\n", argv[0]);
+    printf("Reads a testcase from stdin when no input file (or '-') is specified,\n");
+    printf("mutates according to AFL++'s mutation engine, and write to stdout when '-' or\n");
+    printf("no output filename is given. As an optional third parameter you can give a file\n");
+    printf("for splicing. Maximum input and output length is 1MB.\n");
+    printf("The -v verbose option prints debug output to stderr.\n");
+    return 0;
+  }
+
+  FILE *in = stdin, *out = stdout, *splice = NULL;
+  unsigned char *inbuf = malloc(1024 * 1024), *outbuf, *splicebuf = NULL;
+  int verbose = 0, splicelen = 0;
+
+  if (argc > 1 && strcmp(argv[1], "-v") == 0) {
+    verbose = 1;
+    argc--;
+    argv++;
+    fprintf(stderr, "Verbose active\n");
+  }
+
+  my_mutator_t *data = afl_custom_init(NULL, 0);
+
+  if (argc > 1 && strcmp(argv[1], "-") != 0) {
+    if ((in = fopen(argv[1], "r")) == NULL) {
+      perror(argv[1]);
+      return -1;
+    }
+    if (verbose) fprintf(stderr, "Input: %s\n", argv[1]);
+  }
+
+  size_t inlen = fread(inbuf, 1, 1024*1024, in);
+  
+  if (!inlen) {
+    fprintf(stderr, "Error: empty file %s\n", argv[1] ? argv[1] : "stdin");
+    return -1;
+  }
+
+  if (argc > 2 && strcmp(argv[2], "-") != 0) {
+    if ((out = fopen(argv[2], "w")) == NULL) {
+      perror(argv[2]);
+      return -1;
+    }
+    if (verbose) fprintf(stderr, "Output: %s\n", argv[2]);
+  }
+
+  if (argc > 3) {
+    if ((splice = fopen(argv[3], "r")) == NULL) {
+      perror(argv[3]);
+      return -1;
+    }
+    if (verbose) fprintf(stderr, "Splice: %s\n", argv[3]);
+    splicebuf = malloc(1024*1024);
+    size_t splicelen = fread(splicebuf, 1, 1024*1024, splice);
+    if (!splicelen) {
+      fprintf(stderr, "Error: empty file %s\n", argv[3]);
+      return -1;
+    }
+    if (verbose) fprintf(stderr, "Mutation splice length: %zu\n", splicelen);
+  }
+
+  if (verbose) fprintf(stderr, "Mutation input length: %zu\n", inlen);
+  unsigned int outlen = afl_custom_fuzz(data, inbuf, inlen, &outbuf, splicebuf, splicelen, 1024*1024);
+
+  if (outlen == 0 || !outbuf) {
+    fprintf(stderr, "Error: no mutation data returned.\n");
+    return -1;
+  }
+
+  if (verbose) fprintf(stderr, "Mutation output length: %zu\n", outlen);
+
+  if (fwrite(outbuf, 1, outlen, out) != outlen) {
+    fprintf(stderr, "Warning: incomplete write.\n");
+    return -1;
+  }
+  
+  return 0;
+}
diff --git a/docs/Changelog.md b/docs/Changelog.md
index c52ddd56..e6b90d3d 100644
--- a/docs/Changelog.md
+++ b/docs/Changelog.md
@@ -3,6 +3,30 @@
   This is the list of all noteworthy changes made in every public
   release of the tool. See README.md for the general instruction manual.
 
+### Version ++4.08a (dev)
+  - afl-fuzz:
+    - new mutation engine: mutations that favor discovery more paths are
+      prefered until no new finds for 10 minutes then switching to mutations
+      that favor triggering crashes. Modes and switch time can be configured
+      with `-P`.
+    - new custom mutator that has the new afl++ engine (so it can easily
+      incorporated into new custom mutators), and also comes with a standalone
+      command line tool! See custom_mutators/aflpp/standalone/
+    - display the state of the fuzzing run in the UI :-)
+    - fix timeout setting if '+' is used or a session is restarted
+  - afl-cmin/afl-cmin.bash:
+    - fixed a bug inherited from vanilla AFL where a coverage of
+      map[123] = 11 would be the same as map[1123] = 1
+    - warn on crashing inputs
+  - afl-cc:
+    - fixed an off-by-one instrumentation of iselect, hurting coverage a bit.
+      Thanks to @amykweon for spotting and fixing!
+    - @toka fixed a bug in laf-intel signed integer comparison splitting,
+      thanks a lot!!
+  - frida_mode:
+    - support for long form instrumentation on x86_x64 and arm64
+
+
 ### Version ++4.07c (release)
   - afl-fuzz:
     - reverse reading the seeds only on restarts (increases performance)
diff --git a/docs/tutorials.md b/docs/tutorials.md
index 342080fd..a5ee3322 100644
--- a/docs/tutorials.md
+++ b/docs/tutorials.md
@@ -8,6 +8,7 @@ Here are some good write-ups to show how to effectively use AFL++:
 
 * [https://aflplus.plus/docs/tutorials/libxml2_tutorial/](https://aflplus.plus/docs/tutorials/libxml2_tutorial/)
 * [https://bananamafia.dev/post/gb-fuzz/](https://bananamafia.dev/post/gb-fuzz/)
+* [https://bushido-sec.com/index.php/2023/06/19/the-art-of-fuzzing/](https://bushido-sec.com/index.php/2023/06/19/the-art-of-fuzzing/)
 * [https://securitylab.github.com/research/fuzzing-challenges-solutions-1](https://securitylab.github.com/research/fuzzing-challenges-solutions-1)
 * [https://securitylab.github.com/research/fuzzing-software-2](https://securitylab.github.com/research/fuzzing-software-2)
 * [https://securitylab.github.com/research/fuzzing-sockets-FTP](https://securitylab.github.com/research/fuzzing-sockets-FTP)
@@ -20,6 +21,10 @@ training, then we can highly recommend the following:
 
 * [https://github.com/antonio-morales/Fuzzing101](https://github.com/antonio-morales/Fuzzing101)
 
+Here is a good forkflow description (and tutorial) for qemu_mode:
+
+* [https://airbus-seclab.github.io/AFLplusplus-blogpost/](https://airbus-seclab.github.io/AFLplusplus-blogpost/)
+
 Here is good workflow description for frida_mode:
 
 * [https://blog.quarkslab.com/android-greybox-fuzzing-with-afl-frida-mode.html](https://blog.quarkslab.com/android-greybox-fuzzing-with-afl-frida-mode.html)
diff --git a/frida_mode/src/instrument/instrument_x64.c b/frida_mode/src/instrument/instrument_x64.c
index 8338f8e7..3983c3ba 100644
--- a/frida_mode/src/instrument/instrument_x64.c
+++ b/frida_mode/src/instrument/instrument_x64.c
@@ -58,6 +58,7 @@ typedef union {
 } jcc_insn;
 
 static GHashTable *coverage_blocks = NULL;
+static GHashTable *coverage_blocks_long = NULL;
 
 gboolean instrument_is_coverage_optimize_supported(void) {
 
@@ -127,6 +128,64 @@ typedef struct {
 
 } afl_log_code_asm_t;
 
+typedef struct {
+
+  // cur_location = (block_address >> 4) ^ (block_address << 8);
+  // shared_mem[cur_location ^ prev_location]++;
+  // prev_location = cur_location >> 1;
+
+  // mov    QWORD PTR [rsp-0x88],rax
+  // lahf
+  // mov    QWORD PTR [rsp-0x90],rax
+  // mov    QWORD PTR [rsp-0x98],rbx
+
+  // mov    rax, 0xXXXXXXXXXXXXXXXXX                          /* p_prev_loc */
+  // mov    eax, dword ptr [rax]                                /* prev_loc */
+  // xor    eax,0x3f77                                           /* cur_loc */
+
+  // mov    rbx, 0xXXXXXXXXXXXXXXXXX                                 /* map */
+  // add    rax,rbx
+
+  // mov    bl,BYTE PTR [rax]
+  // add    bl,0x1
+  // adc    bl,0x0
+  // mov    BYTE PTR [rax],bl
+
+  // mov    rax, 0xXXXXXXXXXXXXXXXXX                          /* p_prev_loc */
+  // mov    dword ptr [rax], 0xXXXXXXXXX                        /* prev_loc */
+
+  // mov    rbx,QWORD PTR [rsp-0x98]
+  // mov    rax,QWORD PTR [rsp-0x90]
+  // sahf
+  // mov    rax,QWORD PTR [rsp-0x88]
+
+  uint8_t mov_rax_rsp_88[8];
+  uint8_t lahf;
+  uint8_t mov_rax_rsp_90[8];
+  uint8_t mov_rbx_rsp_98[8];
+
+  uint8_t mov_rax_prev_loc_ptr1[10];
+  uint8_t mov_eax_prev_loc[2];
+  uint8_t xor_eax_curr_loc[5];
+
+  uint8_t mov_rbx_map_ptr[10];
+  uint8_t add_rax_rbx[3];
+
+  uint8_t mov_rbx_ptr_rax[2];
+  uint8_t add_bl_1[3];
+  uint8_t adc_bl_0[3];
+  uint8_t mov_ptr_rax_rbx[2];
+
+  uint8_t mov_rax_prev_loc_ptr2[10];
+  uint8_t mov_prev_loc_curr_loc_shr1[6];
+
+  uint8_t mov_rsp_98_rbx[8];
+  uint8_t mov_rsp_90_rax[8];
+  uint8_t sahf;
+  uint8_t mov_rsp_88_rax[8];
+
+} afl_log_code_asm_long_t;
+
   #pragma pack(pop)
 
 static const afl_log_code_asm_t template =
@@ -158,6 +217,41 @@ static const afl_log_code_asm_t template =
 
 ;
 
+static const afl_log_code_asm_long_t template_long =
+    {
+
+        .mov_rax_rsp_88 = {0x48, 0x89, 0x84, 0x24, 0x78, 0xFF, 0xFF, 0xFF},
+        .lahf = 0x9f,
+        .mov_rax_rsp_90 = {0x48, 0x89, 0x84, 0x24, 0x70, 0xFF, 0xFF, 0xFF},
+        .mov_rbx_rsp_98 = {0x48, 0x89, 0x9C, 0x24, 0x68, 0xFF, 0xFF, 0xFF},
+
+        .mov_rax_prev_loc_ptr1 = {0x48, 0xB8, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+                                  0xFF, 0xFF, 0xFF},
+        .mov_eax_prev_loc = {0x8b, 0x00},
+        .xor_eax_curr_loc = {0x35},
+
+        .mov_rbx_map_ptr = {0x48, 0xBB, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+                            0xFF, 0xFF},
+        .add_rax_rbx = {0x48, 0x01, 0xd8},
+
+        .mov_rbx_ptr_rax = {0x8a, 0x18},
+        .add_bl_1 = {0x80, 0xc3, 0x01},
+        .adc_bl_0 = {0x80, 0xd3, 0x00},
+        .mov_ptr_rax_rbx = {0x88, 0x18},
+
+        .mov_rax_prev_loc_ptr2 = {0x48, 0xB8, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+                                  0xFF, 0xFF, 0xFF},
+        .mov_prev_loc_curr_loc_shr1 = {0xc7, 0x00, 0xFF, 0xFF, 0xFF, 0xFF},
+
+        .mov_rsp_98_rbx = {0x48, 0x8B, 0x9C, 0x24, 0x68, 0xFF, 0xFF, 0xFF},
+        .mov_rsp_90_rax = {0x48, 0x8B, 0x84, 0x24, 0x70, 0xFF, 0xFF, 0xFF},
+        .sahf = 0x9e,
+        .mov_rsp_88_rax = {0x48, 0x8B, 0x84, 0x24, 0x78, 0xFF, 0xFF, 0xFF},
+
+}
+
+;
+
 typedef union {
 
   afl_log_code_asm_t code;
@@ -165,6 +259,13 @@ typedef union {
 
 } afl_log_code;
 
+typedef union {
+
+  afl_log_code_asm_long_t code;
+  uint8_t                 bytes[0];
+
+} afl_log_code_long;
+
 void instrument_coverage_optimize_init(void) {
 
   FVERBOSE("__afl_area_ptr: %p", __afl_area_ptr);
@@ -182,16 +283,19 @@ static void instrument_coverage_switch_insn(GumStalkerObserver *self,
 
   cs_x86    *x86;
   cs_x86_op *op;
+  bool       is_short = false;
+  bool       is_long = false;
+
   if (from_insn == NULL) { return; }
 
   x86 = &from_insn->detail->x86;
   op = x86->operands;
 
-  if (!g_hash_table_contains(coverage_blocks, GSIZE_TO_POINTER(*target))) {
-
-    return;
+  is_short = g_hash_table_contains(coverage_blocks, GSIZE_TO_POINTER(*target));
+  is_long =
+      g_hash_table_contains(coverage_blocks_long, GSIZE_TO_POINTER(*target));
 
-  }
+  if (!is_short && !is_long) { return; }
 
   switch (from_insn->id) {
 
@@ -212,15 +316,41 @@ static void instrument_coverage_switch_insn(GumStalkerObserver *self,
 
       break;
     case X86_INS_RET:
-      instrument_cache_insert(start_address,
-                              (guint8 *)*target + sizeof(afl_log_code));
+      if (is_short) {
+
+        instrument_cache_insert(start_address,
+                                (guint8 *)*target + sizeof(afl_log_code));
+
+      } else if (is_long) {
+
+        instrument_cache_insert(start_address,
+                                (guint8 *)*target + sizeof(afl_log_code_long));
+
+      } else {
+
+        FATAL("Something has gone wrong here!");
+
+      }
+
       break;
     default:
       return;
 
   }
 
-  *target = (guint8 *)*target + sizeof(afl_log_code);
+  if (is_short) {
+
+    *target = (guint8 *)*target + sizeof(afl_log_code);
+
+  } else if (is_long) {
+
+    *target = (guint8 *)*target + sizeof(afl_log_code_long);
+
+  } else {
+
+    FATAL("Something has gone wrong here!");
+
+  }
 
 }
 
@@ -270,22 +400,22 @@ static void instrument_coverage_suppress_init(void) {
 
   }
 
+  coverage_blocks_long = g_hash_table_new(g_direct_hash, g_direct_equal);
+  if (coverage_blocks_long == NULL) {
+
+    FATAL("Failed to g_hash_table_new, errno: %d", errno);
+
+  }
+
 }
 
-static void instrument_coverage_write(GumAddress        address,
-                                      GumStalkerOutput *output) {
+bool instrument_write_inline(GumX86Writer *cw, GumAddress code_addr,
+                             guint32 area_offset, guint32 area_offset_ror) {
 
-  afl_log_code  code = {0};
-  GumX86Writer *cw = output->writer.x86;
-  guint64       area_offset = instrument_get_offset_hash(address);
-  gsize         map_size_pow2;
-  gsize         area_offset_ror;
-  GumAddress    code_addr = cw->pc;
+  afl_log_code code = {0};
 
   code.code = template;
 
-  /* mov_prev_loc_curr_loc_shr1 */
-
   gssize prev_loc_value =
       GPOINTER_TO_SIZE(instrument_previous_pc_addr) -
       (code_addr + offsetof(afl_log_code, code.mov_prev_loc_curr_loc_shr1) +
@@ -294,11 +424,7 @@ static void instrument_coverage_write(GumAddress        address,
       offsetof(afl_log_code, code.mov_prev_loc_curr_loc_shr1) +
       sizeof(code.code.mov_prev_loc_curr_loc_shr1) - sizeof(gint) -
       sizeof(guint32);
-  if (!instrument_coverage_in_range(prev_loc_value)) {
-
-    FATAL("Patch out of range (current_pc_value1): 0x%016lX", prev_loc_value);
-
-  }
+  if (!instrument_coverage_in_range(prev_loc_value)) { return false; }
 
   *((gint *)&code.bytes[prev_loc_value_offset]) = (gint)prev_loc_value;
 
@@ -311,11 +437,7 @@ static void instrument_coverage_write(GumAddress        address,
   gssize prev_loc_value_offset2 =
       offsetof(afl_log_code, code.mov_eax_prev_loc) +
       sizeof(code.code.mov_eax_prev_loc) - sizeof(gint);
-  if (!instrument_coverage_in_range(prev_loc_value)) {
-
-    FATAL("Patch out of range (current_pc_value1): 0x%016lX", prev_loc_value2);
-
-  }
+  if (!instrument_coverage_in_range(prev_loc_value)) { return false; }
 
   *((gint *)&code.bytes[prev_loc_value_offset2]) = (gint)prev_loc_value2;
 
@@ -338,12 +460,7 @@ static void instrument_coverage_write(GumAddress        address,
        (code_addr + offsetof(afl_log_code, code.lea_rbx_area_ptr) +
         sizeof(code.code.lea_rbx_area_ptr)));
 
-  if (!instrument_coverage_in_range(lea_rbx_area_ptr_value)) {
-
-    FATAL("Patch out of range (lea_rbx_area_ptr_value): 0x%016lX",
-          lea_rbx_area_ptr_value);
-
-  }
+  if (!instrument_coverage_in_range(lea_rbx_area_ptr_value)) { return false; }
 
   *((guint32 *)&code.bytes[lea_rbx_area_ptr_offset]) = lea_rbx_area_ptr_value;
 
@@ -353,12 +470,100 @@ static void instrument_coverage_write(GumAddress        address,
       offsetof(afl_log_code, code.mov_prev_loc_curr_loc_shr1) +
       sizeof(code.code.mov_prev_loc_curr_loc_shr1) - sizeof(guint32);
 
-  map_size_pow2 = util_log2(__afl_map_size);
-  area_offset_ror = util_rotate(area_offset, 1, map_size_pow2);
-
   *((guint32 *)&code.bytes[curr_loc_shr_1_offset]) = (guint32)(area_offset_ror);
 
+  if (instrument_suppress) {
+
+    if (!g_hash_table_add(coverage_blocks, GSIZE_TO_POINTER(cw->code))) {
+
+      FATAL("Failed - g_hash_table_add");
+
+    }
+
+  }
+
   gum_x86_writer_put_bytes(cw, code.bytes, sizeof(afl_log_code));
+  return true;
+
+}
+
+bool instrument_write_inline_long(GumX86Writer *cw, guint32 area_offset,
+                                  guint32 area_offset_ror) {
+
+  afl_log_code_long code = {0};
+  code.code = template_long;
+
+  /* mov_rax_prev_loc_ptr1 */
+  gssize mov_rax_prev_loc_ptr1_offset =
+      offsetof(afl_log_code_long, code.mov_rax_prev_loc_ptr1) +
+      sizeof(code.code.mov_rax_prev_loc_ptr1) - sizeof(gsize);
+  *((gsize *)&code.bytes[mov_rax_prev_loc_ptr1_offset]) =
+      GPOINTER_TO_SIZE(instrument_previous_pc_addr);
+
+  /* xor_eax_curr_loc */
+  gssize xor_eax_curr_loc_offset =
+      offsetof(afl_log_code_long, code.xor_eax_curr_loc) +
+      sizeof(code.code.xor_eax_curr_loc) - sizeof(guint32);
+  *((guint32 *)&code.bytes[xor_eax_curr_loc_offset]) = area_offset;
+
+  /* mov_rbx_map_ptr */
+  gsize mov_rbx_map_ptr_offset =
+      offsetof(afl_log_code_long, code.mov_rbx_map_ptr) +
+      sizeof(code.code.mov_rbx_map_ptr) - sizeof(gsize);
+  *((gsize *)&code.bytes[mov_rbx_map_ptr_offset]) =
+      GPOINTER_TO_SIZE(__afl_area_ptr);
+
+  /* mov_rax_prev_loc_ptr2 */
+  gssize mov_rax_prev_loc_ptr2_offset =
+      offsetof(afl_log_code_long, code.mov_rax_prev_loc_ptr2) +
+      sizeof(code.code.mov_rax_prev_loc_ptr2) - sizeof(gsize);
+  *((gsize *)&code.bytes[mov_rax_prev_loc_ptr2_offset]) =
+      GPOINTER_TO_SIZE(instrument_previous_pc_addr);
+
+  /* mov_prev_loc_curr_loc_shr1 */
+  gssize mov_prev_loc_curr_loc_shr1_offset =
+      offsetof(afl_log_code_long, code.mov_prev_loc_curr_loc_shr1) +
+      sizeof(code.code.mov_prev_loc_curr_loc_shr1) - sizeof(guint32);
+  *((guint32 *)&code.bytes[mov_prev_loc_curr_loc_shr1_offset]) =
+      (guint32)(area_offset_ror);
+
+  if (instrument_suppress) {
+
+    if (!g_hash_table_add(coverage_blocks_long, GSIZE_TO_POINTER(cw->code))) {
+
+      FATAL("Failed - g_hash_table_add");
+
+    }
+
+  }
+
+  gum_x86_writer_put_bytes(cw, code.bytes, sizeof(afl_log_code_long));
+  return true;
+
+}
+
+static void instrument_coverage_write(GumAddress        address,
+                                      GumStalkerOutput *output) {
+
+  GumX86Writer *cw = output->writer.x86;
+  guint64       area_offset = (guint32)instrument_get_offset_hash(address);
+  gsize         map_size_pow2;
+  guint32       area_offset_ror;
+  GumAddress    code_addr = cw->pc;
+
+  map_size_pow2 = util_log2(__afl_map_size);
+  area_offset_ror = (guint32)util_rotate(instrument_get_offset_hash(address), 1,
+                                         map_size_pow2);
+
+  if (!instrument_write_inline(cw, code_addr, area_offset, area_offset_ror)) {
+
+    if (!instrument_write_inline_long(cw, area_offset, area_offset_ror)) {
+
+      FATAL("Failed to write inline instrumentation");
+
+    }
+
+  }
 
 }
 
@@ -380,17 +585,7 @@ void instrument_coverage_optimize(const cs_insn    *instr,
 
   }
 
-  if (instrument_suppress) {
-
-    instrument_coverage_suppress_init();
-
-    if (!g_hash_table_add(coverage_blocks, GSIZE_TO_POINTER(cw->code))) {
-
-      FATAL("Failed - g_hash_table_add");
-
-    }
-
-  }
+  if (instrument_suppress) { instrument_coverage_suppress_init(); }
 
   instrument_coverage_write(GUM_ADDRESS(instr->address), output);
 
diff --git a/include/afl-fuzz.h b/include/afl-fuzz.h
index beb2de2a..c6c45fbd 100644
--- a/include/afl-fuzz.h
+++ b/include/afl-fuzz.h
@@ -494,7 +494,8 @@ typedef struct afl_state {
       *orig_cmdline,                    /* Original command line            */
       *infoexec;                       /* Command to execute on a new crash */
 
-  u32 hang_tmout;                       /* Timeout used for hang det (ms)   */
+  u32 hang_tmout,                       /* Timeout used for hang det (ms)   */
+      stats_update_freq;                /* Stats update frequency (execs)   */
 
   u8 havoc_stack_pow2,                  /* HAVOC_STACK_POW2                 */
       no_unlink,                        /* do not unlink cur_input          */
@@ -503,40 +504,37 @@ typedef struct afl_state {
       custom_splice_optout,             /* Custom mutator no splice buffer  */
       is_main_node,                     /* if this is the main node         */
       is_secondary_node,                /* if this is a secondary instance  */
-      pizza_is_served;                  /* pizza mode                       */
-
-  u32 stats_update_freq;                /* Stats update frequency (execs)   */
-
-  u8 schedule;                          /* Power schedule (default: EXPLORE)*/
-  u8 havoc_max_mult;
-
-  u8 skip_deterministic,                /* Skip deterministic stages?       */
-      use_splicing,                     /* Recombine input files?           */
-      non_instrumented_mode,            /* Run in non-instrumented mode?    */
-      score_changed,                    /* Scoring for favorites changed?   */
-      resuming_fuzz,                    /* Resuming an older fuzzing job?   */
-      timeout_given,                    /* Specific timeout given?          */
-      not_on_tty,                       /* stdout is not a tty              */
-      term_too_small,                   /* terminal dimensions too small    */
-      no_forkserver,                    /* Disable forkserver?              */
-      crash_mode,                       /* Crash mode! Yeah!                */
-      in_place_resume,                  /* Attempt in-place resume?         */
-      autoresume,                       /* Resume if afl->out_dir exists?   */
-      auto_changed,                     /* Auto-generated tokens changed?   */
-      no_cpu_meter_red,                 /* Feng shui on the status screen   */
-      no_arith,                         /* Skip most arithmetic ops         */
-      shuffle_queue,                    /* Shuffle input queue?             */
-      bitmap_changed,                   /* Time to update bitmap?           */
-      unicorn_mode,                     /* Running in Unicorn mode?         */
-      use_wine,                         /* Use WINE with QEMU mode          */
-      skip_requested,                   /* Skip request, via SIGUSR1        */
-      run_over10m,                      /* Run time over 10 minutes?        */
-      persistent_mode,                  /* Running in persistent mode?      */
-      deferred_mode,                    /* Deferred forkserver mode?        */
-      fixed_seed,                       /* do not reseed                    */
-      fast_cal,                         /* Try to calibrate faster?         */
-      disable_trim,                     /* Never trim in fuzz_one           */
-      shmem_testcase_mode,              /* If sharedmem testcases are used  */
+      pizza_is_served,                  /* pizza mode                       */
+      text_input,                       /* target wants text inputs         */
+      fuzz_mode, /* current mode: coverage/exploration or crash/exploitation */
+      schedule,                         /* Power schedule (default: EXPLORE)*/
+      havoc_max_mult, skip_deterministic, /* Skip deterministic stages?       */
+      use_splicing,                       /* Recombine input files?           */
+      non_instrumented_mode,              /* Run in non-instrumented mode?    */
+      score_changed,                      /* Scoring for favorites changed?   */
+      resuming_fuzz,                      /* Resuming an older fuzzing job?   */
+      timeout_given,                      /* Specific timeout given?          */
+      not_on_tty,                         /* stdout is not a tty              */
+      term_too_small,                     /* terminal dimensions too small    */
+      no_forkserver,                      /* Disable forkserver?              */
+      crash_mode,                         /* Crash mode! Yeah!                */
+      in_place_resume,                    /* Attempt in-place resume?         */
+      autoresume,                         /* Resume if afl->out_dir exists?   */
+      auto_changed,                       /* Auto-generated tokens changed?   */
+      no_cpu_meter_red,                   /* Feng shui on the status screen   */
+      no_arith,                           /* Skip most arithmetic ops         */
+      shuffle_queue,                      /* Shuffle input queue?             */
+      bitmap_changed,                     /* Time to update bitmap?           */
+      unicorn_mode,                       /* Running in Unicorn mode?         */
+      use_wine,                           /* Use WINE with QEMU mode          */
+      skip_requested,                     /* Skip request, via SIGUSR1        */
+      run_over10m,                        /* Run time over 10 minutes?        */
+      persistent_mode,                    /* Running in persistent mode?      */
+      deferred_mode,                      /* Deferred forkserver mode?        */
+      fixed_seed,                         /* do not reseed                    */
+      fast_cal,                           /* Try to calibrate faster?         */
+      disable_trim,                       /* Never trim in fuzz_one           */
+      shmem_testcase_mode,                /* If sharedmem testcases are used  */
       expand_havoc,                /* perform expensive havoc after no find */
       cycle_schedules,                  /* cycle power schedules?           */
       old_seed_selection,               /* use vanilla afl seed selection   */
@@ -597,7 +595,8 @@ typedef struct afl_state {
       last_hang_time,                   /* Time for most recent hang (ms)   */
       longest_find_time,                /* Longest time taken for a find    */
       exit_on_time,                     /* Delay to exit if no new paths    */
-      sync_time;                        /* Sync time (ms)                   */
+      sync_time,                        /* Sync time (ms)                   */
+      switch_fuzz_mode;                 /* auto or fixed fuzz mode          */
 
   u32 slowest_exec_ms,                  /* Slowest testcase non hang in ms  */
       subseq_tmouts;                    /* Number of timeouts in a row      */
@@ -1203,6 +1202,7 @@ u8     check_if_text_buf(u8 *buf, u32 len);
 #ifndef AFL_SHOWMAP
 void setup_signal_handlers(void);
 #endif
+char *get_fuzzing_state(afl_state_t *afl);
 
 /* CmpLog */
 
diff --git a/include/afl-mutations.h b/include/afl-mutations.h
new file mode 100644
index 00000000..a3c9fd59
--- /dev/null
+++ b/include/afl-mutations.h
@@ -0,0 +1,2202 @@
+/* Implementation of afl havoc mutation to be used in AFL++ custom mutators and
+   partially in afl-fuzz itself.
+
+   How to use:
+
+   #include "afl-mutations.h"  // needs afl-fuzz.h
+
+   u32 afl_mutate(afl_state_t *afl, u8 *buf, u32 len, u32t steps, bool is_text,
+                  bool is_exploration, u8 *splice_buf, u32 splice_len,
+                  u32 max_len);
+
+   Returns:
+     u32 - the length of the mutated data return in *buf. 0 = error
+   Parameters:
+     afl_state_t *afl - the *afl state pointer
+     u8 *buf - the input buffer to mutate which will be mutated into.
+           NOTE: must be able to contain a size of at least max_len (see below)!
+     u32 len - the length of the input
+     u32 steps - how many mutations to perform on the input
+     bool is_text - is the target expecting text inputs
+     bool is_exploration - mutate for exploration mode (instead of exploitation)
+     splice_buf - a buffer from another corpus item to splice with.
+                  If NULL then no splicing
+     splice_len - the length of the splice buffer. If 0 then no splicing
+     u32 max_len - the maximum size the mutated buffer may grow to
+*/
+
+#ifndef _ANDROID_ASHMEM_H
+  #define AFL_MUTATIONS_H
+
+  #include <stdbool.h>
+  #include "afl-fuzz.h"
+
+  #define MUT_STRATEGY_ARRAY_SIZE 256
+
+enum {
+
+  /* 00 */ MUT_FLIPBIT,
+  /* 01 */ MUT_INTERESTING8,
+  /* 02 */ MUT_INTERESTING16,
+  /* 03 */ MUT_INTERESTING16BE,
+  /* 04 */ MUT_INTERESTING32,
+  /* 05 */ MUT_INTERESTING32BE,
+  /* 06 */ MUT_ARITH8_,
+  /* 07 */ MUT_ARITH8,
+  /* 08 */ MUT_ARITH16_,
+  /* 09 */ MUT_ARITH16BE_,
+  /* 10 */ MUT_ARITH16,
+  /* 11 */ MUT_ARITH16BE,
+  /* 12 */ MUT_ARITH32_,
+  /* 13 */ MUT_ARITH32BE_,
+  /* 14 */ MUT_ARITH32,
+  /* 15 */ MUT_ARITH32BE,
+  /* 16 */ MUT_RAND8,
+  /* 17 */ MUT_CLONE_COPY,
+  /* 18 */ MUT_CLONE_FIXED,
+  /* 19 */ MUT_OVERWRITE_COPY,
+  /* 20 */ MUT_OVERWRITE_FIXED,
+  /* 21 */ MUT_BYTEADD,
+  /* 22 */ MUT_BYTESUB,
+  /* 23 */ MUT_FLIP8,
+  /* 24 */ MUT_SWITCH,
+  /* 25 */ MUT_DEL,
+  /* 26 */ MUT_SHUFFLE,
+  /* 27 */ MUT_DELONE,
+  /* 28 */ MUT_INSERTONE,
+  /* 29 */ MUT_ASCIINUM,
+  /* 30 */ MUT_INSERTASCIINUM,
+  /* 31 */ MUT_EXTRA_OVERWRITE,
+  /* 32 */ MUT_EXTRA_INSERT,
+  /* 33 */ MUT_AUTO_EXTRA_OVERWRITE,
+  /* 34 */ MUT_AUTO_EXTRA_INSERT,
+  /* 35 */ MUT_SPLICE_OVERWRITE,
+  /* 36 */ MUT_SPLICE_INSERT,
+
+  MUT_MAX
+
+};
+
+  #define MUT_NORMAL_ARRAY_SIZE 77
+u32 normal_splice_array[MUT_NORMAL_ARRAY_SIZE] = {MUT_FLIPBIT,
+                                                  MUT_FLIPBIT,
+                                                  MUT_FLIPBIT,
+                                                  MUT_FLIPBIT,
+                                                  MUT_INTERESTING8,
+                                                  MUT_INTERESTING8,
+                                                  MUT_INTERESTING8,
+                                                  MUT_INTERESTING8,
+                                                  MUT_INTERESTING16,
+                                                  MUT_INTERESTING16,
+                                                  MUT_INTERESTING16BE,
+                                                  MUT_INTERESTING16BE,
+                                                  MUT_INTERESTING32,
+                                                  MUT_INTERESTING32,
+                                                  MUT_INTERESTING32BE,
+                                                  MUT_INTERESTING32BE,
+                                                  MUT_ARITH8_,
+                                                  MUT_ARITH8_,
+                                                  MUT_ARITH8_,
+                                                  MUT_ARITH8_,
+                                                  MUT_ARITH8,
+                                                  MUT_ARITH8,
+                                                  MUT_ARITH8,
+                                                  MUT_ARITH8,
+                                                  MUT_ARITH16_,
+                                                  MUT_ARITH16_,
+                                                  MUT_ARITH16BE_,
+                                                  MUT_ARITH16BE_,
+                                                  MUT_ARITH16,
+                                                  MUT_ARITH16,
+                                                  MUT_ARITH16BE,
+                                                  MUT_ARITH16BE,
+                                                  MUT_ARITH32_,
+                                                  MUT_ARITH32_,
+                                                  MUT_ARITH32BE_,
+                                                  MUT_ARITH32BE_,
+                                                  MUT_ARITH32,
+                                                  MUT_ARITH32,
+                                                  MUT_ARITH32BE,
+                                                  MUT_ARITH32BE,
+                                                  MUT_RAND8,
+                                                  MUT_RAND8,
+                                                  MUT_RAND8,
+                                                  MUT_RAND8,
+                                                  MUT_CLONE_COPY,
+                                                  MUT_CLONE_COPY,
+                                                  MUT_CLONE_COPY,
+                                                  MUT_CLONE_FIXED,
+                                                  MUT_OVERWRITE_COPY,
+                                                  MUT_OVERWRITE_COPY,
+                                                  MUT_OVERWRITE_COPY,
+                                                  MUT_OVERWRITE_FIXED,
+                                                  MUT_BYTEADD,
+                                                  MUT_BYTESUB,
+                                                  MUT_FLIP8,
+                                                  MUT_SWITCH,
+                                                  MUT_SWITCH,
+                                                  MUT_DEL,
+                                                  MUT_DEL,
+                                                  MUT_DEL,
+                                                  MUT_DEL,
+                                                  MUT_DEL,
+                                                  MUT_DEL,
+                                                  MUT_DEL,
+                                                  MUT_DEL,
+                                                  MUT_EXTRA_OVERWRITE,
+                                                  MUT_EXTRA_OVERWRITE,
+                                                  MUT_EXTRA_INSERT,
+                                                  MUT_EXTRA_INSERT,
+                                                  MUT_AUTO_EXTRA_OVERWRITE,
+                                                  MUT_AUTO_EXTRA_OVERWRITE,
+                                                  MUT_AUTO_EXTRA_INSERT,
+                                                  MUT_AUTO_EXTRA_INSERT,
+                                                  MUT_SPLICE_OVERWRITE,
+                                                  MUT_SPLICE_OVERWRITE,
+                                                  MUT_SPLICE_INSERT,
+                                                  MUT_SPLICE_INSERT};
+
+  #define MUT_SPLICE_ARRAY_SIZE 81
+u32 full_splice_array[MUT_SPLICE_ARRAY_SIZE] = {MUT_FLIPBIT,
+                                                MUT_FLIPBIT,
+                                                MUT_FLIPBIT,
+                                                MUT_FLIPBIT,
+                                                MUT_INTERESTING8,
+                                                MUT_INTERESTING8,
+                                                MUT_INTERESTING8,
+                                                MUT_INTERESTING8,
+                                                MUT_INTERESTING16,
+                                                MUT_INTERESTING16,
+                                                MUT_INTERESTING16BE,
+                                                MUT_INTERESTING16BE,
+                                                MUT_INTERESTING32,
+                                                MUT_INTERESTING32,
+                                                MUT_INTERESTING32BE,
+                                                MUT_INTERESTING32BE,
+                                                MUT_ARITH8_,
+                                                MUT_ARITH8_,
+                                                MUT_ARITH8_,
+                                                MUT_ARITH8_,
+                                                MUT_ARITH8,
+                                                MUT_ARITH8,
+                                                MUT_ARITH8,
+                                                MUT_ARITH8,
+                                                MUT_ARITH16_,
+                                                MUT_ARITH16_,
+                                                MUT_ARITH16BE_,
+                                                MUT_ARITH16BE_,
+                                                MUT_ARITH16,
+                                                MUT_ARITH16,
+                                                MUT_ARITH16BE,
+                                                MUT_ARITH16BE,
+                                                MUT_ARITH32_,
+                                                MUT_ARITH32_,
+                                                MUT_ARITH32BE_,
+                                                MUT_ARITH32BE_,
+                                                MUT_ARITH32,
+                                                MUT_ARITH32,
+                                                MUT_ARITH32BE,
+                                                MUT_ARITH32BE,
+                                                MUT_RAND8,
+                                                MUT_RAND8,
+                                                MUT_RAND8,
+                                                MUT_RAND8,
+                                                MUT_CLONE_COPY,
+                                                MUT_CLONE_COPY,
+                                                MUT_CLONE_COPY,
+                                                MUT_CLONE_FIXED,
+                                                MUT_OVERWRITE_COPY,
+                                                MUT_OVERWRITE_COPY,
+                                                MUT_OVERWRITE_COPY,
+                                                MUT_OVERWRITE_FIXED,
+                                                MUT_BYTEADD,
+                                                MUT_BYTESUB,
+                                                MUT_FLIP8,
+                                                MUT_SWITCH,
+                                                MUT_SWITCH,
+                                                MUT_DEL,
+                                                MUT_DEL,
+                                                MUT_DEL,
+                                                MUT_DEL,
+                                                MUT_DEL,
+                                                MUT_DEL,
+                                                MUT_DEL,
+                                                MUT_DEL,
+                                                MUT_EXTRA_OVERWRITE,
+                                                MUT_EXTRA_OVERWRITE,
+                                                MUT_EXTRA_INSERT,
+                                                MUT_EXTRA_INSERT,
+                                                MUT_AUTO_EXTRA_OVERWRITE,
+                                                MUT_AUTO_EXTRA_OVERWRITE,
+                                                MUT_AUTO_EXTRA_INSERT,
+                                                MUT_AUTO_EXTRA_INSERT,
+                                                MUT_SPLICE_OVERWRITE,
+                                                MUT_SPLICE_OVERWRITE,
+                                                MUT_SPLICE_OVERWRITE,
+                                                MUT_SPLICE_OVERWRITE,
+                                                MUT_SPLICE_INSERT,
+                                                MUT_SPLICE_INSERT,
+                                                MUT_SPLICE_INSERT,
+                                                MUT_SPLICE_INSERT};
+
+u32 mutation_strategy_exploration_text[MUT_STRATEGY_ARRAY_SIZE] = {
+
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_DELONE,
+    MUT_DELONE,
+    MUT_DELONE,
+    MUT_DELONE,
+    MUT_DELONE,
+    MUT_DELONE,
+    MUT_DELONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT
+
+};
+
+u32 mutation_strategy_exploration_binary[MUT_STRATEGY_ARRAY_SIZE] = {
+
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_DELONE,
+    MUT_DELONE,
+    MUT_DELONE,
+    MUT_DELONE,
+    MUT_DELONE,
+    MUT_DELONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT
+
+};
+
+u32 mutation_strategy_exploitation_text[MUT_STRATEGY_ARRAY_SIZE] = {
+
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_DELONE,
+    MUT_DELONE,
+    MUT_DELONE,
+    MUT_DELONE,
+    MUT_DELONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT
+
+};
+
+u32 mutation_strategy_exploitation_binary[MUT_STRATEGY_ARRAY_SIZE] = {
+
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_FLIPBIT,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING8,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING16BE,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_INTERESTING32BE,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8_,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH8,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16BE_,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH16BE,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32BE_,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_ARITH32BE,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_RAND8,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_COPY,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_CLONE_FIXED,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_COPY,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_OVERWRITE_FIXED,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTEADD,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_BYTESUB,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_FLIP8,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_SWITCH,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_DEL,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_SHUFFLE,
+    MUT_DELONE,
+    MUT_DELONE,
+    MUT_DELONE,
+    MUT_DELONE,
+    MUT_DELONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_INSERTONE,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_ASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_INSERTASCIINUM,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_OVERWRITE,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_OVERWRITE,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_AUTO_EXTRA_INSERT,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_OVERWRITE,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT,
+    MUT_SPLICE_INSERT
+
+};
+
+u32 afl_mutate(afl_state_t *, u8 *, u32, u32, bool, bool, u8 *, u32, u32);
+u32 choose_block_len(afl_state_t *, u32);
+
+/* Helper to choose random block len for block operations in fuzz_one().
+   Doesn't return zero, provided that max_len is > 0. */
+
+inline u32 choose_block_len(afl_state_t *afl, u32 limit) {
+
+  u32 min_value, max_value;
+  u32 rlim = MIN(afl->queue_cycle, (u32)3);
+
+  if (unlikely(!afl->run_over10m)) { rlim = 1; }
+
+  switch (rand_below(afl, rlim)) {
+
+    case 0:
+      min_value = 1;
+      max_value = HAVOC_BLK_SMALL;
+      break;
+
+    case 1:
+      min_value = HAVOC_BLK_SMALL;
+      max_value = HAVOC_BLK_MEDIUM;
+      break;
+
+    default:
+
+      if (likely(rand_below(afl, 10))) {
+
+        min_value = HAVOC_BLK_MEDIUM;
+        max_value = HAVOC_BLK_LARGE;
+
+      } else {
+
+        min_value = HAVOC_BLK_LARGE;
+        max_value = HAVOC_BLK_XL;
+
+      }
+
+  }
+
+  if (min_value >= limit) { min_value = 1; }
+
+  return min_value + rand_below(afl, MIN(max_value, limit) - min_value + 1);
+
+}
+
+inline u32 afl_mutate(afl_state_t *afl, u8 *buf, u32 len, u32 steps,
+                      bool is_text, bool is_exploration, u8 *splice_buf,
+                      u32 splice_len, u32 max_len) {
+
+  if (!buf || !len) { return 0; }
+
+  u32       *mutation_array;
+  static u8 *tmp_buf = NULL;
+  static u32 tmp_buf_size = 0;
+
+  if (max_len > tmp_buf_size) {
+
+    if (tmp_buf) {
+
+      u8 *ptr = realloc(tmp_buf, max_len);
+
+      if (!ptr) {
+
+        return 0;
+
+      } else {
+
+        tmp_buf = ptr;
+
+      }
+
+    } else {
+
+      if ((tmp_buf = malloc(max_len)) == NULL) { return 0; }
+
+    }
+
+    tmp_buf_size = max_len;
+
+  }
+
+  if (is_text) {
+
+    if (is_exploration) {
+
+      mutation_array = (u32 *)&mutation_strategy_exploration_text;
+
+    } else {
+
+      mutation_array = (u32 *)&mutation_strategy_exploitation_text;
+
+    }
+
+  } else {
+
+    if (is_exploration) {
+
+      mutation_array = (u32 *)&mutation_strategy_exploration_binary;
+
+    } else {
+
+      mutation_array = (u32 *)&mutation_strategy_exploitation_binary;
+
+    }
+
+  }
+
+  for (u32 step = 0; step < steps; ++step) {
+
+  retry_havoc_step : {
+
+    u32 r = rand_below(afl, MUT_STRATEGY_ARRAY_SIZE), item;
+
+    switch (mutation_array[r]) {
+
+      case MUT_FLIPBIT: {
+
+        /* Flip a single bit somewhere. Spooky! */
+        u8  bit = rand_below(afl, 8);
+        u32 off = rand_below(afl, len);
+        buf[off] ^= 1 << bit;
+
+        break;
+
+      }
+
+      case MUT_INTERESTING8: {
+
+        /* Set byte to interesting value. */
+
+        item = rand_below(afl, sizeof(interesting_8));
+        buf[rand_below(afl, len)] = interesting_8[item];
+        break;
+
+      }
+
+      case MUT_INTERESTING16: {
+
+        /* Set word to interesting value, little endian. */
+
+        if (unlikely(len < 2)) { break; }  // no retry
+
+        item = rand_below(afl, sizeof(interesting_16) >> 1);
+        *(u16 *)(buf + rand_below(afl, len - 1)) = interesting_16[item];
+
+        break;
+
+      }
+
+      case MUT_INTERESTING16BE: {
+
+        /* Set word to interesting value, big endian. */
+
+        if (unlikely(len < 2)) { break; }  // no retry
+
+        item = rand_below(afl, sizeof(interesting_16) >> 1);
+        *(u16 *)(buf + rand_below(afl, len - 1)) = SWAP16(interesting_16[item]);
+
+        break;
+
+      }
+
+      case MUT_INTERESTING32: {
+
+        /* Set dword to interesting value, little endian. */
+
+        if (unlikely(len < 4)) { break; }  // no retry
+
+        item = rand_below(afl, sizeof(interesting_32) >> 2);
+        *(u32 *)(buf + rand_below(afl, len - 3)) = interesting_32[item];
+
+        break;
+
+      }
+
+      case MUT_INTERESTING32BE: {
+
+        /* Set dword to interesting value, big endian. */
+
+        if (unlikely(len < 4)) { break; }  // no retry
+
+        item = rand_below(afl, sizeof(interesting_32) >> 2);
+        *(u32 *)(buf + rand_below(afl, len - 3)) = SWAP32(interesting_32[item]);
+
+        break;
+
+      }
+
+      case MUT_ARITH8_: {
+
+        /* Randomly subtract from byte. */
+
+        item = 1 + rand_below(afl, ARITH_MAX);
+        buf[rand_below(afl, len)] -= item;
+        break;
+
+      }
+
+      case MUT_ARITH8: {
+
+        /* Randomly add to byte. */
+
+        item = 1 + rand_below(afl, ARITH_MAX);
+        buf[rand_below(afl, len)] += item;
+        break;
+
+      }
+
+      case MUT_ARITH16_: {
+
+        /* Randomly subtract from word, little endian. */
+
+        if (unlikely(len < 2)) { break; }  // no retry
+
+        u32 pos = rand_below(afl, len - 1);
+        item = 1 + rand_below(afl, ARITH_MAX);
+        *(u16 *)(buf + pos) -= item;
+
+        break;
+
+      }
+
+      case MUT_ARITH16BE_: {
+
+        /* Randomly subtract from word, big endian. */
+
+        if (unlikely(len < 2)) { break; }  // no retry
+
+        u32 pos = rand_below(afl, len - 1);
+        u16 num = 1 + rand_below(afl, ARITH_MAX);
+        *(u16 *)(buf + pos) = SWAP16(SWAP16(*(u16 *)(buf + pos)) - num);
+
+        break;
+
+      }
+
+      case MUT_ARITH16: {
+
+        /* Randomly add to word, little endian. */
+
+        if (unlikely(len < 2)) { break; }  // no retry
+
+        u32 pos = rand_below(afl, len - 1);
+        item = 1 + rand_below(afl, ARITH_MAX);
+        *(u16 *)(buf + pos) += item;
+
+        break;
+
+      }
+
+      case MUT_ARITH16BE: {
+
+        /* Randomly add to word, big endian. */
+
+        if (unlikely(len < 2)) { break; }  // no retry
+
+        u32 pos = rand_below(afl, len - 1);
+        u16 num = 1 + rand_below(afl, ARITH_MAX);
+        *(u16 *)(buf + pos) = SWAP16(SWAP16(*(u16 *)(buf + pos)) + num);
+
+        break;
+
+      }
+
+      case MUT_ARITH32_: {
+
+        /* Randomly subtract from dword, little endian. */
+
+        if (unlikely(len < 4)) { break; }  // no retry
+
+        u32 pos = rand_below(afl, len - 3);
+        item = 1 + rand_below(afl, ARITH_MAX);
+        *(u32 *)(buf + pos) -= item;
+
+        break;
+
+      }
+
+      case MUT_ARITH32BE_: {
+
+        /* Randomly subtract from dword, big endian. */
+
+        if (unlikely(len < 4)) { break; }  // no retry
+
+        u32 pos = rand_below(afl, len - 3);
+        u32 num = 1 + rand_below(afl, ARITH_MAX);
+        *(u32 *)(buf + pos) = SWAP32(SWAP32(*(u32 *)(buf + pos)) - num);
+
+        break;
+
+      }
+
+      case MUT_ARITH32: {
+
+        /* Randomly add to dword, little endian. */
+
+        if (unlikely(len < 4)) { break; }  // no retry
+
+        u32 pos = rand_below(afl, len - 3);
+        item = 1 + rand_below(afl, ARITH_MAX);
+        *(u32 *)(buf + pos) += item;
+
+        break;
+
+      }
+
+      case MUT_ARITH32BE: {
+
+        /* Randomly add to dword, big endian. */
+
+        if (unlikely(len < 4)) { break; }  // no retry
+
+        u32 pos = rand_below(afl, len - 3);
+        u32 num = 1 + rand_below(afl, ARITH_MAX);
+        *(u32 *)(buf + pos) = SWAP32(SWAP32(*(u32 *)(buf + pos)) + num);
+
+        break;
+
+      }
+
+      case MUT_RAND8: {
+
+        /* Just set a random byte to a random value. Because,
+           why not. We use XOR with 1-255 to eliminate the
+           possibility of a no-op. */
+
+        u32 pos = rand_below(afl, len);
+        item = 1 + rand_below(afl, 255);
+        buf[pos] ^= item;
+        break;
+
+      }
+
+      case MUT_CLONE_COPY: {
+
+        if (likely(len + HAVOC_BLK_XL < max_len)) {
+
+          /* Clone bytes. */
+
+          u32 clone_len = choose_block_len(afl, len);
+          u32 clone_from = rand_below(afl, len - clone_len + 1);
+          u32 clone_to = rand_below(afl, len);
+
+          /* Head */
+
+          memcpy(tmp_buf, buf, clone_to);
+
+          /* Inserted part */
+
+          memcpy(tmp_buf + clone_to, buf + clone_from, clone_len);
+
+          /* Tail */
+          memcpy(tmp_buf + clone_to + clone_len, buf + clone_to,
+                 len - clone_to);
+
+          len += clone_len;
+          memcpy(buf, tmp_buf, len);
+
+        } else if (unlikely(len < 8)) {
+
+          break;
+
+        } else {
+
+          goto retry_havoc_step;
+
+        }
+
+        break;
+
+      }
+
+      case MUT_CLONE_FIXED: {
+
+        if (likely(len + HAVOC_BLK_XL < max_len)) {
+
+          /* Insert a block of constant bytes (25%). */
+
+          u32 clone_len = choose_block_len(afl, HAVOC_BLK_XL);
+          u32 clone_to = rand_below(afl, len);
+          u32 strat = rand_below(afl, 2);
+          u32 clone_from = clone_to ? clone_to - 1 : 0;
+          item = strat ? rand_below(afl, 256) : buf[clone_from];
+
+          /* Head */
+
+          memcpy(tmp_buf, buf, clone_to);
+
+          /* Inserted part */
+
+          memset(tmp_buf + clone_to, item, clone_len);
+
+          /* Tail */
+          memcpy(tmp_buf + clone_to + clone_len, buf + clone_to,
+                 len - clone_to);
+
+          len += clone_len;
+          memcpy(buf, tmp_buf, len);
+
+        } else if (unlikely(len < 8)) {
+
+          break;
+
+        } else {
+
+          goto retry_havoc_step;
+
+        }
+
+        break;
+
+      }
+
+      case MUT_OVERWRITE_COPY: {
+
+        /* Overwrite bytes with a randomly selected chunk bytes. */
+
+        if (unlikely(len < 2)) { break; }  // no retry
+
+        u32 copy_len = choose_block_len(afl, len - 1);
+        u32 copy_from = rand_below(afl, len - copy_len + 1);
+        u32 copy_to = rand_below(afl, len - copy_len + 1);
+
+        if (likely(copy_from != copy_to)) {
+
+          memmove(buf + copy_to, buf + copy_from, copy_len);
+
+        }
+
+        break;
+
+      }
+
+      case MUT_OVERWRITE_FIXED: {
+
+        /* Overwrite bytes with fixed bytes. */
+
+        if (unlikely(len < 2)) { break; }  // no retry
+
+        u32 copy_len = choose_block_len(afl, len - 1);
+        u32 copy_to = rand_below(afl, len - copy_len + 1);
+        u32 strat = rand_below(afl, 2);
+        u32 copy_from = copy_to ? copy_to - 1 : 0;
+        item = strat ? rand_below(afl, 256) : buf[copy_from];
+        memset(buf + copy_to, item, copy_len);
+
+        break;
+
+      }
+
+      case MUT_BYTEADD: {
+
+        /* Increase byte by 1. */
+
+        buf[rand_below(afl, len)]++;
+        break;
+
+      }
+
+      case MUT_BYTESUB: {
+
+        /* Decrease byte by 1. */
+
+        buf[rand_below(afl, len)]--;
+        break;
+
+      }
+
+      case MUT_FLIP8: {
+
+        /* Flip byte. */
+
+        buf[rand_below(afl, len)] ^= 0xff;
+        break;
+
+      }
+
+      case MUT_SWITCH: {
+
+        if (unlikely(len < 4)) { break; }  // no retry
+
+        /* Switch bytes. */
+
+        u32 to_end, switch_to, switch_len, switch_from;
+        switch_from = rand_below(afl, len);
+        do {
+
+          switch_to = rand_below(afl, len);
+
+        } while (unlikely(switch_from == switch_to));
+
+        if (switch_from < switch_to) {
+
+          switch_len = switch_to - switch_from;
+          to_end = len - switch_to;
+
+        } else {
+
+          switch_len = switch_from - switch_to;
+          to_end = len - switch_from;
+
+        }
+
+        switch_len = choose_block_len(afl, MIN(switch_len, to_end));
+
+        /* Backup */
+
+        memcpy(tmp_buf, buf + switch_from, switch_len);
+
+        /* Switch 1 */
+
+        memcpy(buf + switch_from, buf + switch_to, switch_len);
+
+        /* Switch 2 */
+
+        memcpy(buf + switch_to, tmp_buf, switch_len);
+
+        break;
+
+      }
+
+      case MUT_DEL: {
+
+        /* Delete bytes. */
+
+        if (unlikely(len < 2)) { break; }  // no retry
+
+        /* Don't delete too much. */
+
+        u32 del_len = choose_block_len(afl, len - 1);
+        u32 del_from = rand_below(afl, len - del_len + 1);
+        memmove(buf + del_from, buf + del_from + del_len,
+                len - del_from - del_len);
+        len -= del_len;
+
+        break;
+
+      }
+
+      case MUT_SHUFFLE: {
+
+        /* Shuffle bytes. */
+
+        if (unlikely(len < 4)) { break; }  // no retry
+
+        u32 blen = choose_block_len(afl, len - 1);
+        u32 off = rand_below(afl, len - blen + 1);
+
+        for (u32 i = blen - 1; i > 0; i--) {
+
+          u32 j;
+          do {
+
+            j = rand_below(afl, i + 1);
+
+          } while (unlikely(i == j));
+
+          u8 temp = buf[off + i];
+          buf[off + i] = buf[off + j];
+          buf[off + j] = temp;
+
+        }
+
+        break;
+
+      }
+
+      case MUT_DELONE: {
+
+        /* Delete bytes. */
+
+        if (unlikely(len < 2)) { break; }  // no retry
+
+        /* Don't delete too much. */
+
+        u32 del_len = 1;
+        u32 del_from = rand_below(afl, len - del_len + 1);
+        memmove(buf + del_from, buf + del_from + del_len,
+                len - del_from - del_len);
+
+        len -= del_len;
+
+        break;
+
+      }
+
+      case MUT_INSERTONE: {
+
+        if (unlikely(len < 2)) { break; }  // no retry
+
+        u32 clone_len = 1;
+        u32 clone_to = rand_below(afl, len);
+        u32 strat = rand_below(afl, 2);
+        u32 clone_from = clone_to ? clone_to - 1 : 0;
+        item = strat ? rand_below(afl, 256) : buf[clone_from];
+
+        /* Head */
+
+        memcpy(tmp_buf, buf, clone_to);
+
+        /* Inserted part */
+
+        memset(tmp_buf + clone_to, item, clone_len);
+
+        /* Tail */
+        memcpy(tmp_buf + clone_to + clone_len, buf + clone_to, len - clone_to);
+
+        len += clone_len;
+        memcpy(buf, tmp_buf, len);
+
+        break;
+
+      }
+
+      case MUT_ASCIINUM: {
+
+        if (unlikely(len < 4)) { break; }  // no retry
+
+        u32 off = rand_below(afl, len), off2 = off, cnt = 0;
+
+        while (off2 + cnt < len && !isdigit(buf[off2 + cnt])) {
+
+          ++cnt;
+
+        }
+
+        // none found, wrap
+        if (off2 + cnt == len) {
+
+          off2 = 0;
+          cnt = 0;
+
+          while (cnt < off && !isdigit(buf[off2 + cnt])) {
+
+            ++cnt;
+
+          }
+
+          if (cnt == off) {
+
+            if (len < 8) {
+
+              break;
+
+            } else {
+
+              goto retry_havoc_step;
+
+            }
+
+          }
+
+        }
+
+        off = off2 + cnt;
+        off2 = off + 1;
+
+        while (off2 < len && isdigit(buf[off2])) {
+
+          ++off2;
+
+        }
+
+        s64 val = buf[off] - '0';
+        for (u32 i = off + 1; i < off2; ++i) {
+
+          val = (val * 10) + buf[i] - '0';
+
+        }
+
+        if (off && buf[off - 1] == '-') { val = -val; }
+
+        u32 strat = rand_below(afl, 8);
+        switch (strat) {
+
+          case 0:
+            val++;
+            break;
+          case 1:
+            val--;
+            break;
+          case 2:
+            val *= 2;
+            break;
+          case 3:
+            val /= 2;
+            break;
+          case 4:
+            if (likely(val && (u64)val < 0x19999999)) {
+
+              val = (u64)rand_next(afl) % (u64)((u64)val * 10);
+
+            } else {
+
+              val = rand_below(afl, 256);
+
+            }
+
+            break;
+          case 5:
+            val += rand_below(afl, 256);
+            break;
+          case 6:
+            val -= rand_below(afl, 256);
+            break;
+          case 7:
+            val = ~(val);
+            break;
+
+        }
+
+        char buf[20];
+        snprintf(buf, sizeof(buf), "%ld", val);
+        u32 old_len = off2 - off;
+        u32 new_len = strlen(buf);
+
+        if (old_len == new_len) {
+
+          memcpy(buf + off, buf, new_len);
+
+        } else {
+
+          /* Head */
+
+          memcpy(tmp_buf, buf, off);
+
+          /* Inserted part */
+
+          memcpy(tmp_buf + off, buf, new_len);
+
+          /* Tail */
+          memcpy(tmp_buf + off + new_len, buf + off2, len - off2);
+
+          len += (new_len - old_len);
+          memcpy(buf, tmp_buf, len);
+
+        }
+
+        // fprintf(stderr, "AFTER : %s\n", buf);
+        break;
+
+      }
+
+      case MUT_INSERTASCIINUM: {
+
+        u32 len = 1 + rand_below(afl, 8);
+        u32 pos = rand_below(afl, len);
+
+        /* Insert ascii number. */
+        if (unlikely(len < pos + len)) {
+
+          if (unlikely(len < 8)) {
+
+            break;
+
+          } else {
+
+            goto retry_havoc_step;
+
+          }
+
+        }
+
+        u64  val = rand_next(afl);
+        char buf[20];
+        snprintf(buf, sizeof(buf), "%llu", val);
+        memcpy(buf + pos, buf, len);
+
+        break;
+
+      }
+
+      case MUT_EXTRA_OVERWRITE: {
+
+        if (unlikely(!afl->extras_cnt)) { goto retry_havoc_step; }
+
+        /* Use the dictionary. */
+
+        u32 use_extra = rand_below(afl, afl->extras_cnt);
+        u32 extra_len = afl->extras[use_extra].len;
+
+        if (unlikely(extra_len > len)) { goto retry_havoc_step; }
+
+        u32 insert_at = rand_below(afl, len - extra_len + 1);
+        memcpy(buf + insert_at, afl->extras[use_extra].data, extra_len);
+
+        break;
+
+      }
+
+      case MUT_EXTRA_INSERT: {
+
+        if (unlikely(!afl->extras_cnt)) { goto retry_havoc_step; }
+
+        u32 use_extra = rand_below(afl, afl->extras_cnt);
+        u32 extra_len = afl->extras[use_extra].len;
+        if (unlikely(len + extra_len >= max_len)) { goto retry_havoc_step; }
+
+        u8 *ptr = afl->extras[use_extra].data;
+        u32 insert_at = rand_below(afl, len + 1);
+
+        /* Tail */
+        memmove(buf + insert_at + extra_len, buf + insert_at, len - insert_at);
+
+        /* Inserted part */
+        memcpy(buf + insert_at, ptr, extra_len);
+        len += extra_len;
+
+        break;
+
+      }
+
+      case MUT_AUTO_EXTRA_OVERWRITE: {
+
+        if (unlikely(!afl->a_extras_cnt)) { goto retry_havoc_step; }
+
+        /* Use the dictionary. */
+
+        u32 use_extra = rand_below(afl, afl->a_extras_cnt);
+        u32 extra_len = afl->a_extras[use_extra].len;
+
+        if (unlikely(extra_len > len)) { goto retry_havoc_step; }
+
+        u32 insert_at = rand_below(afl, len - extra_len + 1);
+        memcpy(buf + insert_at, afl->a_extras[use_extra].data, extra_len);
+
+        break;
+
+      }
+
+      case MUT_AUTO_EXTRA_INSERT: {
+
+        if (unlikely(!afl->a_extras_cnt)) { goto retry_havoc_step; }
+
+        u32 use_extra = rand_below(afl, afl->a_extras_cnt);
+        u32 extra_len = afl->a_extras[use_extra].len;
+        if (unlikely(len + extra_len >= max_len)) { goto retry_havoc_step; }
+
+        u8 *ptr = afl->a_extras[use_extra].data;
+        u32 insert_at = rand_below(afl, len + 1);
+
+        /* Tail */
+        memmove(buf + insert_at + extra_len, buf + insert_at, len - insert_at);
+
+        /* Inserted part */
+        memcpy(buf + insert_at, ptr, extra_len);
+        len += extra_len;
+
+        break;
+
+      }
+
+      case MUT_SPLICE_OVERWRITE: {
+
+        if (unlikely(!splice_buf || !splice_len)) { goto retry_havoc_step; }
+
+        /* overwrite mode */
+
+        u32 copy_from, copy_to, copy_len;
+
+        copy_len = choose_block_len(afl, splice_len - 1);
+
+        if (copy_len > len) copy_len = len;
+
+        copy_from = rand_below(afl, splice_len - copy_len + 1);
+        copy_to = rand_below(afl, len - copy_len + 1);
+        memmove(buf + copy_to, splice_buf + copy_from, copy_len);
+
+        break;
+
+      }
+
+      case MUT_SPLICE_INSERT: {
+
+        if (unlikely(!splice_buf || !splice_len)) { goto retry_havoc_step; }
+
+        if (unlikely(len + HAVOC_BLK_XL >= max_len)) { goto retry_havoc_step; }
+
+        /* insert mode */
+
+        u32 clone_from, clone_to, clone_len;
+
+        clone_len = choose_block_len(afl, splice_len);
+        clone_from = rand_below(afl, splice_len - clone_len + 1);
+        clone_to = rand_below(afl, len + 1);
+
+        /* Head */
+
+        memcpy(tmp_buf, buf, clone_to);
+
+        /* Inserted part */
+
+        memcpy(tmp_buf + clone_to, splice_buf + clone_from, clone_len);
+
+        /* Tail */
+        memcpy(tmp_buf + clone_to + clone_len, buf + clone_to, len - clone_to);
+
+        len += clone_len;
+        memcpy(buf, tmp_buf, len);
+
+        break;
+
+      }
+
+    }
+
+  }
+
+  }
+
+  return len;
+
+}
+
+#endif                                                  /* !AFL_MUTATIONS_H */
+
diff --git a/include/alloc-inl.h b/include/alloc-inl.h
index 1e9a192b..cff808b2 100644
--- a/include/alloc-inl.h
+++ b/include/alloc-inl.h
@@ -322,7 +322,7 @@ static inline void DFL_ck_free(void *mem) {
 static inline void *DFL_ck_realloc(void *orig, u32 size) {
 
   void *ret;
-  u32 old_size = 0;
+  u32   old_size = 0;
 
   if (!size) {
 
@@ -392,7 +392,7 @@ static inline void *DFL_ck_realloc(void *orig, u32 size) {
 static inline u8 *DFL_ck_strdup(u8 *str) {
 
   void *ret;
-  u32 size;
+  u32   size;
 
   if (!str) return NULL;
 
@@ -438,14 +438,14 @@ struct TRK_obj {
 
   void *ptr;
   char *file, *func;
-  u32 line;
+  u32   line;
 
 };
 
     #ifdef AFL_MAIN
 
 struct TRK_obj *TRK[ALLOC_BUCKETS];
-u32 TRK_cnt[ALLOC_BUCKETS];
+u32             TRK_cnt[ALLOC_BUCKETS];
 
       #define alloc_report() TRK_report()
 
diff --git a/include/config.h b/include/config.h
index 53be8549..8585041e 100644
--- a/include/config.h
+++ b/include/config.h
@@ -26,7 +26,7 @@
 /* Version string: */
 
 // c = release, a = volatile github dev, e = experimental branch
-#define VERSION "++4.07c"
+#define VERSION "++4.08a"
 
 /******************************************************
  *                                                    *
@@ -43,6 +43,12 @@
    Default: 8MB (defined in bytes) */
 #define DEFAULT_SHMEM_SIZE (8 * 1024 * 1024)
 
+/* Default time until when no more coverage finds are happening afl-fuzz
+   switches to exploitation mode. It automatically switches back when new
+   coverage is found.
+   Default: 300 (seconds) */
+#define STRATEGY_SWITCH_TIME 1000
+
 /* Default file permission umode when creating files (default: 0600) */
 #define DEFAULT_PERMISSION 0600
 
diff --git a/instrumentation/SanitizerCoverageLTO.so.cc b/instrumentation/SanitizerCoverageLTO.so.cc
index 2d17ffd4..d7b03634 100644
--- a/instrumentation/SanitizerCoverageLTO.so.cc
+++ b/instrumentation/SanitizerCoverageLTO.so.cc
@@ -331,7 +331,7 @@ llvmGetPassPluginInfo() {
 #if LLVM_VERSION_MAJOR <= 13
             using OptimizationLevel = typename PassBuilder::OptimizationLevel;
 #endif
-#if LLVM_VERSION_MAJOR >= 16
+#if LLVM_VERSION_MAJOR >= 15
             PB.registerFullLinkTimeOptimizationLastEPCallback(
 #else
             PB.registerOptimizerLastEPCallback(
diff --git a/instrumentation/SanitizerCoveragePCGUARD.so.cc b/instrumentation/SanitizerCoveragePCGUARD.so.cc
index 7171e7aa..7d614f43 100644
--- a/instrumentation/SanitizerCoveragePCGUARD.so.cc
+++ b/instrumentation/SanitizerCoveragePCGUARD.so.cc
@@ -225,10 +225,8 @@ llvmGetPassPluginInfo() {
 
 }
 
-#if LLVM_VERSION_MAJOR == 1
 PreservedAnalyses ModuleSanitizerCoverageAFL::run(Module                &M,
                                                   ModuleAnalysisManager &MAM) {
-
   ModuleSanitizerCoverageAFL ModuleSancov(Options);
   auto &FAM = MAM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
   auto  DTCallback = [&FAM](Function &F) -> const DominatorTree  *{
@@ -243,50 +241,12 @@ PreservedAnalyses ModuleSanitizerCoverageAFL::run(Module                &M,
 
   };
 
-  if (!ModuleSancov.instrumentModule(M, DTCallback, PDTCallback))
-    return PreservedAnalyses::all();
-
-  PreservedAnalyses PA = PreservedAnalyses::none();
-  // GlobalsAA is considered stateless and does not get invalidated unless
-  // explicitly invalidated; PreservedAnalyses::none() is not enough. Sanitizers
-  // make changes that require GlobalsAA to be invalidated.
-  PA.abandon<GlobalsAA>();
-  return PA;
-
-}
-
-#else
-  #if LLVM_VERSION_MAJOR >= 16
-PreservedAnalyses ModuleSanitizerCoverageAFL::run(Module &M,
-                                                  ModuleAnalysisManager &MAM) {
-
-  #else
-PreservedAnalyses ModuleSanitizerCoverageAFL::run(Module                &M,
-                                                  ModuleAnalysisManager &MAM) {
-
-  #endif
-  ModuleSanitizerCoverageAFL ModuleSancov(Options);
-  auto &FAM = MAM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
-  auto DTCallback = [&FAM](Function &F) -> const DominatorTree * {
-
-    return &FAM.getResult<DominatorTreeAnalysis>(F);
-
-  };
-
-  auto PDTCallback = [&FAM](Function &F) -> const PostDominatorTree * {
-
-    return &FAM.getResult<PostDominatorTreeAnalysis>(F);
-
-  };
-
   if (ModuleSancov.instrumentModule(M, DTCallback, PDTCallback))
     return PreservedAnalyses::none();
   return PreservedAnalyses::all();
 
 }
 
-#endif
-
 std::pair<Value *, Value *> ModuleSanitizerCoverageAFL::CreateSecStartEnd(
     Module &M, const char *Section, Type *Ty) {
 
@@ -892,7 +852,7 @@ bool ModuleSanitizerCoverageAFL::InjectCoverage(
                   IRB.CreatePointerCast(FunctionGuardArray, IntptrTy),
                   ConstantInt::get(
                       IntptrTy,
-                      (cnt_cov + ++local_selects + AllBlocks.size()) * 4)),
+                      (cnt_cov + local_selects++ + AllBlocks.size()) * 4)),
               Int32PtrTy);
 
           auto GuardPtr2 = IRB.CreateIntToPtr(
@@ -900,7 +860,7 @@ bool ModuleSanitizerCoverageAFL::InjectCoverage(
                   IRB.CreatePointerCast(FunctionGuardArray, IntptrTy),
                   ConstantInt::get(
                       IntptrTy,
-                      (cnt_cov + ++local_selects + AllBlocks.size()) * 4)),
+                      (cnt_cov + local_selects++ + AllBlocks.size()) * 4)),
               Int32PtrTy);
 
           result = IRB.CreateSelect(condition, GuardPtr1, GuardPtr2);
@@ -937,7 +897,7 @@ bool ModuleSanitizerCoverageAFL::InjectCoverage(
                       IRB.CreatePointerCast(FunctionGuardArray, IntptrTy),
                       ConstantInt::get(
                           IntptrTy,
-                          (cnt_cov + ++local_selects + AllBlocks.size()) * 4)),
+                          (cnt_cov + local_selects++ + AllBlocks.size()) * 4)),
                   Int32PtrTy);
               x = IRB.CreateInsertElement(GuardPtr1, val1, (uint64_t)0);
 
@@ -946,7 +906,7 @@ bool ModuleSanitizerCoverageAFL::InjectCoverage(
                       IRB.CreatePointerCast(FunctionGuardArray, IntptrTy),
                       ConstantInt::get(
                           IntptrTy,
-                          (cnt_cov + ++local_selects + AllBlocks.size()) * 4)),
+                          (cnt_cov + local_selects++ + AllBlocks.size()) * 4)),
                   Int32PtrTy);
               y = IRB.CreateInsertElement(GuardPtr2, val2, (uint64_t)0);
 
@@ -955,7 +915,7 @@ bool ModuleSanitizerCoverageAFL::InjectCoverage(
                 val1 = IRB.CreateIntToPtr(
                     IRB.CreateAdd(
                         IRB.CreatePointerCast(FunctionGuardArray, IntptrTy),
-                        ConstantInt::get(IntptrTy, (cnt_cov + ++local_selects +
+                        ConstantInt::get(IntptrTy, (cnt_cov + local_selects++ +
                                                     AllBlocks.size()) *
                                                        4)),
                     Int32PtrTy);
@@ -964,7 +924,7 @@ bool ModuleSanitizerCoverageAFL::InjectCoverage(
                 val2 = IRB.CreateIntToPtr(
                     IRB.CreateAdd(
                         IRB.CreatePointerCast(FunctionGuardArray, IntptrTy),
-                        ConstantInt::get(IntptrTy, (cnt_cov + ++local_selects +
+                        ConstantInt::get(IntptrTy, (cnt_cov + local_selects++ +
                                                     AllBlocks.size()) *
                                                        4)),
                     Int32PtrTy);
diff --git a/instrumentation/split-compares-pass.so.cc b/instrumentation/split-compares-pass.so.cc
index aec6758e..6eafb332 100644
--- a/instrumentation/split-compares-pass.so.cc
+++ b/instrumentation/split-compares-pass.so.cc
@@ -463,8 +463,12 @@ bool SplitComparesTransform::simplifyOrEqualsCompare(CmpInst     *IcmpInst,
 #else
   ReplaceInstWithInst(IcmpInst->getParent()->getInstList(), ii, PN);
 #endif
+  if (new_pred == CmpInst::ICMP_SGT || new_pred == CmpInst::ICMP_SLT) {
+
+    simplifySignedCompare(icmp_np, M, worklist);
+
+  }
 
-  worklist.push_back(icmp_np);
   worklist.push_back(icmp_eq);
 
   return true;
@@ -740,17 +744,24 @@ bool SplitComparesTransform::splitCompare(CmpInst *cmp_inst, Module &M,
       CmpInst     *icmp_inv_cmp = nullptr;
       BasicBlock  *inv_cmp_bb =
           BasicBlock::Create(C, "inv_cmp", end_bb->getParent(), end_bb);
-      if (pred == CmpInst::ICMP_UGT || pred == CmpInst::ICMP_SGT ||
-          pred == CmpInst::ICMP_UGE || pred == CmpInst::ICMP_SGE) {
+      if (pred == CmpInst::ICMP_UGT) {
 
         icmp_inv_cmp = CmpInst::Create(Instruction::ICmp, CmpInst::ICMP_ULT,
                                        op0_high, op1_high);
 
-      } else {
+      } else if (pred == CmpInst::ICMP_ULT) {
 
         icmp_inv_cmp = CmpInst::Create(Instruction::ICmp, CmpInst::ICMP_UGT,
                                        op0_high, op1_high);
 
+      } else {
+
+        // Never gonna appen
+        if (!be_quiet)
+          fprintf(stderr,
+                  "Error: split-compare: Equals or signed not removed: %d\n",
+                  pred);
+
       }
 
 #if LLVM_MAJOR >= 16
diff --git a/qemu_mode/libqasan/dlmalloc.c b/qemu_mode/libqasan/dlmalloc.c
index 5d0b65ce..b459eb7b 100644
--- a/qemu_mode/libqasan/dlmalloc.c
+++ b/qemu_mode/libqasan/dlmalloc.c
@@ -1762,7 +1762,7 @@ static FORCEINLINE void *win32direct_mmap(size_t size) {
 static FORCEINLINE int win32munmap(void *ptr, size_t size) {
 
   MEMORY_BASIC_INFORMATION minfo;
-  char *cptr = (char *)ptr;
+  char                    *cptr = (char *)ptr;
 
   while (size) {
 
diff --git a/src/afl-fuzz-bitmap.c b/src/afl-fuzz-bitmap.c
index 556bb5d1..fb8a1d4b 100644
--- a/src/afl-fuzz-bitmap.c
+++ b/src/afl-fuzz-bitmap.c
@@ -533,6 +533,18 @@ save_if_interesting(afl_state_t *afl, void *mem, u32 len, u8 fault) {
     close(fd);
     add_to_queue(afl, queue_fn, len, 0);
 
+    if (unlikely(afl->fuzz_mode) && likely(afl->switch_fuzz_mode)) {
+
+      if (afl->afl_env.afl_no_ui) {
+
+        ACTF("New coverage found, switching back to exploration mode.");
+
+      }
+
+      afl->fuzz_mode = 0;
+
+    }
+
 #ifdef INTROSPECTION
     if (afl->custom_mutators_count && afl->current_custom_fuzz) {
 
diff --git a/src/afl-fuzz-init.c b/src/afl-fuzz-init.c
index 13802f40..24fd7077 100644
--- a/src/afl-fuzz-init.c
+++ b/src/afl-fuzz-init.c
@@ -1542,8 +1542,8 @@ double get_runnable_processes(void) {
      processes well. */
 
   FILE *f = fopen("/proc/stat", "r");
-  u8 tmp[1024];
-  u32 val = 0;
+  u8    tmp[1024];
+  u32   val = 0;
 
   if (!f) { return 0; }
 
@@ -2226,7 +2226,7 @@ void check_crash_handling(void) {
    *BSD, so we can just let it slide for now. */
 
   s32 fd = open("/proc/sys/kernel/core_pattern", O_RDONLY);
-  u8 fchar;
+  u8  fchar;
 
   if (fd < 0) { return; }
 
@@ -2365,7 +2365,7 @@ void check_cpu_governor(afl_state_t *afl) {
   FATAL("Suboptimal CPU scaling governor");
 
 #elif defined __APPLE__
-  u64 min = 0, max = 0;
+  u64    min = 0, max = 0;
   size_t mlen = sizeof(min);
   if (afl->afl_env.afl_skip_cpufreq) return;
 
diff --git a/src/afl-fuzz-one.c b/src/afl-fuzz-one.c
index 5c71fc59..4efc661e 100644
--- a/src/afl-fuzz-one.c
+++ b/src/afl-fuzz-one.c
@@ -27,6 +27,7 @@
 #include <string.h>
 #include <limits.h>
 #include "cmplog.h"
+#include "afl-mutations.h"
 
 /* MOpt */
 
@@ -70,50 +71,6 @@ static int select_algorithm(afl_state_t *afl, u32 max_algorithm) {
 
 }
 
-/* Helper to choose random block len for block operations in fuzz_one().
-   Doesn't return zero, provided that max_len is > 0. */
-
-static inline u32 choose_block_len(afl_state_t *afl, u32 limit) {
-
-  u32 min_value, max_value;
-  u32 rlim = MIN(afl->queue_cycle, (u32)3);
-
-  if (unlikely(!afl->run_over10m)) { rlim = 1; }
-
-  switch (rand_below(afl, rlim)) {
-
-    case 0:
-      min_value = 1;
-      max_value = HAVOC_BLK_SMALL;
-      break;
-
-    case 1:
-      min_value = HAVOC_BLK_SMALL;
-      max_value = HAVOC_BLK_MEDIUM;
-      break;
-
-    default:
-
-      if (likely(rand_below(afl, 10))) {
-
-        min_value = HAVOC_BLK_MEDIUM;
-        max_value = HAVOC_BLK_LARGE;
-
-      } else {
-
-        min_value = HAVOC_BLK_LARGE;
-        max_value = HAVOC_BLK_XL;
-
-      }
-
-  }
-
-  if (min_value >= limit) { min_value = 1; }
-
-  return min_value + rand_below(afl, MIN(max_value, limit) - min_value + 1);
-
-}
-
 /* Helper function to see if a particular change (xor_val = old ^ new) could
    be a product of deterministic bit flips with the lengths and stepovers
    attempted by afl-fuzz. This is used to avoid dupes in some of the
@@ -445,10 +402,12 @@ u8 fuzz_one_original(afl_state_t *afl) {
   if (unlikely(afl->not_on_tty)) {
 
     ACTF(
-        "Fuzzing test case #%u (%u total, %llu crashes saved, "
+        "Fuzzing test case #%u (%u total, %llu crashes saved, state: %s, "
+        "mode=%s, "
         "perf_score=%0.0f, weight=%0.0f, favorite=%u, was_fuzzed=%u, "
         "exec_us=%llu, hits=%u, map=%u, ascii=%u)...",
         afl->current_entry, afl->queued_items, afl->saved_crashes,
+        get_fuzzing_state(afl), afl->fuzz_mode ? "exploit" : "explore",
         afl->queue_cur->perf_score, afl->queue_cur->weight,
         afl->queue_cur->favored, afl->queue_cur->was_fuzzed,
         afl->queue_cur->exec_us,
@@ -2123,45 +2082,83 @@ havoc_stage:
   /* We essentially just do several thousand runs (depending on perf_score)
      where we take the input file and make random stacked tweaks. */
 
-#define MAX_HAVOC_ENTRY 64
-#define MUTATE_ASCII_DICT 64
-
-  u32 r_max, r;
+  u32 *mutation_array;
+  u32  stack_max, rand_max;  // stack_max_pow = afl->havoc_stack_pow2;
 
-  r_max = (MAX_HAVOC_ENTRY + 1) + (afl->extras_cnt ? 4 : 0) +
-          (afl->a_extras_cnt
-               ? (unlikely(afl->cmplog_binary && afl->queue_cur->is_ascii)
-                      ? MUTATE_ASCII_DICT
-                      : 4)
-               : 0);
+  /*
 
   if (unlikely(afl->expand_havoc && afl->ready_for_splicing_count > 1)) {
 
-    /* add expensive havoc cases here, they are activated after a full
-       cycle without finds happened */
+    mutation_array = full_splice_array;
+    rand_max = MUT_SPLICE_ARRAY_SIZE;
+
+  } else {
+
+    mutation_array = normal_splice_array;
+    rand_max = MUT_NORMAL_ARRAY_SIZE;
+
+  }
+
+  */
+
+  rand_max = MUT_STRATEGY_ARRAY_SIZE;
+
+  if (unlikely(afl->text_input /*|| afl->queue_cur->is_ascii*/)) {  // is text?
+
+    if (likely(afl->fuzz_mode == 0)) {  // is exploration?
+
+      mutation_array = (unsigned int *)&mutation_strategy_exploration_text;
+
+    } else {  // is exploitation!
+
+      mutation_array = (unsigned int *)&mutation_strategy_exploitation_text;
+
+    }
+
+  } else {  // is binary!
+
+    if (likely(afl->fuzz_mode == 0)) {  // is exploration?
 
-    r_max += 4;
+      mutation_array = (unsigned int *)&mutation_strategy_exploration_binary;
+
+    } else {  // is exploitation!
+
+      mutation_array = (unsigned int *)&mutation_strategy_exploitation_binary;
+
+    }
 
   }
 
-  if (unlikely(get_cur_time() - afl->last_find_time > 5000 /* 5 seconds */ &&
-               afl->ready_for_splicing_count > 1)) {
+  /*
+  if (temp_len < 64) {
+
+    --stack_max_pow;
+
+  } else if (temp_len <= 8096) {
 
-    /* add expensive havoc cases here if there is no findings in the last 5s */
+    ++stack_max_pow;
 
-    r_max += 4;
+  } else {
+
+    ++stack_max_pow;
 
   }
 
+  */
+
+  stack_max = 1 << (1 + rand_below(afl, afl->havoc_stack_pow2));
+
+  // + (afl->extras_cnt ? 2 : 0) + (afl->a_extras_cnt ? 2 : 0);
+
   for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) {
 
-    u32 use_stacking = 1 << (1 + rand_below(afl, afl->havoc_stack_pow2));
+    u32 use_stacking = 1 + rand_below(afl, stack_max);
 
     afl->stage_cur_val = use_stacking;
 
 #ifdef INTROSPECTION
-    snprintf(afl->mutation, sizeof(afl->mutation), "%s HAVOC-%u",
-             afl->queue_cur->fname, use_stacking);
+    snprintf(afl->mutation, sizeof(afl->mutation), "%s HAVOC-%u-%u",
+             afl->queue_cur->fname, afl->queue_cur->is_ascii, use_stacking);
 #endif
 
     for (i = 0; i < use_stacking; ++i) {
@@ -2170,8 +2167,8 @@ havoc_stage:
 
         LIST_FOREACH(&afl->custom_mutator_list, struct custom_mutator, {
 
-          if (el->stacked_custom &&
-              rand_below(afl, 100) < el->stacked_custom_prob) {
+          if (unlikely(el->stacked_custom &&
+                       rand_below(afl, 100) < el->stacked_custom_prob)) {
 
             u8    *custom_havoc_buf = NULL;
             size_t new_len = el->afl_custom_havoc_mutation(
@@ -2201,159 +2198,173 @@ havoc_stage:
 
       }
 
-      switch ((r = rand_below(afl, r_max))) {
+    retry_havoc_step : {
+
+      u32 r = rand_below(afl, rand_max), item;
+
+      switch (mutation_array[r]) {
 
-        case 0 ... 3: {
+        case MUT_FLIPBIT: {
 
           /* Flip a single bit somewhere. Spooky! */
+          u8  bit = rand_below(afl, 8);
+          u32 off = rand_below(afl, temp_len);
+          out_buf[off] ^= 1 << bit;
 
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP_BIT1");
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP-BIT_%u", bit);
           strcat(afl->mutation, afl->m_tmp);
 #endif
-          FLIP_BIT(out_buf, rand_below(afl, temp_len << 3));
           break;
 
         }
 
-        case 4 ... 7: {
+        case MUT_INTERESTING8: {
 
           /* Set byte to interesting value. */
 
+          item = rand_below(afl, sizeof(interesting_8));
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING8");
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING8_%u", item);
           strcat(afl->mutation, afl->m_tmp);
 #endif
-          out_buf[rand_below(afl, temp_len)] =
-              interesting_8[rand_below(afl, sizeof(interesting_8))];
+          out_buf[rand_below(afl, temp_len)] = interesting_8[item];
           break;
 
         }
 
-        case 8 ... 9: {
+        case MUT_INTERESTING16: {
 
           /* Set word to interesting value, little endian. */
 
-          if (temp_len < 2) { break; }
+          if (unlikely(temp_len < 2)) { break; }  // no retry
 
+          item = rand_below(afl, sizeof(interesting_16) >> 1);
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING16");
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING16_%u", item);
           strcat(afl->mutation, afl->m_tmp);
 #endif
+
           *(u16 *)(out_buf + rand_below(afl, temp_len - 1)) =
-              interesting_16[rand_below(afl, sizeof(interesting_16) >> 1)];
+              interesting_16[item];
 
           break;
 
         }
 
-        case 10 ... 11: {
+        case MUT_INTERESTING16BE: {
 
           /* Set word to interesting value, big endian. */
 
-          if (temp_len < 2) { break; }
+          if (unlikely(temp_len < 2)) { break; }  // no retry
 
+          item = rand_below(afl, sizeof(interesting_16) >> 1);
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING16BE");
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING16BE_%u", item);
           strcat(afl->mutation, afl->m_tmp);
 #endif
-          *(u16 *)(out_buf + rand_below(afl, temp_len - 1)) = SWAP16(
-              interesting_16[rand_below(afl, sizeof(interesting_16) >> 1)]);
+          *(u16 *)(out_buf + rand_below(afl, temp_len - 1)) =
+              SWAP16(interesting_16[item]);
 
           break;
 
         }
 
-        case 12 ... 13: {
+        case MUT_INTERESTING32: {
 
           /* Set dword to interesting value, little endian. */
 
-          if (temp_len < 4) { break; }
+          if (unlikely(temp_len < 4)) { break; }  // no retry
 
+          item = rand_below(afl, sizeof(interesting_32) >> 2);
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING32");
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING32_%u", item);
           strcat(afl->mutation, afl->m_tmp);
 #endif
+
           *(u32 *)(out_buf + rand_below(afl, temp_len - 3)) =
-              interesting_32[rand_below(afl, sizeof(interesting_32) >> 2)];
+              interesting_32[item];
 
           break;
 
         }
 
-        case 14 ... 15: {
+        case MUT_INTERESTING32BE: {
 
           /* Set dword to interesting value, big endian. */
 
-          if (temp_len < 4) { break; }
+          if (unlikely(temp_len < 4)) { break; }  // no retry
 
+          item = rand_below(afl, sizeof(interesting_32) >> 2);
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING32BE");
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INTERESTING32BE_%u", item);
           strcat(afl->mutation, afl->m_tmp);
 #endif
-          *(u32 *)(out_buf + rand_below(afl, temp_len - 3)) = SWAP32(
-              interesting_32[rand_below(afl, sizeof(interesting_32) >> 2)]);
+          *(u32 *)(out_buf + rand_below(afl, temp_len - 3)) =
+              SWAP32(interesting_32[item]);
 
           break;
 
         }
 
-        case 16 ... 19: {
+        case MUT_ARITH8_: {
 
           /* Randomly subtract from byte. */
 
+          item = 1 + rand_below(afl, ARITH_MAX);
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH8_");
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH8-_%u", item);
           strcat(afl->mutation, afl->m_tmp);
 #endif
-          out_buf[rand_below(afl, temp_len)] -= 1 + rand_below(afl, ARITH_MAX);
+          out_buf[rand_below(afl, temp_len)] -= item;
           break;
 
         }
 
-        case 20 ... 23: {
+        case MUT_ARITH8: {
 
           /* Randomly add to byte. */
 
+          item = 1 + rand_below(afl, ARITH_MAX);
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH8+");
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH8+_%u", item);
           strcat(afl->mutation, afl->m_tmp);
 #endif
-          out_buf[rand_below(afl, temp_len)] += 1 + rand_below(afl, ARITH_MAX);
+          out_buf[rand_below(afl, temp_len)] += item;
           break;
 
         }
 
-        case 24 ... 25: {
+        case MUT_ARITH16_: {
 
           /* Randomly subtract from word, little endian. */
 
-          if (temp_len < 2) { break; }
+          if (unlikely(temp_len < 2)) { break; }  // no retry
 
           u32 pos = rand_below(afl, temp_len - 1);
+          item = 1 + rand_below(afl, ARITH_MAX);
 
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16_-%u", pos);
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16-_%u", item);
           strcat(afl->mutation, afl->m_tmp);
 #endif
-          *(u16 *)(out_buf + pos) -= 1 + rand_below(afl, ARITH_MAX);
+          *(u16 *)(out_buf + pos) -= item;
 
           break;
 
         }
 
-        case 26 ... 27: {
+        case MUT_ARITH16BE_: {
 
           /* Randomly subtract from word, big endian. */
 
-          if (temp_len < 2) { break; }
+          if (unlikely(temp_len < 2)) { break; }  // no retry
 
           u32 pos = rand_below(afl, temp_len - 1);
           u16 num = 1 + rand_below(afl, ARITH_MAX);
 
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16_BE-%u_%u", pos,
-                   num);
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16BE-_%u", num);
           strcat(afl->mutation, afl->m_tmp);
 #endif
           *(u16 *)(out_buf + pos) =
@@ -2363,36 +2374,36 @@ havoc_stage:
 
         }
 
-        case 28 ... 29: {
+        case MUT_ARITH16: {
 
           /* Randomly add to word, little endian. */
 
-          if (temp_len < 2) { break; }
+          if (unlikely(temp_len < 2)) { break; }  // no retry
 
           u32 pos = rand_below(afl, temp_len - 1);
+          item = 1 + rand_below(afl, ARITH_MAX);
 
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16+-%u", pos);
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16+_%u", item);
           strcat(afl->mutation, afl->m_tmp);
 #endif
-          *(u16 *)(out_buf + pos) += 1 + rand_below(afl, ARITH_MAX);
+          *(u16 *)(out_buf + pos) += item;
 
           break;
 
         }
 
-        case 30 ... 31: {
+        case MUT_ARITH16BE: {
 
           /* Randomly add to word, big endian. */
 
-          if (temp_len < 2) { break; }
+          if (unlikely(temp_len < 2)) { break; }  // no retry
 
           u32 pos = rand_below(afl, temp_len - 1);
           u16 num = 1 + rand_below(afl, ARITH_MAX);
 
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16+BE-%u_%u", pos,
-                   num);
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH16BE+__%u", num);
           strcat(afl->mutation, afl->m_tmp);
 #endif
           *(u16 *)(out_buf + pos) =
@@ -2402,36 +2413,36 @@ havoc_stage:
 
         }
 
-        case 32 ... 33: {
+        case MUT_ARITH32_: {
 
           /* Randomly subtract from dword, little endian. */
 
-          if (temp_len < 4) { break; }
+          if (unlikely(temp_len < 4)) { break; }  // no retry
 
           u32 pos = rand_below(afl, temp_len - 3);
+          item = 1 + rand_below(afl, ARITH_MAX);
 
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32_-%u", pos);
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32-_%u", item);
           strcat(afl->mutation, afl->m_tmp);
 #endif
-          *(u32 *)(out_buf + pos) -= 1 + rand_below(afl, ARITH_MAX);
+          *(u32 *)(out_buf + pos) -= item;
 
           break;
 
         }
 
-        case 34 ... 35: {
+        case MUT_ARITH32BE_: {
 
           /* Randomly subtract from dword, big endian. */
 
-          if (temp_len < 4) { break; }
+          if (unlikely(temp_len < 4)) { break; }  // no retry
 
           u32 pos = rand_below(afl, temp_len - 3);
           u32 num = 1 + rand_below(afl, ARITH_MAX);
 
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32_BE-%u-%u", pos,
-                   num);
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32BE-_%u", num);
           strcat(afl->mutation, afl->m_tmp);
 #endif
           *(u32 *)(out_buf + pos) =
@@ -2441,36 +2452,36 @@ havoc_stage:
 
         }
 
-        case 36 ... 37: {
+        case MUT_ARITH32: {
 
           /* Randomly add to dword, little endian. */
 
-          if (temp_len < 4) { break; }
+          if (unlikely(temp_len < 4)) { break; }  // no retry
 
           u32 pos = rand_below(afl, temp_len - 3);
+          item = 1 + rand_below(afl, ARITH_MAX);
 
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32+-%u", pos);
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32+_%u", item);
           strcat(afl->mutation, afl->m_tmp);
 #endif
-          *(u32 *)(out_buf + pos) += 1 + rand_below(afl, ARITH_MAX);
+          *(u32 *)(out_buf + pos) += item;
 
           break;
 
         }
 
-        case 38 ... 39: {
+        case MUT_ARITH32BE: {
 
           /* Randomly add to dword, big endian. */
 
-          if (temp_len < 4) { break; }
+          if (unlikely(temp_len < 4)) { break; }  // no retry
 
           u32 pos = rand_below(afl, temp_len - 3);
           u32 num = 1 + rand_below(afl, ARITH_MAX);
 
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32+BE-%u-%u", pos,
-                   num);
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ARITH32BE+_%u", num);
           strcat(afl->mutation, afl->m_tmp);
 #endif
           *(u32 *)(out_buf + pos) =
@@ -2480,24 +2491,27 @@ havoc_stage:
 
         }
 
-        case 40 ... 43: {
+        case MUT_RAND8: {
 
           /* Just set a random byte to a random value. Because,
              why not. We use XOR with 1-255 to eliminate the
              possibility of a no-op. */
 
+          u32 pos = rand_below(afl, temp_len);
+          item = 1 + rand_below(afl, 255);
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " RAND8");
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " RAND8_%u",
+                   out_buf[pos] ^ item);
           strcat(afl->mutation, afl->m_tmp);
 #endif
-          out_buf[rand_below(afl, temp_len)] ^= 1 + rand_below(afl, 255);
+          out_buf[pos] ^= item;
           break;
 
         }
 
-        case 44 ... 46: {
+        case MUT_CLONE_COPY: {
 
-          if (temp_len + HAVOC_BLK_XL < MAX_FILE) {
+          if (likely(temp_len + HAVOC_BLK_XL < MAX_FILE)) {
 
             /* Clone bytes. */
 
@@ -2506,8 +2520,8 @@ havoc_stage:
             u32 clone_to = rand_below(afl, temp_len);
 
 #ifdef INTROSPECTION
-            snprintf(afl->m_tmp, sizeof(afl->m_tmp), " CLONE-%s-%u-%u-%u",
-                     "clone", clone_from, clone_to, clone_len);
+            snprintf(afl->m_tmp, sizeof(afl->m_tmp), " CLONE-%s_%u_%u_%u",
+                     "COPY", clone_from, clone_to, clone_len);
             strcat(afl->mutation, afl->m_tmp);
 #endif
             u8 *new_buf =
@@ -2530,24 +2544,35 @@ havoc_stage:
             afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
             temp_len += clone_len;
 
+          } else if (unlikely(temp_len < 8)) {
+
+            break;
+
+          } else {
+
+            goto retry_havoc_step;
+
           }
 
           break;
 
         }
 
-        case 47: {
+        case MUT_CLONE_FIXED: {
 
-          if (temp_len + HAVOC_BLK_XL < MAX_FILE) {
+          if (likely(temp_len + HAVOC_BLK_XL < MAX_FILE)) {
 
             /* Insert a block of constant bytes (25%). */
 
             u32 clone_len = choose_block_len(afl, HAVOC_BLK_XL);
             u32 clone_to = rand_below(afl, temp_len);
+            u32 strat = rand_below(afl, 2);
+            u32 clone_from = clone_to ? clone_to - 1 : 0;
+            item = strat ? rand_below(afl, 256) : out_buf[clone_from];
 
 #ifdef INTROSPECTION
-            snprintf(afl->m_tmp, sizeof(afl->m_tmp), " CLONE-%s-%u-%u",
-                     "insert", clone_to, clone_len);
+            snprintf(afl->m_tmp, sizeof(afl->m_tmp), " CLONE-%s_%u_%u_%u",
+                     "FIXED", strat, clone_to, clone_len);
             strcat(afl->mutation, afl->m_tmp);
 #endif
             u8 *new_buf =
@@ -2560,10 +2585,7 @@ havoc_stage:
 
             /* Inserted part */
 
-            memset(new_buf + clone_to,
-                   rand_below(afl, 2) ? rand_below(afl, 256)
-                                      : out_buf[rand_below(afl, temp_len)],
-                   clone_len);
+            memset(new_buf + clone_to, item, clone_len);
 
             /* Tail */
             memcpy(new_buf + clone_to + clone_len, out_buf + clone_to,
@@ -2573,66 +2595,77 @@ havoc_stage:
             afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
             temp_len += clone_len;
 
+          } else if (unlikely(temp_len < 8)) {
+
+            break;
+
+          } else {
+
+            goto retry_havoc_step;
+
           }
 
           break;
 
         }
 
-        case 48 ... 50: {
+        case MUT_OVERWRITE_COPY: {
 
           /* Overwrite bytes with a randomly selected chunk bytes. */
 
-          if (temp_len < 2) { break; }
+          if (unlikely(temp_len < 2)) { break; }  // no retry
 
-          u32 copy_len = choose_block_len(afl, temp_len - 1);
-          u32 copy_from = rand_below(afl, temp_len - copy_len + 1);
-          u32 copy_to = rand_below(afl, temp_len - copy_len + 1);
+          u32 copy_from, copy_to,
+              copy_len = choose_block_len(afl, temp_len - 1);
+
+          do {
+
+            copy_from = rand_below(afl, temp_len - copy_len + 1);
+            copy_to = rand_below(afl, temp_len - copy_len + 1);
 
-          if (likely(copy_from != copy_to)) {
+          } while (unlikely(copy_from == copy_to));
 
 #ifdef INTROSPECTION
-            snprintf(afl->m_tmp, sizeof(afl->m_tmp), " OVERWRITE_COPY-%u-%u-%u",
-                     copy_from, copy_to, copy_len);
-            strcat(afl->mutation, afl->m_tmp);
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " OVERWRITE-COPY_%u_%u_%u",
+                   copy_from, copy_to, copy_len);
+          strcat(afl->mutation, afl->m_tmp);
 #endif
-            memmove(out_buf + copy_to, out_buf + copy_from, copy_len);
-
-          }
+          memmove(out_buf + copy_to, out_buf + copy_from, copy_len);
 
           break;
 
         }
 
-        case 51: {
+        case MUT_OVERWRITE_FIXED: {
 
           /* Overwrite bytes with fixed bytes. */
 
-          if (temp_len < 2) { break; }
+          if (unlikely(temp_len < 2)) { break; }  // no retry
 
           u32 copy_len = choose_block_len(afl, temp_len - 1);
           u32 copy_to = rand_below(afl, temp_len - copy_len + 1);
+          u32 strat = rand_below(afl, 2);
+          u32 copy_from = copy_to ? copy_to - 1 : 0;
+          item = strat ? rand_below(afl, 256) : out_buf[copy_from];
 
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " OVERWRITE_FIXED-%u-%u",
-                   copy_to, copy_len);
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp),
+                   " OVERWRITE-FIXED_%u_%u_%u-%u", strat, item, copy_to,
+                   copy_len);
           strcat(afl->mutation, afl->m_tmp);
 #endif
-          memset(out_buf + copy_to,
-                 rand_below(afl, 2) ? rand_below(afl, 256)
-                                    : out_buf[rand_below(afl, temp_len)],
-                 copy_len);
+          memset(out_buf + copy_to, item, copy_len);
 
           break;
 
         }
 
-        case 52: {
+        case MUT_BYTEADD: {
 
           /* Increase byte by 1. */
 
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ADDBYTE_");
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " BYTEADD_");
           strcat(afl->mutation, afl->m_tmp);
 #endif
           out_buf[rand_below(afl, temp_len)]++;
@@ -2640,12 +2673,12 @@ havoc_stage:
 
         }
 
-        case 53: {
+        case MUT_BYTESUB: {
 
           /* Decrease byte by 1. */
 
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " SUBBYTE_");
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " BYTESUB_");
           strcat(afl->mutation, afl->m_tmp);
 #endif
           out_buf[rand_below(afl, temp_len)]--;
@@ -2653,9 +2686,9 @@ havoc_stage:
 
         }
 
-        case 54: {
+        case MUT_FLIP8: {
 
-          /* Flip byte. */
+          /* Flip byte with a XOR 0xff. This is the same as NEG. */
 
 #ifdef INTROSPECTION
           snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP8_");
@@ -2666,9 +2699,9 @@ havoc_stage:
 
         }
 
-        case 55 ... 56: {
+        case MUT_SWITCH: {
 
-          if (temp_len < 4) { break; }
+          if (unlikely(temp_len < 4)) { break; }  // no retry
 
           /* Switch bytes. */
 
@@ -2678,7 +2711,7 @@ havoc_stage:
 
             switch_to = rand_below(afl, temp_len);
 
-          } while (switch_from == switch_to);
+          } while (unlikely(switch_from == switch_to));
 
           if (switch_from < switch_to) {
 
@@ -2695,7 +2728,7 @@ havoc_stage:
           switch_len = choose_block_len(afl, MIN(switch_len, to_end));
 
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " SWITCH-%s-%u-%u-%u",
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " SWITCH-%s_%u_%u_%u",
                    "switch", switch_from, switch_to, switch_len);
           strcat(afl->mutation, afl->m_tmp);
 #endif
@@ -2718,12 +2751,11 @@ havoc_stage:
 
         }
 
-        // MAX_HAVOC_ENTRY = 64
-        case 57 ... MAX_HAVOC_ENTRY: {
+        case MUT_DEL: {
 
           /* Delete bytes. */
 
-          if (temp_len < 2) { break; }
+          if (unlikely(temp_len < 2)) { break; }  // no retry
 
           /* Don't delete too much. */
 
@@ -2731,7 +2763,7 @@ havoc_stage:
           u32 del_from = rand_below(afl, temp_len - del_len + 1);
 
 #ifdef INTROSPECTION
-          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " DEL-%u-%u", del_from,
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " DEL_%u_%u", del_from,
                    del_len);
           strcat(afl->mutation, afl->m_tmp);
 #endif
@@ -2744,135 +2776,401 @@ havoc_stage:
 
         }
 
-        default:
+        case MUT_SHUFFLE: {
+
+          /* Shuffle bytes. */
+
+          if (unlikely(temp_len < 4)) { break; }  // no retry
+
+          u32 len = choose_block_len(afl, temp_len - 1);
+          u32 off = rand_below(afl, temp_len - len + 1);
+
+#ifdef INTROSPECTION
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " SHUFFLE_%u", len);
+          strcat(afl->mutation, afl->m_tmp);
+#endif
+
+          for (u32 i = len - 1; i > 0; i--) {
 
-          r -= (MAX_HAVOC_ENTRY + 1);
+            u32 j;
+            do {
+
+              j = rand_below(afl, i + 1);
+
+            } while (unlikely(i == j));
+
+            unsigned char temp = out_buf[off + i];
+            out_buf[off + i] = out_buf[off + j];
+            out_buf[off + j] = temp;
+
+          }
+
+          break;
+
+        }
 
-          if (afl->extras_cnt) {
+        case MUT_DELONE: {
 
-            if (r < 2) {
+          /* Delete bytes. */
 
-              /* Use the dictionary. */
+          if (unlikely(temp_len < 2)) { break; }  // no retry
 
-              u32 use_extra = rand_below(afl, afl->extras_cnt);
-              u32 extra_len = afl->extras[use_extra].len;
+          /* Don't delete too much. */
 
-              if (extra_len > temp_len) { break; }
+          u32 del_len = 1;
+          u32 del_from = rand_below(afl, temp_len - del_len + 1);
 
-              u32 insert_at = rand_below(afl, temp_len - extra_len + 1);
 #ifdef INTROSPECTION
-              snprintf(afl->m_tmp, sizeof(afl->m_tmp), " EXTRA_OVERWRITE-%u-%u",
-                       insert_at, extra_len);
-              strcat(afl->mutation, afl->m_tmp);
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " DELONE_%u", del_from);
+          strcat(afl->mutation, afl->m_tmp);
 #endif
-              memcpy(out_buf + insert_at, afl->extras[use_extra].data,
-                     extra_len);
+          memmove(out_buf + del_from, out_buf + del_from + del_len,
+                  temp_len - del_from - del_len);
 
-              break;
+          temp_len -= del_len;
+
+          break;
+
+        }
 
-            } else if (r < 4) {
+        case MUT_INSERTONE: {
 
-              u32 use_extra = rand_below(afl, afl->extras_cnt);
-              u32 extra_len = afl->extras[use_extra].len;
-              if (temp_len + extra_len >= MAX_FILE) { break; }
+          if (unlikely(temp_len < 2)) { break; }  // no retry
+
+          u32 clone_len = 1;
+          u32 clone_to = rand_below(afl, temp_len);
+          u32 strat = rand_below(afl, 2);
+          u32 clone_from = clone_to ? clone_to - 1 : 0;
+          item = strat ? rand_below(afl, 256) : out_buf[clone_from];
 
-              u8 *ptr = afl->extras[use_extra].data;
-              u32 insert_at = rand_below(afl, temp_len + 1);
 #ifdef INTROSPECTION
-              snprintf(afl->m_tmp, sizeof(afl->m_tmp), " EXTRA_INSERT-%u-%u",
-                       insert_at, extra_len);
-              strcat(afl->mutation, afl->m_tmp);
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INSERTONE_%u_%u", strat,
+                   clone_to);
+          strcat(afl->mutation, afl->m_tmp);
 #endif
+          u8 *new_buf =
+              afl_realloc(AFL_BUF_PARAM(out_scratch), temp_len + clone_len);
+          if (unlikely(!new_buf)) { PFATAL("alloc"); }
 
-              out_buf = afl_realloc(AFL_BUF_PARAM(out), temp_len + extra_len);
-              if (unlikely(!out_buf)) { PFATAL("alloc"); }
+          /* Head */
 
-              /* Tail */
-              memmove(out_buf + insert_at + extra_len, out_buf + insert_at,
-                      temp_len - insert_at);
+          memcpy(new_buf, out_buf, clone_to);
 
-              /* Inserted part */
-              memcpy(out_buf + insert_at, ptr, extra_len);
-              temp_len += extra_len;
+          /* Inserted part */
 
-              break;
+          memset(new_buf + clone_to, item, clone_len);
 
-            } else {
+          /* Tail */
+          memcpy(new_buf + clone_to + clone_len, out_buf + clone_to,
+                 temp_len - clone_to);
 
-              r -= 4;
+          out_buf = new_buf;
+          afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
+          temp_len += clone_len;
 
-            }
+          break;
+
+        }
+
+        case MUT_ASCIINUM: {
+
+          if (unlikely(temp_len < 4)) { break; }  // no retry
+
+          u32 off = rand_below(afl, temp_len), off2 = off, cnt = 0;
+
+          while (off2 + cnt < temp_len && !isdigit(out_buf[off2 + cnt])) {
+
+            ++cnt;
 
           }
 
-          if (afl->a_extras_cnt) {
+          // none found, wrap
+          if (off2 + cnt == temp_len) {
 
-            u32 r_cmp = 2;
+            off2 = 0;
+            cnt = 0;
 
-            if (unlikely(afl->cmplog_binary && afl->queue_cur->is_ascii)) {
+            while (cnt < off && !isdigit(out_buf[off2 + cnt])) {
 
-              r_cmp = MUTATE_ASCII_DICT >> 1;
+              ++cnt;
 
             }
 
-            if (r < r_cmp) {
+            if (cnt == off) {
+
+              if (temp_len < 8) {
+
+                break;
 
-              /* Use the dictionary. */
+              } else {
 
-              u32 use_extra = rand_below(afl, afl->a_extras_cnt);
-              u32 extra_len = afl->a_extras[use_extra].len;
+                goto retry_havoc_step;
 
-              if (extra_len > temp_len) { break; }
+              }
 
-              u32 insert_at = rand_below(afl, temp_len - extra_len + 1);
-#ifdef INTROSPECTION
-              snprintf(afl->m_tmp, sizeof(afl->m_tmp),
-                       " AUTO_EXTRA_OVERWRITE-%u-%u", insert_at, extra_len);
-              strcat(afl->mutation, afl->m_tmp);
-#endif
-              memcpy(out_buf + insert_at, afl->a_extras[use_extra].data,
-                     extra_len);
+            }
+
+          }
+
+          off = off2 + cnt;
+          off2 = off + 1;
 
+          while (off2 < temp_len && isdigit(out_buf[off2])) {
+
+            ++off2;
+
+          }
+
+          s64 val = out_buf[off] - '0';
+          for (u32 i = off + 1; i < off2; ++i) {
+
+            val = (val * 10) + out_buf[i] - '0';
+
+          }
+
+          if (off && out_buf[off - 1] == '-') { val = -val; }
+
+          u32 strat = rand_below(afl, 8);
+          switch (strat) {
+
+            case 0:
+              val++;
+              break;
+            case 1:
+              val--;
+              break;
+            case 2:
+              val *= 2;
               break;
+            case 3:
+              val /= 2;
+              break;
+            case 4:
+              if (likely(val && (u64)val < 0x19999999)) {
+
+                val = (u64)rand_next(afl) % (u64)((u64)val * 10);
 
-            } else if (r < (r_cmp << 1)) {
+              } else {
+
+                val = rand_below(afl, 256);
+
+              }
+
+              break;
+            case 5:
+              val += rand_below(afl, 256);
+              break;
+            case 6:
+              val -= rand_below(afl, 256);
+              break;
+            case 7:
+              val = ~(val);
+              break;
 
-              u32 use_extra = rand_below(afl, afl->a_extras_cnt);
-              u32 extra_len = afl->a_extras[use_extra].len;
-              if (temp_len + extra_len >= MAX_FILE) { break; }
+          }
 
-              u8 *ptr = afl->a_extras[use_extra].data;
-              u32 insert_at = rand_below(afl, temp_len + 1);
 #ifdef INTROSPECTION
-              snprintf(afl->m_tmp, sizeof(afl->m_tmp),
-                       " AUTO_EXTRA_INSERT-%u-%u", insert_at, extra_len);
-              strcat(afl->mutation, afl->m_tmp);
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " ASCIINUM_%u_%u_%u",
+                   afl->queue_cur->is_ascii, strat, off);
+          strcat(afl->mutation, afl->m_tmp);
 #endif
+          // fprintf(stderr, "val: %u-%u = %ld\n", off, off2, val);
+
+          char buf[20];
+          snprintf(buf, sizeof(buf), "%ld", val);
+
+          // fprintf(stderr, "BEFORE: %s\n", out_buf);
+
+          u32 old_len = off2 - off;
+          u32 new_len = strlen(buf);
+
+          if (old_len == new_len) {
+
+            memcpy(out_buf + off, buf, new_len);
+
+          } else {
+
+            u8 *new_buf = afl_realloc(AFL_BUF_PARAM(out_scratch),
+                                      temp_len + new_len - old_len);
+            if (unlikely(!new_buf)) { PFATAL("alloc"); }
+
+            /* Head */
+
+            memcpy(new_buf, out_buf, off);
+
+            /* Inserted part */
+
+            memcpy(new_buf + off, buf, new_len);
+
+            /* Tail */
+            memcpy(new_buf + off + new_len, out_buf + off2, temp_len - off2);
+
+            out_buf = new_buf;
+            afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
+            temp_len += (new_len - old_len);
+
+          }
+
+          // fprintf(stderr, "AFTER : %s\n", out_buf);
+          break;
+
+        }
 
-              out_buf = afl_realloc(AFL_BUF_PARAM(out), temp_len + extra_len);
-              if (unlikely(!out_buf)) { PFATAL("alloc"); }
+        case MUT_INSERTASCIINUM: {
 
-              /* Tail */
-              memmove(out_buf + insert_at + extra_len, out_buf + insert_at,
-                      temp_len - insert_at);
+          u32 len = 1 + rand_below(afl, 8);
+          u32 pos = rand_below(afl, temp_len);
+          /* Insert ascii number. */
+          if (unlikely(temp_len < pos + len)) {
 
-              /* Inserted part */
-              memcpy(out_buf + insert_at, ptr, extra_len);
-              temp_len += extra_len;
+            if (unlikely(temp_len < 8)) {
 
               break;
 
             } else {
 
-              r -= (r_cmp << 1);
+              goto retry_havoc_step;
 
             }
 
           }
 
-          /* Splicing otherwise if we are still here.
-             Overwrite bytes with a randomly selected chunk from another
-             testcase or insert that chunk. */
+#ifdef INTROSPECTION
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " INSERTASCIINUM_");
+          strcat(afl->mutation, afl->m_tmp);
+#endif
+          u64  val = rand_next(afl);
+          char buf[20];
+          snprintf(buf, sizeof(buf), "%llu", val);
+          memcpy(out_buf + pos, buf, len);
+
+          break;
+
+        }
+
+        case MUT_EXTRA_OVERWRITE: {
+
+          if (unlikely(!afl->extras_cnt)) { goto retry_havoc_step; }
+
+          /* Use the dictionary. */
+
+          u32 use_extra = rand_below(afl, afl->extras_cnt);
+          u32 extra_len = afl->extras[use_extra].len;
+
+          if (unlikely(extra_len > temp_len)) { goto retry_havoc_step; }
+
+          u32 insert_at = rand_below(afl, temp_len - extra_len + 1);
+#ifdef INTROSPECTION
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " EXTRA-OVERWRITE_%u_%u",
+                   insert_at, extra_len);
+          strcat(afl->mutation, afl->m_tmp);
+#endif
+          memcpy(out_buf + insert_at, afl->extras[use_extra].data, extra_len);
+
+          break;
+
+        }
+
+        case MUT_EXTRA_INSERT: {
+
+          if (unlikely(!afl->extras_cnt)) { goto retry_havoc_step; }
+
+          u32 use_extra = rand_below(afl, afl->extras_cnt);
+          u32 extra_len = afl->extras[use_extra].len;
+          if (unlikely(temp_len + extra_len >= MAX_FILE)) {
+
+            goto retry_havoc_step;
+
+          }
+
+          u8 *ptr = afl->extras[use_extra].data;
+          u32 insert_at = rand_below(afl, temp_len + 1);
+#ifdef INTROSPECTION
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " EXTRA-INSERT_%u_%u",
+                   insert_at, extra_len);
+          strcat(afl->mutation, afl->m_tmp);
+#endif
+
+          out_buf = afl_realloc(AFL_BUF_PARAM(out), temp_len + extra_len);
+          if (unlikely(!out_buf)) { PFATAL("alloc"); }
+
+          /* Tail */
+          memmove(out_buf + insert_at + extra_len, out_buf + insert_at,
+                  temp_len - insert_at);
+
+          /* Inserted part */
+          memcpy(out_buf + insert_at, ptr, extra_len);
+          temp_len += extra_len;
+
+          break;
+
+        }
+
+        case MUT_AUTO_EXTRA_OVERWRITE: {
+
+          if (unlikely(!afl->a_extras_cnt)) { goto retry_havoc_step; }
+
+          /* Use the dictionary. */
+
+          u32 use_extra = rand_below(afl, afl->a_extras_cnt);
+          u32 extra_len = afl->a_extras[use_extra].len;
+
+          if (unlikely(extra_len > temp_len)) { goto retry_havoc_step; }
+
+          u32 insert_at = rand_below(afl, temp_len - extra_len + 1);
+#ifdef INTROSPECTION
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp),
+                   " AUTO-EXTRA-OVERWRITE_%u_%u", insert_at, extra_len);
+          strcat(afl->mutation, afl->m_tmp);
+#endif
+          memcpy(out_buf + insert_at, afl->a_extras[use_extra].data, extra_len);
+
+          break;
+
+        }
+
+        case MUT_AUTO_EXTRA_INSERT: {
+
+          if (unlikely(!afl->a_extras_cnt)) { goto retry_havoc_step; }
+
+          u32 use_extra = rand_below(afl, afl->a_extras_cnt);
+          u32 extra_len = afl->a_extras[use_extra].len;
+          if (unlikely(temp_len + extra_len >= MAX_FILE)) {
+
+            goto retry_havoc_step;
+
+          }
+
+          u8 *ptr = afl->a_extras[use_extra].data;
+          u32 insert_at = rand_below(afl, temp_len + 1);
+#ifdef INTROSPECTION
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " AUTO-EXTRA-INSERT_%u_%u",
+                   insert_at, extra_len);
+          strcat(afl->mutation, afl->m_tmp);
+#endif
+
+          out_buf = afl_realloc(AFL_BUF_PARAM(out), temp_len + extra_len);
+          if (unlikely(!out_buf)) { PFATAL("alloc"); }
+
+          /* Tail */
+          memmove(out_buf + insert_at + extra_len, out_buf + insert_at,
+                  temp_len - insert_at);
+
+          /* Inserted part */
+          memcpy(out_buf + insert_at, ptr, extra_len);
+          temp_len += extra_len;
+
+          break;
+
+        }
+
+        case MUT_SPLICE_OVERWRITE: {
+
+          if (unlikely(afl->ready_for_splicing_count <= 1)) {
+
+            goto retry_havoc_step;
+
+          }
 
           /* Pick a random queue entry and seek to it. */
 
@@ -2881,79 +3179,110 @@ havoc_stage:
 
             tid = rand_below(afl, afl->queued_items);
 
-          } while (tid == afl->current_entry || afl->queue_buf[tid]->len < 4);
+          } while (unlikely(tid == afl->current_entry ||
+
+                            afl->queue_buf[tid]->len < 4));
 
           /* Get the testcase for splicing. */
           struct queue_entry *target = afl->queue_buf[tid];
           u32                 new_len = target->len;
           u8                 *new_buf = queue_testcase_get(afl, target);
 
-          if ((temp_len >= 2 && r % 2) || temp_len + HAVOC_BLK_XL >= MAX_FILE) {
-
-            /* overwrite mode */
+          /* overwrite mode */
 
-            u32 copy_from, copy_to, copy_len;
+          u32 copy_from, copy_to, copy_len;
 
-            copy_len = choose_block_len(afl, new_len - 1);
-            if (copy_len > temp_len) copy_len = temp_len;
+          copy_len = choose_block_len(afl, new_len - 1);
+          if (copy_len > temp_len) copy_len = temp_len;
 
-            copy_from = rand_below(afl, new_len - copy_len + 1);
-            copy_to = rand_below(afl, temp_len - copy_len + 1);
+          copy_from = rand_below(afl, new_len - copy_len + 1);
+          copy_to = rand_below(afl, temp_len - copy_len + 1);
 
 #ifdef INTROSPECTION
-            snprintf(afl->m_tmp, sizeof(afl->m_tmp),
-                     " SPLICE_OVERWRITE-%u-%u-%u-%s", copy_from, copy_to,
-                     copy_len, target->fname);
-            strcat(afl->mutation, afl->m_tmp);
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp),
+                   " SPLICE-OVERWRITE_%u_%u_%u_%s", copy_from, copy_to,
+                   copy_len, target->fname);
+          strcat(afl->mutation, afl->m_tmp);
 #endif
-            memmove(out_buf + copy_to, new_buf + copy_from, copy_len);
+          memmove(out_buf + copy_to, new_buf + copy_from, copy_len);
 
-          } else {
+          break;
 
-            /* insert mode */
+        }
+
+        case MUT_SPLICE_INSERT: {
+
+          if (unlikely(afl->ready_for_splicing_count <= 1)) {
+
+            goto retry_havoc_step;
+
+          }
+
+          if (unlikely(temp_len + HAVOC_BLK_XL >= MAX_FILE)) {
+
+            goto retry_havoc_step;
+
+          }
 
-            u32 clone_from, clone_to, clone_len;
+          /* Pick a random queue entry and seek to it. */
+
+          u32 tid;
+          do {
+
+            tid = rand_below(afl, afl->queued_items);
 
-            clone_len = choose_block_len(afl, new_len);
-            clone_from = rand_below(afl, new_len - clone_len + 1);
-            clone_to = rand_below(afl, temp_len + 1);
+          } while (unlikely(tid == afl->current_entry ||
 
-            u8 *temp_buf = afl_realloc(AFL_BUF_PARAM(out_scratch),
-                                       temp_len + clone_len + 1);
-            if (unlikely(!temp_buf)) { PFATAL("alloc"); }
+                            afl->queue_buf[tid]->len < 4));
+
+          /* Get the testcase for splicing. */
+          struct queue_entry *target = afl->queue_buf[tid];
+          u32                 new_len = target->len;
+          u8                 *new_buf = queue_testcase_get(afl, target);
+
+          /* insert mode */
+
+          u32 clone_from, clone_to, clone_len;
+
+          clone_len = choose_block_len(afl, new_len);
+          clone_from = rand_below(afl, new_len - clone_len + 1);
+          clone_to = rand_below(afl, temp_len + 1);
+
+          u8 *temp_buf =
+              afl_realloc(AFL_BUF_PARAM(out_scratch), temp_len + clone_len + 1);
+          if (unlikely(!temp_buf)) { PFATAL("alloc"); }
 
 #ifdef INTROSPECTION
-            snprintf(afl->m_tmp, sizeof(afl->m_tmp),
-                     " SPLICE_INSERT-%u-%u-%u-%s", clone_from, clone_to,
-                     clone_len, target->fname);
-            strcat(afl->mutation, afl->m_tmp);
+          snprintf(afl->m_tmp, sizeof(afl->m_tmp), " SPLICE-INSERT_%u_%u_%u_%s",
+                   clone_from, clone_to, clone_len, target->fname);
+          strcat(afl->mutation, afl->m_tmp);
 #endif
-            /* Head */
+          /* Head */
 
-            memcpy(temp_buf, out_buf, clone_to);
+          memcpy(temp_buf, out_buf, clone_to);
 
-            /* Inserted part */
+          /* Inserted part */
 
-            memcpy(temp_buf + clone_to, new_buf + clone_from, clone_len);
+          memcpy(temp_buf + clone_to, new_buf + clone_from, clone_len);
 
-            /* Tail */
-            memcpy(temp_buf + clone_to + clone_len, out_buf + clone_to,
-                   temp_len - clone_to);
+          /* Tail */
+          memcpy(temp_buf + clone_to + clone_len, out_buf + clone_to,
+                 temp_len - clone_to);
 
-            out_buf = temp_buf;
-            afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
-            temp_len += clone_len;
-
-          }
+          out_buf = temp_buf;
+          afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch));
+          temp_len += clone_len;
 
           break;
 
-          // end of default
+        }
 
       }
 
     }
 
+    }
+
     if (common_fuzz_stuff(afl, out_buf, temp_len)) { goto abandon_entry; }
 
     /* out_buf might have been mangled a bit, so let's restore it to its
@@ -3039,7 +3368,9 @@ retry_splicing:
 
       tid = rand_below(afl, afl->queued_items);
 
-    } while (tid == afl->current_entry || afl->queue_buf[tid]->len < 4);
+    } while (
+
+        unlikely(tid == afl->current_entry || afl->queue_buf[tid]->len < 4));
 
     /* Get the testcase */
     afl->splicing_with = tid;
diff --git a/src/afl-fuzz-queue.c b/src/afl-fuzz-queue.c
index b10bf749..48fd33ec 100644
--- a/src/afl-fuzz-queue.c
+++ b/src/afl-fuzz-queue.c
@@ -612,7 +612,7 @@ void add_to_queue(afl_state_t *afl, u8 *fname, u32 len, u8 passed_det) {
 
   }
 
-  if (likely(q->len > 4)) afl->ready_for_splicing_count++;
+  if (likely(q->len > 4)) { ++afl->ready_for_splicing_count; }
 
   ++afl->queued_items;
   ++afl->active_items;
diff --git a/src/afl-fuzz-redqueen.c b/src/afl-fuzz-redqueen.c
index 41644cb9..73e188e7 100644
--- a/src/afl-fuzz-redqueen.c
+++ b/src/afl-fuzz-redqueen.c
@@ -1988,10 +1988,10 @@ static u8 rtn_extend_encoding(afl_state_t *afl, u8 entry,
 
   if (l0 >= 0x80 || ol0 >= 0x80) {
 
-    l0 -= 0x80;
-    l1 -= 0x80;
-    ol0 -= 0x80;
-    ol1 -= 0x80;
+    if (l0 >= 0x80) { l0 -= 0x80; }
+    if (l1 >= 0x80) { l1 -= 0x80; }
+    if (ol0 >= 0x80) { ol0 -= 0x80; }
+    if (ol1 >= 0x80) { ol1 -= 0x80; }
 
   }
 
@@ -2059,7 +2059,7 @@ static u8 rtn_extend_encoding(afl_state_t *afl, u8 entry,
 
       for (i = 0; i < its_len; ++i) {
 
-        if ((pattern[i] != buf[idx + i] && o_pattern[i] != orig_buf[idx + i]) ||
+        if ((pattern[i] != buf[idx + i] || o_pattern[i] != orig_buf[idx + i]) ||
             *status == 1) {
 
           break;
@@ -2592,6 +2592,8 @@ static u8 rtn_fuzz(afl_state_t *afl, u32 key, u8 *orig_buf, u8 *buf, u8 *cbuf,
       // shape_len), check_if_text_buf((u8 *)&o->v1, shape_len), v0_len,
       // o->v0, v1_len, o->v1);
 
+      // Note that this check differs from the line 1901, for RTN we are more
+      // opportunistic for adding to the dictionary than cmps
       if (!memcmp(o->v0, orig_o->v0, v0_len) ||
           (!found_one || check_if_text_buf((u8 *)&o->v0, v0_len) == v0_len))
         maybe_add_auto(afl, o->v0, v0_len);
diff --git a/src/afl-fuzz-state.c b/src/afl-fuzz-state.c
index 5e736029..99f69314 100644
--- a/src/afl-fuzz-state.c
+++ b/src/afl-fuzz-state.c
@@ -108,6 +108,7 @@ void afl_state_init(afl_state_t *afl, uint32_t map_size) {
   afl->cmplog_lvl = 2;
   afl->min_length = 1;
   afl->max_length = MAX_FILE;
+  afl->switch_fuzz_mode = STRATEGY_SWITCH_TIME * 1000;
 #ifndef NO_SPLICING
   afl->use_splicing = 1;
 #endif
diff --git a/src/afl-fuzz-stats.c b/src/afl-fuzz-stats.c
index 07157bf7..4013370d 100644
--- a/src/afl-fuzz-stats.c
+++ b/src/afl-fuzz-stats.c
@@ -27,6 +27,45 @@
 #include "envs.h"
 #include <limits.h>
 
+static char fuzzing_state[4][12] = {"started :-)", "in progress", "final phase",
+                                    "finished..."};
+
+char *get_fuzzing_state(afl_state_t *afl) {
+
+  u64 cur_ms = get_cur_time();
+  u64 last_find = cur_ms - afl->last_find_time;
+  u64 cur_run_time = cur_ms - afl->start_time;
+  u64 cur_total_run_time = afl->prev_run_time + cur_run_time;
+
+  if (unlikely(cur_run_time < 60 * 3 * 1000 ||
+               cur_total_run_time < 60 * 5 * 1000)) {
+
+    return fuzzing_state[0];
+
+  } else {
+
+    u64 last_find_100 = 100 * last_find;
+    u64 percent_cur = last_find_100 / cur_run_time;
+    u64 percent_total = last_find_100 / cur_total_run_time;
+
+    if (unlikely(percent_cur >= 80 && percent_total >= 80)) {
+
+      return fuzzing_state[3];
+
+    } else if (unlikely(percent_cur >= 55 && percent_total >= 55)) {
+
+      return fuzzing_state[2];
+
+    } else {
+
+      return fuzzing_state[1];
+
+    }
+
+  }
+
+}
+
 /* Write fuzzer setup file */
 
 void write_setup_file(afl_state_t *afl, u32 argc, char **argv) {
@@ -1282,7 +1321,11 @@ void show_stats_normal(afl_state_t *afl) {
   }
 
   /* Last line */
-  SAYF(SET_G1 "\n" bSTG bLB bH30 bH20 bH2 bRB bSTOP cRST RESET_G1);
+
+  SAYF(SET_G1 "\n" bSTG bLB bH cCYA          bSTOP " strategy:" cPIN
+              " %s " bSTG bH10 cCYA          bSTOP " state:" cPIN
+              " %s " bSTG bH2 bRB bSTOP cRST RESET_G1,
+       afl->fuzz_mode == 0 ? "explore" : "exploit", get_fuzzing_state(afl));
 
 #undef IB
 
@@ -2260,7 +2303,12 @@ void show_init_stats(afl_state_t *afl) {
       stringify_int(IB(0), min_us), stringify_int(IB(1), max_us),
       stringify_int(IB(2), avg_us));
 
-  if (afl->timeout_given != 1) {
+  if (afl->timeout_given == 3) {
+
+    ACTF("Applying timeout settings from resumed session (%u ms).",
+         afl->fsrv.exec_tmout);
+
+  } else if (afl->timeout_given != 1) {
 
     /* Figure out the appropriate timeout. The basic idea is: 5x average or
        1x max, rounded up to EXEC_TM_ROUND ms and capped at 1 second.
@@ -2302,11 +2350,6 @@ void show_init_stats(afl_state_t *afl) {
 
     afl->timeout_given = 1;
 
-  } else if (afl->timeout_given == 3) {
-
-    ACTF("Applying timeout settings from resumed session (%u ms).",
-         afl->fsrv.exec_tmout);
-
   } else {
 
     ACTF("-t option specified. We'll use an exec timeout of %u ms.",
diff --git a/src/afl-fuzz.c b/src/afl-fuzz.c
index 4134b99e..79b05da7 100644
--- a/src/afl-fuzz.c
+++ b/src/afl-fuzz.c
@@ -129,6 +129,13 @@ static void usage(u8 *argv0, int more_help) {
       "  -o dir        - output directory for fuzzer findings\n\n"
 
       "Execution control settings:\n"
+      "  -P strategy   - set fix mutation strategy: explore (focus on new "
+      "coverage),\n"
+      "                  exploit (focus on triggering crashes). You can also "
+      "set a\n"
+      "                  number of seconds after without any finds it switches "
+      "to\n"
+      "                  exploit mode, and back on new coverage (default: %u)\n"
       "  -p schedule   - power schedules compute a seed's performance score:\n"
       "                  fast(default), explore, exploit, seek, rare, mmopt, "
       "coe, lin\n"
@@ -157,6 +164,8 @@ static void usage(u8 *argv0, int more_help) {
       "\n"
 
       "Mutator settings:\n"
+      "  -a            - target expects ascii text input (prefer text "
+      "mutators)\n"
       "  -g minlength  - set min length of generated fuzz input (default: 1)\n"
       "  -G maxlength  - set max length of generated fuzz input (default: "
       "%lu)\n"
@@ -212,7 +221,8 @@ static void usage(u8 *argv0, int more_help) {
       "  -e ext        - file extension for the fuzz test input file (if "
       "needed)\n"
       "\n",
-      argv0, EXEC_TIMEOUT, MEM_LIMIT, MAX_FILE, FOREIGN_SYNCS_MAX);
+      argv0, STRATEGY_SWITCH_TIME, EXEC_TIMEOUT, MEM_LIMIT, MAX_FILE,
+      FOREIGN_SYNCS_MAX);
 
   if (more_help > 1) {
 
@@ -494,14 +504,48 @@ int main(int argc, char **argv_orig, char **envp) {
 
   afl->shmem_testcase_mode = 1;  // we always try to perform shmem fuzzing
 
-  while (
-      (opt = getopt(
-           argc, argv,
-           "+Ab:B:c:CdDe:E:hi:I:f:F:g:G:l:L:m:M:nNOo:p:RQs:S:t:T:UV:WXx:YZ")) >
-      0) {
+  // still available: HjJkKqruvwz
+  while ((opt = getopt(argc, argv,
+                       "+aAb:B:c:CdDe:E:f:F:g:G:hi:I:l:L:m:M:nNo:Op:P:QRs:S:t:"
+                       "T:UV:WXx:YZ")) > 0) {
 
     switch (opt) {
 
+      case 'a':
+        afl->text_input = 1;
+        break;
+
+      case 'P':
+        if (!stricmp(optarg, "explore") || !stricmp(optarg, "exploration")) {
+
+          afl->fuzz_mode = 0;
+          afl->switch_fuzz_mode = 0;
+
+        } else if (!stricmp(optarg, "exploit") ||
+
+                   !stricmp(optarg, "exploitation")) {
+
+          afl->fuzz_mode = 1;
+          afl->switch_fuzz_mode = 0;
+
+        } else {
+
+          if ((afl->switch_fuzz_mode = (u32)atoi(optarg)) > INT_MAX) {
+
+            FATAL(
+                "Parameter for option -P must be \"explore\", \"exploit\" or a "
+                "number!");
+
+          } else {
+
+            afl->switch_fuzz_mode *= 1000;
+
+          }
+
+        }
+
+        break;
+
       case 'g':
         afl->min_length = atoi(optarg);
         break;
@@ -2319,6 +2363,7 @@ int main(int argc, char **argv_orig, char **envp) {
             max_ms = afl->queue_buf[entry]->exec_us;
 
       afl->fsrv.exec_tmout = max_ms;
+      afl->timeout_given = 1;
 
     }
 
@@ -2688,13 +2733,31 @@ int main(int argc, char **argv_orig, char **envp) {
 
     } while (skipped_fuzz && afl->queue_cur && !afl->stop_soon);
 
+    u64 cur_time = get_cur_time();
+
+    if (likely(afl->switch_fuzz_mode && afl->fuzz_mode == 0) &&
+        unlikely(cur_time > afl->last_find_time + afl->switch_fuzz_mode)) {
+
+      if (afl->afl_env.afl_no_ui) {
+
+        ACTF(
+            "No new coverage found for %llu seconds, switching to exploitation "
+            "strategy.",
+            afl->switch_fuzz_mode / 1000);
+
+      }
+
+      afl->fuzz_mode = 1;
+
+    }
+
     if (likely(!afl->stop_soon && afl->sync_id)) {
 
       if (likely(afl->skip_deterministic)) {
 
         if (unlikely(afl->is_main_node)) {
 
-          if (unlikely(get_cur_time() >
+          if (unlikely(cur_time >
                        (afl->sync_time >> 1) + afl->last_sync_time)) {
 
             if (!(sync_interval_cnt++ % (SYNC_INTERVAL / 3))) {
@@ -2707,7 +2770,7 @@ int main(int argc, char **argv_orig, char **envp) {
 
         } else {
 
-          if (unlikely(get_cur_time() > afl->sync_time + afl->last_sync_time)) {
+          if (unlikely(cur_time > afl->sync_time + afl->last_sync_time)) {
 
             if (!(sync_interval_cnt++ % SYNC_INTERVAL)) { sync_fuzzers(afl); }
 
diff --git a/src/afl-showmap.c b/src/afl-showmap.c
index 9c029035..13867fda 100644
--- a/src/afl-showmap.c
+++ b/src/afl-showmap.c
@@ -243,7 +243,8 @@ static void analyze_results(afl_forkserver_t *fsrv) {
 
       total += fsrv->trace_bits[i];
       if (fsrv->trace_bits[i] > highest) highest = fsrv->trace_bits[i];
-      if (!coverage_map[i]) { coverage_map[i] = 1; }
+      // if (!coverage_map[i]) { coverage_map[i] = 1; }
+      coverage_map[i] |= fsrv->trace_bits[i];
 
     }
 
@@ -328,7 +329,7 @@ static u32 write_results_to_file(afl_forkserver_t *fsrv, u8 *outfile) {
 
       if (cmin_mode) {
 
-        fprintf(f, "%u%u\n", fsrv->trace_bits[i], i);
+        fprintf(f, "%u%03u\n", i, fsrv->trace_bits[i]);
 
       } else {
 
diff --git a/utils/afl_network_proxy/afl-network-server.c b/utils/afl_network_proxy/afl-network-server.c
index 04309ada..95b0a551 100644
--- a/utils/afl_network_proxy/afl-network-server.c
+++ b/utils/afl_network_proxy/afl-network-server.c
@@ -173,6 +173,7 @@ static void set_up_environment(afl_forkserver_t *fsrv) {
     }
 
     out_file = alloc_printf("%s/.afl-input-temp-%u", use_dir, getpid());
+    fsrv->out_file = out_file;
 
   }