aboutsummaryrefslogtreecommitdiff
path: root/src/afl-fuzz-src/queue.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/afl-fuzz-src/queue.c')
-rw-r--r--src/afl-fuzz-src/queue.c286
1 files changed, 286 insertions, 0 deletions
diff --git a/src/afl-fuzz-src/queue.c b/src/afl-fuzz-src/queue.c
new file mode 100644
index 00000000..ed352bcb
--- /dev/null
+++ b/src/afl-fuzz-src/queue.c
@@ -0,0 +1,286 @@
+/*
+ american fuzzy lop - fuzzer code
+ --------------------------------
+
+ Written and maintained by Michal Zalewski <lcamtuf@google.com>
+
+ Forkserver design by Jann Horn <jannhorn@googlemail.com>
+
+ Copyright 2013, 2014, 2015, 2016, 2017 Google Inc. All rights reserved.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at:
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ This is the real deal: the program takes an instrumented binary and
+ attempts a variety of basic fuzzing tricks, paying close attention to
+ how they affect the execution path.
+
+ */
+
+#include "afl-fuzz.h"
+
+/* Mark deterministic checks as done for a particular queue entry. We use the
+ .state file to avoid repeating deterministic fuzzing when resuming aborted
+ scans. */
+
+void mark_as_det_done(struct queue_entry* q) {
+
+ u8* fn = strrchr(q->fname, '/');
+ s32 fd;
+
+ fn = alloc_printf("%s/queue/.state/deterministic_done/%s", out_dir, fn + 1);
+
+ fd = open(fn, O_WRONLY | O_CREAT | O_EXCL, 0600);
+ if (fd < 0) PFATAL("Unable to create '%s'", fn);
+ close(fd);
+
+ ck_free(fn);
+
+ q->passed_det = 1;
+
+}
+
+
+/* Mark as variable. Create symlinks if possible to make it easier to examine
+ the files. */
+
+void mark_as_variable(struct queue_entry* q) {
+
+ u8 *fn = strrchr(q->fname, '/') + 1, *ldest;
+
+ ldest = alloc_printf("../../%s", fn);
+ fn = alloc_printf("%s/queue/.state/variable_behavior/%s", out_dir, fn);
+
+ if (symlink(ldest, fn)) {
+
+ s32 fd = open(fn, O_WRONLY | O_CREAT | O_EXCL, 0600);
+ if (fd < 0) PFATAL("Unable to create '%s'", fn);
+ close(fd);
+
+ }
+
+ ck_free(ldest);
+ ck_free(fn);
+
+ q->var_behavior = 1;
+
+}
+
+
+/* Mark / unmark as redundant (edge-only). This is not used for restoring state,
+ but may be useful for post-processing datasets. */
+
+void mark_as_redundant(struct queue_entry* q, u8 state) {
+
+ u8* fn;
+
+ if (state == q->fs_redundant) return;
+
+ q->fs_redundant = state;
+
+ fn = strrchr(q->fname, '/');
+ fn = alloc_printf("%s/queue/.state/redundant_edges/%s", out_dir, fn + 1);
+
+ if (state) {
+
+ s32 fd;
+
+ fd = open(fn, O_WRONLY | O_CREAT | O_EXCL, 0600);
+ if (fd < 0) PFATAL("Unable to create '%s'", fn);
+ close(fd);
+
+ } else {
+
+ if (unlink(fn)) PFATAL("Unable to remove '%s'", fn);
+
+ }
+
+ ck_free(fn);
+
+}
+
+
+/* Append new test case to the queue. */
+
+void add_to_queue(u8* fname, u32 len, u8 passed_det) {
+
+ struct queue_entry* q = ck_alloc(sizeof(struct queue_entry));
+
+ q->fname = fname;
+ q->len = len;
+ q->depth = cur_depth + 1;
+ q->passed_det = passed_det;
+ q->n_fuzz = 1;
+
+ if (q->depth > max_depth) max_depth = q->depth;
+
+ if (queue_top) {
+
+ queue_top->next = q;
+ queue_top = q;
+
+ } else q_prev100 = queue = queue_top = q;
+
+ ++queued_paths;
+ ++pending_not_fuzzed;
+
+ cycles_wo_finds = 0;
+
+ if (!(queued_paths % 100)) {
+
+ q_prev100->next_100 = q;
+ q_prev100 = q;
+
+ }
+
+ last_path_time = get_cur_time();
+
+}
+
+
+/* Destroy the entire queue. */
+
+void destroy_queue(void) {
+
+ struct queue_entry *q = queue, *n;
+
+ while (q) {
+
+ n = q->next;
+ ck_free(q->fname);
+ ck_free(q->trace_mini);
+ ck_free(q);
+ q = n;
+
+ }
+
+}
+
+
+/* When we bump into a new path, we call this to see if the path appears
+ more "favorable" than any of the existing ones. The purpose of the
+ "favorables" is to have a minimal set of paths that trigger all the bits
+ seen in the bitmap so far, and focus on fuzzing them at the expense of
+ the rest.
+
+ The first step of the process is to maintain a list of top_rated[] entries
+ for every byte in the bitmap. We win that slot if there is no previous
+ contender, or if the contender has a more favorable speed x size factor. */
+
+
+void update_bitmap_score(struct queue_entry* q) {
+
+ u32 i;
+ u64 fav_factor = q->exec_us * q->len;
+ u64 fuzz_p2 = next_p2 (q->n_fuzz);
+
+ /* For every byte set in trace_bits[], see if there is a previous winner,
+ and how it compares to us. */
+
+ for (i = 0; i < MAP_SIZE; ++i)
+
+ if (trace_bits[i]) {
+
+ if (top_rated[i]) {
+
+ /* Faster-executing or smaller test cases are favored. */
+ u64 top_rated_fuzz_p2 = next_p2 (top_rated[i]->n_fuzz);
+ u64 top_rated_fav_factor = top_rated[i]->exec_us * top_rated[i]->len;
+
+ if (fuzz_p2 > top_rated_fuzz_p2) {
+ continue;
+ } else if (fuzz_p2 == top_rated_fuzz_p2) {
+ if (fav_factor > top_rated_fav_factor)
+ continue;
+ }
+
+ if (fav_factor > top_rated[i]->exec_us * top_rated[i]->len) continue;
+
+ /* Looks like we're going to win. Decrease ref count for the
+ previous winner, discard its trace_bits[] if necessary. */
+
+ if (!--top_rated[i]->tc_ref) {
+ ck_free(top_rated[i]->trace_mini);
+ top_rated[i]->trace_mini = 0;
+ }
+
+ }
+
+ /* Insert ourselves as the new winner. */
+
+ top_rated[i] = q;
+ ++q->tc_ref;
+
+ if (!q->trace_mini) {
+ q->trace_mini = ck_alloc(MAP_SIZE >> 3);
+ minimize_bits(q->trace_mini, trace_bits);
+ }
+
+ score_changed = 1;
+
+ }
+
+}
+
+
+/* The second part of the mechanism discussed above is a routine that
+ goes over top_rated[] entries, and then sequentially grabs winners for
+ previously-unseen bytes (temp_v) and marks them as favored, at least
+ until the next run. The favored entries are given more air time during
+ all fuzzing steps. */
+
+void cull_queue(void) {
+
+ struct queue_entry* q;
+ static u8 temp_v[MAP_SIZE >> 3];
+ u32 i;
+
+ if (dumb_mode || !score_changed) return;
+
+ score_changed = 0;
+
+ memset(temp_v, 255, MAP_SIZE >> 3);
+
+ queued_favored = 0;
+ pending_favored = 0;
+
+ q = queue;
+
+ while (q) {
+ q->favored = 0;
+ q = q->next;
+ }
+
+ /* Let's see if anything in the bitmap isn't captured in temp_v.
+ If yes, and if it has a top_rated[] contender, let's use it. */
+
+ for (i = 0; i < MAP_SIZE; ++i)
+ if (top_rated[i] && (temp_v[i >> 3] & (1 << (i & 7)))) {
+
+ u32 j = MAP_SIZE >> 3;
+
+ /* Remove all bits belonging to the current entry from temp_v. */
+
+ while (j--)
+ if (top_rated[i]->trace_mini[j])
+ temp_v[j] &= ~top_rated[i]->trace_mini[j];
+
+ top_rated[i]->favored = 1;
+ ++queued_favored;
+
+ if (top_rated[i]->fuzz_level == 0 || !top_rated[i]->was_fuzzed) ++pending_favored;
+
+ }
+
+ q = queue;
+
+ while (q) {
+ mark_as_redundant(q, !q->favored);
+ q = q->next;
+ }
+
+}
+