diff options
author | van Hauser <vh@thc.org> | 2020-06-29 18:19:35 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2020-06-29 18:19:35 +0200 |
commit | 8f1b78f49e8efef8ec089230d732cdee7b37fa9a (patch) | |
tree | 4cb5e6e59f24f488e58fbb1aced2dbc0e6550c15 /src/afl-fuzz-one.c | |
parent | 12bdefe00e38cdc3dd8cb028eeac325ab2e94e16 (diff) | |
parent | 3a0c91b86205bfebb8ec7e62a2e7b0bfcec2e407 (diff) | |
download | afl++-8f1b78f49e8efef8ec089230d732cdee7b37fa9a.tar.gz |
Merge pull request #426 from AFLplusplus/dev
Dev
Diffstat (limited to 'src/afl-fuzz-one.c')
-rw-r--r-- | src/afl-fuzz-one.c | 240 |
1 files changed, 131 insertions, 109 deletions
diff --git a/src/afl-fuzz-one.c b/src/afl-fuzz-one.c index 578ac584..72383727 100644 --- a/src/afl-fuzz-one.c +++ b/src/afl-fuzz-one.c @@ -29,10 +29,14 @@ static int select_algorithm(afl_state_t *afl) { - int i_puppet, j_puppet; + int i_puppet, j_puppet = 0, operator_number = operator_num; + + if (!afl->extras_cnt && !afl->a_extras_cnt) operator_number -= 2; + + double range_sele = + (double)afl->probability_now[afl->swarm_now][operator_number - 1]; + double sele = ((double)(rand_below(afl, 10000) * 0.0001 * range_sele)); - double sele = ((double)(rand_below(afl, 10000)) * 0.0001); - j_puppet = 0; for (i_puppet = 0; i_puppet < operator_num; ++i_puppet) { if (unlikely(i_puppet == 0)) { @@ -52,8 +56,10 @@ static int select_algorithm(afl_state_t *afl) { } - if (j_puppet == 1 && - sele < afl->probability_now[afl->swarm_now][i_puppet - 1]) { + if ((j_puppet == 1 && + sele < afl->probability_now[afl->swarm_now][i_puppet - 1]) || + (i_puppet + 1 < operator_num && + sele > afl->probability_now[afl->swarm_now][i_puppet + 1])) { FATAL("error select_algorithm"); @@ -364,8 +370,8 @@ u8 fuzz_one_original(afl_state_t *afl) { s32 len, fd, temp_len, i, j; u8 *in_buf, *out_buf, *orig_in, *ex_tmp, *eff_map = 0; - u64 havoc_queued = 0, orig_hit_cnt, new_hit_cnt; - u32 splice_cycle = 0, perf_score = 100, orig_perf, prev_cksum, eff_cnt = 1; + u64 havoc_queued = 0, orig_hit_cnt, new_hit_cnt = 0, prev_cksum; + u32 splice_cycle = 0, perf_score = 100, orig_perf, eff_cnt = 1; u8 ret_val = 1, doing_det = 0; @@ -488,6 +494,8 @@ u8 fuzz_one_original(afl_state_t *afl) { if (afl->queue_cur->cal_failed < CAL_CHANCES) { + afl->queue_cur->exec_cksum = 0; + res = calibrate_case(afl, afl->queue_cur, in_buf, afl->queue_cycle - 1, 0); @@ -548,8 +556,6 @@ u8 fuzz_one_original(afl_state_t *afl) { if (unlikely(perf_score == 0)) { goto abandon_entry; } - if (unlikely(afl->use_radamsa > 1)) { goto radamsa_stage; } - if (afl->shm.cmplog_mode && !afl->queue_cur->fully_colorized) { if (input_to_state_stage(afl, in_buf, out_buf, len, @@ -566,12 +572,11 @@ u8 fuzz_one_original(afl_state_t *afl) { if it has gone through deterministic testing in earlier, resumed runs (passed_det). */ - if (afl->skip_deterministic || - ((!afl->queue_cur->passed_det) && - perf_score < (afl->queue_cur->depth * 30 <= afl->havoc_max_mult * 100 - ? afl->queue_cur->depth * 30 - : afl->havoc_max_mult * 100)) || - afl->queue_cur->passed_det) { + if (likely(afl->queue_cur->passed_det) || likely(afl->skip_deterministic) || + likely(perf_score < + (afl->queue_cur->depth * 30 <= afl->havoc_max_mult * 100 + ? afl->queue_cur->depth * 30 + : afl->havoc_max_mult * 100))) { goto custom_mutator_stage; @@ -653,7 +658,7 @@ u8 fuzz_one_original(afl_state_t *afl) { if (!afl->non_instrumented_mode && (afl->stage_cur & 7) == 7) { - u32 cksum = hash32(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST); + u64 cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST); if (afl->stage_cur == afl->stage_max - 1 && cksum == prev_cksum) { @@ -821,14 +826,14 @@ u8 fuzz_one_original(afl_state_t *afl) { if (!eff_map[EFF_APOS(afl->stage_cur)]) { - u32 cksum; + u64 cksum; /* If in non-instrumented mode or if the file is very short, just flag everything without wasting time on checksums. */ if (!afl->non_instrumented_mode && len >= EFF_MIN_LEN) { - cksum = hash32(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST); + cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST); } else { @@ -1680,6 +1685,7 @@ custom_mutator_stage: retry_external_pick: /* Pick a random other queue entry for passing to external API */ + do { tid = rand_below(afl, afl->queued_paths); @@ -1704,7 +1710,7 @@ custom_mutator_stage: /* Make sure that the target has a reasonable length. */ while (target && (target->len < 2 || target == afl->queue_cur) && - afl->queued_paths > 1) { + afl->queued_paths > 3) { target = target->next; ++afl->splicing_with; @@ -2230,7 +2236,7 @@ havoc_stage: case 16: { u32 use_extra, extra_len, insert_at = rand_below(afl, temp_len + 1); - u8 *new_buf; + u8 *ptr; /* Insert an extra. Do the same dice-rolling stuff as for the previous case. */ @@ -2239,44 +2245,27 @@ havoc_stage: use_extra = rand_below(afl, afl->a_extras_cnt); extra_len = afl->a_extras[use_extra].len; - - if (temp_len + extra_len >= MAX_FILE) { break; } - - new_buf = - ck_maybe_grow(BUF_PARAMS(out_scratch), temp_len + extra_len); - - /* Head */ - memcpy(new_buf, out_buf, insert_at); - - /* Inserted part */ - memcpy(new_buf + insert_at, afl->a_extras[use_extra].data, - extra_len); + ptr = afl->a_extras[use_extra].data; } else { use_extra = rand_below(afl, afl->extras_cnt); extra_len = afl->extras[use_extra].len; + ptr = afl->extras[use_extra].data; - if (temp_len + extra_len >= MAX_FILE) { break; } - - new_buf = - ck_maybe_grow(BUF_PARAMS(out_scratch), temp_len + extra_len); - - /* Head */ - memcpy(new_buf, out_buf, insert_at); + } - /* Inserted part */ - memcpy(new_buf + insert_at, afl->extras[use_extra].data, extra_len); + if (temp_len + extra_len >= MAX_FILE) { break; } - } + out_buf = ck_maybe_grow(BUF_PARAMS(out), temp_len + extra_len); /* Tail */ - memcpy(new_buf + insert_at + extra_len, out_buf + insert_at, - temp_len - insert_at); + memmove(out_buf + insert_at + extra_len, out_buf + insert_at, + temp_len - insert_at); + + /* Inserted part */ + memcpy(out_buf + insert_at, ptr, extra_len); - swap_bufs(BUF_PARAMS(out), BUF_PARAMS(out_scratch)); - out_buf = new_buf; - new_buf = NULL; temp_len += extra_len; break; @@ -2438,63 +2427,6 @@ retry_splicing: #endif /* !IGNORE_FINDS */ ret_val = 0; - goto radamsa_stage; - -radamsa_stage: - - if (likely(!afl->use_radamsa || !afl->radamsa_mutate_ptr)) { - - goto abandon_entry; - - } - - afl->stage_name = "radamsa"; - afl->stage_short = "radamsa"; - afl->stage_max = (HAVOC_CYCLES * perf_score / afl->havoc_div / 100) - << afl->use_radamsa; - - if (afl->stage_max < HAVOC_MIN) { afl->stage_max = HAVOC_MIN; } - - orig_hit_cnt = afl->queued_paths + afl->unique_crashes; - - /* Read the additional testcase. - We'll reuse in_scratch, as it is free at this point. - */ - u8 *save_buf = ck_maybe_grow(BUF_PARAMS(in_scratch), len); - memcpy(save_buf, out_buf, len); - - u32 max_len = len + choose_block_len(afl, HAVOC_BLK_XL); - u8 *new_buf = ck_maybe_grow(BUF_PARAMS(out_scratch), max_len); - u8 *tmp_buf; - - for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) { - - u32 new_len = afl->radamsa_mutate_ptr(save_buf, len, new_buf, max_len, - get_rand_seed(afl)); - - if (new_len) { - - temp_len = new_len; - tmp_buf = new_buf; - - } else { - - tmp_buf = save_buf; // nope but I dont care - temp_len = len; - - } - - if (common_fuzz_stuff(afl, tmp_buf, temp_len)) { goto abandon_entry; } - - } - - new_hit_cnt = afl->queued_paths + afl->unique_crashes; - - afl->stage_finds[STAGE_RADAMSA] += new_hit_cnt - orig_hit_cnt; - afl->stage_cycles[STAGE_RADAMSA] += afl->stage_max; - - ret_val = 0; - goto abandon_entry; /* we are through with this queue entry - for this iteration */ abandon_entry: @@ -2539,8 +2471,8 @@ static u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) { s32 len, fd, temp_len, i, j; u8 *in_buf, *out_buf, *orig_in, *ex_tmp, *eff_map = 0; - u64 havoc_queued, orig_hit_cnt, new_hit_cnt, cur_ms_lv; - u32 splice_cycle = 0, perf_score = 100, orig_perf, prev_cksum, eff_cnt = 1; + u64 havoc_queued = 0, orig_hit_cnt, new_hit_cnt = 0, cur_ms_lv, prev_cksum; + u32 splice_cycle = 0, perf_score = 100, orig_perf, eff_cnt = 1; u8 ret_val = 1, doing_det = 0; @@ -2637,6 +2569,8 @@ static u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) { if (afl->queue_cur->cal_failed < CAL_CHANCES) { + afl->queue_cur->exec_cksum = 0; + res = calibrate_case(afl, afl->queue_cur, in_buf, afl->queue_cycle - 1, 0); @@ -2806,7 +2740,7 @@ static u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) { if (!afl->non_instrumented_mode && (afl->stage_cur & 7) == 7) { - u32 cksum = hash32(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST); + u64 cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST); if (afl->stage_cur == afl->stage_max - 1 && cksum == prev_cksum) { @@ -2974,14 +2908,14 @@ static u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) { if (!eff_map[EFF_APOS(afl->stage_cur)]) { - u32 cksum; + u64 cksum; /* If in non-instrumented mode or if the file is very short, just flag everything without wasting time on checksums. */ if (!afl->non_instrumented_mode && len >= EFF_MIN_LEN) { - cksum = hash32(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST); + cksum = hash64(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST); } else { @@ -4208,6 +4142,94 @@ pacemaker_fuzzing: } /* case 15 */ + /* Values 16 and 17 can be selected only if there are any extras + present in the dictionaries. */ + + case 16: { + + /* Overwrite bytes with an extra. */ + + if (!afl->extras_cnt || + (afl->a_extras_cnt && rand_below(afl, 2))) { + + /* No user-specified extras or odds in our favor. Let's use an + auto-detected one. */ + + u32 use_extra = rand_below(afl, afl->a_extras_cnt); + u32 extra_len = afl->a_extras[use_extra].len; + + if (extra_len > temp_len) break; + + u32 insert_at = rand_below(afl, temp_len - extra_len + 1); + memcpy(out_buf + insert_at, afl->a_extras[use_extra].data, + extra_len); + + } else { + + /* No auto extras or odds in our favor. Use the dictionary. */ + + u32 use_extra = rand_below(afl, afl->extras_cnt); + u32 extra_len = afl->extras[use_extra].len; + + if (extra_len > temp_len) break; + + u32 insert_at = rand_below(afl, temp_len - extra_len + 1); + memcpy(out_buf + insert_at, afl->extras[use_extra].data, + extra_len); + + } + + afl->stage_cycles_puppet_v2[afl->swarm_now] + [STAGE_OverWriteExtra] += 1; + + break; + + } + + /* Insert an extra. */ + + case 17: { + + u32 use_extra, extra_len, + insert_at = rand_below(afl, temp_len + 1); + u8 *ptr; + + /* Insert an extra. Do the same dice-rolling stuff as for the + previous case. */ + + if (!afl->extras_cnt || + (afl->a_extras_cnt && rand_below(afl, 2))) { + + use_extra = rand_below(afl, afl->a_extras_cnt); + extra_len = afl->a_extras[use_extra].len; + ptr = afl->a_extras[use_extra].data; + + } else { + + use_extra = rand_below(afl, afl->extras_cnt); + extra_len = afl->extras[use_extra].len; + ptr = afl->extras[use_extra].data; + + } + + if (temp_len + extra_len >= MAX_FILE) break; + + out_buf = ck_maybe_grow(BUF_PARAMS(out), temp_len + extra_len); + + /* Tail */ + memmove(out_buf + insert_at + extra_len, out_buf + insert_at, + temp_len - insert_at); + + /* Inserted part */ + memcpy(out_buf + insert_at, ptr, extra_len); + + temp_len += extra_len; + afl->stage_cycles_puppet_v2[afl->swarm_now][STAGE_InsertExtra] += + 1; + break; + + } + } /* switch select_algorithm() */ } /* for i=0; i < use_stacking */ |