diff options
Diffstat (limited to 'src/afl-fuzz-one.c')
-rw-r--r-- | src/afl-fuzz-one.c | 217 |
1 files changed, 120 insertions, 97 deletions
diff --git a/src/afl-fuzz-one.c b/src/afl-fuzz-one.c index 1f0bf30e..3bf0c195 100644 --- a/src/afl-fuzz-one.c +++ b/src/afl-fuzz-one.c @@ -77,7 +77,7 @@ static int select_algorithm(afl_state_t *afl) { static u32 choose_block_len(afl_state_t *afl, u32 limit) { u32 min_value, max_value; - u32 rlim = MIN(afl->queue_cycle, 3); + u32 rlim = MIN(afl->queue_cycle, (u32)3); if (unlikely(!afl->run_over10m)) { rlim = 1; } @@ -95,7 +95,7 @@ static u32 choose_block_len(afl_state_t *afl, u32 limit) { default: - if (rand_below(afl, 10)) { + if (likely(rand_below(afl, 10))) { min_value = HAVOC_BLK_MEDIUM; max_value = HAVOC_BLK_LARGE; @@ -292,7 +292,7 @@ static u8 could_be_interest(u32 old_val, u32 new_val, u8 blen, u8 check_le) { /* See if two-byte insertions over old_val could give us new_val. */ - for (i = 0; i < blen - 1; ++i) { + for (i = 0; (s32)i < blen - 1; ++i) { for (j = 0; j < sizeof(interesting_16) / 2; ++j) { @@ -364,15 +364,15 @@ static void locate_diffs(u8 *ptr1, u8 *ptr2, u32 len, s32 *first, s32 *last) { #endif /* !IGNORE_FINDS */ -#define BUF_PARAMS(name) (void **)&afl->name##_buf, &afl->name##_size - /* Take the current entry from the queue, fuzz it for a while. This function is a tad too long... returns 0 if fuzzed successfully, 1 if skipped or bailed out. */ u8 fuzz_one_original(afl_state_t *afl) { - s32 len, fd, temp_len, i, j; + s32 len, fd, temp_len; + u32 j; + u32 i; u8 *in_buf, *out_buf, *orig_in, *ex_tmp, *eff_map = 0; u64 havoc_queued = 0, orig_hit_cnt, new_hit_cnt = 0, prev_cksum; u32 splice_cycle = 0, perf_score = 100, orig_perf, eff_cnt = 1; @@ -382,9 +382,6 @@ u8 fuzz_one_original(afl_state_t *afl) { u8 a_collect[MAX_AUTO_EXTRA]; u32 a_len = 0; -/* Not pretty, but saves a lot of writing */ -#define BUF_PARAMS(name) (void **)&afl->name##_buf, &afl->name##_size - #ifdef IGNORE_FINDS /* In IGNORE_FINDS mode, skip any entries that weren't in the @@ -419,7 +416,7 @@ u8 fuzz_one_original(afl_state_t *afl) { if (((afl->queue_cur->was_fuzzed > 0 || afl->queue_cur->fuzz_level > 0) || !afl->queue_cur->favored) && - rand_below(afl, 100) < SKIP_TO_NEW_PROB) { + likely(rand_below(afl, 100) < SKIP_TO_NEW_PROB)) { return 1; @@ -436,11 +433,11 @@ u8 fuzz_one_original(afl_state_t *afl) { if (afl->queue_cycle > 1 && (afl->queue_cur->fuzz_level == 0 || afl->queue_cur->was_fuzzed)) { - if (rand_below(afl, 100) < SKIP_NFAV_NEW_PROB) { return 1; } + if (likely(rand_below(afl, 100) < SKIP_NFAV_NEW_PROB)) { return 1; } } else { - if (rand_below(afl, 100) < SKIP_NFAV_OLD_PROB) { return 1; } + if (likely(rand_below(afl, 100) < SKIP_NFAV_OLD_PROB)) { return 1; } } @@ -482,7 +479,8 @@ u8 fuzz_one_original(afl_state_t *afl) { single byte anyway, so it wouldn't give us any performance or memory usage benefits. */ - out_buf = ck_maybe_grow(BUF_PARAMS(out), len); + out_buf = afl_realloc(AFL_BUF_PARAM(out), len); + if (unlikely(!out_buf)) { PFATAL("alloc"); } afl->subseq_tmouts = 0; @@ -524,8 +522,8 @@ u8 fuzz_one_original(afl_state_t *afl) { * TRIMMING * ************/ - if (!afl->non_instrumented_mode && !afl->queue_cur->trim_done && - !afl->disable_trim) { + if (unlikely(!afl->non_instrumented_mode && !afl->queue_cur->trim_done && + !afl->disable_trim)) { u8 res = trim_case(afl, afl->queue_cur, in_buf); @@ -560,7 +558,7 @@ u8 fuzz_one_original(afl_state_t *afl) { if (unlikely(perf_score == 0)) { goto abandon_entry; } - if (afl->shm.cmplog_mode && !afl->queue_cur->fully_colorized) { + if (unlikely(afl->shm.cmplog_mode && !afl->queue_cur->fully_colorized)) { if (input_to_state_stage(afl, in_buf, out_buf, len, afl->queue_cur->exec_cksum)) { @@ -589,8 +587,9 @@ u8 fuzz_one_original(afl_state_t *afl) { /* Skip deterministic fuzzing if exec path checksum puts this out of scope for this main instance. */ - if (afl->main_node_max && (afl->queue_cur->exec_cksum % afl->main_node_max) != - afl->main_node_id - 1) { + if (unlikely(afl->main_node_max && + (afl->queue_cur->exec_cksum % afl->main_node_max) != + afl->main_node_id - 1)) { goto custom_mutator_stage; @@ -679,7 +678,7 @@ u8 fuzz_one_original(afl_state_t *afl) { if (a_len >= MIN_AUTO_EXTRA && a_len <= MAX_AUTO_EXTRA) { - maybe_add_auto((u8 *)afl, a_collect, a_len); + maybe_add_auto(afl, a_collect, a_len); } @@ -690,7 +689,7 @@ u8 fuzz_one_original(afl_state_t *afl) { if (a_len >= MIN_AUTO_EXTRA && a_len <= MAX_AUTO_EXTRA) { - maybe_add_auto((u8 *)afl, a_collect, a_len); + maybe_add_auto(afl, a_collect, a_len); } @@ -797,7 +796,8 @@ u8 fuzz_one_original(afl_state_t *afl) { /* Initialize effector map for the next step (see comments below). Always flag first and last byte as doing something. */ - eff_map = ck_maybe_grow(BUF_PARAMS(eff), EFF_ALEN(len)); + eff_map = afl_realloc(AFL_BUF_PARAM(eff), EFF_ALEN(len)); + if (unlikely(!eff_map)) { PFATAL("alloc"); } eff_map[0] = 1; if (EFF_APOS(len - 1) != 0) { @@ -862,7 +862,7 @@ u8 fuzz_one_original(afl_state_t *afl) { whole thing as worth fuzzing, since we wouldn't be saving much time anyway. */ - if (eff_cnt != EFF_ALEN(len) && + if (eff_cnt != (u32)EFF_ALEN(len) && eff_cnt * 100 / EFF_ALEN(len) > EFF_MAX_PERC) { memset(eff_map, 1, EFF_ALEN(len)); @@ -893,7 +893,7 @@ u8 fuzz_one_original(afl_state_t *afl) { orig_hit_cnt = new_hit_cnt; - for (i = 0; i < len - 1; ++i) { + for (i = 0; (s32)i < len - 1; ++i) { /* Let's consult the effector map... */ @@ -931,7 +931,7 @@ u8 fuzz_one_original(afl_state_t *afl) { orig_hit_cnt = new_hit_cnt; - for (i = 0; i < len - 3; ++i) { + for (i = 0; (s32)i < len - 3; ++i) { /* Let's consult the effector map... */ if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] && @@ -977,7 +977,7 @@ skip_bitflip: orig_hit_cnt = new_hit_cnt; - for (i = 0; i < len; ++i) { + for (i = 0; i < (u32)len; ++i) { u8 orig = out_buf[i]; @@ -1051,7 +1051,7 @@ skip_bitflip: orig_hit_cnt = new_hit_cnt; - for (i = 0; i < len - 1; ++i) { + for (i = 0; i < (u32)len - 1; ++i) { u16 orig = *(u16 *)(out_buf + i); @@ -1161,7 +1161,7 @@ skip_bitflip: orig_hit_cnt = new_hit_cnt; - for (i = 0; i < len - 3; ++i) { + for (i = 0; i < (u32)len - 3; ++i) { u32 orig = *(u32 *)(out_buf + i); @@ -1202,7 +1202,7 @@ skip_bitflip: } - if ((orig & 0xffff) < j && !could_be_bitflip(r2)) { + if ((orig & 0xffff) < (u32)j && !could_be_bitflip(r2)) { afl->stage_cur_val = -j; *(u32 *)(out_buf + i) = orig - j; @@ -1234,7 +1234,7 @@ skip_bitflip: } - if ((SWAP32(orig) & 0xffff) < j && !could_be_bitflip(r4)) { + if ((SWAP32(orig) & 0xffff) < (u32)j && !could_be_bitflip(r4)) { afl->stage_cur_val = -j; *(u32 *)(out_buf + i) = SWAP32(SWAP32(orig) - j); @@ -1276,7 +1276,7 @@ skip_arith: /* Setting 8-bit integers. */ - for (i = 0; i < len; ++i) { + for (i = 0; i < (u32)len; ++i) { u8 orig = out_buf[i]; @@ -1291,7 +1291,7 @@ skip_arith: afl->stage_cur_byte = i; - for (j = 0; j < sizeof(interesting_8); ++j) { + for (j = 0; j < (u32)sizeof(interesting_8); ++j) { /* Skip if the value could be a product of bitflips or arithmetics. */ @@ -1331,7 +1331,7 @@ skip_arith: orig_hit_cnt = new_hit_cnt; - for (i = 0; i < len - 1; ++i) { + for (i = 0; (s32)i < len - 1; ++i) { u16 orig = *(u16 *)(out_buf + i); @@ -1409,7 +1409,7 @@ skip_arith: orig_hit_cnt = new_hit_cnt; - for (i = 0; i < len - 3; i++) { + for (i = 0; (s32)i < len - 3; i++) { u32 orig = *(u32 *)(out_buf + i); @@ -1496,7 +1496,7 @@ skip_interest: orig_hit_cnt = new_hit_cnt; - for (i = 0; i < len; ++i) { + for (i = 0; i < (u32)len; ++i) { u32 last_len = 0; @@ -1554,9 +1554,10 @@ skip_interest: orig_hit_cnt = new_hit_cnt; - ex_tmp = ck_maybe_grow(BUF_PARAMS(ex), len + MAX_DICT_FILE); + ex_tmp = afl_realloc(AFL_BUF_PARAM(ex), len + MAX_DICT_FILE); + if (unlikely(!ex_tmp)) { PFATAL("alloc"); } - for (i = 0; i <= len; ++i) { + for (i = 0; i <= (u32)len; ++i) { afl->stage_cur_byte = i; @@ -1602,19 +1603,20 @@ skip_user_extras: afl->stage_name = "auto extras (over)"; afl->stage_short = "ext_AO"; afl->stage_cur = 0; - afl->stage_max = MIN(afl->a_extras_cnt, USE_AUTO_EXTRAS) * len; + afl->stage_max = MIN(afl->a_extras_cnt, (u32)USE_AUTO_EXTRAS) * len; afl->stage_val_type = STAGE_VAL_NONE; orig_hit_cnt = new_hit_cnt; - for (i = 0; i < len; ++i) { + for (i = 0; i < (u32)len; ++i) { u32 last_len = 0; afl->stage_cur_byte = i; - for (j = 0; j < MIN(afl->a_extras_cnt, USE_AUTO_EXTRAS); ++j) { + u32 min_extra_len = MIN(afl->a_extras_cnt, (u32)USE_AUTO_EXTRAS); + for (j = 0; j < min_extra_len; ++j) { /* See the comment in the earlier code; extras are sorted by size. */ @@ -1680,6 +1682,8 @@ custom_mutator_stage: has_custom_fuzz = true; + afl->stage_short = el->name_short; + for (afl->stage_cur = 0; afl->stage_cur < afl->stage_max; ++afl->stage_cur) { @@ -1727,7 +1731,8 @@ custom_mutator_stage: fd = open(target->fname, O_RDONLY); if (unlikely(fd < 0)) { PFATAL("Unable to open '%s'", target->fname); } - new_buf = ck_maybe_grow(BUF_PARAMS(out_scratch), target->len); + new_buf = afl_realloc(AFL_BUF_PARAM(out_scratch), target->len); + if (unlikely(!new_buf)) { PFATAL("alloc"); } ck_read(fd, new_buf, target->len, target->fname); close(fd); @@ -1902,7 +1907,8 @@ havoc_stage: temp_len = new_len; if (out_buf != custom_havoc_buf) { - ck_maybe_grow(BUF_PARAMS(out), temp_len); + afl_realloc(AFL_BUF_PARAM(out), temp_len); + if (unlikely(!afl->out_buf)) { PFATAL("alloc"); } memcpy(out_buf, custom_havoc_buf, temp_len); } @@ -2126,7 +2132,7 @@ havoc_stage: u32 clone_from, clone_to, clone_len; u8 *new_buf; - if (actually_clone) { + if (likely(actually_clone)) { clone_len = choose_block_len(afl, temp_len); clone_from = rand_below(afl, temp_len - clone_len + 1); @@ -2141,7 +2147,8 @@ havoc_stage: clone_to = rand_below(afl, temp_len); new_buf = - ck_maybe_grow(BUF_PARAMS(out_scratch), temp_len + clone_len); + afl_realloc(AFL_BUF_PARAM(out_scratch), temp_len + clone_len); + if (unlikely(!new_buf)) { PFATAL("alloc"); } /* Head */ @@ -2149,7 +2156,7 @@ havoc_stage: /* Inserted part */ - if (actually_clone) { + if (likely(actually_clone)) { memcpy(new_buf + clone_to, out_buf + clone_from, clone_len); @@ -2166,7 +2173,7 @@ havoc_stage: memcpy(new_buf + clone_to + clone_len, out_buf + clone_to, temp_len - clone_to); - swap_bufs(BUF_PARAMS(out), BUF_PARAMS(out_scratch)); + afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch)); out_buf = new_buf; new_buf = NULL; temp_len += clone_len; @@ -2189,7 +2196,7 @@ havoc_stage: copy_from = rand_below(afl, temp_len - copy_len + 1); copy_to = rand_below(afl, temp_len - copy_len + 1); - if (rand_below(afl, 4)) { + if (likely(rand_below(afl, 4))) { if (copy_from != copy_to) { @@ -2231,7 +2238,7 @@ havoc_stage: u32 extra_len = afl->a_extras[use_extra].len; u32 insert_at; - if (extra_len > temp_len) { break; } + if ((s32)extra_len > temp_len) { break; } insert_at = rand_below(afl, temp_len - extra_len + 1); memcpy(out_buf + insert_at, afl->a_extras[use_extra].data, @@ -2245,7 +2252,7 @@ havoc_stage: u32 extra_len = afl->extras[use_extra].len; u32 insert_at; - if (extra_len > temp_len) { break; } + if ((s32)extra_len > temp_len) { break; } insert_at = rand_below(afl, temp_len - extra_len + 1); memcpy(out_buf + insert_at, afl->extras[use_extra].data, @@ -2281,7 +2288,8 @@ havoc_stage: if (temp_len + extra_len >= MAX_FILE) { break; } - out_buf = ck_maybe_grow(BUF_PARAMS(out), temp_len + extra_len); + out_buf = afl_realloc(AFL_BUF_PARAM(out), temp_len + extra_len); + if (unlikely(!out_buf)) { PFATAL("alloc"); } /* Tail */ memmove(out_buf + insert_at + extra_len, out_buf + insert_at, @@ -2337,7 +2345,8 @@ havoc_stage: } u32 new_len = target->len; - u8 *new_buf = ck_maybe_grow(BUF_PARAMS(in_scratch), new_len); + u8 *new_buf = afl_realloc(AFL_BUF_PARAM(in_scratch), new_len); + if (unlikely(!new_buf)) { PFATAL("alloc"); } ck_read(fd, new_buf, new_len, target->fname); @@ -2360,7 +2369,7 @@ havoc_stage: u32 copy_from, copy_to, copy_len; copy_len = choose_block_len(afl, new_len - 1); - if (copy_len > temp_len) copy_len = temp_len; + if ((s32)copy_len > temp_len) copy_len = temp_len; copy_from = rand_below(afl, new_len - copy_len + 1); copy_to = rand_below(afl, temp_len - copy_len + 1); @@ -2377,7 +2386,8 @@ havoc_stage: clone_to = rand_below(afl, temp_len); u8 *temp_buf = - ck_maybe_grow(BUF_PARAMS(out_scratch), temp_len + clone_len); + afl_realloc(AFL_BUF_PARAM(out_scratch), temp_len + clone_len); + if (unlikely(!temp_buf)) { PFATAL("alloc"); } /* Head */ @@ -2391,7 +2401,7 @@ havoc_stage: memcpy(temp_buf + clone_to + clone_len, out_buf + clone_to, temp_len - clone_to); - swap_bufs(BUF_PARAMS(out), BUF_PARAMS(out_scratch)); + afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch)); out_buf = temp_buf; temp_len += clone_len; @@ -2412,7 +2422,8 @@ havoc_stage: /* out_buf might have been mangled a bit, so let's restore it to its original size and shape. */ - out_buf = ck_maybe_grow(BUF_PARAMS(out), len); + out_buf = afl_realloc(AFL_BUF_PARAM(out), len); + if (unlikely(!out_buf)) { PFATAL("alloc"); } temp_len = len; memcpy(out_buf, in_buf, len); @@ -2507,7 +2518,8 @@ retry_splicing: if (unlikely(fd < 0)) { PFATAL("Unable to open '%s'", target->fname); } - new_buf = ck_maybe_grow(BUF_PARAMS(in_scratch), target->len); + new_buf = afl_realloc(AFL_BUF_PARAM(in_scratch), target->len); + if (unlikely(!new_buf)) { PFATAL("alloc"); } ck_read(fd, new_buf, target->len, target->fname); @@ -2517,7 +2529,7 @@ retry_splicing: the last differing byte. Bail out if the difference is just a single byte or so. */ - locate_diffs(in_buf, new_buf, MIN(len, target->len), &f_diff, &l_diff); + locate_diffs(in_buf, new_buf, MIN(len, (s64)target->len), &f_diff, &l_diff); if (f_diff < 0 || l_diff < 2 || f_diff == l_diff) { goto retry_splicing; } @@ -2529,10 +2541,11 @@ retry_splicing: len = target->len; memcpy(new_buf, in_buf, split_at); - swap_bufs(BUF_PARAMS(in), BUF_PARAMS(in_scratch)); + afl_swap_bufs(AFL_BUF_PARAM(in), AFL_BUF_PARAM(in_scratch)); in_buf = new_buf; - out_buf = ck_maybe_grow(BUF_PARAMS(out), len); + out_buf = afl_realloc(AFL_BUF_PARAM(out), len); + if (unlikely(!out_buf)) { PFATAL("alloc"); } memcpy(out_buf, in_buf, len); goto custom_mutator_stage; @@ -2587,7 +2600,9 @@ static u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) { } - s32 len, fd, temp_len, i, j; + s32 len, fd, temp_len; + u32 i; + u32 j; u8 *in_buf, *out_buf, *orig_in, *ex_tmp, *eff_map = 0; u64 havoc_queued = 0, orig_hit_cnt, new_hit_cnt = 0, cur_ms_lv, prev_cksum; u32 splice_cycle = 0, perf_score = 100, orig_perf, eff_cnt = 1; @@ -2671,7 +2686,8 @@ static u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) { single byte anyway, so it wouldn't give us any performance or memory usage benefits. */ - out_buf = ck_maybe_grow(BUF_PARAMS(out), len); + out_buf = afl_realloc(AFL_BUF_PARAM(out), len); + if (unlikely(!out_buf)) { PFATAL("alloc"); } afl->subseq_tmouts = 0; @@ -2746,7 +2762,7 @@ static u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) { orig_perf = perf_score = calculate_score(afl, afl->queue_cur); - if (afl->shm.cmplog_mode && !afl->queue_cur->fully_colorized) { + if (unlikely(afl->shm.cmplog_mode && !afl->queue_cur->fully_colorized)) { if (input_to_state_stage(afl, in_buf, out_buf, len, afl->queue_cur->exec_cksum)) { @@ -2761,9 +2777,9 @@ static u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) { cur_ms_lv = get_cur_time(); if (!(afl->key_puppet == 0 && - ((cur_ms_lv - afl->last_path_time < afl->limit_time_puppet) || + ((cur_ms_lv - afl->last_path_time < (u32)afl->limit_time_puppet) || (afl->last_crash_time != 0 && - cur_ms_lv - afl->last_crash_time < afl->limit_time_puppet) || + cur_ms_lv - afl->last_crash_time < (u32)afl->limit_time_puppet) || afl->last_path_time == 0))) { afl->key_puppet = 1; @@ -2875,7 +2891,7 @@ static u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) { if (a_len >= MIN_AUTO_EXTRA && a_len <= MAX_AUTO_EXTRA) { - maybe_add_auto((u8 *)afl, a_collect, a_len); + maybe_add_auto(afl, a_collect, a_len); } @@ -2886,7 +2902,7 @@ static u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) { if (a_len >= MIN_AUTO_EXTRA && a_len <= MAX_AUTO_EXTRA) { - maybe_add_auto((u8 *)afl, a_collect, a_len); + maybe_add_auto(afl, a_collect, a_len); } @@ -2993,7 +3009,8 @@ static u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) { /* Initialize effector map for the next step (see comments below). Always flag first and last byte as doing something. */ - eff_map = ck_maybe_grow(BUF_PARAMS(eff), EFF_ALEN(len)); + eff_map = afl_realloc(AFL_BUF_PARAM(eff), EFF_ALEN(len)); + if (unlikely(!eff_map)) { PFATAL("alloc"); } eff_map[0] = 1; if (EFF_APOS(len - 1) != 0) { @@ -3058,7 +3075,7 @@ static u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) { whole thing as worth fuzzing, since we wouldn't be saving much time anyway. */ - if (eff_cnt != EFF_ALEN(len) && + if (eff_cnt != (u32)EFF_ALEN(len) && eff_cnt * 100 / EFF_ALEN(len) > EFF_MAX_PERC) { memset(eff_map, 1, EFF_ALEN(len)); @@ -3089,7 +3106,7 @@ static u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) { orig_hit_cnt = new_hit_cnt; - for (i = 0; i < len - 1; ++i) { + for (i = 0; (s32)i < len - 1; ++i) { /* Let's consult the effector map... */ @@ -3127,7 +3144,7 @@ static u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) { orig_hit_cnt = new_hit_cnt; - for (i = 0; i < len - 3; ++i) { + for (i = 0; (s32)i < len - 3; ++i) { /* Let's consult the effector map... */ if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] && @@ -3173,7 +3190,7 @@ skip_bitflip: orig_hit_cnt = new_hit_cnt; - for (i = 0; i < len; ++i) { + for (i = 0; i < (u32)len; ++i) { u8 orig = out_buf[i]; @@ -3247,7 +3264,7 @@ skip_bitflip: orig_hit_cnt = new_hit_cnt; - for (i = 0; i < len - 1; ++i) { + for (i = 0; (s32)i < len - 1; ++i) { u16 orig = *(u16 *)(out_buf + i); @@ -3357,7 +3374,7 @@ skip_bitflip: orig_hit_cnt = new_hit_cnt; - for (i = 0; i < len - 3; ++i) { + for (i = 0; (s32)i < len - 3; ++i) { u32 orig = *(u32 *)(out_buf + i); @@ -3472,7 +3489,7 @@ skip_arith: /* Setting 8-bit integers. */ - for (i = 0; i < len; ++i) { + for (i = 0; i < (u32)len; ++i) { u8 orig = out_buf[i]; @@ -3527,7 +3544,7 @@ skip_arith: orig_hit_cnt = new_hit_cnt; - for (i = 0; i < len - 1; ++i) { + for (i = 0; (s32)i < len - 1; ++i) { u16 orig = *(u16 *)(out_buf + i); @@ -3605,7 +3622,7 @@ skip_arith: orig_hit_cnt = new_hit_cnt; - for (i = 0; i < len - 3; ++i) { + for (i = 0; (s32)i < len - 3; ++i) { u32 orig = *(u32 *)(out_buf + i); @@ -3692,7 +3709,7 @@ skip_interest: orig_hit_cnt = new_hit_cnt; - for (i = 0; i < len; ++i) { + for (i = 0; i < (u32)len; ++i) { u32 last_len = 0; @@ -3750,9 +3767,10 @@ skip_interest: orig_hit_cnt = new_hit_cnt; - ex_tmp = ck_maybe_grow(BUF_PARAMS(ex), len + MAX_DICT_FILE); + ex_tmp = afl_realloc(AFL_BUF_PARAM(ex), len + MAX_DICT_FILE); + if (unlikely(!ex_tmp)) { PFATAL("alloc"); } - for (i = 0; i <= len; ++i) { + for (i = 0; i <= (u32)len; ++i) { afl->stage_cur_byte = i; @@ -3798,23 +3816,23 @@ skip_user_extras: afl->stage_name = "auto extras (over)"; afl->stage_short = "ext_AO"; afl->stage_cur = 0; - afl->stage_max = MIN(afl->a_extras_cnt, USE_AUTO_EXTRAS) * len; + afl->stage_max = MIN(afl->a_extras_cnt, (u32)USE_AUTO_EXTRAS) * len; afl->stage_val_type = STAGE_VAL_NONE; orig_hit_cnt = new_hit_cnt; - for (i = 0; i < len; ++i) { + for (i = 0; i < (u32)len; ++i) { u32 last_len = 0; afl->stage_cur_byte = i; - for (j = 0; j < MIN(afl->a_extras_cnt, USE_AUTO_EXTRAS); ++j) { + for (j = 0; j < MIN(afl->a_extras_cnt, (u32)USE_AUTO_EXTRAS); ++j) { /* See the comment in the earlier code; extras are sorted by size. */ - if (afl->a_extras[j].len > len - i || + if ((s32)(afl->a_extras[j].len) > (s32)(len - i) || !memcmp(afl->a_extras[j].data, out_buf + i, afl->a_extras[j].len) || !memchr(eff_map + EFF_APOS(i), 1, EFF_SPAN_ALEN(i, afl->a_extras[j].len))) { @@ -4188,8 +4206,9 @@ pacemaker_fuzzing: clone_to = rand_below(afl, temp_len); - new_buf = ck_maybe_grow(BUF_PARAMS(out_scratch), - temp_len + clone_len); + new_buf = afl_realloc(AFL_BUF_PARAM(out_scratch), + temp_len + clone_len); + if (unlikely(!new_buf)) { PFATAL("alloc"); } /* Head */ @@ -4215,7 +4234,7 @@ pacemaker_fuzzing: memcpy(new_buf + clone_to + clone_len, out_buf + clone_to, temp_len - clone_to); - swap_bufs(BUF_PARAMS(out), BUF_PARAMS(out_scratch)); + afl_swap_bufs(AFL_BUF_PARAM(out), AFL_BUF_PARAM(out_scratch)); out_buf = new_buf; temp_len += clone_len; MOpt_globals.cycles_v2[STAGE_Clone75] += 1; @@ -4276,7 +4295,7 @@ pacemaker_fuzzing: u32 use_extra = rand_below(afl, afl->a_extras_cnt); u32 extra_len = afl->a_extras[use_extra].len; - if (extra_len > temp_len) break; + if (extra_len > (u32)temp_len) break; u32 insert_at = rand_below(afl, temp_len - extra_len + 1); memcpy(out_buf + insert_at, afl->a_extras[use_extra].data, @@ -4289,7 +4308,7 @@ pacemaker_fuzzing: u32 use_extra = rand_below(afl, afl->extras_cnt); u32 extra_len = afl->extras[use_extra].len; - if (extra_len > temp_len) break; + if (extra_len > (u32)temp_len) break; u32 insert_at = rand_below(afl, temp_len - extra_len + 1); memcpy(out_buf + insert_at, afl->extras[use_extra].data, @@ -4332,7 +4351,8 @@ pacemaker_fuzzing: if (temp_len + extra_len >= MAX_FILE) break; - out_buf = ck_maybe_grow(BUF_PARAMS(out), temp_len + extra_len); + out_buf = afl_realloc(AFL_BUF_PARAM(out), temp_len + extra_len); + if (unlikely(!out_buf)) { PFATAL("alloc"); } /* Tail */ memmove(out_buf + insert_at + extra_len, out_buf + insert_at, @@ -4365,7 +4385,8 @@ pacemaker_fuzzing: /* out_buf might have been mangled a bit, so let's restore it to its original size and shape. */ - out_buf = ck_maybe_grow(BUF_PARAMS(out), len); + out_buf = afl_realloc(AFL_BUF_PARAM(out), len); + if (unlikely(!out_buf)) { PFATAL("alloc"); } temp_len = len; memcpy(out_buf, in_buf, len); @@ -4449,7 +4470,8 @@ pacemaker_fuzzing: retry_splicing_puppet: - if (afl->use_splicing && splice_cycle++ < afl->SPLICE_CYCLES_puppet && + if (afl->use_splicing && + splice_cycle++ < (u32)afl->SPLICE_CYCLES_puppet && afl->queued_paths > 1 && afl->queue_cur->len > 1) { struct queue_entry *target; @@ -4509,7 +4531,8 @@ pacemaker_fuzzing: if (fd < 0) { PFATAL("Unable to open '%s'", target->fname); } - new_buf = ck_maybe_grow(BUF_PARAMS(in_scratch), target->len); + new_buf = afl_realloc(AFL_BUF_PARAM(in_scratch), target->len); + if (unlikely(!new_buf)) { PFATAL("alloc"); } ck_read(fd, new_buf, target->len, target->fname); @@ -4519,7 +4542,8 @@ pacemaker_fuzzing: the last differing byte. Bail out if the difference is just a single byte or so. */ - locate_diffs(in_buf, new_buf, MIN(len, target->len), &f_diff, &l_diff); + locate_diffs(in_buf, new_buf, MIN(len, (s32)target->len), &f_diff, + &l_diff); if (f_diff < 0 || l_diff < 2 || f_diff == l_diff) { @@ -4535,9 +4559,10 @@ pacemaker_fuzzing: len = target->len; memcpy(new_buf, in_buf, split_at); - swap_bufs(BUF_PARAMS(in), BUF_PARAMS(in_scratch)); + afl_swap_bufs(AFL_BUF_PARAM(in), AFL_BUF_PARAM(in_scratch)); in_buf = new_buf; - out_buf = ck_maybe_grow(BUF_PARAMS(out), len); + out_buf = afl_realloc(AFL_BUF_PARAM(out), len); + if (unlikely(!out_buf)) { PFATAL("alloc"); } memcpy(out_buf, in_buf, len); goto havoc_stage_puppet; @@ -4551,7 +4576,7 @@ pacemaker_fuzzing: abandon_entry: abandon_entry_puppet: - if (splice_cycle >= afl->SPLICE_CYCLES_puppet) { + if ((s64)splice_cycle >= afl->SPLICE_CYCLES_puppet) { afl->SPLICE_CYCLES_puppet = (rand_below( @@ -4870,5 +4895,3 @@ u8 fuzz_one(afl_state_t *afl) { } -#undef BUF_PARAMS - |