diff options
Diffstat (limited to 'src/afl-fuzz-redqueen.c')
-rw-r--r-- | src/afl-fuzz-redqueen.c | 240 |
1 files changed, 177 insertions, 63 deletions
diff --git a/src/afl-fuzz-redqueen.c b/src/afl-fuzz-redqueen.c index 4acc204b..3e9af088 100644 --- a/src/afl-fuzz-redqueen.c +++ b/src/afl-fuzz-redqueen.c @@ -37,7 +37,7 @@ struct range { }; -struct range *add_range(struct range *ranges, u32 start, u32 end) { +static struct range *add_range(struct range *ranges, u32 start, u32 end) { struct range *r = ck_alloc_nozero(sizeof(struct range)); r->start = start; @@ -47,7 +47,7 @@ struct range *add_range(struct range *ranges, u32 start, u32 end) { } -struct range *pop_biggest_range(struct range **ranges) { +static struct range *pop_biggest_range(struct range **ranges) { struct range *r = *ranges; struct range *prev = NULL; @@ -88,7 +88,7 @@ static u8 get_exec_checksum(afl_state_t *afl, u8 *buf, u32 len, u32 *cksum) { if (unlikely(common_fuzz_stuff(afl, buf, len))) return 1; - *cksum = hash32(afl->fsrv.trace_bits, MAP_SIZE, HASH_CONST); + *cksum = hash32(afl->fsrv.trace_bits, afl->fsrv.map_size, HASH_CONST); return 0; } @@ -115,32 +115,46 @@ static u8 colorization(afl_state_t *afl, u8 *buf, u32 len, u32 exec_cksum) { afl->stage_short = "colorization"; afl->stage_max = 1000; - struct range *rng; + struct range *rng = NULL; afl->stage_cur = 0; while ((rng = pop_biggest_range(&ranges)) != NULL && afl->stage_cur < afl->stage_max) { u32 s = rng->end - rng->start; - if (s == 0) goto empty_range; - memcpy(backup, buf + rng->start, s); - rand_replace(afl, buf + rng->start, s); + if (s != 0) { - u32 cksum; - if (unlikely(get_exec_checksum(afl, buf, len, &cksum))) goto checksum_fail; + /* Range not empty */ - if (cksum != exec_cksum) { + memcpy(backup, buf + rng->start, s); + rand_replace(afl, buf + rng->start, s); - ranges = add_range(ranges, rng->start, rng->start + s / 2); - ranges = add_range(ranges, rng->start + s / 2 + 1, rng->end); - memcpy(buf + rng->start, backup, s); + u32 cksum; + u64 start_us = get_cur_time_us(); + if (unlikely(get_exec_checksum(afl, buf, len, &cksum))) + goto checksum_fail; - } else + u64 stop_us = get_cur_time_us(); - needs_write = 1; + /* Discard if the mutations change the paths or if it is too decremental + in speed */ + if (cksum != exec_cksum || + (stop_us - start_us > 2 * afl->queue_cur->exec_us)) { + + ranges = add_range(ranges, rng->start, rng->start + s / 2); + ranges = add_range(ranges, rng->start + s / 2 + 1, rng->end); + memcpy(buf + rng->start, backup, s); + + } else { + + needs_write = 1; + + } + + } - empty_range: ck_free(rng); + rng = NULL; ++afl->stage_cur; } @@ -157,6 +171,7 @@ static u8 colorization(afl_state_t *afl, u8 *buf, u32 len, u32 exec_cksum) { rng = ranges; ranges = ranges->next; ck_free(rng); + rng = NULL; } @@ -189,6 +204,7 @@ static u8 colorization(afl_state_t *afl, u8 *buf, u32 len, u32 exec_cksum) { return 0; checksum_fail: + if (rng) ck_free(rng); ck_free(backup); while (ranges) { @@ -196,9 +212,12 @@ checksum_fail: rng = ranges; ranges = ranges->next; ck_free(rng); + rng = NULL; } + // TODO: clang notices a _potential_ leak of mem pointed to by rng + return 1; } @@ -225,24 +244,25 @@ static u8 its_fuzz(afl_state_t *afl, u8 *buf, u32 len, u8 *status) { } static u8 cmp_extend_encoding(afl_state_t *afl, struct cmp_header *h, - u64 pattern, u64 repl, u32 idx, u8 *orig_buf, - u8 *buf, u32 len, u8 do_reverse, u8 *status) { + u64 pattern, u64 repl, u64 o_pattern, u32 idx, + u8 *orig_buf, u8 *buf, u32 len, u8 do_reverse, + u8 *status) { u64 *buf_64 = (u64 *)&buf[idx]; u32 *buf_32 = (u32 *)&buf[idx]; u16 *buf_16 = (u16 *)&buf[idx]; - // u8* buf_8 = &buf[idx]; - // u64* o_buf_64 = (u64*)&orig_buf[idx]; - // u32* o_buf_32 = (u32*)&orig_buf[idx]; - // u16* o_buf_16 = (u16*)&orig_buf[idx]; - // u8* o_buf_8 = &orig_buf[idx]; + u8 * buf_8 = &buf[idx]; + u64 *o_buf_64 = (u64 *)&orig_buf[idx]; + u32 *o_buf_32 = (u32 *)&orig_buf[idx]; + u16 *o_buf_16 = (u16 *)&orig_buf[idx]; + u8 * o_buf_8 = &orig_buf[idx]; u32 its_len = len - idx; *status = 0; if (SHAPE_BYTES(h->shape) == 8) { - if (its_len >= 8 && *buf_64 == pattern) { // && *o_buf_64 == pattern) { + if (its_len >= 8 && *buf_64 == pattern && *o_buf_64 == o_pattern) { *buf_64 = repl; if (unlikely(its_fuzz(afl, buf, len, status))) return 1; @@ -253,15 +273,16 @@ static u8 cmp_extend_encoding(afl_state_t *afl, struct cmp_header *h, // reverse encoding if (do_reverse) if (unlikely(cmp_extend_encoding(afl, h, SWAP64(pattern), SWAP64(repl), - idx, orig_buf, buf, len, 0, status))) + SWAP64(o_pattern), idx, orig_buf, buf, + len, 0, status))) return 1; } if (SHAPE_BYTES(h->shape) == 4 || *status == 2) { - if (its_len >= 4 && - *buf_32 == (u32)pattern) { // && *o_buf_32 == (u32)pattern) { + if (its_len >= 4 && *buf_32 == (u32)pattern && + *o_buf_32 == (u32)o_pattern) { *buf_32 = (u32)repl; if (unlikely(its_fuzz(afl, buf, len, status))) return 1; @@ -272,15 +293,16 @@ static u8 cmp_extend_encoding(afl_state_t *afl, struct cmp_header *h, // reverse encoding if (do_reverse) if (unlikely(cmp_extend_encoding(afl, h, SWAP32(pattern), SWAP32(repl), - idx, orig_buf, buf, len, 0, status))) + SWAP32(o_pattern), idx, orig_buf, buf, + len, 0, status))) return 1; } if (SHAPE_BYTES(h->shape) == 2 || *status == 2) { - if (its_len >= 2 && - *buf_16 == (u16)pattern) { // && *o_buf_16 == (u16)pattern) { + if (its_len >= 2 && *buf_16 == (u16)pattern && + *o_buf_16 == (u16)o_pattern) { *buf_16 = (u16)repl; if (unlikely(its_fuzz(afl, buf, len, status))) return 1; @@ -291,23 +313,23 @@ static u8 cmp_extend_encoding(afl_state_t *afl, struct cmp_header *h, // reverse encoding if (do_reverse) if (unlikely(cmp_extend_encoding(afl, h, SWAP16(pattern), SWAP16(repl), - idx, orig_buf, buf, len, 0, status))) + SWAP16(o_pattern), idx, orig_buf, buf, + len, 0, status))) return 1; } - /*if (SHAPE_BYTES(h->shape) == 1 || *status == 2) { + if (SHAPE_BYTES(h->shape) == 1 || *status == 2) { - if (its_len >= 2 && *buf_8 == (u8)pattern) {// && *o_buf_8 == (u8)pattern) { + if (its_len >= 1 && *buf_8 == (u8)pattern && *o_buf_8 == (u8)o_pattern) { *buf_8 = (u8)repl; - if (unlikely(its_fuzz(afl, buf, len, status))) - return 1; - *buf_16 = (u16)pattern; + if (unlikely(its_fuzz(afl, buf, len, status))) return 1; + *buf_8 = (u8)pattern; } - }*/ + } return 0; @@ -332,7 +354,7 @@ static void try_to_add_to_dict(afl_state_t *afl, u64 v, u8 shape) { } - maybe_add_auto(afl, (u8 *)&v, shape); + maybe_add_auto((u8 *)afl, (u8 *)&v, shape); u64 rev; switch (shape) { @@ -340,15 +362,15 @@ static void try_to_add_to_dict(afl_state_t *afl, u64 v, u8 shape) { case 1: break; case 2: rev = SWAP16((u16)v); - maybe_add_auto(afl, (u8 *)&rev, shape); + maybe_add_auto((u8 *)afl, (u8 *)&rev, shape); break; case 4: rev = SWAP32((u32)v); - maybe_add_auto(afl, (u8 *)&rev, shape); + maybe_add_auto((u8 *)afl, (u8 *)&rev, shape); break; case 8: rev = SWAP64(v); - maybe_add_auto(afl, (u8 *)&rev, shape); + maybe_add_auto((u8 *)afl, (u8 *)&rev, shape); break; } @@ -363,14 +385,44 @@ static u8 cmp_fuzz(afl_state_t *afl, u32 key, u8 *orig_buf, u8 *buf, u32 len) { u32 loggeds = h->hits; if (h->hits > CMP_MAP_H) loggeds = CMP_MAP_H; - u8 status; + u8 status = 0; // opt not in the paper - u32 fails = 0; + u32 fails; + u8 found_one = 0; + + /* loop cmps are useless, detect and blacklist them */ + u64 s_v0, s_v1; + u8 s_v0_fixed = 1, s_v1_fixed = 1; + u8 s_v0_inc = 1, s_v1_inc = 1; + u8 s_v0_dec = 1, s_v1_dec = 1; for (i = 0; i < loggeds; ++i) { + fails = 0; + struct cmp_operands *o = &afl->shm.cmp_map->log[key][i]; + // loop detection code + if (i == 0) { + + s_v0 = o->v0; + s_v1 = o->v1; + + } else { + + if (s_v0 != o->v0) s_v0_fixed = 0; + if (s_v1 != o->v1) s_v1_fixed = 0; + if (s_v0 + 1 != o->v0) s_v0_inc = 0; + if (s_v1 + 1 != o->v1) s_v1_inc = 0; + if (s_v0 - 1 != o->v0) s_v0_dec = 0; + if (s_v1 - 1 != o->v1) s_v1_dec = 0; + s_v0 = o->v0; + s_v1 = o->v1; + + } + + struct cmp_operands *orig_o = &afl->orig_cmp_map->log[key][i]; + // opt not in the paper for (j = 0; j < i; ++j) if (afl->shm.cmp_map->log[key][j].v0 == o->v0 && @@ -379,16 +431,16 @@ static u8 cmp_fuzz(afl_state_t *afl, u32 key, u8 *orig_buf, u8 *buf, u32 len) { for (idx = 0; idx < len && fails < 8; ++idx) { - if (unlikely(cmp_extend_encoding(afl, h, o->v0, o->v1, idx, orig_buf, buf, - len, 1, &status))) + if (unlikely(cmp_extend_encoding(afl, h, o->v0, o->v1, orig_o->v0, idx, + orig_buf, buf, len, 1, &status))) return 1; if (status == 2) ++fails; else if (status == 1) break; - if (unlikely(cmp_extend_encoding(afl, h, o->v1, o->v0, idx, orig_buf, buf, - len, 1, &status))) + if (unlikely(cmp_extend_encoding(afl, h, o->v1, o->v0, orig_o->v1, idx, + orig_buf, buf, len, 1, &status))) return 1; if (status == 2) ++fails; @@ -397,11 +449,17 @@ static u8 cmp_fuzz(afl_state_t *afl, u32 key, u8 *orig_buf, u8 *buf, u32 len) { } + if (status == 1) found_one = 1; + // If failed, add to dictionary if (fails == 8) { - try_to_add_to_dict(afl, o->v0, SHAPE_BYTES(h->shape)); - try_to_add_to_dict(afl, o->v1, SHAPE_BYTES(h->shape)); + if (afl->pass_stats[key].total == 0) { + + try_to_add_to_dict(afl, o->v0, SHAPE_BYTES(h->shape)); + try_to_add_to_dict(afl, o->v1, SHAPE_BYTES(h->shape)); + + } } @@ -410,13 +468,28 @@ static u8 cmp_fuzz(afl_state_t *afl, u32 key, u8 *orig_buf, u8 *buf, u32 len) { } + if (loggeds > 3 && ((s_v0_fixed && s_v1_inc) || (s_v1_fixed && s_v0_inc) || + (s_v0_fixed && s_v1_dec) || (s_v1_fixed && s_v0_dec))) { + + afl->pass_stats[key].total = afl->pass_stats[key].faileds = 0xff; + + } + + if (!found_one && afl->pass_stats[key].faileds < 0xff) { + + afl->pass_stats[key].faileds++; + + } + + if (afl->pass_stats[key].total < 0xff) afl->pass_stats[key].total++; + return 0; } static u8 rtn_extend_encoding(afl_state_t *afl, struct cmp_header *h, - u8 *pattern, u8 *repl, u32 idx, u8 *orig_buf, - u8 *buf, u32 len, u8 *status) { + u8 *pattern, u8 *repl, u8 *o_pattern, u32 idx, + u8 *orig_buf, u8 *buf, u32 len, u8 *status) { u32 i; u32 its_len = MIN(32, len - idx); @@ -428,7 +501,9 @@ static u8 rtn_extend_encoding(afl_state_t *afl, struct cmp_header *h, for (i = 0; i < its_len; ++i) { - if (pattern[idx + i] != buf[idx + i] || *status == 1) break; + if (pattern[idx + i] != buf[idx + i] || + o_pattern[idx + i] != orig_buf[idx + i] || *status == 1) + break; buf[idx + i] = repl[idx + i]; if (unlikely(its_fuzz(afl, buf, len, status))) return 1; @@ -448,15 +523,21 @@ static u8 rtn_fuzz(afl_state_t *afl, u32 key, u8 *orig_buf, u8 *buf, u32 len) { u32 loggeds = h->hits; if (h->hits > CMP_MAP_RTN_H) loggeds = CMP_MAP_RTN_H; - u8 status; + u8 status = 0; // opt not in the paper u32 fails = 0; + u8 found_one = 0; for (i = 0; i < loggeds; ++i) { + fails = 0; + struct cmpfn_operands *o = &((struct cmpfn_operands *)afl->shm.cmp_map->log[key])[i]; + struct cmpfn_operands *orig_o = + &((struct cmpfn_operands *)afl->orig_cmp_map->log[key])[i]; + // opt not in the paper for (j = 0; j < i; ++j) if (!memcmp(&((struct cmpfn_operands *)afl->shm.cmp_map->log[key])[j], o, @@ -465,16 +546,16 @@ static u8 rtn_fuzz(afl_state_t *afl, u32 key, u8 *orig_buf, u8 *buf, u32 len) { for (idx = 0; idx < len && fails < 8; ++idx) { - if (unlikely(rtn_extend_encoding(afl, h, o->v0, o->v1, idx, orig_buf, buf, - len, &status))) + if (unlikely(rtn_extend_encoding(afl, h, o->v0, o->v1, orig_o->v0, idx, + orig_buf, buf, len, &status))) return 1; if (status == 2) ++fails; else if (status == 1) break; - if (unlikely(rtn_extend_encoding(afl, h, o->v1, o->v0, idx, orig_buf, buf, - len, &status))) + if (unlikely(rtn_extend_encoding(afl, h, o->v1, o->v0, orig_o->v1, idx, + orig_buf, buf, len, &status))) return 1; if (status == 2) ++fails; @@ -483,11 +564,17 @@ static u8 rtn_fuzz(afl_state_t *afl, u32 key, u8 *orig_buf, u8 *buf, u32 len) { } + if (status == 1) found_one = 1; + // If failed, add to dictionary if (fails == 8) { - maybe_add_auto(afl, o->v0, SHAPE_BYTES(h->shape)); - maybe_add_auto(afl, o->v1, SHAPE_BYTES(h->shape)); + if (afl->pass_stats[key].total == 0) { + + maybe_add_auto((u8 *)afl, o->v0, SHAPE_BYTES(h->shape)); + maybe_add_auto((u8 *)afl, o->v1, SHAPE_BYTES(h->shape)); + + } } @@ -496,6 +583,14 @@ static u8 rtn_fuzz(afl_state_t *afl, u32 key, u8 *orig_buf, u8 *buf, u32 len) { } + if (!found_one && afl->pass_stats[key].faileds < 0xff) { + + afl->pass_stats[key].faileds++; + + } + + if (afl->pass_stats[key].total < 0xff) afl->pass_stats[key].total++; + return 0; } @@ -507,6 +602,18 @@ u8 input_to_state_stage(afl_state_t *afl, u8 *orig_buf, u8 *buf, u32 len, u32 exec_cksum) { u8 r = 1; + if (afl->orig_cmp_map == NULL) + afl->orig_cmp_map = ck_alloc_nozero(sizeof(struct cmp_map)); + + if (afl->pass_stats == NULL) + afl->pass_stats = ck_alloc(sizeof(struct afl_pass_stat) * CMP_MAP_W); + + // do it manually, forkserver clear only afl->fsrv.trace_bits + memset(afl->shm.cmp_map->headers, 0, sizeof(afl->shm.cmp_map->headers)); + + if (unlikely(common_fuzz_cmplog_stuff(afl, buf, len))) return 1; + + memcpy(afl->orig_cmp_map, afl->shm.cmp_map, sizeof(struct cmp_map)); if (unlikely(colorization(afl, buf, len, exec_cksum))) return 1; @@ -516,7 +623,7 @@ u8 input_to_state_stage(afl_state_t *afl, u8 *orig_buf, u8 *buf, u32 len, if (unlikely(common_fuzz_cmplog_stuff(afl, buf, len))) return 1; u64 orig_hit_cnt, new_hit_cnt; - u64 orig_execs = afl->total_execs; + u64 orig_execs = afl->fsrv.total_execs; orig_hit_cnt = afl->queued_paths + afl->unique_crashes; afl->stage_name = "input-to-state"; @@ -528,6 +635,13 @@ u8 input_to_state_stage(afl_state_t *afl, u8 *orig_buf, u8 *buf, u32 len, for (k = 0; k < CMP_MAP_W; ++k) { if (!afl->shm.cmp_map->headers[k].hits) continue; + + if (afl->pass_stats[k].total && + (rand_below(afl, afl->pass_stats[k].total) >= + afl->pass_stats[k].faileds || + afl->pass_stats[k].total == 0xff)) + afl->shm.cmp_map->headers[k].hits = 0; // blacklist this cmp + if (afl->shm.cmp_map->headers[k].type == CMP_TYPE_INS) afl->stage_max += MIN((u32)afl->shm.cmp_map->headers[k].hits, CMP_MAP_H); else @@ -555,11 +669,11 @@ u8 input_to_state_stage(afl_state_t *afl, u8 *orig_buf, u8 *buf, u32 len, r = 0; exit_its: - memcpy(orig_buf, buf, len); - new_hit_cnt = afl->queued_paths + afl->unique_crashes; afl->stage_finds[STAGE_ITS] += new_hit_cnt - orig_hit_cnt; - afl->stage_cycles[STAGE_ITS] += afl->total_execs - orig_execs; + afl->stage_cycles[STAGE_ITS] += afl->fsrv.total_execs - orig_execs; + + memcpy(orig_buf, buf, len); return r; |