From fcd21256780fd21c55e72e9338b3992c60db22dc Mon Sep 17 00:00:00 2001 From: vanhauser-thc Date: Tue, 4 Apr 2023 15:47:53 +0200 Subject: prepare for strategies --- include/afl-mutations.h | 992 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 992 insertions(+) create mode 100644 include/afl-mutations.h (limited to 'include/afl-mutations.h') diff --git a/include/afl-mutations.h b/include/afl-mutations.h new file mode 100644 index 00000000..43b7927d --- /dev/null +++ b/include/afl-mutations.h @@ -0,0 +1,992 @@ +/* Implementation of afl havoc mutation to be used in AFL++ custom mutators and + partially in afl-fuzz itself. + + How to use: + + #include "afl-mutations.h" // needs afl-fuzz.h + + u32 afl_mutate(afl_state_t *afl, u8 *buf, u32 len, u32t steps, bool is_text, + bool is_exploration, u8 *splice_buf, u32 splice_len); + + Returns: + u32 - the length of the mutated data return in *buf. 0 = error + Parameters: + afl_state_t *afl - the *afl state pointer + u8 *buf - the input buffer to mutate which will be mutated into. + NOTE: must be of MAX_FILE size! + u32 len - the length of the input + u32 steps - how many mutations to perform on the input + bool is_text - is the target expecting text inputs + bool is_exploration - mutate for exploration mode (instead of exploitation) + splice_buf - a buffer from another corpus item to splice with. + If NULL then no splicing + splice_len - the length of the splice buffer. If 0 then no splicing +*/ + +#ifndef _ANDROID_ASHMEM_H + #define AFL_MUTATIONS_H + + #include + #include "afl-fuzz.h" + + #define MUT_STRATEGY_ARRAY_SIZE 256 + +enum { + + /* 00 */ MUT_FLIPBIT, + /* 01 */ MUT_INTERESTING8, + /* 02 */ MUT_INTERESTING16, + /* 03 */ MUT_INTERESTING16BE, + /* 04 */ MUT_INTERESTING32, + /* 05 */ MUT_INTERESTING32BE, + /* 06 */ MUT_ARITH8_, + /* 07 */ MUT_ARITH8, + /* 08 */ MUT_ARITH16_, + /* 09 */ MUT_ARITH16BE_, + /* 10 */ MUT_ARITH16, + /* 11 */ MUT_ARITH16BE, + /* 12 */ MUT_ARITH32_, + /* 13 */ MUT_ARITH32BE_, + /* 14 */ MUT_ARITH32, + /* 15 */ MUT_ARITH32BE, + /* 16 */ MUT_RAND8, + /* 17 */ MUT_CLONE_OVERWRITE, + /* 18 */ MUT_CLONE_INSERT, + /* 19 */ MUT_OVERWRITE_COPY, + /* 20 */ MUT_OVERWRITE_FIXED, + /* 21 */ MUT_BYTEADD, + /* 22 */ MUT_BYTESUB, + /* 23 */ MUT_FLIP8, + /* 24 */ MUT_SWITCH, + /* 25 */ MUT_DEL, + /* 26 */ MUT_SHUFFLE, + /* 27 */ MUT_DELONE, + /* 28 */ MUT_INSERTONE, + /* 29 */ MUT_ASCIINUM, + /* 30 */ MUT_NEG, + /* 31 */ MUT_INSERTASCIINUM, + /* 32 */ MUT_EXTRA_OVERWRITE, + /* 33 */ MUT_EXTRA_INSERT, + /* 34 */ MUT_AUTO_EXTRA_OVERWRITE, + /* 35 */ MUT_AUTO_EXTRA_INSERT, + /* 36 */ MUT_SPLICE_OVERWRITE, + /* 37 */ MUT_SPLICE_INSERT, + + MUT_MAX + +}; + +unsigned int mutation_strategy_exploration_text[MUT_STRATEGY_ARRAY_SIZE] = {}; +unsigned int mutation_strategy_exploration_binary[MUT_STRATEGY_ARRAY_SIZE] = {}; +unsigned int mutation_strategy_exploitation_text[MUT_STRATEGY_ARRAY_SIZE] = {}; +unsigned int mutation_strategy_exploitation_binary[MUT_STRATEGY_ARRAY_SIZE] = + {}; + +unsigned int afl_mutate(afl_state_t *, unsigned char *, unsigned int, + unsigned int, bool, bool, unsigned char *, + unsigned int); +u32 choose_block_len(afl_state_t *, u32); + +/* Helper to choose random block len for block operations in fuzz_one(). + Doesn't return zero, provided that max_len is > 0. */ + +inline u32 choose_block_len(afl_state_t *afl, u32 limit) { + + u32 min_value, max_value; + u32 rlim = MIN(afl->queue_cycle, (u32)3); + + if (unlikely(!afl->run_over10m)) { rlim = 1; } + + switch (rand_below(afl, rlim)) { + + case 0: + min_value = 1; + max_value = HAVOC_BLK_SMALL; + break; + + case 1: + min_value = HAVOC_BLK_SMALL; + max_value = HAVOC_BLK_MEDIUM; + break; + + default: + + if (likely(rand_below(afl, 10))) { + + min_value = HAVOC_BLK_MEDIUM; + max_value = HAVOC_BLK_LARGE; + + } else { + + min_value = HAVOC_BLK_LARGE; + max_value = HAVOC_BLK_XL; + + } + + } + + if (min_value >= limit) { min_value = 1; } + + return min_value + rand_below(afl, MIN(max_value, limit) - min_value + 1); + +} + +unsigned int afl_mutate(afl_state_t *afl, unsigned char *buf, unsigned int len, + unsigned int steps, bool is_text, bool is_exploration, + unsigned char *splice_buf, unsigned int splice_len) { + + if (!buf || !len) { return 0; } + + u32 *mutation_array; + static unsigned char *tmp_buf = NULL; + + if (!tmp_buf) { + + if ((tmp_buf = malloc(MAX_FILE)) == NULL) { return 0; } + + } + + if (is_text) { + + if (is_exploration) { + + mutation_array = (unsigned int *)&mutation_strategy_exploration_text; + + } else { + + mutation_array = (unsigned int *)&mutation_strategy_exploitation_text; + + } + + } else { + + if (is_exploration) { + + mutation_array = (unsigned int *)&mutation_strategy_exploration_binary; + + } else { + + mutation_array = (unsigned int *)&mutation_strategy_exploitation_binary; + + } + + } + + for (unsigned int step = 0; step < steps; ++step) { + + retry_havoc_step: + + u32 r = rand_below(afl, MUT_STRATEGY_ARRAY_SIZE), item; + + switch (mutation_array[r]) { + + case MUT_FLIPBIT: { + + /* Flip a single bit somewhere. Spooky! */ + u8 bit = rand_below(afl, 8); + u32 off = rand_below(afl, len); + buf[off] ^= 1 << bit; + + break; + + } + + case MUT_INTERESTING8: { + + /* Set byte to interesting value. */ + + item = rand_below(afl, sizeof(interesting_8)); + buf[rand_below(afl, len)] = interesting_8[item]; + break; + + } + + case MUT_INTERESTING16: { + + /* Set word to interesting value, little endian. */ + + if (unlikely(len < 2)) { break; } // no retry + + item = rand_below(afl, sizeof(interesting_16) >> 1); + *(u16 *)(buf + rand_below(afl, len - 1)) = interesting_16[item]; + + break; + + } + + case MUT_INTERESTING16BE: { + + /* Set word to interesting value, big endian. */ + + if (unlikely(len < 2)) { break; } // no retry + + item = rand_below(afl, sizeof(interesting_16) >> 1); + *(u16 *)(buf + rand_below(afl, len - 1)) = SWAP16(interesting_16[item]); + + break; + + } + + case MUT_INTERESTING32: { + + /* Set dword to interesting value, little endian. */ + + if (unlikely(len < 4)) { break; } // no retry + + item = rand_below(afl, sizeof(interesting_32) >> 2); + *(u32 *)(buf + rand_below(afl, len - 3)) = interesting_32[item]; + + break; + + } + + case MUT_INTERESTING32BE: { + + /* Set dword to interesting value, big endian. */ + + if (unlikely(len < 4)) { break; } // no retry + + item = rand_below(afl, sizeof(interesting_32) >> 2); + *(u32 *)(buf + rand_below(afl, len - 3)) = SWAP32(interesting_32[item]); + + break; + + } + + case MUT_ARITH8_: { + + /* Randomly subtract from byte. */ + + item = 1 + rand_below(afl, ARITH_MAX); + buf[rand_below(afl, len)] -= item; + break; + + } + + case MUT_ARITH8: { + + /* Randomly add to byte. */ + + item = 1 + rand_below(afl, ARITH_MAX); + buf[rand_below(afl, len)] += item; + break; + + } + + case MUT_ARITH16_: { + + /* Randomly subtract from word, little endian. */ + + if (unlikely(len < 2)) { break; } // no retry + + u32 pos = rand_below(afl, len - 1); + item = 1 + rand_below(afl, ARITH_MAX); + *(u16 *)(buf + pos) -= item; + + break; + + } + + case MUT_ARITH16BE_: { + + /* Randomly subtract from word, big endian. */ + + if (unlikely(len < 2)) { break; } // no retry + + u32 pos = rand_below(afl, len - 1); + u16 num = 1 + rand_below(afl, ARITH_MAX); + *(u16 *)(buf + pos) = SWAP16(SWAP16(*(u16 *)(buf + pos)) - num); + + break; + + } + + case MUT_ARITH16: { + + /* Randomly add to word, little endian. */ + + if (unlikely(len < 2)) { break; } // no retry + + u32 pos = rand_below(afl, len - 1); + item = 1 + rand_below(afl, ARITH_MAX); + *(u16 *)(buf + pos) += item; + + break; + + } + + case MUT_ARITH16BE: { + + /* Randomly add to word, big endian. */ + + if (unlikely(len < 2)) { break; } // no retry + + u32 pos = rand_below(afl, len - 1); + u16 num = 1 + rand_below(afl, ARITH_MAX); + *(u16 *)(buf + pos) = SWAP16(SWAP16(*(u16 *)(buf + pos)) + num); + + break; + + } + + case MUT_ARITH32_: { + + /* Randomly subtract from dword, little endian. */ + + if (unlikely(len < 4)) { break; } // no retry + + u32 pos = rand_below(afl, len - 3); + item = 1 + rand_below(afl, ARITH_MAX); + *(u32 *)(buf + pos) -= item; + + break; + + } + + case MUT_ARITH32BE_: { + + /* Randomly subtract from dword, big endian. */ + + if (unlikely(len < 4)) { break; } // no retry + + u32 pos = rand_below(afl, len - 3); + u32 num = 1 + rand_below(afl, ARITH_MAX); + *(u32 *)(buf + pos) = SWAP32(SWAP32(*(u32 *)(buf + pos)) - num); + + break; + + } + + case MUT_ARITH32: { + + /* Randomly add to dword, little endian. */ + + if (unlikely(len < 4)) { break; } // no retry + + u32 pos = rand_below(afl, len - 3); + item = 1 + rand_below(afl, ARITH_MAX); + *(u32 *)(buf + pos) += item; + + break; + + } + + case MUT_ARITH32BE: { + + /* Randomly add to dword, big endian. */ + + if (unlikely(len < 4)) { break; } // no retry + + u32 pos = rand_below(afl, len - 3); + u32 num = 1 + rand_below(afl, ARITH_MAX); + *(u32 *)(buf + pos) = SWAP32(SWAP32(*(u32 *)(buf + pos)) + num); + + break; + + } + + case MUT_RAND8: { + + /* Just set a random byte to a random value. Because, + why not. We use XOR with 1-255 to eliminate the + possibility of a no-op. */ + + u32 pos = rand_below(afl, len); + item = 1 + rand_below(afl, 255); + buf[pos] ^= item; + break; + + } + + case MUT_CLONE_OVERWRITE: { + + if (likely(len + HAVOC_BLK_XL < MAX_FILE)) { + + /* Clone bytes. */ + + u32 clone_len = choose_block_len(afl, len); + u32 clone_from = rand_below(afl, len - clone_len + 1); + u32 clone_to = rand_below(afl, len); + + /* Head */ + + memcpy(tmp_buf, buf, clone_to); + + /* Inserted part */ + + memcpy(tmp_buf + clone_to, buf + clone_from, clone_len); + + /* Tail */ + memcpy(tmp_buf + clone_to + clone_len, buf + clone_to, + len - clone_to); + + len += clone_len; + memcpy(buf, tmp_buf, len); + + } else if (unlikely(len < 8)) { + + break; + + } else { + + goto retry_havoc_step; + + } + + break; + + } + + case MUT_CLONE_INSERT: { + + if (likely(len + HAVOC_BLK_XL < MAX_FILE)) { + + /* Insert a block of constant bytes (25%). */ + + u32 clone_len = choose_block_len(afl, HAVOC_BLK_XL); + u32 clone_to = rand_below(afl, len); + u32 strat = rand_below(afl, 2); + u32 clone_from = clone_to ? clone_to - 1 : 0; + item = strat ? rand_below(afl, 256) : buf[clone_from]; + + /* Head */ + + memcpy(tmp_buf, buf, clone_to); + + /* Inserted part */ + + memset(tmp_buf + clone_to, item, clone_len); + + /* Tail */ + memcpy(tmp_buf + clone_to + clone_len, buf + clone_to, + len - clone_to); + + len += clone_len; + memcpy(buf, tmp_buf, len); + + } else if (unlikely(len < 8)) { + + break; + + } else { + + goto retry_havoc_step; + + } + + break; + + } + + case MUT_OVERWRITE_COPY: { + + /* Overwrite bytes with a randomly selected chunk bytes. */ + + if (unlikely(len < 2)) { break; } // no retry + + u32 copy_len = choose_block_len(afl, len - 1); + u32 copy_from = rand_below(afl, len - copy_len + 1); + u32 copy_to = rand_below(afl, len - copy_len + 1); + + if (likely(copy_from != copy_to)) { + + memmove(buf + copy_to, buf + copy_from, copy_len); + + } + + break; + + } + + case MUT_OVERWRITE_FIXED: { + + /* Overwrite bytes with fixed bytes. */ + + if (unlikely(len < 2)) { break; } // no retry + + u32 copy_len = choose_block_len(afl, len - 1); + u32 copy_to = rand_below(afl, len - copy_len + 1); + u32 strat = rand_below(afl, 2); + u32 copy_from = copy_to ? copy_to - 1 : 0; + item = strat ? rand_below(afl, 256) : buf[copy_from]; + memset(buf + copy_to, item, copy_len); + + break; + + } + + case MUT_BYTEADD: { + + /* Increase byte by 1. */ + + buf[rand_below(afl, len)]++; + break; + + } + + case MUT_BYTESUB: { + + /* Decrease byte by 1. */ + + buf[rand_below(afl, len)]--; + break; + + } + + case MUT_FLIP8: { + + /* Flip byte. */ + + buf[rand_below(afl, len)] ^= 0xff; + break; + + } + + case MUT_SWITCH: { + + if (unlikely(len < 4)) { break; } // no retry + + /* Switch bytes. */ + + u32 to_end, switch_to, switch_len, switch_from; + switch_from = rand_below(afl, len); + do { + + switch_to = rand_below(afl, len); + + } while (unlikely(switch_from == switch_to)); + + if (switch_from < switch_to) { + + switch_len = switch_to - switch_from; + to_end = len - switch_to; + + } else { + + switch_len = switch_from - switch_to; + to_end = len - switch_from; + + } + + switch_len = choose_block_len(afl, MIN(switch_len, to_end)); + + /* Backup */ + + memcpy(tmp_buf, buf + switch_from, switch_len); + + /* Switch 1 */ + + memcpy(buf + switch_from, buf + switch_to, switch_len); + + /* Switch 2 */ + + memcpy(buf + switch_to, tmp_buf, switch_len); + + break; + + } + + case MUT_DEL: { + + /* Delete bytes. */ + + if (unlikely(len < 2)) { break; } // no retry + + /* Don't delete too much. */ + + u32 del_len = choose_block_len(afl, len - 1); + u32 del_from = rand_below(afl, len - del_len + 1); + memmove(buf + del_from, buf + del_from + del_len, + len - del_from - del_len); + len -= del_len; + + break; + + } + + case MUT_SHUFFLE: { + + /* Shuffle bytes. */ + + if (unlikely(len < 4)) { break; } // no retry + + u32 len = choose_block_len(afl, len - 1); + u32 off = rand_below(afl, len - len + 1); + + for (u32 i = len - 1; i > 0; i--) { + + u32 j; + do { + + j = rand_below(afl, i + 1); + + } while (unlikely(i == j)); + + unsigned char temp = buf[off + i]; + buf[off + i] = buf[off + j]; + buf[off + j] = temp; + + } + + break; + + } + + case MUT_DELONE: { + + /* Delete bytes. */ + + if (unlikely(len < 2)) { break; } // no retry + + /* Don't delete too much. */ + + u32 del_len = 1; + u32 del_from = rand_below(afl, len - del_len + 1); + memmove(buf + del_from, buf + del_from + del_len, + len - del_from - del_len); + + len -= del_len; + + break; + + } + + case MUT_INSERTONE: { + + if (unlikely(len < 2)) { break; } // no retry + + u32 clone_len = 1; + u32 clone_to = rand_below(afl, len); + u32 strat = rand_below(afl, 2); + u32 clone_from = clone_to ? clone_to - 1 : 0; + item = strat ? rand_below(afl, 256) : buf[clone_from]; + + /* Head */ + + memcpy(tmp_buf, buf, clone_to); + + /* Inserted part */ + + memset(tmp_buf + clone_to, item, clone_len); + + /* Tail */ + memcpy(tmp_buf + clone_to + clone_len, buf + clone_to, len - clone_to); + + len += clone_len; + memcpy(buf, tmp_buf, len); + + break; + + } + + case MUT_ASCIINUM: { + + if (unlikely(len < 4)) { break; } // no retry + + u32 off = rand_below(afl, len), off2 = off, cnt = 0; + + while (off2 + cnt < len && !isdigit(buf[off2 + cnt])) { + + ++cnt; + + } + + // none found, wrap + if (off2 + cnt == len) { + + off2 = 0; + cnt = 0; + + while (cnt < off && !isdigit(buf[off2 + cnt])) { + + ++cnt; + + } + + if (cnt == off) { + + if (len < 8) { + + break; + + } else { + + goto retry_havoc_step; + + } + + } + + } + + off = off2 + cnt; + off2 = off + 1; + + while (off2 < len && isdigit(buf[off2])) { + + ++off2; + + } + + s64 val = buf[off] - '0'; + for (u32 i = off + 1; i < off2; ++i) { + + val = (val * 10) + buf[i] - '0'; + + } + + if (off && buf[off - 1] == '-') { val = -val; } + + u32 strat = rand_below(afl, 8); + switch (strat) { + + case 0: + val++; + break; + case 1: + val--; + break; + case 2: + val *= 2; + break; + case 3: + val /= 2; + break; + case 4: + if (likely(val && (u64)val < 0x19999999)) { + + val = (u64)rand_next(afl) % (u64)((u64)val * 10); + + } else { + + val = rand_below(afl, 256); + + } + + break; + case 5: + val += rand_below(afl, 256); + break; + case 6: + val -= rand_below(afl, 256); + break; + case 7: + val = ~(val); + break; + + } + + char buf[20]; + snprintf(buf, sizeof(buf), "%ld", val); + u32 old_len = off2 - off; + u32 new_len = strlen(buf); + + if (old_len == new_len) { + + memcpy(buf + off, buf, new_len); + + } else { + + /* Head */ + + memcpy(tmp_buf, buf, off); + + /* Inserted part */ + + memcpy(tmp_buf + off, buf, new_len); + + /* Tail */ + memcpy(tmp_buf + off + new_len, buf + off2, len - off2); + + len += (new_len - old_len); + memcpy(buf, tmp_buf, len); + + } + + // fprintf(stderr, "AFTER : %s\n", buf); + break; + + } + + case MUT_NEG: { + + /* Neg byte. */ + + item = rand_below(afl, len); + buf[item] = ~buf[item]; + + break; + + } + + case MUT_INSERTASCIINUM: { + + u32 len = 1 + rand_below(afl, 8); + u32 pos = rand_below(afl, len); + + /* Insert ascii number. */ + if (unlikely(len < pos + len)) { + + if (unlikely(len < 8)) { + + break; + + } else { + + goto retry_havoc_step; + + } + + } + + u64 val = rand_next(afl); + char buf[20]; + snprintf(buf, sizeof(buf), "%llu", val); + memcpy(buf + pos, buf, len); + + break; + + } + + case MUT_EXTRA_OVERWRITE: { + + if (unlikely(!afl->extras_cnt)) { goto retry_havoc_step; } + + /* Use the dictionary. */ + + u32 use_extra = rand_below(afl, afl->extras_cnt); + u32 extra_len = afl->extras[use_extra].len; + + if (unlikely(extra_len > len)) { goto retry_havoc_step; } + + u32 insert_at = rand_below(afl, len - extra_len + 1); + memcpy(buf + insert_at, afl->extras[use_extra].data, extra_len); + + break; + + } + + case MUT_EXTRA_INSERT: { + + if (unlikely(!afl->extras_cnt)) { goto retry_havoc_step; } + + u32 use_extra = rand_below(afl, afl->extras_cnt); + u32 extra_len = afl->extras[use_extra].len; + if (unlikely(len + extra_len >= MAX_FILE)) { goto retry_havoc_step; } + + u8 *ptr = afl->extras[use_extra].data; + u32 insert_at = rand_below(afl, len + 1); + + /* Tail */ + memmove(buf + insert_at + extra_len, buf + insert_at, len - insert_at); + + /* Inserted part */ + memcpy(buf + insert_at, ptr, extra_len); + len += extra_len; + + break; + + } + + case MUT_AUTO_EXTRA_OVERWRITE: { + + if (unlikely(!afl->a_extras_cnt)) { goto retry_havoc_step; } + + /* Use the dictionary. */ + + u32 use_extra = rand_below(afl, afl->a_extras_cnt); + u32 extra_len = afl->a_extras[use_extra].len; + + if (unlikely(extra_len > len)) { goto retry_havoc_step; } + + u32 insert_at = rand_below(afl, len - extra_len + 1); + memcpy(buf + insert_at, afl->a_extras[use_extra].data, extra_len); + + break; + + } + + case MUT_AUTO_EXTRA_INSERT: { + + if (unlikely(!afl->a_extras_cnt)) { goto retry_havoc_step; } + + u32 use_extra = rand_below(afl, afl->a_extras_cnt); + u32 extra_len = afl->a_extras[use_extra].len; + if (unlikely(len + extra_len >= MAX_FILE)) { goto retry_havoc_step; } + + u8 *ptr = afl->a_extras[use_extra].data; + u32 insert_at = rand_below(afl, len + 1); + + /* Tail */ + memmove(buf + insert_at + extra_len, buf + insert_at, len - insert_at); + + /* Inserted part */ + memcpy(buf + insert_at, ptr, extra_len); + len += extra_len; + + break; + + } + + case MUT_SPLICE_OVERWRITE: { + + if (unlikely(!splice_buf || !splice_len)) { goto retry_havoc_step; } + + /* overwrite mode */ + + u32 copy_from, copy_to, copy_len; + + copy_len = choose_block_len(afl, splice_len - 1); + + if (copy_len > len) copy_len = len; + + copy_from = rand_below(afl, splice_len - copy_len + 1); + copy_to = rand_below(afl, len - copy_len + 1); + memmove(buf + copy_to, splice_buf + copy_from, copy_len); + + break; + + } + + case MUT_SPLICE_INSERT: { + + if (unlikely(!splice_buf || !splice_len)) { goto retry_havoc_step; } + + if (unlikely(len + HAVOC_BLK_XL >= MAX_FILE)) { goto retry_havoc_step; } + + /* insert mode */ + + u32 clone_from, clone_to, clone_len; + + clone_len = choose_block_len(afl, splice_len); + clone_from = rand_below(afl, splice_len - clone_len + 1); + clone_to = rand_below(afl, len + 1); + + /* Head */ + + memcpy(tmp_buf, buf, clone_to); + + /* Inserted part */ + + memcpy(tmp_buf + clone_to, splice_buf + clone_from, clone_len); + + /* Tail */ + memcpy(tmp_buf + clone_to + clone_len, buf + clone_to, len - clone_to); + + len += clone_len; + memcpy(buf, tmp_buf, len); + + break; + + } + + } + + } + + return len; + +} + +#endif /* !AFL_MUTATIONS_H */ + -- cgit 1.4.1 From 32ffa2664cdfa2cc377df12cbf6efdcecbc2e78a Mon Sep 17 00:00:00 2001 From: vanhauser-thc Date: Tue, 4 Apr 2023 16:23:19 +0200 Subject: max_len support --- custom_mutators/aflpp/aflpp.c | 27 +++++++++++++-- include/afl-mutations.h | 76 +++++++++++++++++++++++++++---------------- 2 files changed, 72 insertions(+), 31 deletions(-) (limited to 'include/afl-mutations.h') diff --git a/custom_mutators/aflpp/aflpp.c b/custom_mutators/aflpp/aflpp.c index 2b69ad9c..e15d0391 100644 --- a/custom_mutators/aflpp/aflpp.c +++ b/custom_mutators/aflpp/aflpp.c @@ -4,6 +4,7 @@ typedef struct my_mutator { afl_state_t *afl; u8 *buf; + u32 buf_size; } my_mutator_t; @@ -19,12 +20,15 @@ my_mutator_t *afl_custom_init(afl_state_t *afl, unsigned int seed) { } - data->buf = malloc(MAX_FILE); - if (!data->buf) { + if ((data->buf = malloc(MAX_FILE)) == NULL) { perror("afl_custom_init alloc"); return NULL; + } else { + + data->buf_size = MAX_FILE; + } data->afl = afl; @@ -39,6 +43,23 @@ size_t afl_custom_fuzz(my_mutator_t *data, uint8_t *buf, size_t buf_size, u8 **out_buf, uint8_t *add_buf, size_t add_buf_size, size_t max_size) { + if (max_size > data->buf_size) { + + u8 *ptr = realloc(data->buf, max_size); + + if (ptr) { + + return 0; + + } else { + + data->buf = ptr; + data->buf_size = max_size; + + } + + } + u32 havoc_steps = 1 + rand_below(data->afl, 16); /* set everything up, costly ... :( */ @@ -46,7 +67,7 @@ size_t afl_custom_fuzz(my_mutator_t *data, uint8_t *buf, size_t buf_size, /* the mutation */ u32 out_buf_len = afl_mutate(data->afl, data->buf, buf_size, havoc_steps, - false, true, add_buf, add_buf_size); + false, true, add_buf, add_buf_size, max_size); /* return size of mutated data */ *out_buf = data->buf; diff --git a/include/afl-mutations.h b/include/afl-mutations.h index 43b7927d..e3f69214 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -6,7 +6,8 @@ #include "afl-mutations.h" // needs afl-fuzz.h u32 afl_mutate(afl_state_t *afl, u8 *buf, u32 len, u32t steps, bool is_text, - bool is_exploration, u8 *splice_buf, u32 splice_len); + bool is_exploration, u8 *splice_buf, u32 splice_len, + u32 max_len); Returns: u32 - the length of the mutated data return in *buf. 0 = error @@ -21,6 +22,7 @@ splice_buf - a buffer from another corpus item to splice with. If NULL then no splicing splice_len - the length of the splice buffer. If 0 then no splicing + u32 max_len - the maximum size the mutated buffer may grow to */ #ifndef _ANDROID_ASHMEM_H @@ -76,16 +78,13 @@ enum { }; -unsigned int mutation_strategy_exploration_text[MUT_STRATEGY_ARRAY_SIZE] = {}; -unsigned int mutation_strategy_exploration_binary[MUT_STRATEGY_ARRAY_SIZE] = {}; -unsigned int mutation_strategy_exploitation_text[MUT_STRATEGY_ARRAY_SIZE] = {}; -unsigned int mutation_strategy_exploitation_binary[MUT_STRATEGY_ARRAY_SIZE] = - {}; +u32 mutation_strategy_exploration_text[MUT_STRATEGY_ARRAY_SIZE] = {}; +u32 mutation_strategy_exploration_binary[MUT_STRATEGY_ARRAY_SIZE] = {}; +u32 mutation_strategy_exploitation_text[MUT_STRATEGY_ARRAY_SIZE] = {}; +u32 mutation_strategy_exploitation_binary[MUT_STRATEGY_ARRAY_SIZE] = {}; -unsigned int afl_mutate(afl_state_t *, unsigned char *, unsigned int, - unsigned int, bool, bool, unsigned char *, - unsigned int); -u32 choose_block_len(afl_state_t *, u32); +u32 afl_mutate(afl_state_t *, u8 *, u32, u32, bool, bool, u8 *, u32, u32); +u32 choose_block_len(afl_state_t *, u32); /* Helper to choose random block len for block operations in fuzz_one(). Doesn't return zero, provided that max_len is > 0. */ @@ -131,18 +130,39 @@ inline u32 choose_block_len(afl_state_t *afl, u32 limit) { } -unsigned int afl_mutate(afl_state_t *afl, unsigned char *buf, unsigned int len, - unsigned int steps, bool is_text, bool is_exploration, - unsigned char *splice_buf, unsigned int splice_len) { +inline u32 afl_mutate(afl_state_t *afl, u8 *buf, u32 len, u32 steps, + bool is_text, bool is_exploration, u8 *splice_buf, + u32 splice_len, u32 max_len) { if (!buf || !len) { return 0; } - u32 *mutation_array; - static unsigned char *tmp_buf = NULL; + u32 *mutation_array; + static u8 *tmp_buf = NULL; + static u32 tmp_buf_size = 0; - if (!tmp_buf) { + if (max_len > tmp_buf_size) { - if ((tmp_buf = malloc(MAX_FILE)) == NULL) { return 0; } + if (tmp_buf) { + + u8 *ptr = realloc(tmp_buf, max_len); + + if (!ptr) { + + return 0; + + } else { + + tmp_buf = ptr; + + } + + } else { + + if ((tmp_buf = malloc(max_len)) == NULL) { return 0; } + + } + + tmp_buf_size = max_len; } @@ -150,11 +170,11 @@ unsigned int afl_mutate(afl_state_t *afl, unsigned char *buf, unsigned int len, if (is_exploration) { - mutation_array = (unsigned int *)&mutation_strategy_exploration_text; + mutation_array = (u32 *)&mutation_strategy_exploration_text; } else { - mutation_array = (unsigned int *)&mutation_strategy_exploitation_text; + mutation_array = (u32 *)&mutation_strategy_exploitation_text; } @@ -162,17 +182,17 @@ unsigned int afl_mutate(afl_state_t *afl, unsigned char *buf, unsigned int len, if (is_exploration) { - mutation_array = (unsigned int *)&mutation_strategy_exploration_binary; + mutation_array = (u32 *)&mutation_strategy_exploration_binary; } else { - mutation_array = (unsigned int *)&mutation_strategy_exploitation_binary; + mutation_array = (u32 *)&mutation_strategy_exploitation_binary; } } - for (unsigned int step = 0; step < steps; ++step) { + for (u32 step = 0; step < steps; ++step) { retry_havoc_step: @@ -400,7 +420,7 @@ unsigned int afl_mutate(afl_state_t *afl, unsigned char *buf, unsigned int len, case MUT_CLONE_OVERWRITE: { - if (likely(len + HAVOC_BLK_XL < MAX_FILE)) { + if (likely(len + HAVOC_BLK_XL < max_len)) { /* Clone bytes. */ @@ -439,7 +459,7 @@ unsigned int afl_mutate(afl_state_t *afl, unsigned char *buf, unsigned int len, case MUT_CLONE_INSERT: { - if (likely(len + HAVOC_BLK_XL < MAX_FILE)) { + if (likely(len + HAVOC_BLK_XL < max_len)) { /* Insert a block of constant bytes (25%). */ @@ -622,7 +642,7 @@ unsigned int afl_mutate(afl_state_t *afl, unsigned char *buf, unsigned int len, } while (unlikely(i == j)); - unsigned char temp = buf[off + i]; + u8 temp = buf[off + i]; buf[off + i] = buf[off + j]; buf[off + j] = temp; @@ -872,7 +892,7 @@ unsigned int afl_mutate(afl_state_t *afl, unsigned char *buf, unsigned int len, u32 use_extra = rand_below(afl, afl->extras_cnt); u32 extra_len = afl->extras[use_extra].len; - if (unlikely(len + extra_len >= MAX_FILE)) { goto retry_havoc_step; } + if (unlikely(len + extra_len >= max_len)) { goto retry_havoc_step; } u8 *ptr = afl->extras[use_extra].data; u32 insert_at = rand_below(afl, len + 1); @@ -912,7 +932,7 @@ unsigned int afl_mutate(afl_state_t *afl, unsigned char *buf, unsigned int len, u32 use_extra = rand_below(afl, afl->a_extras_cnt); u32 extra_len = afl->a_extras[use_extra].len; - if (unlikely(len + extra_len >= MAX_FILE)) { goto retry_havoc_step; } + if (unlikely(len + extra_len >= max_len)) { goto retry_havoc_step; } u8 *ptr = afl->a_extras[use_extra].data; u32 insert_at = rand_below(afl, len + 1); @@ -952,7 +972,7 @@ unsigned int afl_mutate(afl_state_t *afl, unsigned char *buf, unsigned int len, if (unlikely(!splice_buf || !splice_len)) { goto retry_havoc_step; } - if (unlikely(len + HAVOC_BLK_XL >= MAX_FILE)) { goto retry_havoc_step; } + if (unlikely(len + HAVOC_BLK_XL >= max_len)) { goto retry_havoc_step; } /* insert mode */ -- cgit 1.4.1 From 2bff92c603463410fa0f97e7c4db7eb14c45e5ed Mon Sep 17 00:00:00 2001 From: vanhauser-thc Date: Tue, 4 Apr 2023 16:25:05 +0200 Subject: nit --- include/afl-mutations.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'include/afl-mutations.h') diff --git a/include/afl-mutations.h b/include/afl-mutations.h index e3f69214..707db799 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -14,7 +14,7 @@ Parameters: afl_state_t *afl - the *afl state pointer u8 *buf - the input buffer to mutate which will be mutated into. - NOTE: must be of MAX_FILE size! + NOTE: must be able to contain a size of at least max_len (see below)! u32 len - the length of the input u32 steps - how many mutations to perform on the input bool is_text - is the target expecting text inputs -- cgit 1.4.1 From 3ab18d286142e2e19e37850c051e0b07b9d7b296 Mon Sep 17 00:00:00 2001 From: vanhauser-thc Date: Tue, 4 Apr 2023 19:44:12 +0200 Subject: mode switch --- include/afl-fuzz.h | 69 +++++++------ include/afl-mutations.h | 259 +++++++++++++++++++++++++++++++++++++++++++++++- include/config.h | 6 ++ src/afl-fuzz-one.c | 6 +- src/afl-fuzz-state.c | 1 + src/afl-fuzz-stats.c | 5 +- src/afl-fuzz.c | 51 ++++++++-- 7 files changed, 351 insertions(+), 46 deletions(-) (limited to 'include/afl-mutations.h') diff --git a/include/afl-fuzz.h b/include/afl-fuzz.h index 58d02af5..6573eabf 100644 --- a/include/afl-fuzz.h +++ b/include/afl-fuzz.h @@ -490,7 +490,9 @@ typedef struct afl_state { *orig_cmdline, /* Original command line */ *infoexec; /* Command to execute on a new crash */ - u32 hang_tmout; /* Timeout used for hang det (ms) */ + u32 hang_tmout, /* Timeout used for hang det (ms) */ + stats_update_freq, /* Stats update frequency (execs) */ + switch_fuzz_mode; /* auto or fixed fuzz mode */ u8 havoc_stack_pow2, /* HAVOC_STACK_POW2 */ no_unlink, /* do not unlink cur_input */ @@ -499,40 +501,37 @@ typedef struct afl_state { custom_splice_optout, /* Custom mutator no splice buffer */ is_main_node, /* if this is the main node */ is_secondary_node, /* if this is a secondary instance */ - pizza_is_served; /* pizza mode */ - - u32 stats_update_freq; /* Stats update frequency (execs) */ - - u8 schedule; /* Power schedule (default: EXPLORE)*/ - u8 havoc_max_mult; - - u8 skip_deterministic, /* Skip deterministic stages? */ - use_splicing, /* Recombine input files? */ - non_instrumented_mode, /* Run in non-instrumented mode? */ - score_changed, /* Scoring for favorites changed? */ - resuming_fuzz, /* Resuming an older fuzzing job? */ - timeout_given, /* Specific timeout given? */ - not_on_tty, /* stdout is not a tty */ - term_too_small, /* terminal dimensions too small */ - no_forkserver, /* Disable forkserver? */ - crash_mode, /* Crash mode! Yeah! */ - in_place_resume, /* Attempt in-place resume? */ - autoresume, /* Resume if afl->out_dir exists? */ - auto_changed, /* Auto-generated tokens changed? */ - no_cpu_meter_red, /* Feng shui on the status screen */ - no_arith, /* Skip most arithmetic ops */ - shuffle_queue, /* Shuffle input queue? */ - bitmap_changed, /* Time to update bitmap? */ - unicorn_mode, /* Running in Unicorn mode? */ - use_wine, /* Use WINE with QEMU mode */ - skip_requested, /* Skip request, via SIGUSR1 */ - run_over10m, /* Run time over 10 minutes? */ - persistent_mode, /* Running in persistent mode? */ - deferred_mode, /* Deferred forkserver mode? */ - fixed_seed, /* do not reseed */ - fast_cal, /* Try to calibrate faster? */ - disable_trim, /* Never trim in fuzz_one */ - shmem_testcase_mode, /* If sharedmem testcases are used */ + pizza_is_served, /* pizza mode */ + text_input, /* target wants text inputs */ + fuzz_mode, /* current mode: coverage/exploration or crash/exploitation */ + schedule, /* Power schedule (default: EXPLORE)*/ + havoc_max_mult, skip_deterministic, /* Skip deterministic stages? */ + use_splicing, /* Recombine input files? */ + non_instrumented_mode, /* Run in non-instrumented mode? */ + score_changed, /* Scoring for favorites changed? */ + resuming_fuzz, /* Resuming an older fuzzing job? */ + timeout_given, /* Specific timeout given? */ + not_on_tty, /* stdout is not a tty */ + term_too_small, /* terminal dimensions too small */ + no_forkserver, /* Disable forkserver? */ + crash_mode, /* Crash mode! Yeah! */ + in_place_resume, /* Attempt in-place resume? */ + autoresume, /* Resume if afl->out_dir exists? */ + auto_changed, /* Auto-generated tokens changed? */ + no_cpu_meter_red, /* Feng shui on the status screen */ + no_arith, /* Skip most arithmetic ops */ + shuffle_queue, /* Shuffle input queue? */ + bitmap_changed, /* Time to update bitmap? */ + unicorn_mode, /* Running in Unicorn mode? */ + use_wine, /* Use WINE with QEMU mode */ + skip_requested, /* Skip request, via SIGUSR1 */ + run_over10m, /* Run time over 10 minutes? */ + persistent_mode, /* Running in persistent mode? */ + deferred_mode, /* Deferred forkserver mode? */ + fixed_seed, /* do not reseed */ + fast_cal, /* Try to calibrate faster? */ + disable_trim, /* Never trim in fuzz_one */ + shmem_testcase_mode, /* If sharedmem testcases are used */ expand_havoc, /* perform expensive havoc after no find */ cycle_schedules, /* cycle power schedules? */ old_seed_selection, /* use vanilla afl seed selection */ diff --git a/include/afl-mutations.h b/include/afl-mutations.h index 707db799..5a1b6356 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -81,7 +81,264 @@ enum { u32 mutation_strategy_exploration_text[MUT_STRATEGY_ARRAY_SIZE] = {}; u32 mutation_strategy_exploration_binary[MUT_STRATEGY_ARRAY_SIZE] = {}; u32 mutation_strategy_exploitation_text[MUT_STRATEGY_ARRAY_SIZE] = {}; -u32 mutation_strategy_exploitation_binary[MUT_STRATEGY_ARRAY_SIZE] = {}; +u32 mutation_strategy_exploitation_binary[MUT_STRATEGY_ARRAY_SIZE] = { + + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_DELONE, + MUT_DELONE, + MUT_DELONE, + MUT_DELONE, + MUT_DELONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_ASCIINUM, + MUT_ASCIINUM, + MUT_ASCIINUM, + MUT_ASCIINUM, + MUT_ASCIINUM, + MUT_NEG, + MUT_NEG, + MUT_NEG, + MUT_NEG, + MUT_NEG, + MUT_NEG, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT}; u32 afl_mutate(afl_state_t *, u8 *, u32, u32, bool, bool, u8 *, u32, u32); u32 choose_block_len(afl_state_t *, u32); diff --git a/include/config.h b/include/config.h index e46f515a..c1297bdd 100644 --- a/include/config.h +++ b/include/config.h @@ -43,6 +43,12 @@ Default: 8MB (defined in bytes) */ #define DEFAULT_SHMEM_SIZE (8 * 1024 * 1024) +/* Default time until when no more coverage finds are happening afl-fuzz + switches to exploitation mode. It automatically switches back when new + coverage is found. + Default: 300 (seconds) */ +#define STRATEGY_SWITCH_TIME 300 + /* Default file permission umode when creating files (default: 0600) */ #define DEFAULT_PERMISSION 0600 diff --git a/src/afl-fuzz-one.c b/src/afl-fuzz-one.c index 226fb40e..e6b58713 100644 --- a/src/afl-fuzz-one.c +++ b/src/afl-fuzz-one.c @@ -2078,9 +2078,9 @@ havoc_stage: u32 *mutation_array; u32 stack_max; - if (afl->queue_cur->is_ascii) { // is text? + if (unlikely(afl->text_input || afl->queue_cur->is_ascii)) { // is text? - if (1) { // is exploration? + if (likely(afl->fuzz_mode == 0)) { // is exploration? mutation_array = (unsigned int *)&mutation_strategy_exploration_text; @@ -2092,7 +2092,7 @@ havoc_stage: } else { // is binary! - if (1) { // is exploration? + if (likely(afl->fuzz_mode == 0)) { // is exploration? mutation_array = (unsigned int *)&mutation_strategy_exploration_binary; diff --git a/src/afl-fuzz-state.c b/src/afl-fuzz-state.c index f9aa5cfe..907861e9 100644 --- a/src/afl-fuzz-state.c +++ b/src/afl-fuzz-state.c @@ -108,6 +108,7 @@ void afl_state_init(afl_state_t *afl, uint32_t map_size) { afl->cmplog_lvl = 2; afl->min_length = 1; afl->max_length = MAX_FILE; + afl->switch_fuzz_mode = STRATEGY_SWITCH_TIME; #ifndef NO_SPLICING afl->use_splicing = 1; #endif diff --git a/src/afl-fuzz-stats.c b/src/afl-fuzz-stats.c index 25ebe987..de48e10a 100644 --- a/src/afl-fuzz-stats.c +++ b/src/afl-fuzz-stats.c @@ -1282,7 +1282,10 @@ void show_stats_normal(afl_state_t *afl) { } /* Last line */ - SAYF(SET_G1 "\n" bSTG bLB bH30 bH20 bH2 bRB bSTOP cRST RESET_G1); + + SAYF(SET_G1 "\n" bSTG bLB bH cCYA bSTOP + " strategy:%s %s " bSTG bH20 bH10 bH2 bRB bSTOP cRST RESET_G1, + cPIN, afl->fuzz_mode == 0 ? "explore" : "exploit"); #undef IB diff --git a/src/afl-fuzz.c b/src/afl-fuzz.c index 3380fd90..315107d7 100644 --- a/src/afl-fuzz.c +++ b/src/afl-fuzz.c @@ -128,6 +128,13 @@ static void usage(u8 *argv0, int more_help) { " -o dir - output directory for fuzzer findings\n\n" "Execution control settings:\n" + " -P strategy - set fix mutation strategy: explore (focus on new " + "coverage),\n" + " exploit (focus on triggering crashes). You can also " + "set a\n" + " number of seconds after without any finds it switches " + "to\n" + " exploit mode, and back on new coverage (default: %u)\n" " -p schedule - power schedules compute a seed's performance score:\n" " fast(default), explore, exploit, seek, rare, mmopt, " "coe, lin\n" @@ -156,6 +163,7 @@ static void usage(u8 *argv0, int more_help) { "\n" "Mutator settings:\n" + " -a - target expects ascii text input\n" " -g minlength - set min length of generated fuzz input (default: 1)\n" " -G maxlength - set max length of generated fuzz input (default: " "%lu)\n" @@ -212,7 +220,8 @@ static void usage(u8 *argv0, int more_help) { " -e ext - file extension for the fuzz test input file (if " "needed)\n" "\n", - argv0, EXEC_TIMEOUT, MEM_LIMIT, MAX_FILE, FOREIGN_SYNCS_MAX); + argv0, STRATEGY_SWITCH_TIME, EXEC_TIMEOUT, MEM_LIMIT, MAX_FILE, + FOREIGN_SYNCS_MAX); if (more_help > 1) { @@ -553,14 +562,44 @@ int main(int argc, char **argv_orig, char **envp) { afl->shmem_testcase_mode = 1; // we always try to perform shmem fuzzing - while ( - (opt = getopt( - argc, argv, - "+Ab:B:c:CdDe:E:hi:I:f:F:g:G:l:L:m:M:nNOo:p:RQs:S:t:T:UV:WXx:YZ")) > - 0) { + // still available: aHjJkKPqruvwz + while ((opt = getopt(argc, argv, + "+aAb:B:c:CdDe:E:f:F:g:G:hi:I:l:L:m:M:nNo:Op:P:QRs:S:t:" + "T:UV:WXx:YZ")) > 0) { switch (opt) { + case 'a': + afl->text_input = 1; + break; + + case 'P': + if (!stricmp(optarg, "explore") || !stricmp(optarg, "exploration")) { + + afl->fuzz_mode = 0; + afl->switch_fuzz_mode = 1; + + } else if (!stricmp(optarg, "exploit") || + + !stricmp(optarg, "exploitation")) { + + afl->fuzz_mode = 1; + afl->switch_fuzz_mode = 0; + + } else { + + if ((s32)(afl->switch_fuzz_mode = (u32)atoi(optarg)) < 1) { + + FATAL( + "Parameter for option -P must be \"explore\", \"exploit\" or a " + "number!"); + + } + + } + + break; + case 'g': afl->min_length = atoi(optarg); break; -- cgit 1.4.1 From 41a452d4e8038461f730736439346eb8a7a3968f Mon Sep 17 00:00:00 2001 From: vanhauser-thc Date: Tue, 4 Apr 2023 21:48:51 +0200 Subject: mutation lists --- include/afl-mutations.h | 786 +++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 783 insertions(+), 3 deletions(-) (limited to 'include/afl-mutations.h') diff --git a/include/afl-mutations.h b/include/afl-mutations.h index 5a1b6356..31d0898a 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -78,9 +78,789 @@ enum { }; -u32 mutation_strategy_exploration_text[MUT_STRATEGY_ARRAY_SIZE] = {}; -u32 mutation_strategy_exploration_binary[MUT_STRATEGY_ARRAY_SIZE] = {}; -u32 mutation_strategy_exploitation_text[MUT_STRATEGY_ARRAY_SIZE] = {}; +u32 mutation_strategy_exploration_text[MUT_STRATEGY_ARRAY_SIZE] = { + + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_DELONE, + MUT_DELONE, + MUT_DELONE, + MUT_DELONE, + MUT_DELONE, + MUT_DELONE, + MUT_DELONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_ASCIINUM, + MUT_ASCIINUM, + MUT_ASCIINUM, + MUT_ASCIINUM, + MUT_ASCIINUM, + MUT_ASCIINUM, + MUT_ASCIINUM, + MUT_NEG, + MUT_NEG, + MUT_NEG, + MUT_NEG, + MUT_NEG, + MUT_NEG, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + +}; + +u32 mutation_strategy_exploration_binary[MUT_STRATEGY_ARRAY_SIZE] = { + + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_DELONE, + MUT_DELONE, + MUT_DELONE, + MUT_DELONE, + MUT_DELONE, + MUT_DELONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_ASCIINUM, + MUT_ASCIINUM, + MUT_ASCIINUM, + MUT_ASCIINUM, + MUT_ASCIINUM, + MUT_ASCIINUM, + MUT_NEG, + MUT_NEG, + MUT_NEG, + MUT_NEG, + MUT_NEG, + MUT_NEG, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + +}; + +u32 mutation_strategy_exploitation_text[MUT_STRATEGY_ARRAY_SIZE] = { + + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_OVERWRITE, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_CLONE_INSERT, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_SHUFFLE, + MUT_DELONE, + MUT_DELONE, + MUT_DELONE, + MUT_DELONE, + MUT_DELONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_INSERTONE, + MUT_ASCIINUM, + MUT_ASCIINUM, + MUT_ASCIINUM, + MUT_ASCIINUM, + MUT_ASCIINUM, + MUT_NEG, + MUT_NEG, + MUT_NEG, + MUT_NEG, + MUT_NEG, + MUT_NEG, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_INSERTASCIINUM, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + +}; + u32 mutation_strategy_exploitation_binary[MUT_STRATEGY_ARRAY_SIZE] = { MUT_FLIPBIT, -- cgit 1.4.1 From 53b70ef104a334424fd5226c7504130b3bd45625 Mon Sep 17 00:00:00 2001 From: vanhauser-thc Date: Wed, 5 Apr 2023 09:33:09 +0200 Subject: mut changes --- include/afl-mutations.h | 83 +++++++++++++++++++++---------------------------- src/afl-fuzz-one.c | 17 +--------- 2 files changed, 37 insertions(+), 63 deletions(-) (limited to 'include/afl-mutations.h') diff --git a/include/afl-mutations.h b/include/afl-mutations.h index 31d0898a..9188a37f 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -65,14 +65,13 @@ enum { /* 27 */ MUT_DELONE, /* 28 */ MUT_INSERTONE, /* 29 */ MUT_ASCIINUM, - /* 30 */ MUT_NEG, - /* 31 */ MUT_INSERTASCIINUM, - /* 32 */ MUT_EXTRA_OVERWRITE, - /* 33 */ MUT_EXTRA_INSERT, - /* 34 */ MUT_AUTO_EXTRA_OVERWRITE, - /* 35 */ MUT_AUTO_EXTRA_INSERT, - /* 36 */ MUT_SPLICE_OVERWRITE, - /* 37 */ MUT_SPLICE_INSERT, + /* 30 */ MUT_INSERTASCIINUM, + /* 31 */ MUT_EXTRA_OVERWRITE, + /* 32 */ MUT_EXTRA_INSERT, + /* 33 */ MUT_AUTO_EXTRA_OVERWRITE, + /* 34 */ MUT_AUTO_EXTRA_INSERT, + /* 35 */ MUT_SPLICE_OVERWRITE, + /* 36 */ MUT_SPLICE_INSERT, MUT_MAX @@ -199,6 +198,7 @@ u32 mutation_strategy_exploration_text[MUT_STRATEGY_ARRAY_SIZE] = { MUT_CLONE_INSERT, MUT_CLONE_INSERT, MUT_CLONE_INSERT, + MUT_CLONE_INSERT, MUT_OVERWRITE_COPY, MUT_OVERWRITE_COPY, MUT_OVERWRITE_COPY, @@ -233,6 +233,9 @@ u32 mutation_strategy_exploration_text[MUT_STRATEGY_ARRAY_SIZE] = { MUT_FLIP8, MUT_FLIP8, MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, MUT_SWITCH, MUT_SWITCH, MUT_SWITCH, @@ -276,12 +279,8 @@ u32 mutation_strategy_exploration_text[MUT_STRATEGY_ARRAY_SIZE] = { MUT_ASCIINUM, MUT_ASCIINUM, MUT_ASCIINUM, - MUT_NEG, - MUT_NEG, - MUT_NEG, - MUT_NEG, - MUT_NEG, - MUT_NEG, + MUT_ASCIINUM, + MUT_INSERTASCIINUM, MUT_INSERTASCIINUM, MUT_INSERTASCIINUM, MUT_INSERTASCIINUM, @@ -335,7 +334,7 @@ u32 mutation_strategy_exploration_text[MUT_STRATEGY_ARRAY_SIZE] = { MUT_SPLICE_INSERT, MUT_SPLICE_INSERT, MUT_SPLICE_INSERT, - MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT }; @@ -468,6 +467,7 @@ u32 mutation_strategy_exploration_binary[MUT_STRATEGY_ARRAY_SIZE] = { MUT_CLONE_INSERT, MUT_CLONE_INSERT, MUT_CLONE_INSERT, + MUT_CLONE_INSERT, MUT_OVERWRITE_COPY, MUT_OVERWRITE_COPY, MUT_OVERWRITE_COPY, @@ -504,6 +504,10 @@ u32 mutation_strategy_exploration_binary[MUT_STRATEGY_ARRAY_SIZE] = { MUT_FLIP8, MUT_FLIP8, MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, MUT_SWITCH, MUT_SWITCH, MUT_SWITCH, @@ -541,12 +545,6 @@ u32 mutation_strategy_exploration_binary[MUT_STRATEGY_ARRAY_SIZE] = { MUT_ASCIINUM, MUT_ASCIINUM, MUT_ASCIINUM, - MUT_NEG, - MUT_NEG, - MUT_NEG, - MUT_NEG, - MUT_NEG, - MUT_NEG, MUT_INSERTASCIINUM, MUT_INSERTASCIINUM, MUT_INSERTASCIINUM, @@ -596,7 +594,7 @@ u32 mutation_strategy_exploration_binary[MUT_STRATEGY_ARRAY_SIZE] = { MUT_SPLICE_INSERT, MUT_SPLICE_INSERT, MUT_SPLICE_INSERT, - MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT }; @@ -766,6 +764,9 @@ u32 mutation_strategy_exploitation_text[MUT_STRATEGY_ARRAY_SIZE] = { MUT_FLIP8, MUT_FLIP8, MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, MUT_SWITCH, MUT_SWITCH, MUT_SWITCH, @@ -785,6 +786,7 @@ u32 mutation_strategy_exploitation_text[MUT_STRATEGY_ARRAY_SIZE] = { MUT_SHUFFLE, MUT_SHUFFLE, MUT_SHUFFLE, + MUT_SHUFFLE, MUT_DELONE, MUT_DELONE, MUT_DELONE, @@ -801,12 +803,8 @@ u32 mutation_strategy_exploitation_text[MUT_STRATEGY_ARRAY_SIZE] = { MUT_ASCIINUM, MUT_ASCIINUM, MUT_ASCIINUM, - MUT_NEG, - MUT_NEG, - MUT_NEG, - MUT_NEG, - MUT_NEG, - MUT_NEG, + MUT_ASCIINUM, + MUT_INSERTASCIINUM, MUT_INSERTASCIINUM, MUT_INSERTASCIINUM, MUT_INSERTASCIINUM, @@ -857,7 +855,7 @@ u32 mutation_strategy_exploitation_text[MUT_STRATEGY_ARRAY_SIZE] = { MUT_SPLICE_INSERT, MUT_SPLICE_INSERT, MUT_SPLICE_INSERT, - MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT }; @@ -959,6 +957,8 @@ u32 mutation_strategy_exploitation_binary[MUT_STRATEGY_ARRAY_SIZE] = { MUT_ARITH32BE_, MUT_ARITH32BE_, MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32, MUT_ARITH32, MUT_ARITH32, MUT_ARITH32, @@ -1027,6 +1027,10 @@ u32 mutation_strategy_exploitation_binary[MUT_STRATEGY_ARRAY_SIZE] = { MUT_FLIP8, MUT_FLIP8, MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, MUT_SWITCH, MUT_SWITCH, MUT_SWITCH, @@ -1062,12 +1066,6 @@ u32 mutation_strategy_exploitation_binary[MUT_STRATEGY_ARRAY_SIZE] = { MUT_ASCIINUM, MUT_ASCIINUM, MUT_ASCIINUM, - MUT_NEG, - MUT_NEG, - MUT_NEG, - MUT_NEG, - MUT_NEG, - MUT_NEG, MUT_INSERTASCIINUM, MUT_INSERTASCIINUM, MUT_INSERTASCIINUM, @@ -1118,7 +1116,9 @@ u32 mutation_strategy_exploitation_binary[MUT_STRATEGY_ARRAY_SIZE] = { MUT_SPLICE_INSERT, MUT_SPLICE_INSERT, MUT_SPLICE_INSERT, - MUT_SPLICE_INSERT}; + MUT_SPLICE_INSERT + +}; u32 afl_mutate(afl_state_t *, u8 *, u32, u32, bool, bool, u8 *, u32, u32); u32 choose_block_len(afl_state_t *, u32); @@ -1865,17 +1865,6 @@ inline u32 afl_mutate(afl_state_t *afl, u8 *buf, u32 len, u32 steps, } - case MUT_NEG: { - - /* Neg byte. */ - - item = rand_below(afl, len); - buf[item] = ~buf[item]; - - break; - - } - case MUT_INSERTASCIINUM: { u32 len = 1 + rand_below(afl, 8); diff --git a/src/afl-fuzz-one.c b/src/afl-fuzz-one.c index e6b58713..bc267b15 100644 --- a/src/afl-fuzz-one.c +++ b/src/afl-fuzz-one.c @@ -2659,7 +2659,7 @@ havoc_stage: case MUT_FLIP8: { - /* Flip byte. */ + /* Flip byte with a XOR 0xff. This is the same as NEG. */ #ifdef INTROSPECTION snprintf(afl->m_tmp, sizeof(afl->m_tmp), " FLIP8_"); @@ -2987,21 +2987,6 @@ havoc_stage: } - case MUT_NEG: { - - /* Neg byte. */ - -#ifdef INTROSPECTION - snprintf(afl->m_tmp, sizeof(afl->m_tmp), " NEG_"); - strcat(afl->mutation, afl->m_tmp); -#endif - item = rand_below(afl, temp_len); - - out_buf[item] = ~out_buf[item]; - break; - - } - case MUT_INSERTASCIINUM: { u32 len = 1 + rand_below(afl, 8); -- cgit 1.4.1 From e313180e4d3f7ba44b773e43af40d4af21088576 Mon Sep 17 00:00:00 2001 From: vanhauser-thc Date: Wed, 5 Apr 2023 10:32:37 +0200 Subject: fix for clang --- include/afl-mutations.h | 10 ++++++---- src/afl-fuzz-one.c | 9 ++++++--- 2 files changed, 12 insertions(+), 7 deletions(-) (limited to 'include/afl-mutations.h') diff --git a/include/afl-mutations.h b/include/afl-mutations.h index 9188a37f..cc913fb0 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -1231,7 +1231,7 @@ inline u32 afl_mutate(afl_state_t *afl, u8 *buf, u32 len, u32 steps, for (u32 step = 0; step < steps; ++step) { - retry_havoc_step: + retry_havoc_step : { u32 r = rand_below(afl, MUT_STRATEGY_ARRAY_SIZE), item; @@ -1667,10 +1667,10 @@ inline u32 afl_mutate(afl_state_t *afl, u8 *buf, u32 len, u32 steps, if (unlikely(len < 4)) { break; } // no retry - u32 len = choose_block_len(afl, len - 1); - u32 off = rand_below(afl, len - len + 1); + u32 blen = choose_block_len(afl, len - 1); + u32 off = rand_below(afl, len - blen + 1); - for (u32 i = len - 1; i > 0; i--) { + for (u32 i = blen - 1; i > 0; i--) { u32 j; do { @@ -2030,6 +2030,8 @@ inline u32 afl_mutate(afl_state_t *afl, u8 *buf, u32 len, u32 steps, } + } + return len; } diff --git a/src/afl-fuzz-one.c b/src/afl-fuzz-one.c index 48aa6eb0..e6ff1d1a 100644 --- a/src/afl-fuzz-one.c +++ b/src/afl-fuzz-one.c @@ -2139,8 +2139,8 @@ havoc_stage: LIST_FOREACH(&afl->custom_mutator_list, struct custom_mutator, { - if (el->stacked_custom && - rand_below(afl, 100) < el->stacked_custom_prob) { + if (unlikely(el->stacked_custom && + rand_below(afl, 100) < el->stacked_custom_prob)) { u8 *custom_havoc_buf = NULL; size_t new_len = el->afl_custom_havoc_mutation( @@ -2170,7 +2170,8 @@ havoc_stage: } - retry_havoc_step: + retry_havoc_step : { + u32 r = rand_below(afl, MUT_STRATEGY_ARRAY_SIZE), item; switch (mutation_array[r]) { @@ -3250,6 +3251,8 @@ havoc_stage: } + } + if (common_fuzz_stuff(afl, out_buf, temp_len)) { goto abandon_entry; } /* out_buf might have been mangled a bit, so let's restore it to its -- cgit 1.4.1 From 400c5e92cb5ed304a2c14a79597100850cf9f82c Mon Sep 17 00:00:00 2001 From: vanhauser-thc Date: Fri, 7 Apr 2023 09:41:22 +0200 Subject: renaming --- include/afl-mutations.h | 166 ++++++++++++++++++++++++------------------------ src/afl-fuzz-one.c | 30 +++++---- 2 files changed, 99 insertions(+), 97 deletions(-) (limited to 'include/afl-mutations.h') diff --git a/include/afl-mutations.h b/include/afl-mutations.h index cc913fb0..8d40855d 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -52,8 +52,8 @@ enum { /* 14 */ MUT_ARITH32, /* 15 */ MUT_ARITH32BE, /* 16 */ MUT_RAND8, - /* 17 */ MUT_CLONE_OVERWRITE, - /* 18 */ MUT_CLONE_INSERT, + /* 17 */ MUT_CLONE_COPY, + /* 18 */ MUT_CLONE_FIXED, /* 19 */ MUT_OVERWRITE_COPY, /* 20 */ MUT_OVERWRITE_FIXED, /* 21 */ MUT_BYTEADD, @@ -176,29 +176,29 @@ u32 mutation_strategy_exploration_text[MUT_STRATEGY_ARRAY_SIZE] = { MUT_RAND8, MUT_RAND8, MUT_RAND8, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, MUT_OVERWRITE_COPY, MUT_OVERWRITE_COPY, MUT_OVERWRITE_COPY, @@ -446,28 +446,28 @@ u32 mutation_strategy_exploration_binary[MUT_STRATEGY_ARRAY_SIZE] = { MUT_RAND8, MUT_RAND8, MUT_RAND8, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, MUT_OVERWRITE_COPY, MUT_OVERWRITE_COPY, MUT_OVERWRITE_COPY, @@ -716,23 +716,23 @@ u32 mutation_strategy_exploitation_text[MUT_STRATEGY_ARRAY_SIZE] = { MUT_RAND8, MUT_RAND8, MUT_RAND8, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, MUT_OVERWRITE_COPY, MUT_OVERWRITE_COPY, MUT_OVERWRITE_COPY, @@ -979,23 +979,23 @@ u32 mutation_strategy_exploitation_binary[MUT_STRATEGY_ARRAY_SIZE] = { MUT_RAND8, MUT_RAND8, MUT_RAND8, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_OVERWRITE, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, - MUT_CLONE_INSERT, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, MUT_OVERWRITE_COPY, MUT_OVERWRITE_COPY, MUT_OVERWRITE_COPY, @@ -1455,7 +1455,7 @@ inline u32 afl_mutate(afl_state_t *afl, u8 *buf, u32 len, u32 steps, } - case MUT_CLONE_OVERWRITE: { + case MUT_CLONE_COPY: { if (likely(len + HAVOC_BLK_XL < max_len)) { @@ -1494,7 +1494,7 @@ inline u32 afl_mutate(afl_state_t *afl, u8 *buf, u32 len, u32 steps, } - case MUT_CLONE_INSERT: { + case MUT_CLONE_FIXED: { if (likely(len + HAVOC_BLK_XL < max_len)) { diff --git a/src/afl-fuzz-one.c b/src/afl-fuzz-one.c index e6ff1d1a..f5ddea0e 100644 --- a/src/afl-fuzz-one.c +++ b/src/afl-fuzz-one.c @@ -2481,7 +2481,7 @@ havoc_stage: } - case MUT_CLONE_OVERWRITE: { + case MUT_CLONE_COPY: { if (likely(temp_len + HAVOC_BLK_XL < MAX_FILE)) { @@ -2493,7 +2493,7 @@ havoc_stage: #ifdef INTROSPECTION snprintf(afl->m_tmp, sizeof(afl->m_tmp), " CLONE-%s_%u_%u_%u", - "overwrite", clone_from, clone_to, clone_len); + "COPY", clone_from, clone_to, clone_len); strcat(afl->mutation, afl->m_tmp); #endif u8 *new_buf = @@ -2530,7 +2530,7 @@ havoc_stage: } - case MUT_CLONE_INSERT: { + case MUT_CLONE_FIXED: { if (likely(temp_len + HAVOC_BLK_XL < MAX_FILE)) { @@ -2544,7 +2544,7 @@ havoc_stage: #ifdef INTROSPECTION snprintf(afl->m_tmp, sizeof(afl->m_tmp), " CLONE-%s_%u_%u_%u", - "insert", strat, clone_to, clone_len); + "FIXED", strat, clone_to, clone_len); strcat(afl->mutation, afl->m_tmp); #endif u8 *new_buf = @@ -2587,20 +2587,22 @@ havoc_stage: if (unlikely(temp_len < 2)) { break; } // no retry - u32 copy_len = choose_block_len(afl, temp_len - 1); - u32 copy_from = rand_below(afl, temp_len - copy_len + 1); - u32 copy_to = rand_below(afl, temp_len - copy_len + 1); + u32 copy_from, copy_to, + copy_len = choose_block_len(afl, temp_len - 1); + + do { - if (likely(copy_from != copy_to)) { + copy_from = rand_below(afl, temp_len - copy_len + 1); + copy_to = rand_below(afl, temp_len - copy_len + 1); + + } while (unlikely(copy_from == copy_to)); #ifdef INTROSPECTION - snprintf(afl->m_tmp, sizeof(afl->m_tmp), " OVERWRITE-COPY_%u_%u_%u", - copy_from, copy_to, copy_len); - strcat(afl->mutation, afl->m_tmp); + snprintf(afl->m_tmp, sizeof(afl->m_tmp), " OVERWRITE-COPY_%u_%u_%u", + copy_from, copy_to, copy_len); + strcat(afl->mutation, afl->m_tmp); #endif - memmove(out_buf + copy_to, out_buf + copy_from, copy_len); - - } + memmove(out_buf + copy_to, out_buf + copy_from, copy_len); break; -- cgit 1.4.1 From 8de7f6131d48e27d53e894b65bd11e0dc3817639 Mon Sep 17 00:00:00 2001 From: vanhauser-thc Date: Tue, 6 Jun 2023 13:12:31 +0200 Subject: add current mutation strategy to include --- include/afl-mutations.h | 161 ++++++++++++++++++++++++++++++++++++++++++++++++ src/afl-fuzz-one.c | 7 ++- 2 files changed, 166 insertions(+), 2 deletions(-) (limited to 'include/afl-mutations.h') diff --git a/include/afl-mutations.h b/include/afl-mutations.h index 8d40855d..08037b09 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -77,6 +77,167 @@ enum { }; + #define MUT_NORMAL_ARRAY_SIZE 77 +u32 normal_splice[MUT_NORMAL_ARRAY_SIZE] = {MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_FIXED, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_FIXED, + MUT_BYTEADD, + MUT_BYTESUB, + MUT_FLIP8, + MUT_SWITCH, + MUT_SWITCH, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT}; + #define MUT_SPLICE_ARRAY_SIZE 81 +u32 full_splice_array[MUT_SPLICE_ARRAY_SIZE] = {MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_FIXED, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_FIXED, + MUT_BYTEADD, + MUT_BYTESUB, + MUT_FLIP8, + MUT_SWITCH, + MUT_SWITCH, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT}; + u32 mutation_strategy_exploration_text[MUT_STRATEGY_ARRAY_SIZE] = { MUT_FLIPBIT, diff --git a/src/afl-fuzz-one.c b/src/afl-fuzz-one.c index f5ddea0e..312e180d 100644 --- a/src/afl-fuzz-one.c +++ b/src/afl-fuzz-one.c @@ -2076,7 +2076,7 @@ havoc_stage: where we take the input file and make random stacked tweaks. */ u32 *mutation_array; - u32 stack_max, stack_max_pow = afl->havoc_stack_pow2; + u32 stack_max; // stack_max_pow = afl->havoc_stack_pow2; if (unlikely(afl->text_input || afl->queue_cur->is_ascii)) { // is text? @@ -2104,6 +2104,7 @@ havoc_stage: } + /* if (temp_len < 64) { --stack_max_pow; @@ -2118,7 +2119,9 @@ havoc_stage: } - stack_max = 1 << stack_max_pow; + */ + + stack_max = 1 << (1 + rand_below(afl, afl->havoc_stack_pow2)); // + (afl->extras_cnt ? 2 : 0) + (afl->a_extras_cnt ? 2 : 0); -- cgit 1.4.1 From 9b2c4a2a5a8e54a80bdb82fee39891fbe42544e8 Mon Sep 17 00:00:00 2001 From: vanhauser-thc Date: Tue, 6 Jun 2023 16:54:12 +0200 Subject: nit --- include/afl-mutations.h | 155 ++++++++++++++++++++++++------------------------ 1 file changed, 78 insertions(+), 77 deletions(-) (limited to 'include/afl-mutations.h') diff --git a/include/afl-mutations.h b/include/afl-mutations.h index 08037b09..a3c9fd59 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -78,83 +78,84 @@ enum { }; #define MUT_NORMAL_ARRAY_SIZE 77 -u32 normal_splice[MUT_NORMAL_ARRAY_SIZE] = {MUT_FLIPBIT, - MUT_FLIPBIT, - MUT_FLIPBIT, - MUT_FLIPBIT, - MUT_INTERESTING8, - MUT_INTERESTING8, - MUT_INTERESTING8, - MUT_INTERESTING8, - MUT_INTERESTING16, - MUT_INTERESTING16, - MUT_INTERESTING16BE, - MUT_INTERESTING16BE, - MUT_INTERESTING32, - MUT_INTERESTING32, - MUT_INTERESTING32BE, - MUT_INTERESTING32BE, - MUT_ARITH8_, - MUT_ARITH8_, - MUT_ARITH8_, - MUT_ARITH8_, - MUT_ARITH8, - MUT_ARITH8, - MUT_ARITH8, - MUT_ARITH8, - MUT_ARITH16_, - MUT_ARITH16_, - MUT_ARITH16BE_, - MUT_ARITH16BE_, - MUT_ARITH16, - MUT_ARITH16, - MUT_ARITH16BE, - MUT_ARITH16BE, - MUT_ARITH32_, - MUT_ARITH32_, - MUT_ARITH32BE_, - MUT_ARITH32BE_, - MUT_ARITH32, - MUT_ARITH32, - MUT_ARITH32BE, - MUT_ARITH32BE, - MUT_RAND8, - MUT_RAND8, - MUT_RAND8, - MUT_RAND8, - MUT_CLONE_COPY, - MUT_CLONE_COPY, - MUT_CLONE_COPY, - MUT_CLONE_FIXED, - MUT_OVERWRITE_COPY, - MUT_OVERWRITE_COPY, - MUT_OVERWRITE_COPY, - MUT_OVERWRITE_FIXED, - MUT_BYTEADD, - MUT_BYTESUB, - MUT_FLIP8, - MUT_SWITCH, - MUT_SWITCH, - MUT_DEL, - MUT_DEL, - MUT_DEL, - MUT_DEL, - MUT_DEL, - MUT_DEL, - MUT_DEL, - MUT_DEL, - MUT_EXTRA_OVERWRITE, - MUT_EXTRA_OVERWRITE, - MUT_EXTRA_INSERT, - MUT_EXTRA_INSERT, - MUT_AUTO_EXTRA_OVERWRITE, - MUT_AUTO_EXTRA_OVERWRITE, - MUT_AUTO_EXTRA_INSERT, - MUT_AUTO_EXTRA_INSERT, - MUT_SPLICE_OVERWRITE, - MUT_SPLICE_OVERWRITE, - MUT_SPLICE_INSERT, - MUT_SPLICE_INSERT}; +u32 normal_splice_array[MUT_NORMAL_ARRAY_SIZE] = {MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_FIXED, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_FIXED, + MUT_BYTEADD, + MUT_BYTESUB, + MUT_FLIP8, + MUT_SWITCH, + MUT_SWITCH, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT}; + #define MUT_SPLICE_ARRAY_SIZE 81 u32 full_splice_array[MUT_SPLICE_ARRAY_SIZE] = {MUT_FLIPBIT, MUT_FLIPBIT, -- cgit 1.4.1 From c2c27349c3d74f79ceb6cd3795862b21d90429ea Mon Sep 17 00:00:00 2001 From: vanhauser-thc Date: Fri, 23 Jun 2023 17:08:21 +0200 Subject: new mutation weighting --- include/afl-mutations.h | 460 ++++++++++++++++++++++++++++++++++++++++++++++++ src/afl-fuzz-one.c | 30 +--- 2 files changed, 466 insertions(+), 24 deletions(-) (limited to 'include/afl-mutations.h') diff --git a/include/afl-mutations.h b/include/afl-mutations.h index a3c9fd59..cc4840c8 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -77,6 +77,466 @@ enum { }; + #define MUT_TXT_ARRAY_SIZE 200 +u32 text_array[MUT_TXT_ARRAY_SIZE] = {MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT}; + + #define MUT_BIN_ARRAY_SIZE 256 +u32 binary_array[MUT_BIN_ARRAY_SIZE] = {MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_FLIPBIT, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING8, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING16BE, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_INTERESTING32BE, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8_, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH8, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16BE_, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH16BE, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32BE_, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_ARITH32BE, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_RAND8, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_COPY, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_CLONE_FIXED, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_COPY, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_OVERWRITE_FIXED, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTEADD, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_BYTESUB, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_FLIP8, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_SWITCH, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_DEL, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_OVERWRITE, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_EXTRA_INSERT, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_OVERWRITE, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_AUTO_EXTRA_INSERT, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_OVERWRITE, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT, + MUT_SPLICE_INSERT}; + #define MUT_NORMAL_ARRAY_SIZE 77 u32 normal_splice_array[MUT_NORMAL_ARRAY_SIZE] = {MUT_FLIPBIT, MUT_FLIPBIT, diff --git a/src/afl-fuzz-one.c b/src/afl-fuzz-one.c index 32c05182..c6e49653 100644 --- a/src/afl-fuzz-one.c +++ b/src/afl-fuzz-one.c @@ -2101,27 +2101,17 @@ havoc_stage: */ - rand_max = MUT_STRATEGY_ARRAY_SIZE; - if (unlikely(afl->text_input)) { // is text? if (likely(afl->fuzz_mode == 0)) { // is exploration? - if (unlikely(afl->expand_havoc && afl->ready_for_splicing_count > 1)) { - - mutation_array = full_splice_array; - rand_max = MUT_SPLICE_ARRAY_SIZE; - - } else { - - mutation_array = normal_splice_array; - rand_max = MUT_NORMAL_ARRAY_SIZE; - - } + mutation_array = (unsigned int *)&text_array; + rand_max = MUT_TXT_ARRAY_SIZE; } else { // is exploitation! mutation_array = (unsigned int *)&mutation_strategy_exploitation_text; + rand_max = MUT_STRATEGY_ARRAY_SIZE; } @@ -2129,21 +2119,13 @@ havoc_stage: if (likely(afl->fuzz_mode == 0)) { // is exploration? - if (unlikely(afl->expand_havoc && afl->ready_for_splicing_count > 1)) { - - mutation_array = full_splice_array; - rand_max = MUT_SPLICE_ARRAY_SIZE; - - } else { - - mutation_array = normal_splice_array; - rand_max = MUT_NORMAL_ARRAY_SIZE; - - } + mutation_array = (unsigned int *)&binary_array; + rand_max = MUT_BIN_ARRAY_SIZE; } else { // is exploitation! mutation_array = (unsigned int *)&mutation_strategy_exploitation_binary; + rand_max = MUT_STRATEGY_ARRAY_SIZE; } -- cgit 1.4.1 From 3e1d7941077b1457f702988063d6b9fdd9b80740 Mon Sep 17 00:00:00 2001 From: vanhauser-thc Date: Thu, 29 Jun 2023 16:57:20 +0200 Subject: update mutation strategy --- docs/Changelog.md | 4 +++- include/afl-fuzz.h | 59 +++++++++++++++++++++++++------------------------ include/afl-mutations.h | 6 ++--- src/afl-fuzz-one.c | 56 +++++++++++++++++++++++++++------------------- src/afl-fuzz.c | 26 +++++++++++++++++----- 5 files changed, 90 insertions(+), 61 deletions(-) (limited to 'include/afl-mutations.h') diff --git a/docs/Changelog.md b/docs/Changelog.md index e6b90d3d..ad58e99e 100644 --- a/docs/Changelog.md +++ b/docs/Changelog.md @@ -8,7 +8,8 @@ - new mutation engine: mutations that favor discovery more paths are prefered until no new finds for 10 minutes then switching to mutations that favor triggering crashes. Modes and switch time can be configured - with `-P`. + with `-P`. Also input mode for the target can be defined with `-a` to + be `text` or `binary` (defaults to `generic`) - new custom mutator that has the new afl++ engine (so it can easily incorporated into new custom mutators), and also comes with a standalone command line tool! See custom_mutators/aflpp/standalone/ @@ -23,6 +24,7 @@ Thanks to @amykweon for spotting and fixing! - @toka fixed a bug in laf-intel signed integer comparison splitting, thanks a lot!! + - more LLVM compatability - frida_mode: - support for long form instrumentation on x86_x64 and arm64 diff --git a/include/afl-fuzz.h b/include/afl-fuzz.h index c6c45fbd..9da5cc03 100644 --- a/include/afl-fuzz.h +++ b/include/afl-fuzz.h @@ -505,36 +505,37 @@ typedef struct afl_state { is_main_node, /* if this is the main node */ is_secondary_node, /* if this is a secondary instance */ pizza_is_served, /* pizza mode */ - text_input, /* target wants text inputs */ - fuzz_mode, /* current mode: coverage/exploration or crash/exploitation */ + input_mode, /* target wants text inputs */ + fuzz_mode, /* coverage/exploration or crash/exploitation mode */ schedule, /* Power schedule (default: EXPLORE)*/ - havoc_max_mult, skip_deterministic, /* Skip deterministic stages? */ - use_splicing, /* Recombine input files? */ - non_instrumented_mode, /* Run in non-instrumented mode? */ - score_changed, /* Scoring for favorites changed? */ - resuming_fuzz, /* Resuming an older fuzzing job? */ - timeout_given, /* Specific timeout given? */ - not_on_tty, /* stdout is not a tty */ - term_too_small, /* terminal dimensions too small */ - no_forkserver, /* Disable forkserver? */ - crash_mode, /* Crash mode! Yeah! */ - in_place_resume, /* Attempt in-place resume? */ - autoresume, /* Resume if afl->out_dir exists? */ - auto_changed, /* Auto-generated tokens changed? */ - no_cpu_meter_red, /* Feng shui on the status screen */ - no_arith, /* Skip most arithmetic ops */ - shuffle_queue, /* Shuffle input queue? */ - bitmap_changed, /* Time to update bitmap? */ - unicorn_mode, /* Running in Unicorn mode? */ - use_wine, /* Use WINE with QEMU mode */ - skip_requested, /* Skip request, via SIGUSR1 */ - run_over10m, /* Run time over 10 minutes? */ - persistent_mode, /* Running in persistent mode? */ - deferred_mode, /* Deferred forkserver mode? */ - fixed_seed, /* do not reseed */ - fast_cal, /* Try to calibrate faster? */ - disable_trim, /* Never trim in fuzz_one */ - shmem_testcase_mode, /* If sharedmem testcases are used */ + havoc_max_mult, /* havoc multiplier */ + skip_deterministic, /* Skip deterministic stages? */ + use_splicing, /* Recombine input files? */ + non_instrumented_mode, /* Run in non-instrumented mode? */ + score_changed, /* Scoring for favorites changed? */ + resuming_fuzz, /* Resuming an older fuzzing job? */ + timeout_given, /* Specific timeout given? */ + not_on_tty, /* stdout is not a tty */ + term_too_small, /* terminal dimensions too small */ + no_forkserver, /* Disable forkserver? */ + crash_mode, /* Crash mode! Yeah! */ + in_place_resume, /* Attempt in-place resume? */ + autoresume, /* Resume if afl->out_dir exists? */ + auto_changed, /* Auto-generated tokens changed? */ + no_cpu_meter_red, /* Feng shui on the status screen */ + no_arith, /* Skip most arithmetic ops */ + shuffle_queue, /* Shuffle input queue? */ + bitmap_changed, /* Time to update bitmap? */ + unicorn_mode, /* Running in Unicorn mode? */ + use_wine, /* Use WINE with QEMU mode */ + skip_requested, /* Skip request, via SIGUSR1 */ + run_over10m, /* Run time over 10 minutes? */ + persistent_mode, /* Running in persistent mode? */ + deferred_mode, /* Deferred forkserver mode? */ + fixed_seed, /* do not reseed */ + fast_cal, /* Try to calibrate faster? */ + disable_trim, /* Never trim in fuzz_one */ + shmem_testcase_mode, /* If sharedmem testcases are used */ expand_havoc, /* perform expensive havoc after no find */ cycle_schedules, /* cycle power schedules? */ old_seed_selection, /* use vanilla afl seed selection */ diff --git a/include/afl-mutations.h b/include/afl-mutations.h index cc4840c8..0a9bbbf4 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -14,14 +14,14 @@ Parameters: afl_state_t *afl - the *afl state pointer u8 *buf - the input buffer to mutate which will be mutated into. - NOTE: must be able to contain a size of at least max_len (see below)! + NOTE: must be able to contain a size of at least max_len!! (see below) u32 len - the length of the input u32 steps - how many mutations to perform on the input bool is_text - is the target expecting text inputs bool is_exploration - mutate for exploration mode (instead of exploitation) splice_buf - a buffer from another corpus item to splice with. - If NULL then no splicing - splice_len - the length of the splice buffer. If 0 then no splicing + If NULL then no splicing is done (obviously). + splice_len - the length of the splice buffer. If 0 then no splicing. u32 max_len - the maximum size the mutated buffer may grow to */ diff --git a/src/afl-fuzz-one.c b/src/afl-fuzz-one.c index c6e49653..0d3c29f2 100644 --- a/src/afl-fuzz-one.c +++ b/src/afl-fuzz-one.c @@ -2085,47 +2085,57 @@ havoc_stage: u32 *mutation_array; u32 stack_max, rand_max; // stack_max_pow = afl->havoc_stack_pow2; - /* + switch (afl->input_mode) { - if (unlikely(afl->expand_havoc && afl->ready_for_splicing_count > 1)) { + case 1: { // TEXT - mutation_array = full_splice_array; - rand_max = MUT_SPLICE_ARRAY_SIZE; + if (likely(afl->fuzz_mode == 0)) { // is exploration? + mutation_array = (unsigned int *)&binary_array; + rand_max = MUT_BIN_ARRAY_SIZE; - } else { + } else { // exploitation mode - mutation_array = normal_splice_array; - rand_max = MUT_NORMAL_ARRAY_SIZE; + mutation_array = (unsigned int *)&mutation_strategy_exploitation_text; + rand_max = MUT_STRATEGY_ARRAY_SIZE; - } + } - */ + break; - if (unlikely(afl->text_input)) { // is text? + } - if (likely(afl->fuzz_mode == 0)) { // is exploration? + case 2: { // BINARY - mutation_array = (unsigned int *)&text_array; - rand_max = MUT_TXT_ARRAY_SIZE; + if (likely(afl->fuzz_mode == 0)) { // is exploration? + mutation_array = (unsigned int *)&mutation_strategy_exploration_binary; + rand_max = MUT_STRATEGY_ARRAY_SIZE; - } else { // is exploitation! + } else { // exploitation mode - mutation_array = (unsigned int *)&mutation_strategy_exploitation_text; - rand_max = MUT_STRATEGY_ARRAY_SIZE; + mutation_array = (unsigned int *)&mutation_strategy_exploitation_binary; + rand_max = MUT_STRATEGY_ARRAY_SIZE; + + } + + break; } - } else { // is binary! + default: { // DEFAULT/GENERIC - if (likely(afl->fuzz_mode == 0)) { // is exploration? + if (likely(afl->fuzz_mode == 0)) { // is exploration? + mutation_array = (unsigned int *)&binary_array; + rand_max = MUT_BIN_ARRAY_SIZE; - mutation_array = (unsigned int *)&binary_array; - rand_max = MUT_BIN_ARRAY_SIZE; + } else { // exploitation mode - } else { // is exploitation! + // this will need to be changed I guess + mutation_array = (unsigned int *)&mutation_strategy_exploration_text; + rand_max = MUT_STRATEGY_ARRAY_SIZE; + + } - mutation_array = (unsigned int *)&mutation_strategy_exploitation_binary; - rand_max = MUT_STRATEGY_ARRAY_SIZE; + break; } diff --git a/src/afl-fuzz.c b/src/afl-fuzz.c index 79b05da7..ab7d6534 100644 --- a/src/afl-fuzz.c +++ b/src/afl-fuzz.c @@ -125,7 +125,8 @@ static void usage(u8 *argv0, int more_help) { "Required parameters:\n" " -i dir - input directory with test cases (or '-' to resume, " - "also see AFL_AUTORESUME)\n" + "also see \n" + " AFL_AUTORESUME)\n" " -o dir - output directory for fuzzer findings\n\n" "Execution control settings:\n" @@ -164,8 +165,8 @@ static void usage(u8 *argv0, int more_help) { "\n" "Mutator settings:\n" - " -a - target expects ascii text input (prefer text " - "mutators)\n" + " -a - target input format, \"text\" or \"binary\" (default: " + "generic)\n" " -g minlength - set min length of generated fuzz input (default: 1)\n" " -G maxlength - set max length of generated fuzz input (default: " "%lu)\n" @@ -506,13 +507,28 @@ int main(int argc, char **argv_orig, char **envp) { // still available: HjJkKqruvwz while ((opt = getopt(argc, argv, - "+aAb:B:c:CdDe:E:f:F:g:G:hi:I:l:L:m:M:nNo:Op:P:QRs:S:t:" + "+a:Ab:B:c:CdDe:E:f:F:g:G:hi:I:l:L:m:M:nNo:Op:P:QRs:S:t:" "T:UV:WXx:YZ")) > 0) { switch (opt) { case 'a': - afl->text_input = 1; + + if (!stricmp(optarg, "text") || !stricmp(optarg, "ascii") || + !stricmp(optarg, "txt") || !stricmp(optarg, "asc")) { + + afl->input_mode = 1; + + } else if (!stricmp(optarg, "bin") || !stricmp(optarg, "binary")) { + + afl->input_mode = 2; + + } else { + + FATAL("-a input mode needs to be \"text\" or \"binary\"."); + + } + break; case 'P': -- cgit 1.4.1 From 168ade3b70077ec6a24df9fc594e3b8c1db89bd6 Mon Sep 17 00:00:00 2001 From: mark0 <59284400+mark0-cn@users.noreply.github.com> Date: Mon, 31 Jul 2023 05:51:24 +0800 Subject: Fix format specifiers (#1818) * Update afl-mutations.h Fix bug: compilation cannot pass when DEBUG macro is enabled * Update afl-fuzz-one.c Fix bug: compilation cannot pass when DEBUG macro is enabled --- include/afl-mutations.h | 2 +- src/afl-fuzz-one.c | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) (limited to 'include/afl-mutations.h') diff --git a/include/afl-mutations.h b/include/afl-mutations.h index 0a9bbbf4..5dde4473 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -2456,7 +2456,7 @@ inline u32 afl_mutate(afl_state_t *afl, u8 *buf, u32 len, u32 steps, } char buf[20]; - snprintf(buf, sizeof(buf), "%ld", val); + snprintf(buf, sizeof(buf), "%lld", val); u32 old_len = off2 - off; u32 new_len = strlen(buf); diff --git a/src/afl-fuzz-one.c b/src/afl-fuzz-one.c index 8ee50bbf..f4ae7bfd 100644 --- a/src/afl-fuzz-one.c +++ b/src/afl-fuzz-one.c @@ -2995,7 +2995,7 @@ havoc_stage: // fprintf(stderr, "val: %u-%u = %ld\n", off, off2, val); char buf[20]; - snprintf(buf, sizeof(buf), "%ld", val); + snprintf(buf, sizeof(buf), "%lld", val); // fprintf(stderr, "BEFORE: %s\n", out_buf); -- cgit 1.4.1 From d0782a7f03a23f8323772d189e5b66a4eb086afd Mon Sep 17 00:00:00 2001 From: Dominik Maier Date: Sun, 30 Jul 2023 21:59:57 +0000 Subject: Various fixes for warnings, extends #1818 --- include/afl-mutations.h | 13 +++++++------ src/afl-fuzz-one.c | 2 +- src/afl-fuzz-redqueen.c | 2 +- unicorn_mode/unicornafl | 2 +- 4 files changed, 10 insertions(+), 9 deletions(-) (limited to 'include/afl-mutations.h') diff --git a/include/afl-mutations.h b/include/afl-mutations.h index 5dde4473..1806790e 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -25,13 +25,14 @@ u32 max_len - the maximum size the mutated buffer may grow to */ -#ifndef _ANDROID_ASHMEM_H - #define AFL_MUTATIONS_H +#ifndef AFL_MUTATIONS_H +#define AFL_MUTATIONS_H - #include - #include "afl-fuzz.h" +#include +#include +#include "afl-fuzz.h" - #define MUT_STRATEGY_ARRAY_SIZE 256 +#define MUT_STRATEGY_ARRAY_SIZE 256 enum { @@ -2456,7 +2457,7 @@ inline u32 afl_mutate(afl_state_t *afl, u8 *buf, u32 len, u32 steps, } char buf[20]; - snprintf(buf, sizeof(buf), "%lld", val); + snprintf(buf, sizeof(buf), "%" PRId64, val); u32 old_len = off2 - off; u32 new_len = strlen(buf); diff --git a/src/afl-fuzz-one.c b/src/afl-fuzz-one.c index f4ae7bfd..2ad4697e 100644 --- a/src/afl-fuzz-one.c +++ b/src/afl-fuzz-one.c @@ -2995,7 +2995,7 @@ havoc_stage: // fprintf(stderr, "val: %u-%u = %ld\n", off, off2, val); char buf[20]; - snprintf(buf, sizeof(buf), "%lld", val); + snprintf(buf, sizeof(buf), "%" PRId64, val); // fprintf(stderr, "BEFORE: %s\n", out_buf); diff --git a/src/afl-fuzz-redqueen.c b/src/afl-fuzz-redqueen.c index 8a652a9f..54bf4e32 100644 --- a/src/afl-fuzz-redqueen.c +++ b/src/afl-fuzz-redqueen.c @@ -2665,7 +2665,7 @@ static u8 rtn_fuzz(afl_state_t *afl, u32 key, u8 *orig_buf, u8 *buf, u8 *cbuf, } - rtn_fuzz_next_iter: + // rtn_fuzz_next_iter: afl->stage_cur++; } diff --git a/unicorn_mode/unicornafl b/unicorn_mode/unicornafl index f2cede37..2df75f3e 160000 --- a/unicorn_mode/unicornafl +++ b/unicorn_mode/unicornafl @@ -1 +1 @@ -Subproject commit f2cede37a75bbd4a9b9438f0277727b5d4620572 +Subproject commit 2df75f3e1045367cab95fe3471191b38c1a9f79e -- cgit 1.4.1 From 79640acbf1ffff9677ec9094b61ac4a158b1551c Mon Sep 17 00:00:00 2001 From: vanhauser-thc Date: Fri, 4 Aug 2023 09:25:19 +0200 Subject: nits --- include/afl-mutations.h | 8 ++++---- include/forkserver.h | 3 ++- src/afl-forkserver.c | 17 ++++++++++++----- src/afl-fuzz-redqueen.c | 2 +- src/afl-fuzz.c | 15 +++++++-------- 5 files changed, 26 insertions(+), 19 deletions(-) (limited to 'include/afl-mutations.h') diff --git a/include/afl-mutations.h b/include/afl-mutations.h index 1806790e..98ba6fcf 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -78,7 +78,7 @@ enum { }; - #define MUT_TXT_ARRAY_SIZE 200 +#define MUT_TXT_ARRAY_SIZE 200 u32 text_array[MUT_TXT_ARRAY_SIZE] = {MUT_FLIPBIT, MUT_FLIPBIT, MUT_FLIPBIT, @@ -280,7 +280,7 @@ u32 text_array[MUT_TXT_ARRAY_SIZE] = {MUT_FLIPBIT, MUT_SPLICE_INSERT, MUT_SPLICE_INSERT}; - #define MUT_BIN_ARRAY_SIZE 256 +#define MUT_BIN_ARRAY_SIZE 256 u32 binary_array[MUT_BIN_ARRAY_SIZE] = {MUT_FLIPBIT, MUT_FLIPBIT, MUT_FLIPBIT, @@ -538,7 +538,7 @@ u32 binary_array[MUT_BIN_ARRAY_SIZE] = {MUT_FLIPBIT, MUT_SPLICE_INSERT, MUT_SPLICE_INSERT}; - #define MUT_NORMAL_ARRAY_SIZE 77 +#define MUT_NORMAL_ARRAY_SIZE 77 u32 normal_splice_array[MUT_NORMAL_ARRAY_SIZE] = {MUT_FLIPBIT, MUT_FLIPBIT, MUT_FLIPBIT, @@ -617,7 +617,7 @@ u32 normal_splice_array[MUT_NORMAL_ARRAY_SIZE] = {MUT_FLIPBIT, MUT_SPLICE_INSERT, MUT_SPLICE_INSERT}; - #define MUT_SPLICE_ARRAY_SIZE 81 +#define MUT_SPLICE_ARRAY_SIZE 81 u32 full_splice_array[MUT_SPLICE_ARRAY_SIZE] = {MUT_FLIPBIT, MUT_FLIPBIT, MUT_FLIPBIT, diff --git a/include/forkserver.h b/include/forkserver.h index c93c6f61..1d41d83d 100644 --- a/include/forkserver.h +++ b/include/forkserver.h @@ -86,7 +86,8 @@ typedef struct { uint32_t size); bool (*nyx_remove_work_dir)(const char *workdir); - bool (*nyx_config_set_aux_buffer_size)(void *config, uint32_t aux_buffer_size); + bool (*nyx_config_set_aux_buffer_size)(void *config, + uint32_t aux_buffer_size); } nyx_plugin_handler_t; diff --git a/src/afl-forkserver.c b/src/afl-forkserver.c index 957cb2b7..e90ea460 100644 --- a/src/afl-forkserver.c +++ b/src/afl-forkserver.c @@ -129,7 +129,8 @@ nyx_plugin_handler_t *afl_load_libnyx_plugin(u8 *libnyx_binary) { plugin->nyx_remove_work_dir = dlsym(handle, "nyx_remove_work_dir"); if (plugin->nyx_remove_work_dir == NULL) { goto fail; } - plugin->nyx_config_set_aux_buffer_size = dlsym(handle, "nyx_config_set_aux_buffer_size"); + plugin->nyx_config_set_aux_buffer_size = + dlsym(handle, "nyx_config_set_aux_buffer_size"); if (plugin->nyx_config_set_aux_buffer_size == NULL) { goto fail; } OKF("libnyx plugin is ready!"); @@ -593,10 +594,16 @@ void afl_fsrv_start(afl_forkserver_t *fsrv, char **argv, } if (getenv("AFL_NYX_AUX_SIZE") != NULL) { - if(fsrv->nyx_handlers->nyx_config_set_aux_buffer_size( - nyx_config, atoi(getenv("AFL_NYX_AUX_SIZE"))) != 1) { - NYX_PRE_FATAL(fsrv, "Invalid AFL_NYX_AUX_SIZE value set (must be a multiple of 4096) ..."); - } + + if (fsrv->nyx_handlers->nyx_config_set_aux_buffer_size( + nyx_config, atoi(getenv("AFL_NYX_AUX_SIZE"))) != 1) { + + NYX_PRE_FATAL(fsrv, + "Invalid AFL_NYX_AUX_SIZE value set (must be a multiple " + "of 4096) ..."); + + } + } if (getenv("NYX_REUSE_SNAPSHOT") != NULL) { diff --git a/src/afl-fuzz-redqueen.c b/src/afl-fuzz-redqueen.c index 54bf4e32..ca5104c0 100644 --- a/src/afl-fuzz-redqueen.c +++ b/src/afl-fuzz-redqueen.c @@ -2665,7 +2665,7 @@ static u8 rtn_fuzz(afl_state_t *afl, u32 key, u8 *orig_buf, u8 *buf, u8 *cbuf, } - // rtn_fuzz_next_iter: + // rtn_fuzz_next_iter: afl->stage_cur++; } diff --git a/src/afl-fuzz.c b/src/afl-fuzz.c index 9504d908..29659013 100644 --- a/src/afl-fuzz.c +++ b/src/afl-fuzz.c @@ -311,8 +311,8 @@ static void usage(u8 *argv0, int more_help) { PERSISTENT_MSG - "AFL_POST_PROCESS_KEEP_ORIGINAL: save the file as it was prior post-processing to the queue,\n" - " but execute the post-processed one\n" + "AFL_POST_PROCESS_KEEP_ORIGINAL: save the file as it was prior post-processing to\n" + " the queue, but execute the post-processed one\n" "AFL_PRELOAD: LD_PRELOAD / DYLD_INSERT_LIBRARIES settings for target\n" "AFL_TARGET_ENV: pass extra environment variables to target\n" "AFL_SHUFFLE_QUEUE: reorder the input queue randomly on startup\n" @@ -323,18 +323,17 @@ static void usage(u8 *argv0, int more_help) { "AFL_STATSD_HOST: change default statsd host (default 127.0.0.1)\n" "AFL_STATSD_PORT: change default statsd port (default: 8125)\n" "AFL_STATSD_TAGS_FLAVOR: set statsd tags format (default: disable tags)\n" - " Supported formats are: 'dogstatsd', 'librato',\n" - " 'signalfx' and 'influxdb'\n" + " suported formats: dogstatsd, librato, signalfx, influxdb\n" "AFL_SYNC_TIME: sync time between fuzzing instances (in minutes)\n" "AFL_NO_CRASH_README: do not create a README in the crashes directory\n" "AFL_TESTCACHE_SIZE: use a cache for testcases, improves performance (in MB)\n" "AFL_TMPDIR: directory to use for input file generation (ramdisk recommended)\n" "AFL_EARLY_FORKSERVER: force an early forkserver in an afl-clang-fast/\n" " afl-clang-lto/afl-gcc-fast target\n" - "AFL_PERSISTENT: enforce persistent mode (if __AFL_LOOP is in a shared lib\n" - "AFL_DEFER_FORKSRV: enforced deferred forkserver (__AFL_INIT is in a .so)\n" - "AFL_FUZZER_STATS_UPDATE_INTERVAL: interval to update fuzzer_stats file in seconds, " - "(default: 60, minimum: 1)\n" + "AFL_PERSISTENT: enforce persistent mode (if __AFL_LOOP is in a shared lib)\n" + "AFL_DEFER_FORKSRV: enforced deferred forkserver (__AFL_INIT is in a shared lib)\n" + "AFL_FUZZER_STATS_UPDATE_INTERVAL: interval to update fuzzer_stats file in\n" + " seconds (default: 60, minimum: 1)\n" "\n" ); -- cgit 1.4.1 From f3d2127fd815bed2ec9dfab981123898d11cea65 Mon Sep 17 00:00:00 2001 From: vanhauser-thc Date: Mon, 6 Nov 2023 10:13:59 +0100 Subject: clang-format 16->17 --- .custom-format.py | 2 +- frida_mode/src/main.c | 8 +++--- include/afl-mutations.h | 2 +- include/xxhash.h | 30 +++++++++++------------ instrumentation/cmplog-instructions-pass.cc | 2 +- instrumentation/cmplog-routines-pass.cc | 2 +- instrumentation/cmplog-switches-pass.cc | 2 +- instrumentation/split-switches-pass.so.cc | 2 +- qemu_mode/libqasan/dlmalloc.c | 38 ++++++++++++++--------------- qemu_mode/libqasan/malloc.c | 4 +-- src/afl-fuzz-one.c | 30 +++++++++++------------ src/afl-fuzz-redqueen.c | 2 +- utils/libtokencap/libtokencap.so.c | 6 ++--- 13 files changed, 65 insertions(+), 65 deletions(-) (limited to 'include/afl-mutations.h') diff --git a/.custom-format.py b/.custom-format.py index 3521c05d..c8075ace 100755 --- a/.custom-format.py +++ b/.custom-format.py @@ -24,7 +24,7 @@ import importlib.metadata # string_re = re.compile('(\\"(\\\\.|[^"\\\\])*\\")') # TODO: for future use -CURRENT_LLVM = os.getenv('LLVM_VERSION', 16) +CURRENT_LLVM = os.getenv('LLVM_VERSION', 17) CLANG_FORMAT_BIN = os.getenv("CLANG_FORMAT_BIN", "") diff --git a/frida_mode/src/main.c b/frida_mode/src/main.c index f11c4b25..bd7b1351 100644 --- a/frida_mode/src/main.c +++ b/frida_mode/src/main.c @@ -49,10 +49,10 @@ extern void __libc_init(void *raw_args, void (*onexit)(void) __unused, int (*slingshot)(int, char **, char **), structors_array_t const *const structors); #else -extern int __libc_start_main(int (*main)(int, char **, char **), int argc, - char **ubp_av, void (*init)(void), - void (*fini)(void), void (*rtld_fini)(void), - void(*stack_end)); +extern int __libc_start_main(int (*main)(int, char **, char **), int argc, + char **ubp_av, void (*init)(void), + void (*fini)(void), void (*rtld_fini)(void), + void(*stack_end)); #endif typedef int (*main_fn_t)(int argc, char **argv, char **envp); diff --git a/include/afl-mutations.h b/include/afl-mutations.h index 98ba6fcf..d709b90d 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -1854,7 +1854,7 @@ inline u32 afl_mutate(afl_state_t *afl, u8 *buf, u32 len, u32 steps, for (u32 step = 0; step < steps; ++step) { - retry_havoc_step : { + retry_havoc_step: { u32 r = rand_below(afl, MUT_STRATEGY_ARRAY_SIZE), item; diff --git a/include/xxhash.h b/include/xxhash.h index 7bc0a14e..a8bd6f27 100644 --- a/include/xxhash.h +++ b/include/xxhash.h @@ -365,7 +365,7 @@ typedef uint32_t XXH32_hash_t; (defined(__cplusplus) || \ (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */)) #include -typedef uint32_t XXH32_hash_t; +typedef uint32_t XXH32_hash_t; #else #include @@ -1082,7 +1082,7 @@ struct XXH64_state_s { #include #define XXH_ALIGN(n) alignas(n) #elif defined(__cplusplus) && (__cplusplus >= 201103L) /* >= C++11 */ - /* In C++ alignas() is a keyword */ + /* In C++ alignas() is a keyword */ #define XXH_ALIGN(n) alignas(n) #elif defined(__GNUC__) #define XXH_ALIGN(n) __attribute__((aligned(n))) @@ -3031,8 +3031,8 @@ XXH64_hashFromCanonical(const XXH64_canonical_t *src) { __STDC_VERSION__ >= 199901L /* >= C99 */ #define XXH_RESTRICT restrict #else - /* Note: it might be useful to define __restrict or __restrict__ for - * some C++ compilers */ + /* Note: it might be useful to define __restrict or __restrict__ for + * some C++ compilers */ #define XXH_RESTRICT /* disable */ #endif @@ -3492,8 +3492,8 @@ XXH_FORCE_INLINE xxh_u64x2 XXH_vec_loadu(const void *ptr) { #define XXH_vec_mulo vec_mulo #define XXH_vec_mule vec_mule #elif defined(__clang__) && XXH_HAS_BUILTIN(__builtin_altivec_vmuleuw) - /* Clang has a better way to control this, we can just use the builtin - * which doesn't swap. */ + /* Clang has a better way to control this, we can just use the builtin + * which doesn't swap. */ #define XXH_vec_mulo __builtin_altivec_vmulouw #define XXH_vec_mule __builtin_altivec_vmuleuw #else @@ -3604,15 +3604,15 @@ XXH_FORCE_INLINE xxh_u64 XXH_mult32to64(xxh_u64 x, xxh_u64 y) { #include #define XXH_mult32to64(x, y) __emulu((unsigned)(x), (unsigned)(y)) #else - /* - * Downcast + upcast is usually better than masking on older compilers - * like GCC 4.2 (especially 32-bit ones), all without affecting newer - * compilers. - * - * The other method, (x & 0xFFFFFFFF) * (y & 0xFFFFFFFF), will AND both - * operands and perform a full 64x64 multiply -- entirely redundant on - * 32-bit. - */ + /* + * Downcast + upcast is usually better than masking on older compilers + * like GCC 4.2 (especially 32-bit ones), all without affecting newer + * compilers. + * + * The other method, (x & 0xFFFFFFFF) * (y & 0xFFFFFFFF), will AND both + * operands and perform a full 64x64 multiply -- entirely redundant on + * 32-bit. + */ #define XXH_mult32to64(x, y) \ ((xxh_u64)(xxh_u32)(x) * (xxh_u64)(xxh_u32)(y)) #endif diff --git a/instrumentation/cmplog-instructions-pass.cc b/instrumentation/cmplog-instructions-pass.cc index bca1f927..9cd1dc59 100644 --- a/instrumentation/cmplog-instructions-pass.cc +++ b/instrumentation/cmplog-instructions-pass.cc @@ -90,7 +90,7 @@ class CmpLogInstructions : public ModulePass { #if LLVM_MAJOR >= 11 /* use new pass manager */ PreservedAnalyses run(Module &M, ModuleAnalysisManager &MAM); #else - bool runOnModule(Module &M) override; + bool runOnModule(Module &M) override; #if LLVM_VERSION_MAJOR >= 4 StringRef getPassName() const override { diff --git a/instrumentation/cmplog-routines-pass.cc b/instrumentation/cmplog-routines-pass.cc index c3fbed8d..54e9ddf3 100644 --- a/instrumentation/cmplog-routines-pass.cc +++ b/instrumentation/cmplog-routines-pass.cc @@ -85,7 +85,7 @@ class CmpLogRoutines : public ModulePass { #if LLVM_VERSION_MAJOR >= 11 /* use new pass manager */ PreservedAnalyses run(Module &M, ModuleAnalysisManager &MAM); #else - bool runOnModule(Module &M) override; + bool runOnModule(Module &M) override; #if LLVM_VERSION_MAJOR >= 4 StringRef getPassName() const override { diff --git a/instrumentation/cmplog-switches-pass.cc b/instrumentation/cmplog-switches-pass.cc index 38de669d..01da6da7 100644 --- a/instrumentation/cmplog-switches-pass.cc +++ b/instrumentation/cmplog-switches-pass.cc @@ -85,7 +85,7 @@ class CmplogSwitches : public ModulePass { #if LLVM_VERSION_MAJOR >= 11 /* use new pass manager */ PreservedAnalyses run(Module &M, ModuleAnalysisManager &MAM); #else - bool runOnModule(Module &M) override; + bool runOnModule(Module &M) override; #if LLVM_VERSION_MAJOR < 4 const char *getPassName() const override { diff --git a/instrumentation/split-switches-pass.so.cc b/instrumentation/split-switches-pass.so.cc index dcd89652..e3dfea0d 100644 --- a/instrumentation/split-switches-pass.so.cc +++ b/instrumentation/split-switches-pass.so.cc @@ -84,7 +84,7 @@ class SplitSwitchesTransform : public ModulePass { #if LLVM_VERSION_MAJOR >= 11 /* use new pass manager */ PreservedAnalyses run(Module &M, ModuleAnalysisManager &MAM); #else - bool runOnModule(Module &M) override; + bool runOnModule(Module &M) override; #if LLVM_VERSION_MAJOR >= 4 StringRef getPassName() const override { diff --git a/qemu_mode/libqasan/dlmalloc.c b/qemu_mode/libqasan/dlmalloc.c index b459eb7b..1919ae26 100644 --- a/qemu_mode/libqasan/dlmalloc.c +++ b/qemu_mode/libqasan/dlmalloc.c @@ -771,8 +771,8 @@ MAX_RELEASE_CHECK_RATE default: 4095 unless not HAVE_MMAP #include "/usr/include/malloc.h" #else /* HAVE_USR_INCLUDE_MALLOC_H */ #ifndef STRUCT_MALLINFO_DECLARED - /* HP-UX (and others?) redefines mallinfo unless _STRUCT_MALLINFO is - * defined */ + /* HP-UX (and others?) redefines mallinfo unless _STRUCT_MALLINFO is + * defined */ #define _STRUCT_MALLINFO #define STRUCT_MALLINFO_DECLARED 1 struct mallinfo { @@ -1660,10 +1660,10 @@ extern size_t getpagesize(); #define is_aligned(A) (((size_t)((A)) & (CHUNK_ALIGN_MASK)) == 0) /* the number of bytes to offset an address to align it */ - #define align_offset(A) \ - ((((size_t)(A)&CHUNK_ALIGN_MASK) == 0) \ - ? 0 \ - : ((MALLOC_ALIGNMENT - ((size_t)(A)&CHUNK_ALIGN_MASK)) & \ + #define align_offset(A) \ + ((((size_t)(A) & CHUNK_ALIGN_MASK) == 0) \ + ? 0 \ + : ((MALLOC_ALIGNMENT - ((size_t)(A) & CHUNK_ALIGN_MASK)) & \ CHUNK_ALIGN_MASK)) /* -------------------------- MMAP preliminaries ------------------------- */ @@ -1715,10 +1715,10 @@ static FORCEINLINE int unixmunmap(void *ptr, size_t size) { #define MUNMAP_DEFAULT(a, s) unixmunmap((a), (s)) #else /* MAP_ANONYMOUS */ - /* - Nearly all versions of mmap support MAP_ANONYMOUS, so the following - is unlikely to be needed, but is supplied just in case. - */ + /* + Nearly all versions of mmap support MAP_ANONYMOUS, so the following + is unlikely to be needed, but is supplied just in case. + */ #define MMAP_FLAGS (MAP_PRIVATE) static int dev_zero_fd = -1; /* Cached file descriptor for /dev/zero. */ #define MMAP_DEFAULT(s) \ @@ -1965,7 +1965,7 @@ static FORCEINLINE void x86_clear_lock(int *sl) { #endif /* ... gcc spins locks ... */ - /* How to yield for a spin lock */ + /* How to yield for a spin lock */ #define SPINS_PER_YIELD 63 #if defined(_MSC_VER) #define SLEEP_EX_DURATION 50 /* delay for yield/sleep */ @@ -2008,11 +2008,11 @@ static MLOCK_T malloc_global_mutex = 0; #define CURRENT_THREAD GetCurrentThreadId() #define EQ_OWNER(X, Y) ((X) == (Y)) #else - /* - Note: the following assume that pthread_t is a type that can be - initialized to (casted) zero. If this is not the case, you will need - to somehow redefine these or not use spin locks. - */ + /* + Note: the following assume that pthread_t is a type that can be + initialized to (casted) zero. If this is not the case, you will need + to somehow redefine these or not use spin locks. + */ #define THREAD_ID_T pthread_t #define CURRENT_THREAD pthread_self() #define EQ_OWNER(X, Y) pthread_equal(X, Y) @@ -2169,7 +2169,7 @@ static int pthread_init_lock(MLOCK_T *lk) { #endif /* ... lock types ... */ - /* Common code for all lock types */ + /* Common code for all lock types */ #define USE_LOCK_BIT (2U) #ifndef ACQUIRE_MALLOC_GLOBAL_LOCK @@ -3077,7 +3077,7 @@ static size_t traverse_and_check(mstate m); /* The size of the smallest chunk held in bin with index i */ #define minsize_for_tree_index(i) \ ((SIZE_T_ONE << (((i) >> 1) + TREEBIN_SHIFT)) | \ - (((size_t)((i)&SIZE_T_ONE)) << (((i) >> 1) + TREEBIN_SHIFT - 1))) + (((size_t)((i) & SIZE_T_ONE)) << (((i) >> 1) + TREEBIN_SHIFT - 1))) /* ------------------------ Operations on bin maps ----------------------- */ @@ -3245,7 +3245,7 @@ static size_t traverse_and_check(mstate m); #else /* FOOTERS */ - /* Set foot of inuse chunk to be xor of mstate and seed */ + /* Set foot of inuse chunk to be xor of mstate and seed */ #define mark_inuse_foot(M, p, s) \ (((mchunkptr)((char *)(p) + (s)))->prev_foot = \ ((size_t)(M) ^ mparams.magic)) diff --git a/qemu_mode/libqasan/malloc.c b/qemu_mode/libqasan/malloc.c index d2db3856..4448f480 100644 --- a/qemu_mode/libqasan/malloc.c +++ b/qemu_mode/libqasan/malloc.c @@ -80,8 +80,8 @@ static unsigned char __tmp_alloc_zone[TMP_ZONE_SIZE]; #else // From dlmalloc.c -void *dlmalloc(size_t); -void dlfree(void *); +void *dlmalloc(size_t); +void dlfree(void *); #define backend_malloc dlmalloc #define backend_free dlfree diff --git a/src/afl-fuzz-one.c b/src/afl-fuzz-one.c index 2003be1f..b2306996 100644 --- a/src/afl-fuzz-one.c +++ b/src/afl-fuzz-one.c @@ -577,13 +577,13 @@ u8 fuzz_one_original(afl_state_t *afl) { * SIMPLE BITFLIP (+dictionary construction) * *********************************************/ -#define FLIP_BIT(_ar, _b) \ - do { \ - \ - u8 *_arf = (u8 *)(_ar); \ - u32 _bf = (_b); \ - _arf[(_bf) >> 3] ^= (128 >> ((_bf)&7)); \ - \ +#define FLIP_BIT(_ar, _b) \ + do { \ + \ + u8 *_arf = (u8 *)(_ar); \ + u32 _bf = (_b); \ + _arf[(_bf) >> 3] ^= (128 >> ((_bf) & 7)); \ + \ } while (0) /* Single walking bit. */ @@ -2216,7 +2216,7 @@ havoc_stage: } - retry_havoc_step : { + retry_havoc_step: { u32 r = rand_below(afl, rand_max), item; @@ -3703,13 +3703,13 @@ static u8 mopt_common_fuzzing(afl_state_t *afl, MOpt_globals_t MOpt_globals) { * SIMPLE BITFLIP (+dictionary construction) * *********************************************/ -#define FLIP_BIT(_ar, _b) \ - do { \ - \ - u8 *_arf = (u8 *)(_ar); \ - u32 _bf = (_b); \ - _arf[(_bf) >> 3] ^= (128 >> ((_bf)&7)); \ - \ +#define FLIP_BIT(_ar, _b) \ + do { \ + \ + u8 *_arf = (u8 *)(_ar); \ + u32 _bf = (_b); \ + _arf[(_bf) >> 3] ^= (128 >> ((_bf) & 7)); \ + \ } while (0) /* Single walking bit. */ diff --git a/src/afl-fuzz-redqueen.c b/src/afl-fuzz-redqueen.c index 43b5c8bd..86e7f1cf 100644 --- a/src/afl-fuzz-redqueen.c +++ b/src/afl-fuzz-redqueen.c @@ -1828,7 +1828,7 @@ static void try_to_add_to_dictN(afl_state_t *afl, u128 v, u8 size) { for (k = 0; k < size; ++k) { #else - u32 off = 16 - size; + u32 off = 16 - size; for (k = 16 - size; k < 16; ++k) { #endif diff --git a/utils/libtokencap/libtokencap.so.c b/utils/libtokencap/libtokencap.so.c index b21f3068..f4024799 100644 --- a/utils/libtokencap/libtokencap.so.c +++ b/utils/libtokencap/libtokencap.so.c @@ -55,7 +55,7 @@ #elif defined __HAIKU__ #include #elif defined __sun - /* For map addresses the old struct is enough */ +/* For map addresses the old struct is enough */ #include #include #endif @@ -168,7 +168,7 @@ static void __tokencap_load_mappings(void) { #elif defined __FreeBSD__ || defined __OpenBSD__ || defined __NetBSD__ #if defined __FreeBSD__ - int mib[] = {CTL_KERN, KERN_PROC, KERN_PROC_VMMAP, __tokencap_pid}; + int mib[] = {CTL_KERN, KERN_PROC, KERN_PROC_VMMAP, __tokencap_pid}; #elif defined __OpenBSD__ int mib[] = {CTL_KERN, KERN_PROC_VMMAP, __tokencap_pid}; #elif defined __NetBSD__ @@ -209,7 +209,7 @@ static void __tokencap_load_mappings(void) { #if defined __FreeBSD__ || defined __NetBSD__ #if defined __FreeBSD__ - size_t size = region->kve_structsize; + size_t size = region->kve_structsize; if (size == 0) break; #elif defined __NetBSD__ -- cgit 1.4.1 From 37505928bcec63a08fe50cdebdbf7b9b28b952d0 Mon Sep 17 00:00:00 2001 From: vanhauser-thc Date: Fri, 15 Dec 2023 09:23:30 +0100 Subject: fix 2 mutation bugs --- docs/Changelog.md | 3 +++ include/afl-mutations.h | 16 ++++++++-------- 2 files changed, 11 insertions(+), 8 deletions(-) (limited to 'include/afl-mutations.h') diff --git a/docs/Changelog.md b/docs/Changelog.md index 7faa0ab3..0d75782d 100644 --- a/docs/Changelog.md +++ b/docs/Changelog.md @@ -5,6 +5,7 @@ ### Version ++4.09a (dev) - afl-fuzz: + - fixed the new mutation implementation for two bugs - added `AFL_FINAL_SYNC` which forces a final fuzzer sync (also for `-F`) before terminating. - added AFL_IGNORE_SEED_PROBLEMS to skip over seeds that time out instead @@ -23,6 +24,8 @@ - option -n will not use color in the output - instrumentation: - fix for a few string compare transform functions for LAF + - we are instrumenting __cxx internal functions again. this might break + a few targets, please report if so. - frida_mode: - fixes support for large map offsets - support for AFL_FUZZER_LOOPCOUNT for afl.rs and LLVMFuzzerTestOneInput diff --git a/include/afl-mutations.h b/include/afl-mutations.h index d709b90d..6338c93c 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -2456,14 +2456,14 @@ inline u32 afl_mutate(afl_state_t *afl, u8 *buf, u32 len, u32 steps, } - char buf[20]; - snprintf(buf, sizeof(buf), "%" PRId64, val); + char numbuf[32]; + snprintf(numbuf, sizeof(buf), "%" PRId64, val); u32 old_len = off2 - off; - u32 new_len = strlen(buf); + u32 new_len = strlen(numbuf); if (old_len == new_len) { - memcpy(buf + off, buf, new_len); + memcpy(buf + off, numbuf, new_len); } else { @@ -2473,7 +2473,7 @@ inline u32 afl_mutate(afl_state_t *afl, u8 *buf, u32 len, u32 steps, /* Inserted part */ - memcpy(tmp_buf + off, buf, new_len); + memcpy(tmp_buf + off, numbuf, new_len); /* Tail */ memcpy(tmp_buf + off + new_len, buf + off2, len - off2); @@ -2509,9 +2509,9 @@ inline u32 afl_mutate(afl_state_t *afl, u8 *buf, u32 len, u32 steps, } u64 val = rand_next(afl); - char buf[20]; - snprintf(buf, sizeof(buf), "%llu", val); - memcpy(buf + pos, buf, len); + char numbuf[32]; + snprintf(numbuf, sizeof(numbuf), "%llu", val); + memcpy(buf + pos, numbuf, len); break; -- cgit 1.4.1 From 7fabe5052bd41deec72fad43acd5219b5f506ac0 Mon Sep 17 00:00:00 2001 From: vanhauser-thc Date: Tue, 19 Dec 2023 09:26:11 +0100 Subject: fix MUT_INSERTASCIINUM --- include/afl-mutations.h | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) (limited to 'include/afl-mutations.h') diff --git a/include/afl-mutations.h b/include/afl-mutations.h index 6338c93c..24c6b8ff 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -2490,12 +2490,13 @@ inline u32 afl_mutate(afl_state_t *afl, u8 *buf, u32 len, u32 steps, case MUT_INSERTASCIINUM: { - u32 len = 1 + rand_below(afl, 8); + u32 ins_len = 1 + rand_below(afl, 8); u32 pos = rand_below(afl, len); /* Insert ascii number. */ - if (unlikely(len < pos + len)) { + if (unlikely(len < pos + ins_len)) { + // no retry if we have a small input if (unlikely(len < 8)) { break; @@ -2511,7 +2512,20 @@ inline u32 afl_mutate(afl_state_t *afl, u8 *buf, u32 len, u32 steps, u64 val = rand_next(afl); char numbuf[32]; snprintf(numbuf, sizeof(numbuf), "%llu", val); - memcpy(buf + pos, numbuf, len); + size_t val_len = strlen(numbuf), off; + + if (ins_len > val_len) { + + ins_len = val_len; + off = 0; + + } else { + + off = val_len - ins_len; + + } + + memcpy(buf + pos, numbuf + off, ins_len); break; -- cgit 1.4.1 From f822cdeb747fb7aad8be7a9d9472331e36f3dd83 Mon Sep 17 00:00:00 2001 From: vanhauser-thc Date: Tue, 19 Dec 2023 09:29:12 +0100 Subject: fix MUT_STRATEGY_ARRAY_SIZE --- include/afl-mutations.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'include/afl-mutations.h') diff --git a/include/afl-mutations.h b/include/afl-mutations.h index 24c6b8ff..dcc62d0b 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -32,7 +32,7 @@ #include #include "afl-fuzz.h" -#define MUT_STRATEGY_ARRAY_SIZE 256 +#define MUT_STRATEGY_ARRAY_SIZE 255 enum { -- cgit 1.4.1 From 806a76afaeb1e1c99847df95af4181b3d1b48a91 Mon Sep 17 00:00:00 2001 From: vanhauser-thc Date: Tue, 19 Dec 2023 11:15:33 +0100 Subject: fix bad fix for MUT_STRATEGY_ARRAY_SIZE --- docs/Changelog.md | 7 +++++-- include/afl-mutations.h | 3 ++- 2 files changed, 7 insertions(+), 3 deletions(-) (limited to 'include/afl-mutations.h') diff --git a/docs/Changelog.md b/docs/Changelog.md index 150ce6c7..133e460b 100644 --- a/docs/Changelog.md +++ b/docs/Changelog.md @@ -4,8 +4,11 @@ release of the tool. See README.md for the general instruction manual. ### Version ++4.10a (dev) - - default power schedule is now EXPLORE, due a fix in fast schedules - explore is slightly better now. + - afl-fuzz: + - default power schedule is now EXPLORE, due a fix in fast schedules + explore is slightly better now. + - fixed minor issues in the mutation engine, thanks to @futhewo for + reporting! ### Version ++4.09c (release) diff --git a/include/afl-mutations.h b/include/afl-mutations.h index dcc62d0b..75e66484 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -32,7 +32,7 @@ #include #include "afl-fuzz.h" -#define MUT_STRATEGY_ARRAY_SIZE 255 +#define MUT_STRATEGY_ARRAY_SIZE 256 enum { @@ -1082,6 +1082,7 @@ u32 mutation_strategy_exploration_binary[MUT_STRATEGY_ARRAY_SIZE] = { MUT_CLONE_COPY, MUT_CLONE_COPY, MUT_CLONE_COPY, + MUT_CLONE_COPY, MUT_CLONE_FIXED, MUT_CLONE_FIXED, MUT_CLONE_FIXED, -- cgit 1.4.1 From f7ea0f569fa57e22548c1dc8eaba2903213e496e Mon Sep 17 00:00:00 2001 From: vanhauser-thc Date: Fri, 5 Apr 2024 14:52:53 +0200 Subject: fix aflpp custom mutator + standalone tool --- custom_mutators/aflpp/aflpp.c | 1 + custom_mutators/aflpp/standalone/aflpp-standalone.c | 7 ++----- docs/Changelog.md | 1 + include/afl-mutations.h | 5 ++++- src/afl-fuzz-state.c | 4 ---- 5 files changed, 8 insertions(+), 10 deletions(-) (limited to 'include/afl-mutations.h') diff --git a/custom_mutators/aflpp/aflpp.c b/custom_mutators/aflpp/aflpp.c index e15d0391..0b236f76 100644 --- a/custom_mutators/aflpp/aflpp.c +++ b/custom_mutators/aflpp/aflpp.c @@ -1,3 +1,4 @@ +#include "afl-fuzz.h" #include "afl-mutations.h" typedef struct my_mutator { diff --git a/custom_mutators/aflpp/standalone/aflpp-standalone.c b/custom_mutators/aflpp/standalone/aflpp-standalone.c index 361feaba..3a2cbc2f 100644 --- a/custom_mutators/aflpp/standalone/aflpp-standalone.c +++ b/custom_mutators/aflpp/standalone/aflpp-standalone.c @@ -1,9 +1,6 @@ +#include "afl-fuzz.h" #include "afl-mutations.h" -s8 interesting_8[] = {INTERESTING_8}; -s16 interesting_16[] = {INTERESTING_8, INTERESTING_16}; -s32 interesting_32[] = {INTERESTING_8, INTERESTING_16, INTERESTING_32}; - typedef struct my_mutator { afl_state_t *afl; @@ -155,7 +152,7 @@ int main(int argc, char *argv[]) { return -1; } - if (verbose) fprintf(stderr, "Mutation output length: %zu\n", outlen); + if (verbose) fprintf(stderr, "Mutation output length: %u\n", outlen); if (fwrite(outbuf, 1, outlen, out) != outlen) { fprintf(stderr, "Warning: incomplete write.\n"); diff --git a/docs/Changelog.md b/docs/Changelog.md index 94ea5fca..70f4e375 100644 --- a/docs/Changelog.md +++ b/docs/Changelog.md @@ -33,6 +33,7 @@ - afl-whatsup: - now also displays current average speed - small bugfixes + - Fixes for aflpp custom mutator and standalone tool - Minor edits to afl-persistent-config - Prevent temporary files being left behind on aborted afl-whatsup - More CPU benchmarks added to benchmark/ diff --git a/include/afl-mutations.h b/include/afl-mutations.h index 75e66484..79cf7c6a 100644 --- a/include/afl-mutations.h +++ b/include/afl-mutations.h @@ -30,10 +30,13 @@ #include #include -#include "afl-fuzz.h" #define MUT_STRATEGY_ARRAY_SIZE 256 +s8 interesting_8[] = {INTERESTING_8}; +s16 interesting_16[] = {INTERESTING_8, INTERESTING_16}; +s32 interesting_32[] = {INTERESTING_8, INTERESTING_16, INTERESTING_32}; + enum { /* 00 */ MUT_FLIPBIT, diff --git a/src/afl-fuzz-state.c b/src/afl-fuzz-state.c index ae327117..c61f00bd 100644 --- a/src/afl-fuzz-state.c +++ b/src/afl-fuzz-state.c @@ -28,10 +28,6 @@ #include "afl-fuzz.h" #include "envs.h" -s8 interesting_8[] = {INTERESTING_8}; -s16 interesting_16[] = {INTERESTING_8, INTERESTING_16}; -s32 interesting_32[] = {INTERESTING_8, INTERESTING_16, INTERESTING_32}; - char *power_names[POWER_SCHEDULES_NUM] = {"explore", "mmopt", "exploit", "fast", "coe", "lin", "quad", "rare", "seek"}; -- cgit 1.4.1