diff options
author | vanhauser-thc <vh@thc.org> | 2023-01-18 14:21:44 +0100 |
---|---|---|
committer | vanhauser-thc <vh@thc.org> | 2023-01-18 14:21:44 +0100 |
commit | 0db662db7b433a08b01de7f5a989843450919b88 (patch) | |
tree | d90cbfed51fd7c62c9da281dfeaa4d5f08a766be | |
parent | 70f4b456faf8e361f6e0a34246708380c94cb36e (diff) | |
download | afl++-0db662db7b433a08b01de7f5a989843450919b88.tar.gz |
fix
-rw-r--r-- | custom_mutators/autotokens/autotokens.cpp | 78 |
1 files changed, 41 insertions, 37 deletions
diff --git a/custom_mutators/autotokens/autotokens.cpp b/custom_mutators/autotokens/autotokens.cpp index 57c35846..94f86413 100644 --- a/custom_mutators/autotokens/autotokens.cpp +++ b/custom_mutators/autotokens/autotokens.cpp @@ -851,43 +851,47 @@ extern "C" my_mutator_t *afl_custom_init(afl_state *afl, unsigned int seed) { // set common whitespace tokens // we deliberately do not put uncommon ones here to these will count as // identifier tokens. - token_to_id[" "] = current_id; - id_to_token[current_id] = " "; - ++current_id; - token_to_id["\t"] = current_id; - id_to_token[current_id] = "\t"; - ++current_id; - token_to_id["\n"] = current_id; - id_to_token[current_id] = "\n"; - ++current_id; - token_to_id["\r\n"] = current_id; - id_to_token[current_id] = "\r\n"; - ++current_id; - token_to_id[" \n"] = current_id; - id_to_token[current_id] = " \n"; - ++current_id; - token_to_id[" "] = current_id; - id_to_token[current_id] = " "; - ++current_id; - token_to_id["\t\t"] = current_id; - id_to_token[current_id] = "\t\t"; - ++current_id; - token_to_id["\n\n"] = current_id; - id_to_token[current_id] = "\n\n"; - ++current_id; - token_to_id["\r\n\r\n"] = current_id; - id_to_token[current_id] = "\r\n\r\n"; - ++current_id; - token_to_id[" "] = current_id; - id_to_token[current_id] = " "; - ++current_id; - token_to_id["\t\t\t\t"] = current_id; - id_to_token[current_id] = "\t\t\t\t"; - ++current_id; - token_to_id["\n\n\n\n"] = current_id; - id_to_token[current_id] = "\n\n\n\n"; - ++current_id; - whitespace_ids = current_id; + if (!alternative_tokenize) { + + token_to_id[" "] = current_id; + id_to_token[current_id] = " "; + ++current_id; + token_to_id["\t"] = current_id; + id_to_token[current_id] = "\t"; + ++current_id; + token_to_id["\n"] = current_id; + id_to_token[current_id] = "\n"; + ++current_id; + token_to_id["\r\n"] = current_id; + id_to_token[current_id] = "\r\n"; + ++current_id; + token_to_id[" \n"] = current_id; + id_to_token[current_id] = " \n"; + ++current_id; + token_to_id[" "] = current_id; + id_to_token[current_id] = " "; + ++current_id; + token_to_id["\t\t"] = current_id; + id_to_token[current_id] = "\t\t"; + ++current_id; + token_to_id["\n\n"] = current_id; + id_to_token[current_id] = "\n\n"; + ++current_id; + token_to_id["\r\n\r\n"] = current_id; + id_to_token[current_id] = "\r\n\r\n"; + ++current_id; + token_to_id[" "] = current_id; + id_to_token[current_id] = " "; + ++current_id; + token_to_id["\t\t\t\t"] = current_id; + id_to_token[current_id] = "\t\t\t\t"; + ++current_id; + token_to_id["\n\n\n\n"] = current_id; + id_to_token[current_id] = "\n\n\n\n"; + ++current_id; + whitespace_ids = current_id; + + } return data; |