diff options
Diffstat (limited to 'src/afl-performance.c')
-rw-r--r-- | src/afl-performance.c | 27 |
1 files changed, 25 insertions, 2 deletions
diff --git a/src/afl-performance.c b/src/afl-performance.c index 07069108..6631f148 100644 --- a/src/afl-performance.c +++ b/src/afl-performance.c @@ -33,6 +33,17 @@ static inline uint64_t rotl(const uint64_t x, int k) { } +void rand_set_seed(afl_state_t *afl, s64 init_seed) { + + afl->init_seed = init_seed; + afl->rand_seed[0] = + hash64((u8 *)&afl->init_seed, sizeof(afl->init_seed), HASH_CONST); + afl->rand_seed[1] = afl->rand_seed[0] ^ 0x1234567890abcdef; + afl->rand_seed[2] = afl->rand_seed[0] & 0x0123456789abcdef; + afl->rand_seed[3] = afl->rand_seed[0] | 0x01abcde43f567908; + +} + uint32_t rand_next(afl_state_t *afl) { const uint32_t result = @@ -132,13 +143,25 @@ void long_jump(afl_state_t *afl) { /* we switch from afl's murmur implementation to xxh3 as it is 30% faster - and get 64 bit hashes instead of just 32 bit. Less collisions! :-) */ -u32 inline hash32(const void *key, u32 len, u32 seed) { +#ifdef _DEBUG +u32 hash32(u8 *key, u32 len, u32 seed) { + +#else +u32 inline hash32(u8 *key, u32 len, u32 seed) { + +#endif return (u32)XXH64(key, len, seed); } -u64 inline hash64(const void *key, u32 len, u64 seed) { +#ifdef _DEBUG +u64 hash64(u8 *key, u32 len, u64 seed) { + +#else +u64 inline hash64(u8 *key, u32 len, u64 seed) { + +#endif return XXH64(key, len, seed); |