about summary refs log tree commit diff
path: root/src/afl-fuzz-queue.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/afl-fuzz-queue.c')
-rw-r--r--src/afl-fuzz-queue.c21
1 files changed, 10 insertions, 11 deletions
diff --git a/src/afl-fuzz-queue.c b/src/afl-fuzz-queue.c
index 39087878..da6b1eee 100644
--- a/src/afl-fuzz-queue.c
+++ b/src/afl-fuzz-queue.c
@@ -312,13 +312,12 @@ void update_bitmap_score(afl_state_t *afl, struct queue_entry *q) {
   u64 fav_factor;
   u64 fuzz_p2;
 
-  if (unlikely(afl->schedule >= FAST))
+  if (unlikely(afl->schedule >= FAST && afl->schedule <= RARE))
     fuzz_p2 = next_pow2(q->n_fuzz);
   else
     fuzz_p2 = q->fuzz_level;
 
-  if (unlikely(afl->schedule == MMOPT || afl->schedule == RARE) ||
-      unlikely(afl->fixed_seed)) {
+  if (unlikely(afl->schedule >= RARE) || unlikely(afl->fixed_seed)) {
 
     fav_factor = q->len << 2;
 
@@ -339,13 +338,12 @@ void update_bitmap_score(afl_state_t *afl, struct queue_entry *q) {
         /* Faster-executing or smaller test cases are favored. */
         u64 top_rated_fav_factor;
         u64 top_rated_fuzz_p2;
-        if (unlikely(afl->schedule >= FAST))
+        if (unlikely(afl->schedule >= FAST && afl->schedule <= RARE))
           top_rated_fuzz_p2 = next_pow2(afl->top_rated[i]->n_fuzz);
         else
           top_rated_fuzz_p2 = afl->top_rated[i]->fuzz_level;
 
-        if (unlikely(afl->schedule == MMOPT || afl->schedule == RARE) ||
-            unlikely(afl->fixed_seed)) {
+        if (unlikely(afl->schedule >= RARE) || unlikely(afl->fixed_seed)) {
 
           top_rated_fav_factor = afl->top_rated[i]->len << 2;
 
@@ -366,8 +364,7 @@ void update_bitmap_score(afl_state_t *afl, struct queue_entry *q) {
 
         }
 
-        if (unlikely(afl->schedule == MMOPT || afl->schedule == RARE) ||
-            unlikely(afl->fixed_seed)) {
+        if (unlikely(afl->schedule >= RARE) || unlikely(afl->fixed_seed)) {
 
           if (fav_factor > afl->top_rated[i]->len << 2) { continue; }
 
@@ -512,8 +509,7 @@ u32 calculate_score(afl_state_t *afl, struct queue_entry *q) {
   // Longer execution time means longer work on the input, the deeper in
   // coverage, the better the fuzzing, right? -mh
 
-  if (afl->schedule != MMOPT && afl->schedule != RARE &&
-      likely(!afl->fixed_seed)) {
+  if (afl->schedule >= RARE && likely(!afl->fixed_seed)) {
 
     if (q->exec_us * 0.1 > avg_exec_us) {
 
@@ -625,6 +621,9 @@ u32 calculate_score(afl_state_t *afl, struct queue_entry *q) {
     case EXPLORE:
       break;
 
+    case SEEK:
+      break;
+
     case EXPLOIT:
       factor = MAX_FACTOR;
       break;
@@ -718,7 +717,7 @@ u32 calculate_score(afl_state_t *afl, struct queue_entry *q) {
 
   }
 
-  if (unlikely(afl->schedule >= FAST)) {
+  if (unlikely(afl->schedule >= FAST && afl->schedule <= RARE)) {
 
     if (factor > MAX_FACTOR) { factor = MAX_FACTOR; }
     perf_score *= factor / POWER_BETA;