about summary refs log tree commit diff
diff options
context:
space:
mode:
authorhexcoder- <heiko@hexco.de>2019-08-10 19:58:18 +0200
committerhexcoder- <heiko@hexco.de>2019-08-10 19:58:18 +0200
commit642cf8b5ff662e125bd96d6dd0377c0fcaf9e29d (patch)
tree05ffa0d44b0c999c871b111664d3c6c24caff779
parent0612aa2b65a6995c8cd372385a0d096efef7ba93 (diff)
downloadafl++-642cf8b5ff662e125bd96d6dd0377c0fcaf9e29d.tar.gz
performance tuning prefer preincrement over postincrement
-rw-r--r--afl-fuzz.c562
1 files changed, 281 insertions, 281 deletions
diff --git a/afl-fuzz.c b/afl-fuzz.c
index a1388a55..be44c69e 100644
--- a/afl-fuzz.c
+++ b/afl-fuzz.c
@@ -650,7 +650,7 @@ int select_algorithm(void) {
 
   double sele = ((double)(random()%10000)*0.0001);
   j_puppet = 0;
-  for (i_puppet = 0; i_puppet < operator_num; i_puppet++) {
+  for (i_puppet = 0; i_puppet < operator_num; ++i_puppet) {
       if (unlikely(i_puppet == 0)) {
           if (sele < probability_now[swarm_now][i_puppet])
             break;
@@ -718,7 +718,7 @@ static void shuffle_ptrs(void** ptrs, u32 cnt) {
 
   u32 i;
 
-  for (i = 0; i < cnt - 2; i++) {
+  for (i = 0; i < cnt - 2; ++i) {
 
     u32 j = i + UR(cnt - i);
     void *s = ptrs[i];
@@ -817,7 +817,7 @@ static void bind_to_free_cpu(void) {
 
   closedir(d);
 
-  for (i = 0; i < cpu_core_count; i++) if (!cpu_used[i]) break;
+  for (i = 0; i < cpu_core_count; ++i) if (!cpu_used[i]) break;
 
   if (i == cpu_core_count) {
 
@@ -857,7 +857,7 @@ static void locate_diffs(u8* ptr1, u8* ptr2, u32 len, s32* first, s32* last) {
   s32 l_loc = -1;
   u32 pos;
 
-  for (pos = 0; pos < len; pos++) {
+  for (pos = 0; pos < len; ++pos) {
 
     if (*(ptr1++) != *(ptr2++)) {
 
@@ -1133,8 +1133,8 @@ static void add_to_queue(u8* fname, u32 len, u8 passed_det) {
 
   } else q_prev100 = queue = queue_top = q;
 
-  queued_paths++;
-  pending_not_fuzzed++;
+  ++queued_paths;
+  ++pending_not_fuzzed;
 
   cycles_wo_finds = 0;
 
@@ -1275,8 +1275,8 @@ static inline u8 has_new_bits(u8* virgin_map) {
 
     }
 
-    current++;
-    virgin++;
+    ++current;
+    ++virgin;
 
   }
 
@@ -1336,10 +1336,10 @@ static u32 count_bytes(u8* mem) {
     u32 v = *(ptr++);
 
     if (!v) continue;
-    if (v & FF(0)) ret++;
-    if (v & FF(1)) ret++;
-    if (v & FF(2)) ret++;
-    if (v & FF(3)) ret++;
+    if (v & FF(0)) ++ret;
+    if (v & FF(1)) ++ret;
+    if (v & FF(2)) ++ret;
+    if (v & FF(3)) ++ret;
 
   }
 
@@ -1365,10 +1365,10 @@ static u32 count_non_255_bytes(u8* mem) {
        case. */
 
     if (v == 0xffffffff) continue;
-    if ((v & FF(0)) != FF(0)) ret++;
-    if ((v & FF(1)) != FF(1)) ret++;
-    if ((v & FF(2)) != FF(2)) ret++;
-    if ((v & FF(3)) != FF(3)) ret++;
+    if ((v & FF(0)) != FF(0)) ++ret;
+    if ((v & FF(1)) != FF(1)) ++ret;
+    if ((v & FF(2)) != FF(2)) ++ret;
+    if ((v & FF(3)) != FF(3)) ++ret;
 
   }
 
@@ -1414,7 +1414,7 @@ static void simplify_trace(u64* mem) {
 
     } else *mem = 0x0101010101010101ULL;
 
-    mem++;
+    ++mem;
 
   }
 
@@ -1441,7 +1441,7 @@ static void simplify_trace(u32* mem) {
 
     } else *mem = 0x01010101;
 
-    mem++;
+    ++mem;
   }
 
 }
@@ -1504,7 +1504,7 @@ static inline void classify_counts(u64* mem) {
 
     }
 
-    mem++;
+    ++mem;
 
   }
 
@@ -1529,7 +1529,7 @@ static inline void classify_counts(u32* mem) {
 
     }
 
-    mem++;
+    ++mem;
 
   }
 
@@ -1549,7 +1549,7 @@ static void minimize_bits(u8* dst, u8* src) {
   while (i < MAP_SIZE) {
 
     if (*(src++)) dst[i >> 3] |= 1 << (i & 7);
-    i++;
+    ++i;
 
   }
 
@@ -1589,7 +1589,7 @@ static void update_bitmap_score(struct queue_entry* q) {
   /* For every byte set in trace_bits[], see if there is a previous winner,
      and how it compares to us. */
 
-  for (i = 0; i < MAP_SIZE; i++)
+  for (i = 0; i < MAP_SIZE; ++i)
 
     if (trace_bits[i]) {
 
@@ -1621,7 +1621,7 @@ static void update_bitmap_score(struct queue_entry* q) {
        /* Insert ourselves as the new winner. */
 
        top_rated[i] = q;
-       q->tc_ref++;
+       ++q->tc_ref;
 
        if (!q->trace_mini) {
          q->trace_mini = ck_alloc(MAP_SIZE >> 3);
@@ -1666,7 +1666,7 @@ static void cull_queue(void) {
   /* Let's see if anything in the bitmap isn't captured in temp_v.
      If yes, and if it has a top_rated[] contender, let's use it. */
 
-  for (i = 0; i < MAP_SIZE; i++)
+  for (i = 0; i < MAP_SIZE; ++i)
     if (top_rated[i] && (temp_v[i >> 3] & (1 << (i & 7)))) {
 
       u32 j = MAP_SIZE >> 3;
@@ -1678,9 +1678,9 @@ static void cull_queue(void) {
           temp_v[j] &= ~top_rated[i]->trace_mini[j];
 
       top_rated[i]->favored = 1;
-      queued_favored++;
+      ++queued_favored;
 
-      if (top_rated[i]->fuzz_level == 0 || !top_rated[i]->was_fuzzed) pending_favored++;
+      if (top_rated[i]->fuzz_level == 0 || !top_rated[i]->was_fuzzed) ++pending_favored;
 
     }
 
@@ -1785,7 +1785,7 @@ static void read_testcases(void) {
 
   }
 
-  for (i = 0; i < nl_cnt; i++) {
+  for (i = 0; i < nl_cnt; ++i) {
 
     struct stat st;
 
@@ -1881,15 +1881,15 @@ static void load_extras_file(u8* fname, u32* min_len, u32* max_len,
     u8 *rptr, *wptr;
     u32 klen = 0;
 
-    cur_line++;
+    ++cur_line;
 
     /* Trim on left and right. */
 
-    while (isspace(*lptr)) lptr++;
+    while (isspace(*lptr)) ++lptr;
 
     rptr = lptr + strlen(lptr) - 1;
-    while (rptr >= lptr && isspace(*rptr)) rptr--;
-    rptr++;
+    while (rptr >= lptr && isspace(*rptr)) --rptr;
+    ++rptr;
     *rptr = 0;
 
     /* Skip empty lines and comments. */
@@ -1907,28 +1907,28 @@ static void load_extras_file(u8* fname, u32* min_len, u32* max_len,
 
     /* Skip alphanumerics and dashes (label). */
 
-    while (isalnum(*lptr) || *lptr == '_') lptr++;
+    while (isalnum(*lptr) || *lptr == '_') ++lptr;
 
     /* If @number follows, parse that. */
 
     if (*lptr == '@') {
 
-      lptr++;
+      ++lptr;
       if (atoi(lptr) > dict_level) continue;
-      while (isdigit(*lptr)) lptr++;
+      while (isdigit(*lptr)) ++lptr;
 
     }
 
     /* Skip whitespace and = signs. */
 
-    while (isspace(*lptr) || *lptr == '=') lptr++;
+    while (isspace(*lptr) || *lptr == '=') ++lptr;
 
     /* Consume opening '"'. */
 
     if (*lptr != '"')
       FATAL("Malformed name=\"keyword\" pair in line %u.", cur_line);
 
-    lptr++;
+    ++lptr;
 
     if (!*lptr) FATAL("Empty keyword in line %u.", cur_line);
 
@@ -1952,7 +1952,7 @@ static void load_extras_file(u8* fname, u32* min_len, u32* max_len,
 
         case '\\':
 
-          lptr++;
+          ++lptr;
 
           if (*lptr == '\\' || *lptr == '"') {
             *(wptr++) = *(lptr++);
@@ -1968,14 +1968,14 @@ static void load_extras_file(u8* fname, u32* min_len, u32* max_len,
             (strchr(hexdigits, tolower(lptr[2])) - hexdigits);
 
           lptr += 3;
-          klen++;
+          ++klen;
 
           break;
 
         default:
 
           *(wptr++) = *(lptr++);
-          klen++;
+          ++klen;
 
       }
 
@@ -1990,7 +1990,7 @@ static void load_extras_file(u8* fname, u32* min_len, u32* max_len,
     if (*min_len > klen) *min_len = klen;
     if (*max_len < klen) *max_len = klen;
 
-    extras_cnt++;
+    ++extras_cnt;
 
   }
 
@@ -2073,7 +2073,7 @@ static void load_extras(u8* dir) {
     close(fd);
     ck_free(fn);
 
-    extras_cnt++;
+    ++extras_cnt;
 
   }
 
@@ -2123,7 +2123,7 @@ static void maybe_add_auto(u8* mem, u32 len) {
 
   /* Skip runs of identical bytes. */
 
-  for (i = 1; i < len; i++)
+  for (i = 1; i < len; ++i)
     if (mem[0] ^ mem[i]) break;
 
   if (i == len) return;
@@ -2154,10 +2154,10 @@ static void maybe_add_auto(u8* mem, u32 len) {
      match. We optimize by exploiting the fact that extras[] are sorted
      by size. */
 
-  for (i = 0; i < extras_cnt; i++)
+  for (i = 0; i < extras_cnt; ++i)
     if (extras[i].len >= len) break;
 
-  for (; i < extras_cnt && extras[i].len == len; i++)
+  for (; i < extras_cnt && extras[i].len == len; ++i)
     if (!memcmp_nocase(extras[i].data, mem, len)) return;
 
   /* Last but not least, check a_extras[] for matches. There are no
@@ -2165,7 +2165,7 @@ static void maybe_add_auto(u8* mem, u32 len) {
 
   auto_changed = 1;
 
-  for (i = 0; i < a_extras_cnt; i++) {
+  for (i = 0; i < a_extras_cnt; ++i) {
 
     if (a_extras[i].len == len && !memcmp_nocase(a_extras[i].data, mem, len)) {
 
@@ -2187,7 +2187,7 @@ static void maybe_add_auto(u8* mem, u32 len) {
 
     a_extras[a_extras_cnt].data = ck_memdup(mem, len);
     a_extras[a_extras_cnt].len  = len;
-    a_extras_cnt++;
+    ++a_extras_cnt;
 
   } else {
 
@@ -2226,7 +2226,7 @@ static void save_auto(void) {
   if (!auto_changed) return;
   auto_changed = 0;
 
-  for (i = 0; i < MIN(USE_AUTO_EXTRAS, a_extras_cnt); i++) {
+  for (i = 0; i < MIN(USE_AUTO_EXTRAS, a_extras_cnt); ++i) {
 
     u8* fn = alloc_printf("%s/queue/.state/auto_extras/auto_%06u", out_dir, i);
     s32 fd;
@@ -2251,7 +2251,7 @@ static void load_auto(void) {
 
   u32 i;
 
-  for (i = 0; i < USE_AUTO_EXTRAS; i++) {
+  for (i = 0; i < USE_AUTO_EXTRAS; ++i) {
 
     u8  tmp[MAX_AUTO_EXTRA + 1];
     u8* fn = alloc_printf("%s/.state/auto_extras/auto_%06u", in_dir, i);
@@ -2294,12 +2294,12 @@ static void destroy_extras(void) {
 
   u32 i;
 
-  for (i = 0; i < extras_cnt; i++) 
+  for (i = 0; i < extras_cnt; ++i) 
     ck_free(extras[i].data);
 
   ck_free(extras);
 
-  for (i = 0; i < a_extras_cnt; i++) 
+  for (i = 0; i < a_extras_cnt; ++i) 
     ck_free(a_extras[i].data);
 
   ck_free(a_extras);
@@ -2763,7 +2763,7 @@ static u8 run_target(char** argv, u32 timeout) {
 
   setitimer(ITIMER_REAL, &it, NULL);
 
-  total_execs++;
+  ++total_execs;
 
   /* Any subsequent operations on trace_bits must not be moved by the
      compiler below this point. Past this location, trace_bits[] behave
@@ -2904,7 +2904,7 @@ static u8 calibrate_case(char** argv, struct queue_entry* q, u8* use_mem,
     use_tmout = MAX(exec_tmout + CAL_TMOUT_ADD,
                     exec_tmout * CAL_TMOUT_PERC / 100);
 
-  q->cal_failed++;
+  ++q->cal_failed;
 
   stage_name = "calibration";
   stage_max  = fast_cal ? 3 : CAL_CYCLES;
@@ -2919,7 +2919,7 @@ static u8 calibrate_case(char** argv, struct queue_entry* q, u8* use_mem,
 
   start_us = get_cur_time_us();
 
-  for (stage_cur = 0; stage_cur < stage_max; stage_cur++) {
+  for (stage_cur = 0; stage_cur < stage_max; ++stage_cur) {
 
     u32 cksum;
 
@@ -2950,7 +2950,7 @@ static u8 calibrate_case(char** argv, struct queue_entry* q, u8* use_mem,
 
         u32 i;
 
-        for (i = 0; i < MAP_SIZE; i++) {
+        for (i = 0; i < MAP_SIZE; ++i) {
 
           if (!var_bytes[i] && first_trace[i] != trace_bits[i]) {
 
@@ -2988,7 +2988,7 @@ static u8 calibrate_case(char** argv, struct queue_entry* q, u8* use_mem,
   q->cal_failed  = 0;
 
   total_bitmap_size += q->bitmap_size;
-  total_bitmap_entries++;
+  ++total_bitmap_entries;
 
   update_bitmap_score(q);
 
@@ -3002,7 +3002,7 @@ abort_calibration:
 
   if (new_bits == 2 && !q->has_new_cov) {
     q->has_new_cov = 1;
-    queued_with_cov++;
+    ++queued_with_cov;
   }
 
   /* Mark variable paths. */
@@ -3013,7 +3013,7 @@ abort_calibration:
 
     if (!q->var_behavior) {
       mark_as_variable(q);
-      queued_variable++;
+      ++queued_variable;
     }
 
   }
@@ -3037,7 +3037,7 @@ static void check_map_coverage(void) {
 
   if (count_bytes(trace_bits) < 100) return;
 
-  for (i = (1 << (MAP_SIZE_POW2 - 1)); i < MAP_SIZE; i++)
+  for (i = (1 << (MAP_SIZE_POW2 - 1)); i < MAP_SIZE; ++i)
     if (trace_bits[i]) return;
 
   WARNF("Recompile binary with newer version of afl to improve coverage!");
@@ -3104,7 +3104,7 @@ static void perform_dry_run(char** argv) {
           if (timeout_given > 1) {
             WARNF("Test case results in a timeout (skipping)");
             q->cal_failed = CAL_CHANCES;
-            cal_failures++;
+            ++cal_failures;
             break;
           }
 
@@ -3139,7 +3139,7 @@ static void perform_dry_run(char** argv) {
         if (skip_crashes) {
           WARNF("Test case results in a crash (skipping)");
           q->cal_failed = CAL_CHANCES;
-          cal_failures++;
+          ++cal_failures;
           break;
         }
 
@@ -3215,7 +3215,7 @@ static void perform_dry_run(char** argv) {
 
       case FAULT_NOBITS: 
 
-        useless_at_start++;
+        ++useless_at_start;
 
         if (!in_bitmap && !shuffle_queue)
           WARNF("No new instrumentation output, test case may be useless.");
@@ -3297,7 +3297,7 @@ static void pivot_inputs(void) {
     u8  *nfn, *rsl = strrchr(q->fname, '/');
     u32 orig_id;
 
-    if (!rsl) rsl = q->fname; else rsl++;
+    if (!rsl) rsl = q->fname; else ++rsl;
 
     /* If the original file name conforms to the syntax and the recorded
        ID matches the one we'd assign, just use the original file name.
@@ -3364,7 +3364,7 @@ static void pivot_inputs(void) {
     if (q->passed_det) mark_as_det_done(q);
 
     q = q->next;
-    id++;
+    ++id;
 
   }
 
@@ -3494,7 +3494,7 @@ static u8 save_if_interesting(char** argv, void* mem, u32 len, u8 fault) {
        future fuzzing, etc. */
 
     if (!(hnb = has_new_bits(virgin_bits))) {
-      if (crash_mode) total_crashes++;
+      if (crash_mode) ++total_crashes;
       return 0;
     }    
 
@@ -3513,7 +3513,7 @@ static u8 save_if_interesting(char** argv, void* mem, u32 len, u8 fault) {
 
     if (hnb == 2) {
       queue_top->has_new_cov = 1;
-      queued_with_cov++;
+      ++queued_with_cov;
     }
 
     queue_top->exec_cksum = cksum;
@@ -3544,7 +3544,7 @@ static u8 save_if_interesting(char** argv, void* mem, u32 len, u8 fault) {
          hang-specific bitmap as a signal of uniqueness. In "dumb" mode, we
          just keep everything. */
 
-      total_tmouts++;
+      ++total_tmouts;
 
       if (unique_hangs >= KEEP_UNIQUE_HANG) return keeping;
 
@@ -3560,7 +3560,7 @@ static u8 save_if_interesting(char** argv, void* mem, u32 len, u8 fault) {
 
       }
 
-      unique_tmouts++;
+      ++unique_tmouts;
 
       /* Before saving, we make sure that it's a genuine hang by re-running
          the target with a more generous timeout (unless the default timeout
@@ -3594,7 +3594,7 @@ static u8 save_if_interesting(char** argv, void* mem, u32 len, u8 fault) {
 
 #endif /* ^!SIMPLE_FILES */
 
-      unique_hangs++;
+      ++unique_hangs;
 
       last_hang_time = get_cur_time();
 
@@ -3608,7 +3608,7 @@ keep_as_crash:
          except for slightly different limits and no need to re-run test
          cases. */
 
-      total_crashes++;
+      ++total_crashes;
 
       if (unique_crashes >= KEEP_UNIQUE_CRASH) return keeping;
 
@@ -3638,7 +3638,7 @@ keep_as_crash:
 
 #endif /* ^!SIMPLE_FILES */
 
-      unique_crashes++;
+      ++unique_crashes;
 
       last_crash_time = get_cur_time();
       last_crash_execs = total_execs;
@@ -4868,7 +4868,7 @@ static u8 trim_case_python(char** argv, struct queue_entry* q, u8* in_buf) {
     write_to_testcase(retbuf, retlen);
 
     fault = run_target(argv, exec_tmout);
-    trim_execs++;
+    ++trim_execs;
 
     if (stop_soon || fault == FAULT_ERROR) goto abort_trimming;
 
@@ -4994,7 +4994,7 @@ static u8 trim_case(char** argv, struct queue_entry* q, u8* in_buf) {
       write_with_gap(in_buf, q->len, remove_pos, trim_avail);
 
       fault = run_target(argv, exec_tmout);
-      trim_execs++;
+      ++trim_execs;
 
       if (stop_soon || fault == FAULT_ERROR) goto abort_trimming;
 
@@ -5032,7 +5032,7 @@ static u8 trim_case(char** argv, struct queue_entry* q, u8* in_buf) {
       /* Since this can be slow, update the screen every now and then. */
 
       if (!(trim_exec++ % stats_update_freq)) show_stats();
-      stage_cur++;
+      ++stage_cur;
 
     }
 
@@ -5093,7 +5093,7 @@ EXP_ST u8 common_fuzz_stuff(char** argv, u8* out_buf, u32 len) {
   if (fault == FAULT_TMOUT) {
 
     if (subseq_tmouts++ > TMOUT_LIMIT) {
-      cur_skipped_paths++;
+      ++cur_skipped_paths;
       return 1;
     }
 
@@ -5105,7 +5105,7 @@ EXP_ST u8 common_fuzz_stuff(char** argv, u8* out_buf, u32 len) {
   if (skip_requested) {
 
      skip_requested = 0;
-     cur_skipped_paths++;
+     ++cur_skipped_paths;
      return 1;
 
   }
@@ -5315,7 +5315,7 @@ static u8 could_be_bitflip(u32 xor_val) {
 
   /* Shift left until first bit set. */
 
-  while (!(xor_val & 1)) { sh++; xor_val >>= 1; }
+  while (!(xor_val & 1)) { ++sh; xor_val >>= 1; }
 
   /* 1-, 2-, and 4-bit patterns are OK anywhere. */
 
@@ -5345,12 +5345,12 @@ static u8 could_be_arith(u32 old_val, u32 new_val, u8 blen) {
 
   /* See if one-byte adjustments to any byte could produce this result. */
 
-  for (i = 0; i < blen; i++) {
+  for (i = 0; i < blen; ++i) {
 
     u8 a = old_val >> (8 * i),
        b = new_val >> (8 * i);
 
-    if (a != b) { diffs++; ov = a; nv = b; }
+    if (a != b) { ++diffs; ov = a; nv = b; }
 
   }
 
@@ -5369,12 +5369,12 @@ static u8 could_be_arith(u32 old_val, u32 new_val, u8 blen) {
 
   diffs = 0;
 
-  for (i = 0; i < blen / 2; i++) {
+  for (i = 0; i < blen / 2; ++i) {
 
     u16 a = old_val >> (16 * i),
         b = new_val >> (16 * i);
 
-    if (a != b) { diffs++; ov = a; nv = b; }
+    if (a != b) { ++diffs; ov = a; nv = b; }
 
   }
 
@@ -5427,9 +5427,9 @@ static u8 could_be_interest(u32 old_val, u32 new_val, u8 blen, u8 check_le) {
   /* See if one-byte insertions from interesting_8 over old_val could
      produce new_val. */
 
-  for (i = 0; i < blen; i++) {
+  for (i = 0; i < blen; ++i) {
 
-    for (j = 0; j < sizeof(interesting_8); j++) {
+    for (j = 0; j < sizeof(interesting_8); ++j) {
 
       u32 tval = (old_val & ~(0xff << (i * 8))) |
                  (((u8)interesting_8[j]) << (i * 8));
@@ -5447,9 +5447,9 @@ static u8 could_be_interest(u32 old_val, u32 new_val, u8 blen, u8 check_le) {
 
   /* See if two-byte insertions over old_val could give us new_val. */
 
-  for (i = 0; i < blen - 1; i++) {
+  for (i = 0; i < blen - 1; ++i) {
 
-    for (j = 0; j < sizeof(interesting_16) / 2; j++) {
+    for (j = 0; j < sizeof(interesting_16) / 2; ++j) {
 
       u32 tval = (old_val & ~(0xffff << (i * 8))) |
                  (((u16)interesting_16[j]) << (i * 8));
@@ -5476,7 +5476,7 @@ static u8 could_be_interest(u32 old_val, u32 new_val, u8 blen, u8 check_le) {
     /* See if four-byte insertions could produce the same result
        (LE only). */
 
-    for (j = 0; j < sizeof(interesting_32) / 4; j++)
+    for (j = 0; j < sizeof(interesting_32) / 4; ++j)
       if (new_val == (u32)interesting_32[j]) return 1;
 
   }
@@ -5588,7 +5588,7 @@ static u8 fuzz_one_original(char** argv) {
     }
 
     if (stop_soon || res != crash_mode) {
-      cur_skipped_paths++;
+      ++cur_skipped_paths;
       goto abandon_entry;
     }
 
@@ -5606,7 +5606,7 @@ static u8 fuzz_one_original(char** argv) {
       FATAL("Unable to execute target application");
 
     if (stop_soon) {
-      cur_skipped_paths++;
+      ++cur_skipped_paths;
       goto abandon_entry;
     }
 
@@ -5639,7 +5639,7 @@ static u8 fuzz_one_original(char** argv) {
 
     orig_hit_cnt = queued_paths + unique_crashes;
 
-    for (stage_cur = 0 ; stage_cur < stage_max ; stage_cur++) {
+    for (stage_cur = 0 ; stage_cur < stage_max ; ++stage_cur) {
       size_t orig_size = (size_t) len;
       size_t mutated_size = custom_mutator(out_buf, orig_size, mutated_buf, max_seed_size, UR(UINT32_MAX));
       if (mutated_size > 0) {
@@ -5712,7 +5712,7 @@ static u8 fuzz_one_original(char** argv) {
 
   prev_cksum = queue_cur->exec_cksum;
 
-  for (stage_cur = 0; stage_cur < stage_max; stage_cur++) {
+  for (stage_cur = 0; stage_cur < stage_max; ++stage_cur) {
 
     stage_cur_byte = stage_cur >> 3;
 
@@ -5759,7 +5759,7 @@ static u8 fuzz_one_original(char** argv) {
            final character and force output. */
 
         if (a_len < MAX_AUTO_EXTRA) a_collect[a_len] = out_buf[stage_cur >> 3];
-        a_len++;
+        ++a_len;
 
         if (a_len >= MIN_AUTO_EXTRA && a_len <= MAX_AUTO_EXTRA)
           maybe_add_auto(a_collect, a_len);
@@ -5783,7 +5783,7 @@ static u8 fuzz_one_original(char** argv) {
       if (cksum != queue_cur->exec_cksum) {
 
         if (a_len < MAX_AUTO_EXTRA) a_collect[a_len] = out_buf[stage_cur >> 3];        
-        a_len++;
+        ++a_len;
 
       }
 
@@ -5804,7 +5804,7 @@ static u8 fuzz_one_original(char** argv) {
 
   orig_hit_cnt = new_hit_cnt;
 
-  for (stage_cur = 0; stage_cur < stage_max; stage_cur++) {
+  for (stage_cur = 0; stage_cur < stage_max; ++stage_cur) {
 
     stage_cur_byte = stage_cur >> 3;
 
@@ -5831,7 +5831,7 @@ static u8 fuzz_one_original(char** argv) {
 
   orig_hit_cnt = new_hit_cnt;
 
-  for (stage_cur = 0; stage_cur < stage_max; stage_cur++) {
+  for (stage_cur = 0; stage_cur < stage_max; ++stage_cur) {
 
     stage_cur_byte = stage_cur >> 3;
 
@@ -5875,7 +5875,7 @@ static u8 fuzz_one_original(char** argv) {
 
   if (EFF_APOS(len - 1) != 0) {
     eff_map[EFF_APOS(len - 1)] = 1;
-    eff_cnt++;
+    ++eff_cnt;
   }
 
   /* Walking byte. */
@@ -5886,7 +5886,7 @@ static u8 fuzz_one_original(char** argv) {
 
   orig_hit_cnt = new_hit_cnt;
 
-  for (stage_cur = 0; stage_cur < stage_max; stage_cur++) {
+  for (stage_cur = 0; stage_cur < stage_max; ++stage_cur) {
 
     stage_cur_byte = stage_cur;
 
@@ -5913,7 +5913,7 @@ static u8 fuzz_one_original(char** argv) {
 
       if (cksum != queue_cur->exec_cksum) {
         eff_map[EFF_APOS(stage_cur)] = 1;
-        eff_cnt++;
+        ++eff_cnt;
       }
 
     }
@@ -5957,7 +5957,7 @@ static u8 fuzz_one_original(char** argv) {
 
   orig_hit_cnt = new_hit_cnt;
 
-  for (i = 0; i < len - 1; i++) {
+  for (i = 0; i < len - 1; ++i) {
 
     /* Let's consult the effector map... */
 
@@ -5971,7 +5971,7 @@ static u8 fuzz_one_original(char** argv) {
     *(u16*)(out_buf + i) ^= 0xFFFF;
 
     if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-    stage_cur++;
+    ++stage_cur;
 
     *(u16*)(out_buf + i) ^= 0xFFFF;
 
@@ -5994,7 +5994,7 @@ static u8 fuzz_one_original(char** argv) {
 
   orig_hit_cnt = new_hit_cnt;
 
-  for (i = 0; i < len - 3; i++) {
+  for (i = 0; i < len - 3; ++i) {
 
     /* Let's consult the effector map... */
     if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] &&
@@ -6008,7 +6008,7 @@ static u8 fuzz_one_original(char** argv) {
     *(u32*)(out_buf + i) ^= 0xFFFFFFFF;
 
     if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-    stage_cur++;
+    ++stage_cur;
 
     *(u32*)(out_buf + i) ^= 0xFFFFFFFF;
 
@@ -6038,7 +6038,7 @@ skip_bitflip:
 
   orig_hit_cnt = new_hit_cnt;
 
-  for (i = 0; i < len; i++) {
+  for (i = 0; i < len; ++i) {
 
     u8 orig = out_buf[i];
 
@@ -6051,7 +6051,7 @@ skip_bitflip:
 
     stage_cur_byte = i;
 
-    for (j = 1; j <= ARITH_MAX; j++) {
+    for (j = 1; j <= ARITH_MAX; ++j) {
 
       u8 r = orig ^ (orig + j);
 
@@ -6064,7 +6064,7 @@ skip_bitflip:
         out_buf[i] = orig + j;
 
         if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-        stage_cur++;
+        ++stage_cur;
 
       } else stage_max--;
 
@@ -6076,7 +6076,7 @@ skip_bitflip:
         out_buf[i] = orig - j;
 
         if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-        stage_cur++;
+        ++stage_cur;
 
       } else stage_max--;
 
@@ -6102,7 +6102,7 @@ skip_bitflip:
 
   orig_hit_cnt = new_hit_cnt;
 
-  for (i = 0; i < len - 1; i++) {
+  for (i = 0; i < len - 1; ++i) {
 
     u16 orig = *(u16*)(out_buf + i);
 
@@ -6115,7 +6115,7 @@ skip_bitflip:
 
     stage_cur_byte = i;
 
-    for (j = 1; j <= ARITH_MAX; j++) {
+    for (j = 1; j <= ARITH_MAX; ++j) {
 
       u16 r1 = orig ^ (orig + j),
           r2 = orig ^ (orig - j),
@@ -6135,9 +6135,9 @@ skip_bitflip:
         *(u16*)(out_buf + i) = orig + j;
 
         if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-        stage_cur++;
+        ++stage_cur;
  
-      } else stage_max--;
+      } else --stage_max;
 
       if ((orig & 0xff) < j && !could_be_bitflip(r2)) {
 
@@ -6145,7 +6145,7 @@ skip_bitflip:
         *(u16*)(out_buf + i) = orig - j;
 
         if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-        stage_cur++;
+        ++stage_cur;
 
       } else stage_max--;
 
@@ -6160,7 +6160,7 @@ skip_bitflip:
         *(u16*)(out_buf + i) = SWAP16(SWAP16(orig) + j);
 
         if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-        stage_cur++;
+        ++stage_cur;
 
       } else stage_max--;
 
@@ -6170,7 +6170,7 @@ skip_bitflip:
         *(u16*)(out_buf + i) = SWAP16(SWAP16(orig) - j);
 
         if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-        stage_cur++;
+        ++stage_cur;
 
       } else stage_max--;
 
@@ -6196,7 +6196,7 @@ skip_bitflip:
 
   orig_hit_cnt = new_hit_cnt;
 
-  for (i = 0; i < len - 3; i++) {
+  for (i = 0; i < len - 3; ++i) {
 
     u32 orig = *(u32*)(out_buf + i);
 
@@ -6210,7 +6210,7 @@ skip_bitflip:
 
     stage_cur_byte = i;
 
-    for (j = 1; j <= ARITH_MAX; j++) {
+    for (j = 1; j <= ARITH_MAX; ++j) {
 
       u32 r1 = orig ^ (orig + j),
           r2 = orig ^ (orig - j),
@@ -6228,7 +6228,7 @@ skip_bitflip:
         *(u32*)(out_buf + i) = orig + j;
 
         if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-        stage_cur++;
+        ++stage_cur;
 
       } else stage_max--;
 
@@ -6238,7 +6238,7 @@ skip_bitflip:
         *(u32*)(out_buf + i) = orig - j;
 
         if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-        stage_cur++;
+        ++stage_cur;
 
       } else stage_max--;
 
@@ -6252,7 +6252,7 @@ skip_bitflip:
         *(u32*)(out_buf + i) = SWAP32(SWAP32(orig) + j);
 
         if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-        stage_cur++;
+        ++stage_cur;
 
       } else stage_max--;
 
@@ -6262,7 +6262,7 @@ skip_bitflip:
         *(u32*)(out_buf + i) = SWAP32(SWAP32(orig) - j);
 
         if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-        stage_cur++;
+        ++stage_cur;
 
       } else stage_max--;
 
@@ -6294,7 +6294,7 @@ skip_arith:
 
   /* Setting 8-bit integers. */
 
-  for (i = 0; i < len; i++) {
+  for (i = 0; i < len; ++i) {
 
     u8 orig = out_buf[i];
 
@@ -6307,7 +6307,7 @@ skip_arith:
 
     stage_cur_byte = i;
 
-    for (j = 0; j < sizeof(interesting_8); j++) {
+    for (j = 0; j < sizeof(interesting_8); ++j) {
 
       /* Skip if the value could be a product of bitflips or arithmetics. */
 
@@ -6323,7 +6323,7 @@ skip_arith:
       if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
 
       out_buf[i] = orig;
-      stage_cur++;
+      ++stage_cur;
 
     }
 
@@ -6345,7 +6345,7 @@ skip_arith:
 
   orig_hit_cnt = new_hit_cnt;
 
-  for (i = 0; i < len - 1; i++) {
+  for (i = 0; i < len - 1; ++i) {
 
     u16 orig = *(u16*)(out_buf + i);
 
@@ -6358,7 +6358,7 @@ skip_arith:
 
     stage_cur_byte = i;
 
-    for (j = 0; j < sizeof(interesting_16) / 2; j++) {
+    for (j = 0; j < sizeof(interesting_16) / 2; ++j) {
 
       stage_cur_val = interesting_16[j];
 
@@ -6374,7 +6374,7 @@ skip_arith:
         *(u16*)(out_buf + i) = interesting_16[j];
 
         if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-        stage_cur++;
+        ++stage_cur;
 
       } else stage_max--;
 
@@ -6387,7 +6387,7 @@ skip_arith:
 
         *(u16*)(out_buf + i) = SWAP16(interesting_16[j]);
         if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-        stage_cur++;
+        ++stage_cur;
 
       } else stage_max--;
 
@@ -6427,7 +6427,7 @@ skip_arith:
 
     stage_cur_byte = i;
 
-    for (j = 0; j < sizeof(interesting_32) / 4; j++) {
+    for (j = 0; j < sizeof(interesting_32) / 4; ++j) {
 
       stage_cur_val = interesting_32[j];
 
@@ -6443,7 +6443,7 @@ skip_arith:
         *(u32*)(out_buf + i) = interesting_32[j];
 
         if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-        stage_cur++;
+        ++stage_cur;
 
       } else stage_max--;
 
@@ -6456,7 +6456,7 @@ skip_arith:
 
         *(u32*)(out_buf + i) = SWAP32(interesting_32[j]);
         if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-        stage_cur++;
+        ++stage_cur;
 
       } else stage_max--;
 
@@ -6490,7 +6490,7 @@ skip_interest:
 
   orig_hit_cnt = new_hit_cnt;
 
-  for (i = 0; i < len; i++) {
+  for (i = 0; i < len; ++i) {
 
     u32 last_len = 0;
 
@@ -6501,7 +6501,7 @@ skip_interest:
        between writes at a particular offset determined by the outer
        loop. */
 
-    for (j = 0; j < extras_cnt; j++) {
+    for (j = 0; j < extras_cnt; ++j) {
 
       /* Skip extras probabilistically if extras_cnt > MAX_DET_EXTRAS. Also
          skip them if there's no room to insert the payload, if the token
@@ -6523,7 +6523,7 @@ skip_interest:
 
       if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
 
-      stage_cur++;
+      ++stage_cur;
 
     }
 
@@ -6548,11 +6548,11 @@ skip_interest:
 
   ex_tmp = ck_alloc(len + MAX_DICT_FILE);
 
-  for (i = 0; i <= len; i++) {
+  for (i = 0; i <= len; ++i) {
 
     stage_cur_byte = i;
 
-    for (j = 0; j < extras_cnt; j++) {
+    for (j = 0; j < extras_cnt; ++j) {
 
       if (len + extras[j].len > MAX_FILE) {
         stage_max--; 
@@ -6570,7 +6570,7 @@ skip_interest:
         goto abandon_entry;
       }
 
-      stage_cur++;
+      ++stage_cur;
 
     }
 
@@ -6599,13 +6599,13 @@ skip_user_extras:
 
   orig_hit_cnt = new_hit_cnt;
 
-  for (i = 0; i < len; i++) {
+  for (i = 0; i < len; ++i) {
 
     u32 last_len = 0;
 
     stage_cur_byte = i;
 
-    for (j = 0; j < MIN(a_extras_cnt, USE_AUTO_EXTRAS); j++) {
+    for (j = 0; j < MIN(a_extras_cnt, USE_AUTO_EXTRAS); ++j) {
 
       /* See the comment in the earlier code; extras are sorted by size. */
 
@@ -6623,7 +6623,7 @@ skip_user_extras:
 
       if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
 
-      stage_cur++;
+      ++stage_cur;
 
     }
 
@@ -6664,7 +6664,7 @@ python_stage:
   char* retbuf = NULL;
   size_t retlen = 0;
 
-  for (stage_cur = 0; stage_cur < stage_max; stage_cur++) {
+  for (stage_cur = 0; stage_cur < stage_max; ++stage_cur) {
     struct queue_entry* target;
     u32 tid;
     u8* new_buf;
@@ -6682,7 +6682,7 @@ retry_external_pick:
 
     while (target && (target->len < 2 || target == queue_cur) && queued_paths > 1) {
       target = target->next;
-      splicing_with++;
+      ++splicing_with;
     }
 
     if (!target) goto retry_external_pick;
@@ -6780,13 +6780,13 @@ havoc_stage:
   /* We essentially just do several thousand runs (depending on perf_score)
      where we take the input file and make random stacked tweaks. */
 
-  for (stage_cur = 0; stage_cur < stage_max; stage_cur++) {
+  for (stage_cur = 0; stage_cur < stage_max; ++stage_cur) {
 
     u32 use_stacking = 1 << (1 + UR(HAVOC_STACK_POW2));
 
     stage_cur_val = use_stacking;
  
-    for (i = 0; i < use_stacking; i++) {
+    for (i = 0; i < use_stacking; ++i) {
 
       switch (UR(15 + ((extras_cnt + a_extras_cnt) ? 2 : 0))) {
 
@@ -7240,7 +7240,7 @@ retry_splicing:
 
     while (target && (target->len < 2 || target == queue_cur)) {
       target = target->next;
-      splicing_with++;
+      ++splicing_with;
     }
 
     if (!target) goto retry_splicing;
@@ -7307,7 +7307,7 @@ abandon_entry:
     if (queue_cur->favored) pending_favored--;
   }
 
-  queue_cur->fuzz_level++;
+  ++queue_cur->fuzz_level;
 
   munmap(orig_in, queue_cur->len);
 
@@ -7422,7 +7422,7 @@ static u8 pilot_fuzzing(char** argv) {
 		}
 
 		if (stop_soon || res != crash_mode) {
-			cur_skipped_paths++;
+			++cur_skipped_paths;
 			goto abandon_entry;
 		}
 
@@ -7440,7 +7440,7 @@ static u8 pilot_fuzzing(char** argv) {
 			FATAL("Unable to execute target application");
 
 		if (stop_soon) {
-			cur_skipped_paths++;
+			++cur_skipped_paths;
 			goto abandon_entry;
 		}
 
@@ -7509,7 +7509,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 		prev_cksum = queue_cur->exec_cksum;
 
-		for (stage_cur = 0; stage_cur < stage_max; stage_cur++) {
+		for (stage_cur = 0; stage_cur < stage_max; ++stage_cur) {
 
 			stage_cur_byte = stage_cur >> 3;
 
@@ -7556,7 +7556,7 @@ static u8 pilot_fuzzing(char** argv) {
 					   final character and force output. */
 
 					if (a_len < MAX_AUTO_EXTRA) a_collect[a_len] = out_buf[stage_cur >> 3];
-					a_len++;
+					++a_len;
 
 					if (a_len >= MIN_AUTO_EXTRA && a_len <= MAX_AUTO_EXTRA)
 						maybe_add_auto(a_collect, a_len);
@@ -7581,7 +7581,7 @@ static u8 pilot_fuzzing(char** argv) {
 				if (cksum != queue_cur->exec_cksum) {
 
 					if (a_len < MAX_AUTO_EXTRA) a_collect[a_len] = out_buf[stage_cur >> 3];
-					a_len++;
+					++a_len;
 
 				}
 
@@ -7602,7 +7602,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (stage_cur = 0; stage_cur < stage_max; stage_cur++) {
+		for (stage_cur = 0; stage_cur < stage_max; ++stage_cur) {
 
 			stage_cur_byte = stage_cur >> 3;
 
@@ -7635,7 +7635,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (stage_cur = 0; stage_cur < stage_max; stage_cur++) {
+		for (stage_cur = 0; stage_cur < stage_max; ++stage_cur) {
 
 			stage_cur_byte = stage_cur >> 3;
 
@@ -7682,7 +7682,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 		if (EFF_APOS(len - 1) != 0) {
 			eff_map[EFF_APOS(len - 1)] = 1;
-			eff_cnt++;
+			++eff_cnt;
 		}
 
 		/* Walking byte. */
@@ -7695,7 +7695,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (stage_cur = 0; stage_cur < stage_max; stage_cur++) {
+		for (stage_cur = 0; stage_cur < stage_max; ++stage_cur) {
 
 			stage_cur_byte = stage_cur;
 
@@ -7722,7 +7722,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 				if (cksum != queue_cur->exec_cksum) {
 					eff_map[EFF_APOS(stage_cur)] = 1;
-					eff_cnt++;
+					++eff_cnt;
 				}
 
 			}
@@ -7773,7 +7773,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (i = 0; i < len - 1; i++) {
+		for (i = 0; i < len - 1; ++i) {
 
 			/* Let's consult the effector map... */
 
@@ -7787,7 +7787,7 @@ static u8 pilot_fuzzing(char** argv) {
 			*(u16*)(out_buf + i) ^= 0xFFFF;
 
 			if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-			stage_cur++;
+			++stage_cur;
 
 			*(u16*)(out_buf + i) ^= 0xFFFF;
 
@@ -7815,7 +7815,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (i = 0; i < len - 3; i++) {
+		for (i = 0; i < len - 3; ++i) {
 
 			/* Let's consult the effector map... */
 			if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] &&
@@ -7829,7 +7829,7 @@ static u8 pilot_fuzzing(char** argv) {
 			*(u32*)(out_buf + i) ^= 0xFFFFFFFF;
 
 			if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-			stage_cur++;
+			++stage_cur;
 
 			*(u32*)(out_buf + i) ^= 0xFFFFFFFF;
 
@@ -7867,7 +7867,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (i = 0; i < len; i++) {
+		for (i = 0; i < len; ++i) {
 
 			u8 orig = out_buf[i];
 
@@ -7880,7 +7880,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 			stage_cur_byte = i;
 
-			for (j = 1; j <= ARITH_MAX; j++) {
+			for (j = 1; j <= ARITH_MAX; ++j) {
 
 				u8 r = orig ^ (orig + j);
 
@@ -7893,7 +7893,7 @@ static u8 pilot_fuzzing(char** argv) {
 					out_buf[i] = orig + j;
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -7905,7 +7905,7 @@ static u8 pilot_fuzzing(char** argv) {
 					out_buf[i] = orig - j;
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -7938,7 +7938,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (i = 0; i < len - 1; i++) {
+		for (i = 0; i < len - 1; ++i) {
 
 			u16 orig = *(u16*)(out_buf + i);
 
@@ -7951,7 +7951,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 			stage_cur_byte = i;
 
-			for (j = 1; j <= ARITH_MAX; j++) {
+			for (j = 1; j <= ARITH_MAX; ++j) {
 
 				u16 r1 = orig ^ (orig + j),
 					r2 = orig ^ (orig - j),
@@ -7971,7 +7971,7 @@ static u8 pilot_fuzzing(char** argv) {
 					*(u16*)(out_buf + i) = orig + j;
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -7981,7 +7981,7 @@ static u8 pilot_fuzzing(char** argv) {
 					*(u16*)(out_buf + i) = orig - j;
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -7996,7 +7996,7 @@ static u8 pilot_fuzzing(char** argv) {
 					*(u16*)(out_buf + i) = SWAP16(SWAP16(orig) + j);
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -8006,7 +8006,7 @@ static u8 pilot_fuzzing(char** argv) {
 					*(u16*)(out_buf + i) = SWAP16(SWAP16(orig) - j);
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -8037,7 +8037,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (i = 0; i < len - 3; i++) {
+		for (i = 0; i < len - 3; ++i) {
 
 			u32 orig = *(u32*)(out_buf + i);
 
@@ -8051,7 +8051,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 			stage_cur_byte = i;
 
-			for (j = 1; j <= ARITH_MAX; j++) {
+			for (j = 1; j <= ARITH_MAX; ++j) {
 
 				u32 r1 = orig ^ (orig + j),
 					r2 = orig ^ (orig - j),
@@ -8069,7 +8069,7 @@ static u8 pilot_fuzzing(char** argv) {
 					*(u32*)(out_buf + i) = orig + j;
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -8093,7 +8093,7 @@ static u8 pilot_fuzzing(char** argv) {
 					*(u32*)(out_buf + i) = SWAP32(SWAP32(orig) + j);
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -8103,7 +8103,7 @@ static u8 pilot_fuzzing(char** argv) {
 					*(u32*)(out_buf + i) = SWAP32(SWAP32(orig) - j);
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -8140,7 +8140,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 		/* Setting 8-bit integers. */
 
-		for (i = 0; i < len; i++) {
+		for (i = 0; i < len; ++i) {
 
 			u8 orig = out_buf[i];
 
@@ -8153,7 +8153,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 			stage_cur_byte = i;
 
-			for (j = 0; j < sizeof(interesting_8); j++) {
+			for (j = 0; j < sizeof(interesting_8); ++j) {
 
 				/* Skip if the value could be a product of bitflips or arithmetics. */
 
@@ -8169,7 +8169,7 @@ static u8 pilot_fuzzing(char** argv) {
 				if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
 
 				out_buf[i] = orig;
-				stage_cur++;
+				++stage_cur;
 
 			}
 
@@ -8196,7 +8196,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (i = 0; i < len - 1; i++) {
+		for (i = 0; i < len - 1; ++i) {
 
 			u16 orig = *(u16*)(out_buf + i);
 
@@ -8209,7 +8209,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 			stage_cur_byte = i;
 
-			for (j = 0; j < sizeof(interesting_16) / 2; j++) {
+			for (j = 0; j < sizeof(interesting_16) / 2; ++j) {
 
 				stage_cur_val = interesting_16[j];
 
@@ -8225,7 +8225,7 @@ static u8 pilot_fuzzing(char** argv) {
 					*(u16*)(out_buf + i) = interesting_16[j];
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -8238,7 +8238,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 					*(u16*)(out_buf + i) = SWAP16(interesting_16[j]);
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -8269,7 +8269,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (i = 0; i < len - 3; i++) {
+		for (i = 0; i < len - 3; ++i) {
 
 			u32 orig = *(u32*)(out_buf + i);
 
@@ -8283,7 +8283,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 			stage_cur_byte = i;
 
-			for (j = 0; j < sizeof(interesting_32) / 4; j++) {
+			for (j = 0; j < sizeof(interesting_32) / 4; ++j) {
 
 				stage_cur_val = interesting_32[j];
 
@@ -8299,7 +8299,7 @@ static u8 pilot_fuzzing(char** argv) {
 					*(u32*)(out_buf + i) = interesting_32[j];
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -8312,7 +8312,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 					*(u32*)(out_buf + i) = SWAP32(interesting_32[j]);
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -8353,7 +8353,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (i = 0; i < len; i++) {
+		for (i = 0; i < len; ++i) {
 
 			u32 last_len = 0;
 
@@ -8364,7 +8364,7 @@ static u8 pilot_fuzzing(char** argv) {
 			   between writes at a particular offset determined by the outer
 			   loop. */
 
-			for (j = 0; j < extras_cnt; j++) {
+			for (j = 0; j < extras_cnt; ++j) {
 
 				/* Skip extras probabilistically if extras_cnt > MAX_DET_EXTRAS. Also
 				   skip them if there's no room to insert the payload, if the token
@@ -8386,7 +8386,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 				if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
 
-				stage_cur++;
+				++stage_cur;
 
 			}
 
@@ -8414,11 +8414,11 @@ static u8 pilot_fuzzing(char** argv) {
 
 		ex_tmp = ck_alloc(len + MAX_DICT_FILE);
 
-		for (i = 0; i <= len; i++) {
+		for (i = 0; i <= len; ++i) {
 
 			stage_cur_byte = i;
 
-			for (j = 0; j < extras_cnt; j++) {
+			for (j = 0; j < extras_cnt; ++j) {
 
 				if (len + extras[j].len > MAX_FILE) {
 					stage_max--;
@@ -8436,7 +8436,7 @@ static u8 pilot_fuzzing(char** argv) {
 					goto abandon_entry;
 				}
 
-				stage_cur++;
+				++stage_cur;
 
 			}
 
@@ -8466,13 +8466,13 @@ static u8 pilot_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (i = 0; i < len; i++) {
+		for (i = 0; i < len; ++i) {
 
 			u32 last_len = 0;
 
 			stage_cur_byte = i;
 
-			for (j = 0; j < MIN(a_extras_cnt, USE_AUTO_EXTRAS); j++) {
+			for (j = 0; j < MIN(a_extras_cnt, USE_AUTO_EXTRAS); ++j) {
 
 				/* See the comment in the earlier code; extras are sorted by size. */
 
@@ -8490,7 +8490,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 				if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
 
-				stage_cur++;
+				++stage_cur;
 
 			}
 
@@ -8600,20 +8600,20 @@ static u8 pilot_fuzzing(char** argv) {
 
 
 
-				for (stage_cur = 0; stage_cur < stage_max; stage_cur++) {
+				for (stage_cur = 0; stage_cur < stage_max; ++stage_cur) {
 
 					u32 use_stacking = 1 << (1 + UR(HAVOC_STACK_POW2));
 
 					stage_cur_val = use_stacking;
 
 
-					for (i = 0; i < operator_num; i++)
+					for (i = 0; i < operator_num; ++i)
 					{
 						stage_cycles_puppet_v3[swarm_now][i] = stage_cycles_puppet_v2[swarm_now][i];
 					}
 
 
-					for (i = 0; i < use_stacking; i++) {
+					for (i = 0; i < use_stacking; ++i) {
 
 						switch (select_algorithm()) {
 
@@ -8921,7 +8921,7 @@ static u8 pilot_fuzzing(char** argv) {
 					{
 						u64 temp_temp_puppet = queued_paths + unique_crashes - temp_total_found;
 						total_puppet_find = total_puppet_find + temp_temp_puppet;
-						for (i = 0; i < 16; i++)
+						for (i = 0; i < 16; ++i)
 						{
 							if (stage_cycles_puppet_v2[swarm_now][i] > stage_cycles_puppet_v3[swarm_now][i])
 								stage_finds_puppet_v2[swarm_now][i] += temp_temp_puppet;
@@ -8979,7 +8979,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 					while (target && (target->len < 2 || target == queue_cur)) {
 						target = target->next;
-						splicing_with++;
+						++splicing_with;
 					}
 
 					if (!target) goto retry_splicing_puppet;
@@ -9071,7 +9071,7 @@ static u8 pilot_fuzzing(char** argv) {
 					temp_puppet_find = total_puppet_find;
 
 					u64 temp_stage_finds_puppet = 0;
-					for (i = 0; i < operator_num; i++) {
+					for (i = 0; i < operator_num; ++i) {
 						double temp_eff = 0.0;
 
 						if (stage_cycles_puppet_v2[swarm_now][i] > stage_cycles_puppet[swarm_now][i])
@@ -9091,7 +9091,7 @@ static u8 pilot_fuzzing(char** argv) {
 					swarm_now = swarm_now + 1;
 						if (swarm_now == swarm_num) {
 							key_module = 1;
-							for (i = 0; i < operator_num; i++) {
+							for (i = 0; i < operator_num; ++i) {
 								core_operator_cycles_puppet_v2[i] = core_operator_cycles_puppet[i];
 								core_operator_cycles_puppet_v3[i] = core_operator_cycles_puppet[i];
 								core_operator_finds_puppet_v2[i] = core_operator_finds_puppet[i];
@@ -9099,7 +9099,7 @@ static u8 pilot_fuzzing(char** argv) {
 
 							double swarm_eff = 0.0;
 							swarm_now = 0;
-							for (i = 0; i < swarm_num; i++)	{
+							for (i = 0; i < swarm_num; ++i)	{
 								if (swarm_fitness[i] > swarm_eff) {
 									swarm_eff = swarm_fitness[i];
 									swarm_now = i;
@@ -9225,7 +9225,7 @@ static u8 core_fuzzing(char** argv) {
 			}
 
 			if (stop_soon || res != crash_mode) {
-				cur_skipped_paths++;
+				++cur_skipped_paths;
 				goto abandon_entry;
 			}
 
@@ -9243,7 +9243,7 @@ static u8 core_fuzzing(char** argv) {
 				FATAL("Unable to execute target application");
 
 			if (stop_soon) {
-				cur_skipped_paths++;
+				++cur_skipped_paths;
 				goto abandon_entry;
 			}
 
@@ -9309,7 +9309,7 @@ static u8 core_fuzzing(char** argv) {
 
 		prev_cksum = queue_cur->exec_cksum;
 
-		for (stage_cur = 0; stage_cur < stage_max; stage_cur++) {
+		for (stage_cur = 0; stage_cur < stage_max; ++stage_cur) {
 
 			stage_cur_byte = stage_cur >> 3;
 
@@ -9356,7 +9356,7 @@ static u8 core_fuzzing(char** argv) {
 					   final character and force output. */
 
 					if (a_len < MAX_AUTO_EXTRA) a_collect[a_len] = out_buf[stage_cur >> 3];
-					a_len++;
+					++a_len;
 
 					if (a_len >= MIN_AUTO_EXTRA && a_len <= MAX_AUTO_EXTRA)
 						maybe_add_auto(a_collect, a_len);
@@ -9381,7 +9381,7 @@ static u8 core_fuzzing(char** argv) {
 				if (cksum != queue_cur->exec_cksum) {
 
 					if (a_len < MAX_AUTO_EXTRA) a_collect[a_len] = out_buf[stage_cur >> 3];
-					a_len++;
+					++a_len;
 
 				}
 
@@ -9404,7 +9404,7 @@ static u8 core_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (stage_cur = 0; stage_cur < stage_max; stage_cur++) {
+		for (stage_cur = 0; stage_cur < stage_max; ++stage_cur) {
 
 			stage_cur_byte = stage_cur >> 3;
 
@@ -9433,7 +9433,7 @@ static u8 core_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (stage_cur = 0; stage_cur < stage_max; stage_cur++) {
+		for (stage_cur = 0; stage_cur < stage_max; ++stage_cur) {
 
 			stage_cur_byte = stage_cur >> 3;
 
@@ -9478,7 +9478,7 @@ static u8 core_fuzzing(char** argv) {
 
 		if (EFF_APOS(len - 1) != 0) {
 			eff_map[EFF_APOS(len - 1)] = 1;
-			eff_cnt++;
+			++eff_cnt;
 		}
 
 		/* Walking byte. */
@@ -9490,7 +9490,7 @@ static u8 core_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (stage_cur = 0; stage_cur < stage_max; stage_cur++) {
+		for (stage_cur = 0; stage_cur < stage_max; ++stage_cur) {
 
 			stage_cur_byte = stage_cur;
 
@@ -9517,7 +9517,7 @@ static u8 core_fuzzing(char** argv) {
 
 				if (cksum != queue_cur->exec_cksum) {
 					eff_map[EFF_APOS(stage_cur)] = 1;
-					eff_cnt++;
+					++eff_cnt;
 				}
 
 			}
@@ -9565,7 +9565,7 @@ static u8 core_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (i = 0; i < len - 1; i++) {
+		for (i = 0; i < len - 1; ++i) {
 
 			/* Let's consult the effector map... */
 
@@ -9579,7 +9579,7 @@ static u8 core_fuzzing(char** argv) {
 			*(u16*)(out_buf + i) ^= 0xFFFF;
 
 			if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-			stage_cur++;
+			++stage_cur;
 
 			*(u16*)(out_buf + i) ^= 0xFFFF;
 
@@ -9605,7 +9605,7 @@ static u8 core_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (i = 0; i < len - 3; i++) {
+		for (i = 0; i < len - 3; ++i) {
 
 			/* Let's consult the effector map... */
 			if (!eff_map[EFF_APOS(i)] && !eff_map[EFF_APOS(i + 1)] &&
@@ -9619,7 +9619,7 @@ static u8 core_fuzzing(char** argv) {
 			*(u32*)(out_buf + i) ^= 0xFFFFFFFF;
 
 			if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-			stage_cur++;
+			++stage_cur;
 
 			*(u32*)(out_buf + i) ^= 0xFFFFFFFF;
 
@@ -9653,7 +9653,7 @@ static u8 core_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (i = 0; i < len; i++) {
+		for (i = 0; i < len; ++i) {
 
 			u8 orig = out_buf[i];
 
@@ -9666,7 +9666,7 @@ static u8 core_fuzzing(char** argv) {
 
 			stage_cur_byte = i;
 
-			for (j = 1; j <= ARITH_MAX; j++) {
+			for (j = 1; j <= ARITH_MAX; ++j) {
 
 				u8 r = orig ^ (orig + j);
 
@@ -9679,7 +9679,7 @@ static u8 core_fuzzing(char** argv) {
 					out_buf[i] = orig + j;
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -9691,7 +9691,7 @@ static u8 core_fuzzing(char** argv) {
 					out_buf[i] = orig - j;
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -9721,7 +9721,7 @@ static u8 core_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (i = 0; i < len - 1; i++) {
+		for (i = 0; i < len - 1; ++i) {
 
 			u16 orig = *(u16*)(out_buf + i);
 
@@ -9734,7 +9734,7 @@ static u8 core_fuzzing(char** argv) {
 
 			stage_cur_byte = i;
 
-			for (j = 1; j <= ARITH_MAX; j++) {
+			for (j = 1; j <= ARITH_MAX; ++j) {
 
 				u16 r1 = orig ^ (orig + j),
 					r2 = orig ^ (orig - j),
@@ -9754,7 +9754,7 @@ static u8 core_fuzzing(char** argv) {
 					*(u16*)(out_buf + i) = orig + j;
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -9764,7 +9764,7 @@ static u8 core_fuzzing(char** argv) {
 					*(u16*)(out_buf + i) = orig - j;
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -9779,7 +9779,7 @@ static u8 core_fuzzing(char** argv) {
 					*(u16*)(out_buf + i) = SWAP16(SWAP16(orig) + j);
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -9789,7 +9789,7 @@ static u8 core_fuzzing(char** argv) {
 					*(u16*)(out_buf + i) = SWAP16(SWAP16(orig) - j);
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -9817,7 +9817,7 @@ static u8 core_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (i = 0; i < len - 3; i++) {
+		for (i = 0; i < len - 3; ++i) {
 
 			u32 orig = *(u32*)(out_buf + i);
 
@@ -9831,7 +9831,7 @@ static u8 core_fuzzing(char** argv) {
 
 			stage_cur_byte = i;
 
-			for (j = 1; j <= ARITH_MAX; j++) {
+			for (j = 1; j <= ARITH_MAX; ++j) {
 
 				u32 r1 = orig ^ (orig + j),
 					r2 = orig ^ (orig - j),
@@ -9849,7 +9849,7 @@ static u8 core_fuzzing(char** argv) {
 					*(u32*)(out_buf + i) = orig + j;
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -9859,7 +9859,7 @@ static u8 core_fuzzing(char** argv) {
 					*(u32*)(out_buf + i) = orig - j;
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -9873,7 +9873,7 @@ static u8 core_fuzzing(char** argv) {
 					*(u32*)(out_buf + i) = SWAP32(SWAP32(orig) + j);
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -9883,7 +9883,7 @@ static u8 core_fuzzing(char** argv) {
 					*(u32*)(out_buf + i) = SWAP32(SWAP32(orig) - j);
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -9919,7 +9919,7 @@ static u8 core_fuzzing(char** argv) {
 
 		/* Setting 8-bit integers. */
 
-		for (i = 0; i < len; i++) {
+		for (i = 0; i < len; ++i) {
 
 			u8 orig = out_buf[i];
 
@@ -9932,7 +9932,7 @@ static u8 core_fuzzing(char** argv) {
 
 			stage_cur_byte = i;
 
-			for (j = 0; j < sizeof(interesting_8); j++) {
+			for (j = 0; j < sizeof(interesting_8); ++j) {
 
 				/* Skip if the value could be a product of bitflips or arithmetics. */
 
@@ -9948,7 +9948,7 @@ static u8 core_fuzzing(char** argv) {
 				if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
 
 				out_buf[i] = orig;
-				stage_cur++;
+				++stage_cur;
 
 			}
 
@@ -9973,7 +9973,7 @@ static u8 core_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (i = 0; i < len - 1; i++) {
+		for (i = 0; i < len - 1; ++i) {
 
 			u16 orig = *(u16*)(out_buf + i);
 
@@ -9986,7 +9986,7 @@ static u8 core_fuzzing(char** argv) {
 
 			stage_cur_byte = i;
 
-			for (j = 0; j < sizeof(interesting_16) / 2; j++) {
+			for (j = 0; j < sizeof(interesting_16) / 2; ++j) {
 
 				stage_cur_val = interesting_16[j];
 
@@ -10002,7 +10002,7 @@ static u8 core_fuzzing(char** argv) {
 					*(u16*)(out_buf + i) = interesting_16[j];
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -10015,7 +10015,7 @@ static u8 core_fuzzing(char** argv) {
 
 					*(u16*)(out_buf + i) = SWAP16(interesting_16[j]);
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -10045,7 +10045,7 @@ static u8 core_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (i = 0; i < len - 3; i++) {
+		for (i = 0; i < len - 3; ++i) {
 
 			u32 orig = *(u32*)(out_buf + i);
 
@@ -10059,7 +10059,7 @@ static u8 core_fuzzing(char** argv) {
 
 			stage_cur_byte = i;
 
-			for (j = 0; j < sizeof(interesting_32) / 4; j++) {
+			for (j = 0; j < sizeof(interesting_32) / 4; ++j) {
 
 				stage_cur_val = interesting_32[j];
 
@@ -10075,7 +10075,7 @@ static u8 core_fuzzing(char** argv) {
 					*(u32*)(out_buf + i) = interesting_32[j];
 
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -10088,7 +10088,7 @@ static u8 core_fuzzing(char** argv) {
 
 					*(u32*)(out_buf + i) = SWAP32(interesting_32[j]);
 					if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
-					stage_cur++;
+					++stage_cur;
 
 				} else stage_max--;
 
@@ -10125,7 +10125,7 @@ static u8 core_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (i = 0; i < len; i++) {
+		for (i = 0; i < len; ++i) {
 
 			u32 last_len = 0;
 
@@ -10136,7 +10136,7 @@ static u8 core_fuzzing(char** argv) {
 			   between writes at a particular offset determined by the outer
 			   loop. */
 
-			for (j = 0; j < extras_cnt; j++) {
+			for (j = 0; j < extras_cnt; ++j) {
 
 				/* Skip extras probabilistically if extras_cnt > MAX_DET_EXTRAS. Also
 				   skip them if there's no room to insert the payload, if the token
@@ -10158,7 +10158,7 @@ static u8 core_fuzzing(char** argv) {
 
 				if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
 
-				stage_cur++;
+				++stage_cur;
 
 			}
 
@@ -10186,11 +10186,11 @@ static u8 core_fuzzing(char** argv) {
 
 		ex_tmp = ck_alloc(len + MAX_DICT_FILE);
 
-		for (i = 0; i <= len; i++) {
+		for (i = 0; i <= len; ++i) {
 
 			stage_cur_byte = i;
 
-			for (j = 0; j < extras_cnt; j++) {
+			for (j = 0; j < extras_cnt; ++j) {
 
 				if (len + extras[j].len > MAX_FILE) {
 					stage_max--;
@@ -10208,7 +10208,7 @@ static u8 core_fuzzing(char** argv) {
 					goto abandon_entry;
 				}
 
-				stage_cur++;
+				++stage_cur;
 
 			}
 
@@ -10238,13 +10238,13 @@ static u8 core_fuzzing(char** argv) {
 
 		orig_hit_cnt = new_hit_cnt;
 
-		for (i = 0; i < len; i++) {
+		for (i = 0; i < len; ++i) {
 
 			u32 last_len = 0;
 
 			stage_cur_byte = i;
 
-			for (j = 0; j < MIN(a_extras_cnt, USE_AUTO_EXTRAS); j++) {
+			for (j = 0; j < MIN(a_extras_cnt, USE_AUTO_EXTRAS); ++j) {
 
 				/* See the comment in the earlier code; extras are sorted by size. */
 
@@ -10262,7 +10262,7 @@ static u8 core_fuzzing(char** argv) {
 
 				if (common_fuzz_stuff(argv, out_buf, len)) goto abandon_entry;
 
-				stage_cur++;
+				++stage_cur;
 
 			}
 
@@ -10320,7 +10320,7 @@ static u8 core_fuzzing(char** argv) {
 		s32 temp_len_puppet;
 		cur_ms_lv = get_cur_time();
 
-		//for (; swarm_now < swarm_num; swarm_now++)
+		//for (; swarm_now < swarm_num; ++swarm_now)
 		{
 			if (key_puppet == 1) {
 				if (unlikely(orig_hit_cnt_puppet == 0)) {
@@ -10356,16 +10356,16 @@ static u8 core_fuzzing(char** argv) {
 				orig_hit_cnt = queued_paths + unique_crashes;
 				havoc_queued = queued_paths;
 
-				for (stage_cur = 0; stage_cur < stage_max; stage_cur++) {
+				for (stage_cur = 0; stage_cur < stage_max; ++stage_cur) {
 
 					u32 use_stacking = 1 << (1 + UR(HAVOC_STACK_POW2));
 					stage_cur_val = use_stacking;
 
-					for (i = 0; i < operator_num; i++) {
+					for (i = 0; i < operator_num; ++i) {
 						core_operator_cycles_puppet_v3[i] = core_operator_cycles_puppet_v2[i];
 					}
 
-					for (i = 0; i < use_stacking; i++) {
+					for (i = 0; i < use_stacking; ++i) {
 
 						switch (select_algorithm()) {
 
@@ -10657,7 +10657,7 @@ static u8 core_fuzzing(char** argv) {
 					{
 						u64 temp_temp_puppet = queued_paths + unique_crashes - temp_total_found;
 						total_puppet_find = total_puppet_find + temp_temp_puppet;
-						for (i = 0; i < 16; i++)
+						for (i = 0; i < 16; ++i)
 						{
 							if (core_operator_cycles_puppet_v2[i] > core_operator_cycles_puppet_v3[i])
 								core_operator_finds_puppet_v2[i] += temp_temp_puppet;
@@ -10711,7 +10711,7 @@ static u8 core_fuzzing(char** argv) {
 
 					while (target && (target->len < 2 || target == queue_cur)) {
 						target = target->next;
-						splicing_with++;
+						++splicing_with;
 					}
 
 					if (!target) goto retry_splicing_puppet;
@@ -10797,7 +10797,7 @@ static u8 core_fuzzing(char** argv) {
 					new_hit_cnt = queued_paths + unique_crashes;
 
 					u64 temp_stage_finds_puppet = 0;
-					for (i = 0; i < operator_num; i++)
+					for (i = 0; i < operator_num; ++i)
 					{
 
 						core_operator_finds_puppet[i] = core_operator_finds_puppet_v2[i];
@@ -10826,27 +10826,27 @@ void pso_updating(void) {
 	w_now = (w_init - w_end)*(g_max - g_now) / (g_max)+w_end;
 	int tmp_swarm, i, j;
 	u64 temp_operator_finds_puppet = 0;
-	for (i = 0; i < operator_num; i++)
+	for (i = 0; i < operator_num; ++i)
 	{
 		operator_finds_puppet[i] = core_operator_finds_puppet[i];
 
-		for (j = 0; j < swarm_num; j++)
+		for (j = 0; j < swarm_num; ++j)
 		{
 			operator_finds_puppet[i] = operator_finds_puppet[i] + stage_finds_puppet[j][i];
 		}
 		temp_operator_finds_puppet = temp_operator_finds_puppet + operator_finds_puppet[i];
 	}
 
-	for (i = 0; i < operator_num; i++)
+	for (i = 0; i < operator_num; ++i)
 	{
 		if (operator_finds_puppet[i])
 			G_best[i] = (double)((double)(operator_finds_puppet[i]) / (double)(temp_operator_finds_puppet));
 	}
 
-	for (tmp_swarm = 0; tmp_swarm < swarm_num; tmp_swarm++)
+	for (tmp_swarm = 0; tmp_swarm < swarm_num; ++tmp_swarm)
 	{
 		double x_temp = 0.0;
-		for (i = 0; i < operator_num; i++)
+		for (i = 0; i < operator_num; ++i)
 		{
 			probability_now[tmp_swarm][i] = 0.0;
 			v_now[tmp_swarm][i] = w_now * v_now[tmp_swarm][i] + RAND_C * (L_best[tmp_swarm][i] - x_now[tmp_swarm][i]) + RAND_C * (G_best[i] - x_now[tmp_swarm][i]);
@@ -10858,7 +10858,7 @@ void pso_updating(void) {
 			x_temp += x_now[tmp_swarm][i];
 		}
 
-		for (i = 0; i < operator_num; i++)
+		for (i = 0; i < operator_num; ++i)
 		{
 			x_now[tmp_swarm][i] = x_now[tmp_swarm][i] / x_temp;
 			if (likely(i != 0))
@@ -11102,7 +11102,7 @@ EXP_ST void check_binary(u8* fname) {
 
         cur_elem = ck_alloc(delim - env_path + 1);
         memcpy(cur_elem, env_path, delim - env_path);
-        delim++;
+        ++delim;
 
       } else cur_elem = ck_strdup(env_path);
 
@@ -11504,7 +11504,7 @@ static void setup_cmdline_file(char** argv) {
 
   while (argv[i]) {
     fprintf(cmdline_file, "%s\n", argv[i]);
-    i++;
+    ++i;
   }
 
   fclose(cmdline_file);
@@ -11708,7 +11708,7 @@ static void get_core_count(void) {
   if (!f) return;
 
   while (fgets(tmp, sizeof(tmp), f))
-    if (!strncmp(tmp, "cpu", 3) && isdigit(tmp[3])) cpu_core_count++;
+    if (!strncmp(tmp, "cpu", 3) && isdigit(tmp[3])) ++cpu_core_count;
 
   fclose(f);
 
@@ -11724,7 +11724,7 @@ static void get_core_count(void) {
 
     /* Add ourselves, since the 1-minute average doesn't include that yet. */
 
-    cur_runnable++;
+    ++cur_runnable;
 
 #endif /* __APPLE__ || __FreeBSD__ || __OpenBSD__ */
 
@@ -11779,7 +11779,7 @@ static void fix_up_sync(void) {
     if (!isalnum(*x) && *x != '_' && *x != '-')
       FATAL("Non-alphanumeric fuzzer ID specified via -S or -M");
 
-    x++;
+    ++x;
 
   }
 
@@ -11956,12 +11956,12 @@ static void save_cmdline(u32 argc, char** argv) {
   u32 len = 1, i;
   u8* buf;
 
-  for (i = 0; i < argc; i++)
+  for (i = 0; i < argc; ++i)
     len += strlen(argv[i]) + 1;
   
   buf = orig_cmdline = ck_alloc(len);
 
-  for (i = 0; i < argc; i++) {
+  for (i = 0; i < argc; ++i) {
 
     u32 l = strlen(argv[i]);
 
@@ -11978,7 +11978,7 @@ static void save_cmdline(u32 argc, char** argv) {
 
 int stricmp(char const *a, char const *b) {
   int d;
-  for (;; a++, b++) {
+  for (;; ++a, ++b) {
     d = tolower(*a) - tolower(*b);
     if (d != 0 || !*a)
       return d;
@@ -12260,11 +12260,11 @@ int main(int argc, char** argv) {
 			if (g_now > g_max) g_now = 0;
 			w_now = (w_init - w_end)*(g_max - g_now) / (g_max)+w_end;
 
-			for (tmp_swarm = 0; tmp_swarm < swarm_num; tmp_swarm++) {
+			for (tmp_swarm = 0; tmp_swarm < swarm_num; ++tmp_swarm) {
 				double total_puppet_temp = 0.0;
 				swarm_fitness[tmp_swarm] = 0.0;
 
-				for (i = 0; i < operator_num; i++) {
+				for (i = 0; i < operator_num; ++i) {
 					stage_finds_puppet[tmp_swarm][i] = 0;
 					probability_now[tmp_swarm][i] = 0.0;
 					x_now[tmp_swarm][i] = ((double)(random() % 7000)*0.0001 + 0.1);
@@ -12276,7 +12276,7 @@ int main(int argc, char** argv) {
 
 				}
 
-				for (i = 0; i < operator_num; i++) {
+				for (i = 0; i < operator_num; ++i) {
 					stage_cycles_puppet_v2[tmp_swarm][i] = stage_cycles_puppet[tmp_swarm][i];
 					stage_finds_puppet_v2[tmp_swarm][i] = stage_finds_puppet[tmp_swarm][i];
 					x_now[tmp_swarm][i] = x_now[tmp_swarm][i] / total_puppet_temp;
@@ -12284,7 +12284,7 @@ int main(int argc, char** argv) {
 
 				double x_temp = 0.0;
 
-				for (i = 0; i < operator_num; i++) {
+				for (i = 0; i < operator_num; ++i) {
 					probability_now[tmp_swarm][i] = 0.0;
 					v_now[tmp_swarm][i] = w_now * v_now[tmp_swarm][i] + RAND_C * (L_best[tmp_swarm][i] - x_now[tmp_swarm][i]) + RAND_C * (G_best[i] - x_now[tmp_swarm][i]);
 
@@ -12298,7 +12298,7 @@ int main(int argc, char** argv) {
 					x_temp += x_now[tmp_swarm][i];
 				}
 
-				for (i = 0; i < operator_num; i++) {
+				for (i = 0; i < operator_num; ++i) {
 					x_now[tmp_swarm][i] = x_now[tmp_swarm][i] / x_temp;
 					if (likely(i != 0))
 						probability_now[tmp_swarm][i] = probability_now[tmp_swarm][i - 1] + x_now[tmp_swarm][i];
@@ -12309,7 +12309,7 @@ int main(int argc, char** argv) {
                                     FATAL("ERROR probability");
 			}
 
-			for (i = 0; i < operator_num; i++) {
+			for (i = 0; i < operator_num; ++i) {
 				core_operator_finds_puppet[i] = 0;
 				core_operator_finds_puppet_v2[i] = 0;
 				core_operator_cycles_puppet[i] = 0;
@@ -12482,7 +12482,7 @@ int main(int argc, char** argv) {
 	break;
       }
 
-      i++;
+      ++i;
 
     }
   }
@@ -12530,13 +12530,13 @@ int main(int argc, char** argv) {
 
     if (!queue_cur) {
 
-      queue_cycle++;
+      ++queue_cycle;
       current_entry     = 0;
       cur_skipped_paths = 0;
       queue_cur         = queue;
 
       while (seek_to) {
-        current_entry++;
+        ++current_entry;
         seek_to--;
         queue_cur = queue_cur->next;
       }
@@ -12553,7 +12553,7 @@ int main(int argc, char** argv) {
 
       if (queued_paths == prev_queued) {
 
-        if (use_splicing) cycles_wo_finds++; else use_splicing = 1;
+        if (use_splicing) ++cycles_wo_finds; else use_splicing = 1;
 
       } else cycles_wo_finds = 0;
 
@@ -12578,7 +12578,7 @@ int main(int argc, char** argv) {
     if (stop_soon) break;
 
     queue_cur = queue_cur->next;
-    current_entry++;
+    ++current_entry;
 
     if (most_time_key == 1) {
       u64 cur_ms_lv = get_cur_time();