about summary refs log tree commit diff
path: root/utils/autodict_ql/autodict-ql.py
diff options
context:
space:
mode:
Diffstat (limited to 'utils/autodict_ql/autodict-ql.py')
-rw-r--r--utils/autodict_ql/autodict-ql.py91
1 files changed, 18 insertions, 73 deletions
diff --git a/utils/autodict_ql/autodict-ql.py b/utils/autodict_ql/autodict-ql.py
index 69d11f48..ddc95435 100644
--- a/utils/autodict_ql/autodict-ql.py
+++ b/utils/autodict_ql/autodict-ql.py
@@ -1,4 +1,14 @@
 #!/usr/bin/env python3
+# AutoDict-QL - Optimal Token Generation for Fuzzing
+# Part of AFL++ Project
+# Developed and Maintained by Arash Ale Ebrahim (@Microsvuln)
+# Usage : python3 autodict-ql.py [CURRECT_DIR] [CODEQL_DATABASE_PATH] [TOKEN_PATH]
+# CURRENT_DIR = full of your current Dir
+# CODEQL_DATABASE_PATH = Full path to your CodeQL database
+# TOKEN_PATH = Folder name of the newly generated tokens
+# Example : python3 autodict-ql.py /home/user/libxml/automate /home/user/libxml/libxml-db tokens
+# Just pass the tokens folder to the -x flag of your fuzzer
+
 import os
 import string
 import binascii 
@@ -42,47 +52,25 @@ def static_analysis(file,file2,cur,db) :
         f.close()
 
 def copy_tokens(cur, tokenpath) :
-    subprocess.call(["cp " + cur  + "/" + "arrays-lits/*" + " " + cur + "/" + tokenpath + "/."] ,shell=True)
-    subprocess.call(["cp " + cur  + "/" + "strstr-strs/*" + " " + cur + "/" + tokenpath + "/."] ,shell=True)
     subprocess.call(["cp " + cur  + "/" + "strcmp-strs/*" + " " + cur + "/" + tokenpath + "/."] ,shell=True)
     subprocess.call(["cp " + cur  + "/" + "strncmp-strs/*" + " " + cur + "/" + tokenpath + "/."] ,shell=True)
-    subprocess.call(["cp " + cur  + "/" + "local-strs/*" + " " + cur + "/" + tokenpath + "/."] ,shell=True)
     subprocess.call(["cp " + cur  + "/" + "memcmp-strs/*" + " " + cur + "/" + tokenpath + "/."] ,shell=True)
-    subprocess.call(["cp " + cur  + "/" + "global-strs/*" + " " + cur + "/" + tokenpath + "/."] ,shell=True)
     subprocess.call(["cp " + cur  + "/" + "lits/*" + " " + cur + "/" + tokenpath + "/."] ,shell=True)
-    subprocess.call(["cp " + cur  + "/" + "arrays-lits/*" + " " + cur + "/" + tokenpath + "/."] ,shell=True)
-    subprocess.call(["cp " + cur  + "/" + "arrays-strs/*" + " " + cur + "/" + tokenpath + "/."] ,shell=True)
     subprocess.call(["cp " + cur  + "/" + "strtool-strs/*" + " " + cur + "/" + tokenpath + "/."] ,shell=True)
-    #strtool-strs
+
 
 
 def codeql_analysis(cur, db) :
     static_analysis("litout.out","litool.ql", cur, db)
     static_analysis("strcmp-strings.out","strcmp-str.ql", cur, db)
     static_analysis("strncmp-strings.out","strncmp-str.ql", cur, db)
-    static_analysis("strstr-strings.out","strstr-str.ql", cur, db)
     static_analysis("memcmp-strings.out","memcmp-str.ql", cur, db)
-    static_analysis("global-values-strings.out","globals-values.ql", cur, db)
-    static_analysis("local-strings.out","locals-strs.ql", cur, db)
     static_analysis("strtool-strings.out","strtool.ql", cur, db)
-    static_analysis("arrays.out","array-literals.ql", cur, db)
-    start_aflql(0,cur)
-    #command1 = [
-    #       'codeql','query', 'run',
-    #       cur + '/litool.ql',
-    #       '-d',
-    #       db, '>','fff.txt'
-    #    ]
-    #with open("litool2.log", "w") as f:
-    #    stream = os.popen("codeql query run litool.ql -d " + db )
-    #    output = stream.read()
-    #    f.write(output)
-    #    f.close()
-    #worker1 = subprocess.Popen(command1)
-    #print(worker1.communicate())
-
-
-def start_aflql(tokenpath, cur):
+    start_autodict(0,cur)
+
+
+
+def start_autodict(tokenpath, cur):
     command = [
            'python3',
            cur + '/litan.py',
@@ -110,23 +98,6 @@ def start_aflql(tokenpath, cur):
     worker3 = subprocess.Popen(command2)
     print(worker3.communicate())
 
-    command3 = [
-           'python3',
-           cur + '/array-lits.py',
-           cur + '/arrays-lits/',
-           cur + '/arrays.out'
-        ]
-    worker4 = subprocess.Popen(command3)
-    print(worker4.communicate())
-
-    command4 = [
-           'python3',
-           cur + '/array-strings.py',
-           cur + '/arrays-strs/',
-           cur + '/arrays.out'
-        ]
-    worker5 = subprocess.Popen(command4)
-    print(worker5.communicate())
 
 
     command5 = [
@@ -138,27 +109,8 @@ def start_aflql(tokenpath, cur):
     worker6 = subprocess.Popen(command5)
     print(worker6.communicate())
 
-    command6 = [
-           'python3',
-           cur + '/globals-strings.py',
-           cur + '/global-strs/',
-           cur + '/global-values-strings.out'
-        ]
-    worker7 = subprocess.Popen(command6)
-    print(worker7.communicate())
-
-    command7 = [
-           'python3',
-           cur + '/strstr-strings.py',
-           cur + '/strstr-strs/',
-           cur + '/strstr-strings.out'
-        ]
-    worker8 = subprocess.Popen(command7)
-    print(worker8.communicate())
 
 
-    #strtool-strings.out
-
     command8 = [
            'python3',
            cur + '/stan-strings.py',
@@ -168,14 +120,7 @@ def start_aflql(tokenpath, cur):
     worker9 = subprocess.Popen(command8)
     print(worker9.communicate())
 
-    command9 = [
-           'python3',
-           cur + '/local-strings.py',
-           cur + '/local-strs/',
-           cur + '/local-strings.out'
-        ]
-    worker10 = subprocess.Popen(command9)
-    print(worker10.communicate())
+
 
 def main():
     args = parse_args()    
@@ -183,6 +128,6 @@ def main():
     #copy_tokens(args.cur, args.tokenpath)
     codeql_analysis(args.cur, args.db)
     copy_tokens(args.cur, args.tokenpath)
-    #start_aflql(args.tokenpath, args.cur)
+    #start_autodict(args.tokenpath, args.cur)
 if __name__ == '__main__':
     main()
\ No newline at end of file