about summary refs log tree commit diff homepage
diff options
context:
space:
mode:
authorFrank Busse <bb0xfb@gmail.com>2020-08-14 15:36:40 +0100
committerCristian Cadar <c.cadar@imperial.ac.uk>2020-09-30 21:28:12 +0100
commitf83eac1f001e11c3464fcf088eb4e433abad04a9 (patch)
tree248a6e8ba71deeb5f7f29d27c93008a77a2e2928
parent2bd684d2bfb7eaeff14f331453f92a1fe05a7bda (diff)
downloadklee-f83eac1f001e11c3464fcf088eb4e433abad04a9.tar.gz
klee-stats: fix behaviour for broken/empty DBs
* fill missing columns in rows with None
* fill previous rows with None if new column encountered
* error for --to-csv when more than one input directory given
-rwxr-xr-xtools/klee-stats/klee-stats47
1 files changed, 20 insertions, 27 deletions
diff --git a/tools/klee-stats/klee-stats b/tools/klee-stats/klee-stats
index 49b101b3..82b50279 100755
--- a/tools/klee-stats/klee-stats
+++ b/tools/klee-stats/klee-stats
@@ -84,7 +84,7 @@ class LazyEvalList:
             cursor = self.conn().execute("SELECT * FROM stats ORDER BY rowid DESC LIMIT 1")
             column_names = [description[0] for description in cursor.description]
             return dict(zip(column_names, cursor.fetchone()))
-        except sqlite3.OperationalError as e:
+        except (sqlite3.OperationalError, TypeError) as e:
             return None
 
 
@@ -133,12 +133,8 @@ def select_columns(record, pr):
         s_column = ['Path', 'Instructions', 'WallTime', 'ICov',
                   'BCov', 'ICount', 'RelSolverTime']
 
-    filtered_record = dict()
-    for column in s_column:
-        if column in record.keys():
-            filtered_record[column] = record[column]
-
-    return filtered_record
+    # filter record
+    return { column:record[column] for column in s_column if column in record }
 
 
 def add_artificial_columns(record):
@@ -312,8 +308,7 @@ def write_table(args, data, dirs, pr):
         # Get raw row
         single_row = records.getLastRecord()
         if single_row is None:
-            # print("Error reading SQLite file (probably corrupt)")
-            # continue
+            # empty or corrupt SQLite database
             single_row = {}
         single_row['Path'] = path
         single_row.update(stats)
@@ -322,25 +317,18 @@ def write_table(args, data, dirs, pr):
         single_row = add_artificial_columns(single_row)
         single_row = select_columns(single_row, pr)
 
-        for key in single_row:
-            # Get an existing column or add an empty one
-            column = table.setdefault(key, [])
+        for key in set.union(set(single_row.keys()), set(table.keys())):
+            # Not all columns in row: add "None"
+            if key not in single_row:
+                table[key].append(None)
+                continue
 
-            # Resize the column if needed
-            missing_entries = i - len(column) - 1
-            if missing_entries > 0:
-                # Append empty entries for this column
-                column.extend([None]* missing_entries)
+            # New column: extend if necessary for previous rows
+            if key not in table:
+                table[key] = [None] * i
 
             # add the value
-            column.append(single_row[key])
-
-    # Add missing entries if needed
-    max_len = len(data)
-    for c_name in table:
-        c_length = len(table[c_name])
-        if  c_length < max_len:
-            table[c_name].extend([None] * (max_len - c_length))
+            table[key].append(single_row[key])
 
     # Rename columns
     name_mapping = dict()
@@ -349,6 +337,7 @@ def write_table(args, data, dirs, pr):
     table = rename_columns(table, name_mapping)
 
     # Add a summary row
+    max_len = len(data)
     if max_len > 1:
         # calculate the total
         for k in table:
@@ -477,10 +466,10 @@ def main():
     dirs = getKleeOutDirs(args.dir)
     if len(dirs) == 0:
         print('No KLEE output directory found', file=sys.stderr)
-        exit(1)
+        sys.exit(1)
 
     if args.grafana:
-      return grafana(dirs, args.grafana_host, args.grafana_port)
+        return grafana(dirs, args.grafana_host, args.grafana_port)
 
     # Filter non-existing files, useful for star operations
     valid_log_files = [getLogFile(f) for f in dirs if os.path.isfile(getLogFile(f))]
@@ -489,6 +478,10 @@ def main():
     data = [LazyEvalList(d) for d in valid_log_files]
 
     if args.toCsv:
+        if len(valid_log_files) > 1:
+            print('Error: --to-csv only supports a single input directory ', file=sys.stderr)
+            sys.exit(1)
+
         write_csv(data)
         return