about summary refs log tree commit diff homepage
path: root/tools/klee-stats
diff options
context:
space:
mode:
authorTimotej Kapus <tk1713@ic.ac.uk>2018-10-28 12:49:52 +0000
committerMartinNowack <martin.nowack@gmail.com>2019-04-04 20:37:41 +0100
commit572d644e8de439fe59f5598fc902d71b60cf8a85 (patch)
tree2b6a06d02c2854efc7f916121ef950c75438e325 /tools/klee-stats
parentb1f34f8d0ff890511e16fc322abf3ca08000b950 (diff)
downloadklee-572d644e8de439fe59f5598fc902d71b60cf8a85.tar.gz
Clean klee-stats, StatsTracker and cmake
Diffstat (limited to 'tools/klee-stats')
-rwxr-xr-xtools/klee-stats/klee-stats185
1 files changed, 78 insertions, 107 deletions
diff --git a/tools/klee-stats/klee-stats b/tools/klee-stats/klee-stats
index 901cab55..38d8f750 100755
--- a/tools/klee-stats/klee-stats
+++ b/tools/klee-stats/klee-stats
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 # -*- encoding: utf-8 -*-
 
 # ===-- klee-stats --------------------------------------------------------===##
@@ -12,24 +12,18 @@
 
 """Output statistics logged by Klee."""
 
-# use '/' to mean true division and '//' to mean floor division
-from __future__ import division
-from __future__ import print_function
-
 import os
-import re
 import sys
 import argparse
 import sqlite3
 
-from operator import itemgetter
 try:
-    from tabulate import TableFormat, Line, DataRow, tabulate
+    from tabulate import TableFormat, Line, DataRow, tabulate, _table_formats
 except:
     print('Error: Package "tabulate" required for table formatting. '
-          'Please install it using "pip" or your package manager.',
+          'Please install it using "pip" or your package manager.'
+          'You can still use -grafana and -to-csv without tabulate.',
           file=sys.stderr)
-    exit(1)
 
 Legend = [
     ('Instrs', 'number of executed instructions'),
@@ -63,35 +57,27 @@ def getLogFile(path):
     """Return the path to run.stats."""
     return os.path.join(path, 'run.stats')
 
-
 class LazyEvalList:
     """Store all the lines in run.stats and eval() when needed."""
     def __init__(self, fileName):
         # The first line in the records contains headers.
-      #  self.lines = lines[1:]
-      self.conn = sqlite3.connect(fileName);
+      self.conn = sqlite3.connect(fileName)
       self.c = self.conn.cursor()
       self.c.execute("SELECT * FROM stats ORDER BY Instructions DESC LIMIT 1")
-      self.lines = self.c.fetchone()
+      self.line = self.c.fetchone()
 
     def aggregateRecords(self):
-        memC = self.conn.cursor()
-        memC.execute("SELECT max(MallocUsage) / 1024 / 1024, avg(MallocUsage) / 1024 / 1024 from stats")
-        maxMem, avgMem = memC.fetchone()
-
-        stateC = self.conn.cursor()
-        stateC.execute("SELECT max(NumStates), avg(NumStates) from stats")
-        maxStates, avgStates = stateC.fetchone()
-        return (maxMem, avgMem, maxStates, avgStates)
-
-
-    def __getitem__(self, index):
-        return self.lines
-
-    def __len__(self):
-        return len(self.lines)
+      memC = self.conn.cursor()
+      memC.execute("SELECT max(MallocUsage) / 1024 / 1024, avg(MallocUsage) / 1024 / 1024 from stats")
+      maxMem, avgMem = memC.fetchone()
 
+      stateC = self.conn.cursor()
+      stateC.execute("SELECT max(NumStates), avg(NumStates) from stats")
+      maxStates, avgStates = stateC.fetchone()
+      return (maxMem, avgMem, maxStates, avgStates)
 
+    def getLastRecord(self):
+      return self.line
 
 def stripCommonPathPrefix(paths):
     paths = map(os.path.normpath, paths)
@@ -103,7 +89,6 @@ def stripCommonPathPrefix(paths):
             break
     return ['/'.join(p[i:]) for p in paths]
 
-
 def getKleeOutDirs(dirs):
     kleeOutDirs = []
     for dir in dirs:
@@ -117,7 +102,6 @@ def getKleeOutDirs(dirs):
                         kleeOutDirs.append(path)
     return kleeOutDirs
 
-
 def getLabels(pr):
     if pr == 'all':
         labels = ('Path', 'Instrs', 'Time(s)', 'ICov(%)', 'BCov(%)', 'ICount',
@@ -149,6 +133,7 @@ def getRow(record, stats, pr):
     if BTot == 0:
         BFull = BTot = 1
 
+    Ts, Tcex, Tf, Tr = [e / 1000000 for e in [Ts, Tcex, Tf, Tr]] #convert from microseconds
     Mem = Mem / 1024 / 1024
     AvgQC = int(QCon / max(1, QTot))
 
@@ -177,69 +162,60 @@ def getRow(record, stats, pr):
 
 
 def grafana(dirs):
-  dr = getLogFile(dirs[0])
-  from flask import Flask, jsonify, request
-  from json import dumps
-  import datetime
-  import time
-  app = Flask(__name__)
-
-  def toepoch(date_text):
-    dt = datetime.datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.%fZ")
-    epoch = (dt - datetime.datetime(1970, 1, 1)).total_seconds()
-    return epoch
-
-  @app.route('/')
-  def status():
-      return 'OK'
-
-  @app.route('/search', methods=['GET', 'POST'])
-  def search():
-      jsn = request.get_json()
-      conn = sqlite3.connect(dr);
-      cursor = conn.execute('SELECT * FROM stats')
-      names = [description[0] for description in cursor.description]
-      return jsonify(names)
-
-  @app.route('/query', methods=['POST'])
-  def query():
-      jsn = request.get_json()
-#      print(dumps(jsn))
-      interval = jsn["intervalMs"]
-      limit = jsn["maxDataPoints"]
-      frm = toepoch(jsn["range"]["from"])
-      to = toepoch(jsn["range"]["to"])
-      targets = [str(t["target"]) for t in jsn["targets"]]
-      startTime = os.path.getmtime(dr)
-      fromTime = frm - startTime if frm - startTime > 0 else 0
-      toTime =  to - startTime if to - startTime > fromTime else fromTime + 100
-#      print(startTime)
-      conn = sqlite3.connect(dr);
-      s = "SELECT WallTime + " + str(startTime) + ","  + ",".join(targets) + " FROM stats GROUP BY WallTime/3 LIMIT ?"
-      s = "SELECT WallTime + {startTime} , {fields} " \
-          + " FROM stats" \
-          + " WHERE WallTime >= {fromTime} AND WallTime <= {toTime}" \
-          + " GROUP BY WallTime/{intervalSeconds} LIMIT {limit}"
-      s = s.format(startTime=startTime,
-                   fields=",".join(["AVG( {0} )".format(t) for t in targets]),
-                   intervalSeconds=interval/1000,
-                   fromTime=fromTime,
-                   toTime=toTime,
-                   limit=limit)
-      cursor = conn.execute(s)
-      result = [ {"target": t, "datapoints": []} for t in targets ]
-      for line in cursor:
-          unixtimestamp = int(line[0] * 1000) #miliseconds
-          for field, datastream in zip(line[1:], result):
-              datastream["datapoints"].append([field, unixtimestamp])
-
-#      print(len(result[0]["datapoints"]))
-      ret = jsonify(result)
-#      print(result)
-      return ret
-
-  app.run()
-  return 0
+    dr = getLogFile(dirs[0])
+    from flask import Flask, jsonify, request
+    import datetime
+    app = Flask(__name__)
+
+    def toEpoch(date_text):
+        dt = datetime.datetime.strptime(date_text, "%Y-%m-%dT%H:%M:%S.%fZ")
+        epoch = (dt - datetime.datetime(1970, 1, 1)).total_seconds()
+        return epoch
+
+    @app.route('/')
+    def status():
+        return 'OK'
+
+    @app.route('/search', methods=['GET', 'POST'])
+    def search():
+        conn = sqlite3.connect(dr)
+        cursor = conn.execute('SELECT * FROM stats')
+        names = [description[0] for description in cursor.description]
+        return jsonify(names)
+
+    @app.route('/query', methods=['POST'])
+    def query():
+        jsn = request.get_json()
+        interval = jsn["intervalMs"]
+        limit = jsn["maxDataPoints"]
+        frm = toEpoch(jsn["range"]["from"])
+        to = toEpoch(jsn["range"]["to"])
+        targets = [str(t["target"]) for t in jsn["targets"]]
+        startTime = os.path.getmtime(dr)
+        fromTime = frm - startTime if frm - startTime > 0 else 0
+        toTime = to - startTime if to - startTime > fromTime else fromTime + 100
+        sqlTarget = ",".join(["AVG( {0} )".format(t) for t in targets if t.isalnum()])
+
+        conn = sqlite3.connect(dr)
+
+        s = "SELECT WallTime + ? , {fields} " \
+            + " FROM stats" \
+            + " WHERE WallTime >= ? AND WallTime <= ?" \
+            + " GROUP BY WallTime/? LIMIT ?"
+        s = s.format(fields=sqlTarget) #can't use prepared staments for this one
+
+        cursor = conn.execute(s, ( startTime, fromTime, toTime, interval/1000, limit))
+        result = [ {"target": t, "datapoints": []} for t in targets ]
+        for line in cursor:
+            unixtimestamp = int(line[0]) * 1000  #miliseconds
+            for field, datastream in zip(line[1:], result):
+                datastream["datapoints"].append([field, unixtimestamp])
+
+        ret = jsonify(result)
+        return ret
+
+    app.run()
+    return 0
 
 def main():
     parser = argparse.ArgumentParser(
@@ -250,16 +226,15 @@ def main():
     parser.add_argument('dir', nargs='+', help='klee output directory')
 
     parser.add_argument('--table-format',
-                        choices=['plain', 'simple', 'grid', 'pipe', 'orgtbl',
-                                 'rst', 'mediawiki', 'latex', 'klee'],
-                        dest='tableFormat', default='klee',
-                        help='Table format for the summary.')
+                          choices=['klee'] + list(_table_formats.keys()),
+                          dest='tableFormat', default='klee',
+                          help='Table format for the summary.')
     parser.add_argument('-to-csv',
                           action='store_true', dest='toCsv',
-                          help='Dump run.stats to STDOUT in CSV format')
+                          help='Output stats as comma-separated values (CSV)')
     parser.add_argument('-grafana',
                           action='store_true', dest='grafana',
-                          help='Start a graphan web server')
+                          help='Start a grafana web server')
 
     # argument group for controlling output verboseness
     pControl = parser.add_mutually_exclusive_group(required=False)
@@ -314,18 +289,14 @@ def main():
         # write data                          
         for result in sql3_cursor:
           csv_out.writerow(result)
- 
         return
+
     if len(data) > 1:
         dirs = stripCommonPathPrefix(dirs)
     # attach the stripped path
     data = list(zip(dirs, data))
 
     labels = getLabels(pr)
-    # labels in the same order as in the run.stats file. used by --compare-by.
-    # current impl needs monotonic values, so only keep the ones making sense.
-    rawLabels = ('Instrs', '', '', '', '', '', '', 'Queries',
-                 '', '', 'Time', 'ICov', '', '', '', '', '', '')
 
     # build the main body of the table
     table = []
@@ -335,8 +306,8 @@ def main():
         row = [path]
         stats = records.aggregateRecords()
         totStats.append(stats)
-        row.extend(getRow(records[-1], stats, pr))
-        totRecords.append(records[-1])
+        row.extend(getRow(records.getLastRecord(), stats, pr))
+        totRecords.append(records.getLastRecord())
         table.append(row)
     # calculate the total
     totRecords = [sum(e) for e in zip(*totRecords)]