#!/usr/bin/python3 """ Python Script to convert output from mutilate to rmit like output """ import argparse import json import locale import os import re import sys locale.setlocale( locale.LC_ALL, 'en_US.UTF-8' ) parser = argparse.ArgumentParser(description='Python Script to convert output from mutilate to rmit like output') parser.add_argument('--out', nargs='?', type=argparse.FileType('w'), default=sys.stdout) parser.add_argument('--var', nargs='?', type=str, default='Target QPS') try: options = parser.parse_args() except: print('ERROR: invalid arguments', file=sys.stderr) parser.print_help(sys.stderr) sys.exit(1) thisdir = os.getcwd() dirs = os.listdir( thisdir ) names = ['fibre', 'forall', 'vanilla'] names_re = '|'.join(names) def precentile(line): fields = line.split() try: latAvs = fields[1] lat50s = fields[6] lat99s = fields[9] except: raise Warning("Warning: \"{}\"! insufficient fields".format(line)) try: latAv = locale.atof(latAvs) lat50 = locale.atof(lat50s) lat99 = locale.atof(lat99s) except: raise Warning("Warning: \"{}\" \"{}\"! can't convert to float".format(lat50s, lat99s)) return latAv, lat50, lat99 def want0(line): line = line.strip() if not line.endswith("= 0 (0.0%)"): raise Warning("Warning: \"{}\"! should be 0".format(line)) def extract(filename, out): with open(filename, "r") as file: lines = file.readlines() warns = [] for line in lines: try: if line.startswith("read"): rlatAv, rlat50, rlat99 = precentile(line) elif line.startswith("update"): ulatAv, ulat50, ulat99 = precentile(line) elif line.startswith("Total QPS"): match = re.search("Total QPS = ([0-9,\.]+)", line) if match: try: qps = locale.atof(match[1]) except: raise Warning("Warning: \"{}\" can't convert qps to float".format(match[1])) else: raise Warning("Warning: \"{}\" line unreadable".format(line)) if line.startswith("Misses") or line.startswith("Skipped TXs"): want0(line) except Warning as w: warns.append(str(w)) try: out['Actual QPS'] = qps except: warns.append("Warning: No total QPS") try: out['Average Read Latency'] = rlatAv out['Median Read Latency'] = rlat50 out['Tail Read Latency'] = rlat99 except: warns.append("Warning: no read latencies") try: out['Average Update Latency'] = ulatAv out['Median Update Latency'] = ulat50 out['Tail Update Latency'] = ulat99 except: warns.append("Warning: no update latencies") return warns data = [] for filename in dirs: f = os.path.join( thisdir, filename ) # checking if it is a file if os.path.isfile(f): match = re.search("({})\.([0-9]+)\.([0-9]+)\.([0-9]+)".format(names_re), filename) print(filename, match) try: series = "{}-{}%".format(match[1], match[3]) rate = match[2] rep = match[4] except: continue d = { options.var : int(rate) } w = extract( f, d ) data.append([series, "memcached {}".format(series), d]) if w: print("{} {} {}\n{}\n".format(series, rate, rep, '\n'.join(w))) options.out.write(json.dumps(data)) options.out.flush() options.out.write("\n")