blob: 25721eba8b71bd3cb19484a27f9dfe9968e2df5f [file] [log] [blame]
koder aka kdanilov6c491062015-04-09 22:33:13 +03001import re
2import json
3
4
koder aka kdanilovcff7b2e2015-04-18 20:48:15 +03005from wally.utils import ssize_to_b
6from wally.statistic import med_dev
koder aka kdanilov6c491062015-04-09 22:33:13 +03007
8
9def parse_output(out_err):
10 start_patt = r"(?ims)=+\s+RESULTS\(format=json\)\s+=+"
11 end_patt = r"(?ims)=+\s+END OF RESULTS\s+=+"
12
13 for block in re.split(start_patt, out_err)[1:]:
14 data, garbage = re.split(end_patt, block)
15 yield json.loads(data.strip())
16
17 start_patt = r"(?ims)=+\s+RESULTS\(format=eval\)\s+=+"
18 end_patt = r"(?ims)=+\s+END OF RESULTS\s+=+"
19
20 for block in re.split(start_patt, out_err)[1:]:
21 data, garbage = re.split(end_patt, block)
22 yield eval(data.strip())
23
24
25def filter_data(name_prefix, fields_to_select, **filters):
26 def closure(data):
27 for result in data:
28 if name_prefix is not None:
29 if not result['jobname'].startswith(name_prefix):
30 continue
31
32 for k, v in filters.items():
33 if result.get(k) != v:
34 break
35 else:
36 yield map(result.get, fields_to_select)
37 return closure
38
39
40def load_data(raw_data):
41 data = list(parse_output(raw_data))[0]
42
43 for key, val in data['res'].items():
koder aka kdanilov6c491062015-04-09 22:33:13 +030044 val['blocksize_b'] = ssize_to_b(val['blocksize'])
45
koder aka kdanilov652cd802015-04-13 12:21:07 +030046 val['iops_mediana'], val['iops_stddev'] = med_dev(val['iops'])
koder aka kdanilov4e9f3ed2015-04-14 11:26:12 +030047 val['bw_mediana'], val['bw_stddev'] = med_dev(val['bw'])
koder aka kdanilov652cd802015-04-13 12:21:07 +030048 val['lat_mediana'], val['lat_stddev'] = med_dev(val['lat'])
koder aka kdanilov6c491062015-04-09 22:33:13 +030049 yield val
50
51
52def load_files(*fnames):
53 for fname in fnames:
54 for i in load_data(open(fname).read()):
55 yield i