blob: d2bef7e381ab9bb0f7bc2144a283e498dda3e802 [file] [log] [blame]
koder aka kdanilovcff7b2e2015-04-18 20:48:15 +03001import os
koder aka kdanilov88407ff2015-05-26 15:35:57 +03002import csv
koder aka kdanilov4a510ee2015-04-21 18:50:42 +03003import bisect
koder aka kdanilova047e1b2015-04-21 23:16:59 +03004import logging
koder aka kdanilov88407ff2015-05-26 15:35:57 +03005import itertools
koder aka kdanilov416b87a2015-05-12 00:26:04 +03006import collections
koder aka kdanilov3b4da8b2016-10-17 00:17:53 +03007from io import StringIO
koder aka kdanilov70227062016-11-26 23:23:21 +02008from typing import Dict, Any, Iterator, Tuple, cast
koder aka kdanilovcff7b2e2015-04-18 20:48:15 +03009
koder aka kdanilovbe8f89f2015-04-28 14:51:51 +030010try:
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030011 import numpy
12 import scipy
koder aka kdanilov0fdaaee2015-06-30 11:10:48 +030013 import matplotlib
koder aka kdanilov9e0512a2015-08-10 14:51:59 +030014 matplotlib.use('svg')
koder aka kdanilovbe8f89f2015-04-28 14:51:51 +030015 import matplotlib.pyplot as plt
16except ImportError:
17 plt = None
18
koder aka kdanilov4a510ee2015-04-21 18:50:42 +030019import wally
koder aka kdanilov3b4da8b2016-10-17 00:17:53 +030020from .utils import ssize2b
21from .statistic import round_3_digit
koder aka kdanilov70227062016-11-26 23:23:21 +020022from .storage import Storage
koder aka kdanilov39e449e2016-12-17 15:15:26 +020023from .stage import Stage, StepOrder
24from .test_run_class import TestRun
koder aka kdanilov70227062016-11-26 23:23:21 +020025from .result_classes import TestInfo, FullTestResult, SensorInfo
koder aka kdanilov3b4da8b2016-10-17 00:17:53 +030026from .suits.io.fio_task_parser import (get_test_sync_mode,
27 get_test_summary,
28 parse_all_in_1,
29 abbv_name_to_full)
koder aka kdanilovcff7b2e2015-04-18 20:48:15 +030030
koder aka kdanilov4a510ee2015-04-21 18:50:42 +030031
koder aka kdanilov962ee5f2016-12-19 02:40:08 +020032logger = logging.getLogger("wally")
koder aka kdanilova047e1b2015-04-21 23:16:59 +030033
34
koder aka kdanilov70227062016-11-26 23:23:21 +020035def load_test_results(storage: Storage) -> Iterator[FullTestResult]:
36 sensors_data = {} # type: Dict[Tuple[str, str, str], SensorInfo]
koder aka kdanilov88407ff2015-05-26 15:35:57 +030037
koder aka kdanilov39e449e2016-12-17 15:15:26 +020038 mstorage = storage.sub_storage("metric")
39 for _, node_id in mstorage.list():
40 for _, dev_name in mstorage.list(node_id):
41 for _, sensor_name in mstorage.list(node_id, dev_name):
koder aka kdanilov70227062016-11-26 23:23:21 +020042 key = (node_id, dev_name, sensor_name)
43 si = SensorInfo(*key)
koder aka kdanilov39e449e2016-12-17 15:15:26 +020044 si.begin_time, si.end_time, si.data = storage[node_id, dev_name, sensor_name] # type: ignore
koder aka kdanilov70227062016-11-26 23:23:21 +020045 sensors_data[key] = si
koder aka kdanilov88407ff2015-05-26 15:35:57 +030046
koder aka kdanilov39e449e2016-12-17 15:15:26 +020047 rstorage = storage.sub_storage("result")
48 for _, run_id in rstorage.list():
koder aka kdanilov70227062016-11-26 23:23:21 +020049 ftr = FullTestResult()
koder aka kdanilov39e449e2016-12-17 15:15:26 +020050 ftr.test_info = rstorage.load(TestInfo, run_id, "info")
koder aka kdanilov70227062016-11-26 23:23:21 +020051 ftr.performance_data = {}
koder aka kdanilov209e85d2015-04-27 23:11:05 +030052
koder aka kdanilov39e449e2016-12-17 15:15:26 +020053 p1 = "{}/measurement".format(run_id)
54 for _, node_id in rstorage.list(p1):
55 for _, measurement_name in rstorage.list(p1, node_id):
koder aka kdanilov70227062016-11-26 23:23:21 +020056 perf_key = (node_id, measurement_name)
koder aka kdanilov39e449e2016-12-17 15:15:26 +020057 ftr.performance_data[perf_key] = rstorage["{}/{}/{}".format(p1, *perf_key)] # type: ignore
koder aka kdanilov209e85d2015-04-27 23:11:05 +030058
koder aka kdanilov70227062016-11-26 23:23:21 +020059 yield ftr
koder aka kdanilovbe8f89f2015-04-28 14:51:51 +030060
61
koder aka kdanilov39e449e2016-12-17 15:15:26 +020062class ConsoleReportStage(Stage):
63
64 priority = StepOrder.REPORT
65
66 def run(self, ctx: TestRun) -> None:
67 # TODO(koder): load data from storage
68 raise NotImplementedError("...")
69
70class HtmlReportStage(Stage):
71
72 priority = StepOrder.REPORT
73
74 def run(self, ctx: TestRun) -> None:
75 # TODO(koder): load data from storage
76 raise NotImplementedError("...")
77
koder aka kdanilov70227062016-11-26 23:23:21 +020078# class StoragePerfInfo:
79# def __init__(self, name: str, summary: Any, params, testnodes_count) -> None:
80# self.direct_iops_r_max = 0 # type: int
81# self.direct_iops_w_max = 0 # type: int
82#
83# # 64 used instead of 4k to faster feed caches
84# self.direct_iops_w64_max = 0 # type: int
85#
86# self.rws4k_10ms = 0 # type: int
87# self.rws4k_30ms = 0 # type: int
88# self.rws4k_100ms = 0 # type: int
89# self.bw_write_max = 0 # type: int
90# self.bw_read_max = 0 # type: int
91#
92# self.bw = None #
93# self.iops = None
94# self.lat = None
95# self.lat_50 = None
96# self.lat_95 = None
97#
98#
99# # disk_info = None
100# # base = None
101# # linearity = None
102#
103#
104# def group_by_name(test_data):
105# name_map = collections.defaultdict(lambda: [])
106#
107# for data in test_data:
108# name_map[(data.name, data.summary())].append(data)
109#
110# return name_map
111#
112#
113# def report(name, required_fields):
114# def closure(func):
115# report_funcs.append((required_fields.split(","), name, func))
116# return func
117# return closure
118#
119#
120# def get_test_lcheck_params(pinfo):
121# res = [{
122# 's': 'sync',
123# 'd': 'direct',
124# 'a': 'async',
125# 'x': 'sync direct'
126# }[pinfo.sync_mode]]
127#
128# res.append(pinfo.p.rw)
129#
130# return " ".join(res)
131#
132#
133# def get_emb_data_svg(plt):
134# sio = StringIO()
135# plt.savefig(sio, format='svg')
136# img_start = "<!-- Created with matplotlib (http://matplotlib.org/) -->"
137# return sio.getvalue().split(img_start, 1)[1]
138#
139#
140# def get_template(templ_name):
141# very_root_dir = os.path.dirname(os.path.dirname(wally.__file__))
142# templ_dir = os.path.join(very_root_dir, 'report_templates')
143# templ_file = os.path.join(templ_dir, templ_name)
144# return open(templ_file, 'r').read()
145#
146#
147# def group_by(data, func):
148# if len(data) < 2:
149# yield data
150# return
151#
152# ndata = [(func(dt), dt) for dt in data]
153# ndata.sort(key=func)
154# pkey, dt = ndata[0]
155# curr_list = [dt]
156#
157# for key, val in ndata[1:]:
158# if pkey != key:
159# yield curr_list
160# curr_list = [val]
161# else:
162# curr_list.append(val)
163# pkey = key
164#
165# yield curr_list
166#
167#
168# @report('linearity', 'linearity_test')
169# def linearity_report(processed_results, lab_info, comment):
170# labels_and_data_mp = collections.defaultdict(lambda: [])
171# vls = {}
172#
173# # plot io_time = func(bsize)
174# for res in processed_results.values():
175# if res.name.startswith('linearity_test'):
176# iotimes = [1000. / val for val in res.iops.raw]
177#
178# op_summ = get_test_summary(res.params)[:3]
179#
180# labels_and_data_mp[op_summ].append(
181# [res.p.blocksize, res.iops.raw, iotimes])
182#
183# cvls = res.params.vals.copy()
184# del cvls['blocksize']
185# del cvls['rw']
186#
187# cvls.pop('sync', None)
188# cvls.pop('direct', None)
189# cvls.pop('buffered', None)
190#
191# if op_summ not in vls:
192# vls[op_summ] = cvls
193# else:
194# assert cvls == vls[op_summ]
195#
196# all_labels = None
197# _, ax1 = plt.subplots()
198# for name, labels_and_data in labels_and_data_mp.items():
199# labels_and_data.sort(key=lambda x: ssize2b(x[0]))
200#
201# labels, _, iotimes = zip(*labels_and_data)
202#
203# if all_labels is None:
204# all_labels = labels
205# else:
206# assert all_labels == labels
207#
208# plt.boxplot(iotimes)
209# if len(labels_and_data) > 2 and \
210# ssize2b(labels_and_data[-2][0]) >= 4096:
211#
212# xt = range(1, len(labels) + 1)
213#
214# def io_time(sz, bw, initial_lat):
215# return sz / bw + initial_lat
216#
217# x = numpy.array(map(ssize2b, labels))
218# y = numpy.array([sum(dt) / len(dt) for dt in iotimes])
219# popt, _ = scipy.optimize.curve_fit(io_time, x, y, p0=(100., 1.))
220#
221# y1 = io_time(x, *popt)
222# plt.plot(xt, y1, linestyle='--',
223# label=name + ' LS linear approx')
224#
225# for idx, (sz, _, _) in enumerate(labels_and_data):
226# if ssize2b(sz) >= 4096:
227# break
228#
229# bw = (x[-1] - x[idx]) / (y[-1] - y[idx])
230# lat = y[-1] - x[-1] / bw
231# y2 = io_time(x, bw, lat)
232# plt.plot(xt, y2, linestyle='--',
233# label=abbv_name_to_full(name) +
234# ' (4k & max) linear approx')
235#
236# plt.setp(ax1, xticklabels=labels)
237#
238# plt.xlabel("Block size")
239# plt.ylabel("IO time, ms")
240#
241# plt.subplots_adjust(top=0.85)
242# plt.legend(bbox_to_anchor=(0.5, 1.15),
243# loc='upper center',
244# prop={'size': 10}, ncol=2)
245# plt.grid()
246# iotime_plot = get_emb_data_svg(plt)
247# plt.clf()
248#
249# # plot IOPS = func(bsize)
250# _, ax1 = plt.subplots()
251#
252# for name, labels_and_data in labels_and_data_mp.items():
253# labels_and_data.sort(key=lambda x: ssize2b(x[0]))
254# _, data, _ = zip(*labels_and_data)
255# plt.boxplot(data)
256# avg = [float(sum(arr)) / len(arr) for arr in data]
257# xt = range(1, len(data) + 1)
258# plt.plot(xt, avg, linestyle='--',
259# label=abbv_name_to_full(name) + " avg")
260#
261# plt.setp(ax1, xticklabels=labels)
262# plt.xlabel("Block size")
263# plt.ylabel("IOPS")
264# plt.legend(bbox_to_anchor=(0.5, 1.15),
265# loc='upper center',
266# prop={'size': 10}, ncol=2)
267# plt.grid()
268# plt.subplots_adjust(top=0.85)
269#
270# iops_plot = get_emb_data_svg(plt)
271#
272# res = set(get_test_lcheck_params(res) for res in processed_results.values())
273# ncount = list(set(res.testnodes_count for res in processed_results.values()))
274# conc = list(set(res.concurence for res in processed_results.values()))
275#
276# assert len(conc) == 1
277# assert len(ncount) == 1
278#
279# descr = {
280# 'vm_count': ncount[0],
281# 'concurence': conc[0],
282# 'oper_descr': ", ".join(res).capitalize()
283# }
284#
285# params_map = {'iotime_vs_size': iotime_plot,
286# 'iops_vs_size': iops_plot,
287# 'descr': descr}
288#
289# return get_template('report_linearity.html').format(**params_map)
290#
291#
292# @report('lat_vs_iops', 'lat_vs_iops')
293# def lat_vs_iops(processed_results, lab_info, comment):
294# lat_iops = collections.defaultdict(lambda: [])
295# requsted_vs_real = collections.defaultdict(lambda: {})
296#
297# for res in processed_results.values():
298# if res.name.startswith('lat_vs_iops'):
299# lat_iops[res.concurence].append((res.lat,
300# 0,
301# res.iops.average,
302# res.iops.deviation))
303# # lat_iops[res.concurence].append((res.lat.average / 1000.0,
304# # res.lat.deviation / 1000.0,
305# # res.iops.average,
306# # res.iops.deviation))
307# requested_iops = res.p.rate_iops * res.concurence
308# requsted_vs_real[res.concurence][requested_iops] = \
309# (res.iops.average, res.iops.deviation)
310#
311# colors = ['red', 'green', 'blue', 'orange', 'magenta', "teal"]
312# colors_it = iter(colors)
313# for conc, lat_iops in sorted(lat_iops.items()):
314# lat, dev, iops, iops_dev = zip(*lat_iops)
315# plt.errorbar(iops, lat, xerr=iops_dev, yerr=dev, fmt='ro',
316# label=str(conc) + " threads",
317# color=next(colors_it))
318#
319# plt.xlabel("IOPS")
320# plt.ylabel("Latency, ms")
321# plt.grid()
322# plt.legend(loc=0)
323# plt_iops_vs_lat = get_emb_data_svg(plt)
324# plt.clf()
325#
326# colors_it = iter(colors)
327# for conc, req_vs_real in sorted(requsted_vs_real.items()):
328# req, real = zip(*sorted(req_vs_real.items()))
329# iops, dev = zip(*real)
330# plt.errorbar(req, iops, yerr=dev, fmt='ro',
331# label=str(conc) + " threads",
332# color=next(colors_it))
333# plt.xlabel("Requested IOPS")
334# plt.ylabel("Get IOPS")
335# plt.grid()
336# plt.legend(loc=0)
337# plt_iops_vs_requested = get_emb_data_svg(plt)
338#
339# res1 = processed_results.values()[0]
340# params_map = {'iops_vs_lat': plt_iops_vs_lat,
341# 'iops_vs_requested': plt_iops_vs_requested,
342# 'oper_descr': get_test_lcheck_params(res1).capitalize()}
343#
344# return get_template('report_iops_vs_lat.html').format(**params_map)
345#
346#
347# def render_all_html(comment, info, lab_description, images, templ_name):
348# data = info.__dict__.copy()
349# for name, val in data.items():
350# if not name.startswith('__'):
351# if val is None:
352# if name in ('direct_iops_w64_max', 'direct_iops_w_max'):
353# data[name] = ('-', '-', '-')
354# else:
355# data[name] = '-'
356# elif isinstance(val, (int, float, long)):
357# data[name] = round_3_digit(val)
358#
359# data['bw_read_max'] = (data['bw_read_max'][0] // 1024,
360# data['bw_read_max'][1],
361# data['bw_read_max'][2])
362#
363# data['bw_write_max'] = (data['bw_write_max'][0] // 1024,
364# data['bw_write_max'][1],
365# data['bw_write_max'][2])
366#
367# images.update(data)
368# templ = get_template(templ_name)
369# return templ.format(lab_info=lab_description,
370# comment=comment,
371# **images)
372#
373#
374# def io_chart(title, concurence,
375# latv, latv_min, latv_max,
376# iops_or_bw, iops_or_bw_err,
377# legend,
378# log_iops=False,
379# log_lat=False,
380# boxplots=False,
381# latv_50=None,
382# latv_95=None,
383# error2=None):
384#
385# matplotlib.rcParams.update({'font.size': 10})
386# points = " MiBps" if legend == 'BW' else ""
387# lc = len(concurence)
388# width = 0.35
389# xt = range(1, lc + 1)
390#
391# op_per_vm = [v / (vm * th) for v, (vm, th) in zip(iops_or_bw, concurence)]
392# fig, p1 = plt.subplots()
393# xpos = [i - width / 2 for i in xt]
394#
395# p1.bar(xpos, iops_or_bw,
396# width=width,
397# color='y',
398# label=legend)
399#
400# err1_leg = None
401# for pos, y, err in zip(xpos, iops_or_bw, iops_or_bw_err):
402# err1_leg = p1.errorbar(pos + width / 2,
403# y,
404# err,
405# color='magenta')
406#
407# err2_leg = None
408# if error2 is not None:
409# for pos, y, err in zip(xpos, iops_or_bw, error2):
410# err2_leg = p1.errorbar(pos + width / 2 + 0.08,
411# y,
412# err,
413# lw=2,
414# alpha=0.5,
415# color='teal')
416#
417# p1.grid(True)
418# p1.plot(xt, op_per_vm, '--', label=legend + "/thread", color='black')
419# handles1, labels1 = p1.get_legend_handles_labels()
420#
421# handles1 += [err1_leg]
422# labels1 += ["95% conf"]
423#
424# if err2_leg is not None:
425# handles1 += [err2_leg]
426# labels1 += ["95% dev"]
427#
428# p2 = p1.twinx()
429#
430# if latv_50 is None:
431# p2.plot(xt, latv_max, label="lat max")
432# p2.plot(xt, latv, label="lat avg")
433# p2.plot(xt, latv_min, label="lat min")
434# else:
435# p2.plot(xt, latv_50, label="lat med")
436# p2.plot(xt, latv_95, label="lat 95%")
437#
438# plt.xlim(0.5, lc + 0.5)
439# plt.xticks(xt, ["{0} * {1}".format(vm, th) for (vm, th) in concurence])
440# p1.set_xlabel("VM Count * Thread per VM")
441# p1.set_ylabel(legend + points)
442# p2.set_ylabel("Latency ms")
443# plt.title(title)
444# handles2, labels2 = p2.get_legend_handles_labels()
445#
446# plt.legend(handles1 + handles2, labels1 + labels2,
447# loc='center left', bbox_to_anchor=(1.1, 0.81))
448#
449# if log_iops:
450# p1.set_yscale('log')
451#
452# if log_lat:
453# p2.set_yscale('log')
454#
455# plt.subplots_adjust(right=0.68)
456#
457# return get_emb_data_svg(plt)
458#
459#
460# def make_plots(processed_results, plots):
461# """
462# processed_results: [PerfInfo]
463# plots = [(test_name_prefix:str, fname:str, description:str)]
464# """
465# files = {}
466# for name_pref, fname, desc in plots:
467# chart_data = []
468#
469# for res in processed_results:
470# summ = res.name + "_" + res.summary
471# if summ.startswith(name_pref):
472# chart_data.append(res)
473#
474# if len(chart_data) == 0:
475# raise ValueError("Can't found any date for " + name_pref)
476#
477# use_bw = ssize2b(chart_data[0].p.blocksize) > 16 * 1024
478#
479# chart_data.sort(key=lambda x: x.params['vals']['numjobs'])
480#
481# lat = None
482# lat_min = None
483# lat_max = None
484#
485# lat_50 = [x.lat_50 for x in chart_data]
486# lat_95 = [x.lat_95 for x in chart_data]
487#
488# lat_diff_max = max(x.lat_95 / x.lat_50 for x in chart_data)
489# lat_log_scale = (lat_diff_max > 10)
490#
491# testnodes_count = x.testnodes_count
492# concurence = [(testnodes_count, x.concurence)
493# for x in chart_data]
494#
495# if use_bw:
496# data = [x.bw.average / 1000 for x in chart_data]
497# data_conf = [x.bw.confidence / 1000 for x in chart_data]
498# data_dev = [x.bw.deviation * 2.5 / 1000 for x in chart_data]
499# name = "BW"
500# else:
501# data = [x.iops.average for x in chart_data]
502# data_conf = [x.iops.confidence for x in chart_data]
503# data_dev = [x.iops.deviation * 2 for x in chart_data]
504# name = "IOPS"
505#
506# fc = io_chart(title=desc,
507# concurence=concurence,
508#
509# latv=lat,
510# latv_min=lat_min,
511# latv_max=lat_max,
512#
513# iops_or_bw=data,
514# iops_or_bw_err=data_conf,
515#
516# legend=name,
517# log_lat=lat_log_scale,
518#
519# latv_50=lat_50,
520# latv_95=lat_95,
521#
522# error2=data_dev)
523# files[fname] = fc
524#
525# return files
526#
527#
528# def find_max_where(processed_results, sync_mode, blocksize, rw, iops=True):
529# result = None
530# attr = 'iops' if iops else 'bw'
531# for measurement in processed_results:
532# ok = measurement.sync_mode == sync_mode
533# ok = ok and (measurement.p.blocksize == blocksize)
534# ok = ok and (measurement.p.rw == rw)
535#
536# if ok:
537# field = getattr(measurement, attr)
538#
539# if result is None:
540# result = field
541# elif field.average > result.average:
542# result = field
543#
544# return result
545#
546#
547# def get_disk_info(processed_results):
548# di = DiskInfo()
549# di.direct_iops_w_max = find_max_where(processed_results,
550# 'd', '4k', 'randwrite')
551# di.direct_iops_r_max = find_max_where(processed_results,
552# 'd', '4k', 'randread')
553#
554# di.direct_iops_w64_max = find_max_where(processed_results,
555# 'd', '64k', 'randwrite')
556#
557# for sz in ('16m', '64m'):
558# di.bw_write_max = find_max_where(processed_results,
559# 'd', sz, 'randwrite', False)
560# if di.bw_write_max is not None:
561# break
562#
563# if di.bw_write_max is None:
564# for sz in ('1m', '2m', '4m', '8m'):
565# di.bw_write_max = find_max_where(processed_results,
566# 'd', sz, 'write', False)
567# if di.bw_write_max is not None:
568# break
569#
570# for sz in ('16m', '64m'):
571# di.bw_read_max = find_max_where(processed_results,
572# 'd', sz, 'randread', False)
573# if di.bw_read_max is not None:
574# break
575#
576# if di.bw_read_max is None:
577# di.bw_read_max = find_max_where(processed_results,
578# 'd', '1m', 'read', False)
579#
580# rws4k_iops_lat_th = []
581# for res in processed_results:
582# if res.sync_mode in 'xs' and res.p.blocksize == '4k':
583# if res.p.rw != 'randwrite':
584# continue
585# rws4k_iops_lat_th.append((res.iops.average,
586# res.lat,
587# # res.lat.average,
588# res.concurence))
589#
590# rws4k_iops_lat_th.sort(key=lambda x: x[2])
591#
592# latv = [lat for _, lat, _ in rws4k_iops_lat_th]
593#
594# for tlat in [10, 30, 100]:
595# pos = bisect.bisect_left(latv, tlat)
596# if 0 == pos:
597# setattr(di, 'rws4k_{}ms'.format(tlat), 0)
598# elif pos == len(latv):
599# iops3, _, _ = rws4k_iops_lat_th[-1]
600# iops3 = int(round_3_digit(iops3))
601# setattr(di, 'rws4k_{}ms'.format(tlat), ">=" + str(iops3))
602# else:
603# lat1 = latv[pos - 1]
604# lat2 = latv[pos]
605#
606# iops1, _, th1 = rws4k_iops_lat_th[pos - 1]
607# iops2, _, th2 = rws4k_iops_lat_th[pos]
608#
609# th_lat_coef = (th2 - th1) / (lat2 - lat1)
610# th3 = th_lat_coef * (tlat - lat1) + th1
611#
612# th_iops_coef = (iops2 - iops1) / (th2 - th1)
613# iops3 = th_iops_coef * (th3 - th1) + iops1
614# iops3 = int(round_3_digit(iops3))
615# setattr(di, 'rws4k_{}ms'.format(tlat), iops3)
616#
617# hdi = DiskInfo()
618#
619# def pp(x):
620# med, conf = x.rounded_average_conf()
621# conf_perc = int(float(conf) / med * 100)
622# dev_perc = int(float(x.deviation) / med * 100)
623# return (round_3_digit(med), conf_perc, dev_perc)
624#
625# hdi.direct_iops_r_max = pp(di.direct_iops_r_max)
626#
627# if di.direct_iops_w_max is not None:
628# hdi.direct_iops_w_max = pp(di.direct_iops_w_max)
629# else:
630# hdi.direct_iops_w_max = None
631#
632# if di.direct_iops_w64_max is not None:
633# hdi.direct_iops_w64_max = pp(di.direct_iops_w64_max)
634# else:
635# hdi.direct_iops_w64_max = None
636#
637# hdi.bw_write_max = pp(di.bw_write_max)
638# hdi.bw_read_max = pp(di.bw_read_max)
639#
640# hdi.rws4k_10ms = di.rws4k_10ms if 0 != di.rws4k_10ms else None
641# hdi.rws4k_30ms = di.rws4k_30ms if 0 != di.rws4k_30ms else None
642# hdi.rws4k_100ms = di.rws4k_100ms if 0 != di.rws4k_100ms else None
643# return hdi
644#
645#
646# @report('hdd', 'hdd')
647# def make_hdd_report(processed_results, lab_info, comment):
648# plots = [
649# ('hdd_rrd4k', 'rand_read_4k', 'Random read 4k direct IOPS'),
650# ('hdd_rwx4k', 'rand_write_4k', 'Random write 4k sync IOPS')
651# ]
652# perf_infos = [res.disk_perf_info() for res in processed_results]
653# images = make_plots(perf_infos, plots)
654# di = get_disk_info(perf_infos)
655# return render_all_html(comment, di, lab_info, images, "report_hdd.html")
656#
657#
658# @report('cinder_iscsi', 'cinder_iscsi')
659# def make_cinder_iscsi_report(processed_results, lab_info, comment):
660# plots = [
661# ('cinder_iscsi_rrd4k', 'rand_read_4k', 'Random read 4k direct IOPS'),
662# ('cinder_iscsi_rwx4k', 'rand_write_4k', 'Random write 4k sync IOPS')
663# ]
664# perf_infos = [res.disk_perf_info() for res in processed_results]
665# try:
666# images = make_plots(perf_infos, plots)
667# except ValueError:
668# plots = [
669# ('cinder_iscsi_rrd4k', 'rand_read_4k', 'Random read 4k direct IOPS'),
670# ('cinder_iscsi_rws4k', 'rand_write_4k', 'Random write 4k sync IOPS')
671# ]
672# images = make_plots(perf_infos, plots)
673# di = get_disk_info(perf_infos)
674#
675# return render_all_html(comment, di, lab_info, images, "report_cinder_iscsi.html")
676#
677#
678# @report('ceph', 'ceph')
679# def make_ceph_report(processed_results, lab_info, comment):
680# plots = [
681# ('ceph_rrd4k', 'rand_read_4k', 'Random read 4k direct IOPS'),
682# ('ceph_rws4k', 'rand_write_4k', 'Random write 4k sync IOPS'),
683# ('ceph_rrd16m', 'rand_read_16m', 'Random read 16m direct MiBps'),
684# ('ceph_rwd16m', 'rand_write_16m',
685# 'Random write 16m direct MiBps'),
686# ]
687#
688# perf_infos = [res.disk_perf_info() for res in processed_results]
689# images = make_plots(perf_infos, plots)
690# di = get_disk_info(perf_infos)
691# return render_all_html(comment, di, lab_info, images, "report_ceph.html")
692#
693#
694# @report('mixed', 'mixed')
695# def make_mixed_report(processed_results, lab_info, comment):
696# #
697# # IOPS(X% read) = 100 / ( X / IOPS_W + (100 - X) / IOPS_R )
698# #
699#
700# perf_infos = [res.disk_perf_info() for res in processed_results]
701# mixed = collections.defaultdict(lambda: [])
702#
703# is_ssd = False
704# for res in perf_infos:
705# if res.name.startswith('mixed'):
706# if res.name.startswith('mixed-ssd'):
707# is_ssd = True
708# mixed[res.concurence].append((res.p.rwmixread,
709# res.lat,
710# 0,
711# # res.lat.average / 1000.0,
712# # res.lat.deviation / 1000.0,
713# res.iops.average,
714# res.iops.deviation))
715#
716# if len(mixed) == 0:
717# raise ValueError("No mixed load found")
718#
719# fig, p1 = plt.subplots()
720# p2 = p1.twinx()
721#
722# colors = ['red', 'green', 'blue', 'orange', 'magenta', "teal"]
723# colors_it = iter(colors)
724# for conc, mix_lat_iops in sorted(mixed.items()):
725# mix_lat_iops = sorted(mix_lat_iops)
726# read_perc, lat, dev, iops, iops_dev = zip(*mix_lat_iops)
727# p1.errorbar(read_perc, iops, color=next(colors_it),
728# yerr=iops_dev, label=str(conc) + " th")
729#
730# p2.errorbar(read_perc, lat, color=next(colors_it),
731# ls='--', yerr=dev, label=str(conc) + " th lat")
732#
733# if is_ssd:
734# p1.set_yscale('log')
735# p2.set_yscale('log')
736#
737# p1.set_xlim(-5, 105)
738#
739# read_perc = set(read_perc)
740# read_perc.add(0)
741# read_perc.add(100)
742# read_perc = sorted(read_perc)
743#
744# plt.xticks(read_perc, map(str, read_perc))
745#
746# p1.grid(True)
747# p1.set_xlabel("% of reads")
748# p1.set_ylabel("Mixed IOPS")
749# p2.set_ylabel("Latency, ms")
750#
751# handles1, labels1 = p1.get_legend_handles_labels()
752# handles2, labels2 = p2.get_legend_handles_labels()
753# plt.subplots_adjust(top=0.85)
754# plt.legend(handles1 + handles2, labels1 + labels2,
755# bbox_to_anchor=(0.5, 1.15),
756# loc='upper center',
757# prop={'size': 12}, ncol=3)
758# plt.show()
759#
760#
761# def make_load_report(idx, results_dir, fname):
762# dpath = os.path.join(results_dir, "io_" + str(idx))
763# files = sorted(os.listdir(dpath))
764# gf = lambda x: "_".join(x.rsplit(".", 1)[0].split('_')[:3])
765#
766# for key, group in itertools.groupby(files, gf):
767# fname = os.path.join(dpath, key + ".fio")
768#
769# cfgs = list(parse_all_in_1(open(fname).read(), fname))
770#
771# fname = os.path.join(dpath, key + "_lat.log")
772#
773# curr = []
774# arrays = []
775#
776# with open(fname) as fd:
777# for offset, lat, _, _ in csv.reader(fd):
778# offset = int(offset)
779# lat = int(lat)
780# if len(curr) > 0 and curr[-1][0] > offset:
781# arrays.append(curr)
782# curr = []
783# curr.append((offset, lat))
784# arrays.append(curr)
785# conc = int(cfgs[0].vals.get('numjobs', 1))
786#
787# if conc != 5:
788# continue
789#
790# assert len(arrays) == len(cfgs) * conc
791#
792# garrays = [[(0, 0)] for _ in range(conc)]
793#
794# for offset in range(len(cfgs)):
795# for acc, new_arr in zip(garrays, arrays[offset * conc:(offset + 1) * conc]):
796# last = acc[-1][0]
797# for off, lat in new_arr:
798# acc.append((off / 1000. + last, lat / 1000.))
799#
800# for cfg, arr in zip(cfgs, garrays):
801# plt.plot(*zip(*arr[1:]))
802# plt.show()
803# exit(1)
804#
805#
806# def make_io_report(dinfo, comment, path, lab_info=None):
807# lab_info = {
808# "total_disk": "None",
809# "total_memory": "None",
810# "nodes_count": "None",
811# "processor_count": "None"
812# }
813#
814# try:
815# res_fields = sorted(v.name for v in dinfo)
816#
817# found = False
818# for fields, name, func in report_funcs:
819# for field in fields:
820# pos = bisect.bisect_left(res_fields, field)
821#
822# if pos == len(res_fields):
823# break
824#
825# if not res_fields[pos].startswith(field):
826# break
827# else:
828# found = True
829# hpath = path.format(name)
830#
831# try:
832# report = func(dinfo, lab_info, comment)
833# except:
834# logger.exception("Diring {0} report generation".format(name))
835# continue
836#
837# if report is not None:
838# try:
839# with open(hpath, "w") as fd:
840# fd.write(report)
841# except:
842# logger.exception("Diring saving {0} report".format(name))
843# continue
844# logger.info("Report {0} saved into {1}".format(name, hpath))
845# else:
846# logger.warning("No report produced by {0!r}".format(name))
847#
848# if not found:
849# logger.warning("No report generator found for this load")
850#
851# except Exception as exc:
852# import traceback
853# traceback.print_exc()
854# logger.error("Failed to generate html report:" + str(exc))
855#
856#
857# # @classmethod
858# # def prepare_data(cls, results) -> List[Dict[str, Any]]:
859# # """create a table with io performance report for console"""
860# #
861# # def key_func(data: FioRunResult) -> Tuple[str, str, str, str, int]:
862# # tpl = data.summary_tpl()
863# # return (data.name,
864# # tpl.oper,
865# # tpl.mode,
866# # ssize2b(tpl.bsize),
867# # int(tpl.th_count) * int(tpl.vm_count))
868# # res = []
869# #
870# # for item in sorted(results, key=key_func):
871# # test_dinfo = item.disk_perf_info()
872# # testnodes_count = len(item.config.nodes)
873# #
874# # iops, _ = test_dinfo.iops.rounded_average_conf()
875# #
876# # if test_dinfo.iops_sys is not None:
877# # iops_sys, iops_sys_conf = test_dinfo.iops_sys.rounded_average_conf()
878# # _, iops_sys_dev = test_dinfo.iops_sys.rounded_average_dev()
879# # iops_sys_per_vm = round_3_digit(iops_sys / testnodes_count)
880# # iops_sys = round_3_digit(iops_sys)
881# # else:
882# # iops_sys = None
883# # iops_sys_per_vm = None
884# # iops_sys_dev = None
885# # iops_sys_conf = None
886# #
887# # bw, bw_conf = test_dinfo.bw.rounded_average_conf()
888# # _, bw_dev = test_dinfo.bw.rounded_average_dev()
889# # conf_perc = int(round(bw_conf * 100 / bw))
890# # dev_perc = int(round(bw_dev * 100 / bw))
891# #
892# # lat_50 = round_3_digit(int(test_dinfo.lat_50))
893# # lat_95 = round_3_digit(int(test_dinfo.lat_95))
894# # lat_avg = round_3_digit(int(test_dinfo.lat_avg))
895# #
896# # iops_per_vm = round_3_digit(iops / testnodes_count)
897# # bw_per_vm = round_3_digit(bw / testnodes_count)
898# #
899# # iops = round_3_digit(iops)
900# # bw = round_3_digit(bw)
901# #
902# # summ = "{0.oper}{0.mode} {0.bsize:>4} {0.th_count:>3}th {0.vm_count:>2}vm".format(item.summary_tpl())
903# #
904# # res.append({"name": key_func(item)[0],
905# # "key": key_func(item)[:4],
906# # "summ": summ,
907# # "iops": int(iops),
908# # "bw": int(bw),
909# # "conf": str(conf_perc),
910# # "dev": str(dev_perc),
911# # "iops_per_vm": int(iops_per_vm),
912# # "bw_per_vm": int(bw_per_vm),
913# # "lat_50": lat_50,
914# # "lat_95": lat_95,
915# # "lat_avg": lat_avg,
916# #
917# # "iops_sys": iops_sys,
918# # "iops_sys_per_vm": iops_sys_per_vm,
919# # "sys_conf": iops_sys_conf,
920# # "sys_dev": iops_sys_dev})
921# #
922# # return res
923# #
924# # Field = collections.namedtuple("Field", ("header", "attr", "allign", "size"))
925# # fiels_and_header = [
926# # Field("Name", "name", "l", 7),
927# # Field("Description", "summ", "l", 19),
928# # Field("IOPS\ncum", "iops", "r", 3),
929# # # Field("IOPS_sys\ncum", "iops_sys", "r", 3),
930# # Field("KiBps\ncum", "bw", "r", 6),
931# # Field("Cnf %\n95%", "conf", "r", 3),
932# # Field("Dev%", "dev", "r", 3),
933# # Field("iops\n/vm", "iops_per_vm", "r", 3),
934# # Field("KiBps\n/vm", "bw_per_vm", "r", 6),
935# # Field("lat ms\nmedian", "lat_50", "r", 3),
936# # Field("lat ms\n95%", "lat_95", "r", 3),
937# # Field("lat\navg", "lat_avg", "r", 3),
938# # ]
939# #
940# # fiels_and_header_dct = dict((item.attr, item) for item in fiels_and_header)
941# #
942# # @classmethod
943# # def format_for_console(cls, results) -> str:
944# # """create a table with io performance report for console"""
945# #
946# # tab = texttable.Texttable(max_width=120)
947# # tab.set_deco(tab.HEADER | tab.VLINES | tab.BORDER)
948# # tab.set_cols_align([f.allign for f in cls.fiels_and_header])
949# # sep = ["-" * f.size for f in cls.fiels_and_header]
950# # tab.header([f.header for f in cls.fiels_and_header])
951# # prev_k = None
952# # for item in cls.prepare_data(results):
953# # if prev_k is not None:
954# # if prev_k != item["key"]:
955# # tab.add_row(sep)
956# #
957# # prev_k = item["key"]
958# # tab.add_row([item[f.attr] for f in cls.fiels_and_header])
959# #
960# # return tab.draw()
961# #
962# # @classmethod
963# # def format_diff_for_console(cls, list_of_results: List[Any]) -> str:
964# # """create a table with io performance report for console"""
965# #
966# # tab = texttable.Texttable(max_width=200)
967# # tab.set_deco(tab.HEADER | tab.VLINES | tab.BORDER)
968# #
969# # header = [
970# # cls.fiels_and_header_dct["name"].header,
971# # cls.fiels_and_header_dct["summ"].header,
972# # ]
973# # allign = ["l", "l"]
974# #
975# # header.append("IOPS ~ Cnf% ~ Dev%")
976# # allign.extend(["r"] * len(list_of_results))
977# # header.extend(
978# # "IOPS_{0} %".format(i + 2) for i in range(len(list_of_results[1:]))
979# # )
980# #
981# # header.append("BW")
982# # allign.extend(["r"] * len(list_of_results))
983# # header.extend(
984# # "BW_{0} %".format(i + 2) for i in range(len(list_of_results[1:]))
985# # )
986# #
987# # header.append("LAT")
988# # allign.extend(["r"] * len(list_of_results))
989# # header.extend(
990# # "LAT_{0}".format(i + 2) for i in range(len(list_of_results[1:]))
991# # )
992# #
993# # tab.header(header)
994# # sep = ["-" * 3] * len(header)
995# # processed_results = map(cls.prepare_data, list_of_results)
996# #
997# # key2results = []
998# # for res in processed_results:
999# # key2results.append(dict(
1000# # ((item["name"], item["summ"]), item) for item in res
1001# # ))
1002# #
1003# # prev_k = None
1004# # iops_frmt = "{0[iops]} ~ {0[conf]:>2} ~ {0[dev]:>2}"
1005# # for item in processed_results[0]:
1006# # if prev_k is not None:
1007# # if prev_k != item["key"]:
1008# # tab.add_row(sep)
1009# #
1010# # prev_k = item["key"]
1011# #
1012# # key = (item['name'], item['summ'])
1013# # line = list(key)
1014# # base = key2results[0][key]
1015# #
1016# # line.append(iops_frmt.format(base))
1017# #
1018# # for test_results in key2results[1:]:
1019# # val = test_results.get(key)
1020# # if val is None:
1021# # line.append("-")
1022# # elif base['iops'] == 0:
1023# # line.append("Nan")
1024# # else:
1025# # prc_val = {'dev': val['dev'], 'conf': val['conf']}
1026# # prc_val['iops'] = int(100 * val['iops'] / base['iops'])
1027# # line.append(iops_frmt.format(prc_val))
1028# #
1029# # line.append(base['bw'])
1030# #
1031# # for test_results in key2results[1:]:
1032# # val = test_results.get(key)
1033# # if val is None:
1034# # line.append("-")
1035# # elif base['bw'] == 0:
1036# # line.append("Nan")
1037# # else:
1038# # line.append(int(100 * val['bw'] / base['bw']))
1039# #
1040# # for test_results in key2results:
1041# # val = test_results.get(key)
1042# # if val is None:
1043# # line.append("-")
1044# # else:
1045# # line.append("{0[lat_50]} - {0[lat_95]}".format(val))
1046# #
1047# # tab.add_row(line)
1048# #
1049# # tab.set_cols_align(allign)
1050# # return tab.draw()
1051#
1052#
1053# # READ_IOPS_DISCSTAT_POS = 3
1054# # WRITE_IOPS_DISCSTAT_POS = 7
1055# #
1056# #
1057# # def load_sys_log_file(ftype: str, fname: str) -> TimeSeriesValue:
1058# # assert ftype == 'iops'
1059# # pval = None
1060# # with open(fname) as fd:
1061# # iops = []
1062# # for ln in fd:
1063# # params = ln.split()
1064# # cval = int(params[WRITE_IOPS_DISCSTAT_POS]) + \
1065# # int(params[READ_IOPS_DISCSTAT_POS])
1066# # if pval is not None:
1067# # iops.append(cval - pval)
1068# # pval = cval
1069# #
1070# # vals = [(idx * 1000, val) for idx, val in enumerate(iops)]
1071# # return TimeSeriesValue(vals)
1072# #
1073# #
1074# # def load_test_results(folder: str, run_num: int) -> 'FioRunResult':
1075# # res = {}
1076# # params = None
1077# #
1078# # fn = os.path.join(folder, str(run_num) + '_params.yaml')
1079# # params = yaml.load(open(fn).read())
1080# #
1081# # conn_ids_set = set()
1082# # rr = r"{}_(?P<conn_id>.*?)_(?P<type>[^_.]*)\.\d+\.log$".format(run_num)
1083# # for fname in os.listdir(folder):
1084# # rm = re.match(rr, fname)
1085# # if rm is None:
1086# # continue
1087# #
1088# # conn_id_s = rm.group('conn_id')
1089# # conn_id = conn_id_s.replace('_', ':')
1090# # ftype = rm.group('type')
1091# #
1092# # if ftype not in ('iops', 'bw', 'lat'):
1093# # continue
1094# #
1095# # ts = load_fio_log_file(os.path.join(folder, fname))
1096# # res.setdefault(ftype, {}).setdefault(conn_id, []).append(ts)
1097# #
1098# # conn_ids_set.add(conn_id)
1099# #
1100# # rr = r"{}_(?P<conn_id>.*?)_(?P<type>[^_.]*)\.sys\.log$".format(run_num)
1101# # for fname in os.listdir(folder):
1102# # rm = re.match(rr, fname)
1103# # if rm is None:
1104# # continue
1105# #
1106# # conn_id_s = rm.group('conn_id')
1107# # conn_id = conn_id_s.replace('_', ':')
1108# # ftype = rm.group('type')
1109# #
1110# # if ftype not in ('iops', 'bw', 'lat'):
1111# # continue
1112# #
1113# # ts = load_sys_log_file(ftype, os.path.join(folder, fname))
1114# # res.setdefault(ftype + ":sys", {}).setdefault(conn_id, []).append(ts)
1115# #
1116# # conn_ids_set.add(conn_id)
1117# #
1118# # mm_res = {}
1119# #
1120# # if len(res) == 0:
1121# # raise ValueError("No data was found")
1122# #
1123# # for key, data in res.items():
1124# # conn_ids = sorted(conn_ids_set)
1125# # awail_ids = [conn_id for conn_id in conn_ids if conn_id in data]
1126# # matr = [data[conn_id] for conn_id in awail_ids]
1127# # mm_res[key] = MeasurementMatrix(matr, awail_ids)
1128# #
1129# # raw_res = {}
1130# # for conn_id in conn_ids:
1131# # fn = os.path.join(folder, "{0}_{1}_rawres.json".format(run_num, conn_id_s))
1132# #
1133# # # remove message hack
1134# # fc = "{" + open(fn).read().split('{', 1)[1]
1135# # raw_res[conn_id] = json.loads(fc)
1136# #
1137# # fio_task = FioJobSection(params['name'])
1138# # fio_task.vals.update(params['vals'])
1139# #
1140# # config = TestConfig('io', params, None, params['nodes'], folder, None)
1141# # return FioRunResult(config, fio_task, mm_res, raw_res, params['intervals'], run_num)
1142# #
1143#
1144# # class DiskPerfInfo:
1145# # def __init__(self, name: str, summary: str, params: Dict[str, Any], testnodes_count: int) -> None:
1146# # self.name = name
1147# # self.bw = None
1148# # self.iops = None
1149# # self.lat = None
1150# # self.lat_50 = None
1151# # self.lat_95 = None
1152# # self.lat_avg = None
1153# #
1154# # self.raw_bw = []
1155# # self.raw_iops = []
1156# # self.raw_lat = []
1157# #
1158# # self.params = params
1159# # self.testnodes_count = testnodes_count
1160# # self.summary = summary
1161# #
1162# # self.sync_mode = get_test_sync_mode(self.params['vals'])
1163# # self.concurence = self.params['vals'].get('numjobs', 1)
1164# #
1165# #
1166# # class IOTestResults:
1167# # def __init__(self, suite_name: str, fio_results: 'FioRunResult', log_directory: str):
1168# # self.suite_name = suite_name
1169# # self.fio_results = fio_results
1170# # self.log_directory = log_directory
1171# #
1172# # def __iter__(self):
1173# # return iter(self.fio_results)
1174# #
1175# # def __len__(self):
1176# # return len(self.fio_results)
1177# #
1178# # def get_yamable(self) -> Dict[str, List[str]]:
1179# # items = [(fio_res.summary(), fio_res.idx) for fio_res in self]
1180# # return {self.suite_name: [self.log_directory] + items}
1181#
1182#
1183# # class FioRunResult(TestResults):
1184# # """
1185# # Fio run results
1186# # config: TestConfig
1187# # fio_task: FioJobSection
1188# # ts_results: {str: MeasurementMatrix[TimeSeriesValue]}
1189# # raw_result: ????
1190# # run_interval:(float, float) - test tun time, used for sensors
1191# # """
1192# # def __init__(self, config, fio_task, ts_results, raw_result, run_interval, idx):
1193# #
1194# # self.name = fio_task.name.rsplit("_", 1)[0]
1195# # self.fio_task = fio_task
1196# # self.idx = idx
1197# #
1198# # self.bw = ts_results['bw']
1199# # self.lat = ts_results['lat']
1200# # self.iops = ts_results['iops']
1201# #
1202# # if 'iops:sys' in ts_results:
1203# # self.iops_sys = ts_results['iops:sys']
1204# # else:
1205# # self.iops_sys = None
1206# #
1207# # res = {"bw": self.bw,
1208# # "lat": self.lat,
1209# # "iops": self.iops,
1210# # "iops:sys": self.iops_sys}
1211# #
1212# # self.sensors_data = None
1213# # self._pinfo = None
1214# # TestResults.__init__(self, config, res, raw_result, run_interval)
1215# #
1216# # def get_params_from_fio_report(self):
1217# # nodes = self.bw.connections_ids
1218# #
1219# # iops = [self.raw_result[node]['jobs'][0]['mixed']['iops'] for node in nodes]
1220# # total_ios = [self.raw_result[node]['jobs'][0]['mixed']['total_ios'] for node in nodes]
1221# # runtime = [self.raw_result[node]['jobs'][0]['mixed']['runtime'] / 1000 for node in nodes]
1222# # flt_iops = [float(ios) / rtime for ios, rtime in zip(total_ios, runtime)]
1223# #
1224# # bw = [self.raw_result[node]['jobs'][0]['mixed']['bw'] for node in nodes]
1225# # total_bytes = [self.raw_result[node]['jobs'][0]['mixed']['io_bytes'] for node in nodes]
1226# # flt_bw = [float(tbytes) / rtime for tbytes, rtime in zip(total_bytes, runtime)]
1227# #
1228# # return {'iops': iops,
1229# # 'flt_iops': flt_iops,
1230# # 'bw': bw,
1231# # 'flt_bw': flt_bw}
1232# #
1233# # def summary(self):
1234# # return get_test_summary(self.fio_task, len(self.config.nodes))
1235# #
1236# # def summary_tpl(self):
1237# # return get_test_summary_tuple(self.fio_task, len(self.config.nodes))
1238# #
1239# # def get_lat_perc_50_95_multy(self):
1240# # lat_mks = collections.defaultdict(lambda: 0)
1241# # num_res = 0
1242# #
1243# # for result in self.raw_result.values():
1244# # num_res += len(result['jobs'])
1245# # for job_info in result['jobs']:
1246# # for k, v in job_info['latency_ms'].items():
1247# # if isinstance(k, basestring) and k.startswith('>='):
1248# # lat_mks[int(k[2:]) * 1000] += v
1249# # else:
1250# # lat_mks[int(k) * 1000] += v
1251# #
1252# # for k, v in job_info['latency_us'].items():
1253# # lat_mks[int(k)] += v
1254# #
1255# # for k, v in lat_mks.items():
1256# # lat_mks[k] = float(v) / num_res
1257# # return get_lat_perc_50_95(lat_mks)
1258# #
1259# # def disk_perf_info(self, avg_interval=2.0):
1260# #
1261# # if self._pinfo is not None:
1262# # return self._pinfo
1263# #
1264# # testnodes_count = len(self.config.nodes)
1265# #
1266# # pinfo = DiskPerfInfo(self.name,
1267# # self.summary(),
1268# # self.params,
1269# # testnodes_count)
1270# #
1271# # def prepare(data, drop=1):
1272# # if data is None:
1273# # return data
1274# #
1275# # res = []
1276# # for ts_data in data:
1277# # if ts_data.average_interval() < avg_interval:
1278# # ts_data = ts_data.derived(avg_interval)
1279# #
1280# # # drop last value on bounds
1281# # # as they may contains ranges without activities
1282# # assert len(ts_data.values) >= drop + 1, str(drop) + " " + str(ts_data.values)
1283# #
1284# # if drop > 0:
1285# # res.append(ts_data.values[:-drop])
1286# # else:
1287# # res.append(ts_data.values)
1288# #
1289# # return res
1290# #
1291# # def agg_data(matr):
1292# # arr = sum(matr, [])
1293# # min_len = min(map(len, arr))
1294# # res = []
1295# # for idx in range(min_len):
1296# # res.append(sum(dt[idx] for dt in arr))
1297# # return res
1298# #
1299# # pinfo.raw_lat = map(prepare, self.lat.per_vm())
1300# # num_th = sum(map(len, pinfo.raw_lat))
1301# # lat_avg = [val / num_th for val in agg_data(pinfo.raw_lat)]
1302# # pinfo.lat_avg = data_property(lat_avg).average / 1000 # us to ms
1303# #
1304# # pinfo.lat_50, pinfo.lat_95 = self.get_lat_perc_50_95_multy()
1305# # pinfo.lat = pinfo.lat_50
1306# #
1307# # pinfo.raw_bw = map(prepare, self.bw.per_vm())
1308# # pinfo.raw_iops = map(prepare, self.iops.per_vm())
1309# #
1310# # if self.iops_sys is not None:
1311# # pinfo.raw_iops_sys = map(prepare, self.iops_sys.per_vm())
1312# # pinfo.iops_sys = data_property(agg_data(pinfo.raw_iops_sys))
1313# # else:
1314# # pinfo.raw_iops_sys = None
1315# # pinfo.iops_sys = None
1316# #
1317# # fparams = self.get_params_from_fio_report()
1318# # fio_report_bw = sum(fparams['flt_bw'])
1319# # fio_report_iops = sum(fparams['flt_iops'])
1320# #
1321# # agg_bw = agg_data(pinfo.raw_bw)
1322# # agg_iops = agg_data(pinfo.raw_iops)
1323# #
1324# # log_bw_avg = average(agg_bw)
1325# # log_iops_avg = average(agg_iops)
1326# #
1327# # # update values to match average from fio report
1328# # coef_iops = fio_report_iops / float(log_iops_avg)
1329# # coef_bw = fio_report_bw / float(log_bw_avg)
1330# #
1331# # bw_log = data_property([val * coef_bw for val in agg_bw])
1332# # iops_log = data_property([val * coef_iops for val in agg_iops])
1333# #
1334# # bw_report = data_property([fio_report_bw])
1335# # iops_report = data_property([fio_report_iops])
1336# #
1337# # # When IOPS/BW per thread is too low
1338# # # data from logs is rounded to match
1339# # iops_per_th = sum(sum(pinfo.raw_iops, []), [])
1340# # if average(iops_per_th) > 10:
1341# # pinfo.iops = iops_log
1342# # pinfo.iops2 = iops_report
1343# # else:
1344# # pinfo.iops = iops_report
1345# # pinfo.iops2 = iops_log
1346# #
1347# # bw_per_th = sum(sum(pinfo.raw_bw, []), [])
1348# # if average(bw_per_th) > 10:
1349# # pinfo.bw = bw_log
1350# # pinfo.bw2 = bw_report
1351# # else:
1352# # pinfo.bw = bw_report
1353# # pinfo.bw2 = bw_log
1354# #
1355# # self._pinfo = pinfo
1356# #
1357# # return pinfo
1358#
1359# # class TestResult:
1360# # """Hold all information for a given test - test info,
1361# # sensors data and performance results for test period from all nodes"""
1362# # run_id = None # type: int
1363# # test_info = None # type: Any
1364# # begin_time = None # type: int
1365# # end_time = None # type: int
1366# # sensors = None # Dict[Tuple[str, str, str], TimeSeries]
1367# # performance = None # Dict[Tuple[str, str], TimeSeries]
1368# #
1369# # class TestResults:
1370# # """
1371# # this class describe test results
1372# #
1373# # config:TestConfig - test config object
1374# # params:dict - parameters from yaml file for this test
1375# # results:{str:MeasurementMesh} - test results object
1376# # raw_result:Any - opaque object to store raw results
1377# # run_interval:(float, float) - test tun time, used for sensors
1378# # """
1379# #
1380# # def __init__(self,
1381# # config: TestConfig,
1382# # results: Dict[str, Any],
1383# # raw_result: Any,
1384# # run_interval: Tuple[float, float]) -> None:
1385# # self.config = config
1386# # self.params = config.params
1387# # self.results = results
1388# # self.raw_result = raw_result
1389# # self.run_interval = run_interval
1390# #
1391# # def __str__(self) -> str:
1392# # res = "{0}({1}):\n results:\n".format(
1393# # self.__class__.__name__,
1394# # self.summary())
1395# #
1396# # for name, val in self.results.items():
1397# # res += " {0}={1}\n".format(name, val)
1398# #
1399# # res += " params:\n"
1400# #
1401# # for name, val in self.params.items():
1402# # res += " {0}={1}\n".format(name, val)
1403# #
1404# # return res
1405# #
1406# # def summary(self) -> str:
1407# # raise NotImplementedError()
1408# # return ""
1409# #
1410# # def get_yamable(self) -> Any:
1411# # raise NotImplementedError()
1412# # return None
1413#
1414#
1415#
1416# # class MeasurementMatrix:
1417# # """
1418# # data:[[MeasurementResult]] - VM_COUNT x TH_COUNT matrix of MeasurementResult
1419# # """
1420# # def __init__(self, data, connections_ids):
1421# # self.data = data
1422# # self.connections_ids = connections_ids
1423# #
1424# # def per_vm(self):
1425# # return self.data
1426# #
1427# # def per_th(self):
1428# # return sum(self.data, [])
1429#
1430#
1431# # class MeasurementResults:
1432# # data = None # type: List[Any]
1433# #
1434# # def stat(self) -> StatProps:
1435# # return data_property(self.data)
1436# #
1437# # def __str__(self) -> str:
1438# # return 'TS([' + ", ".join(map(str, self.data)) + '])'
1439# #
1440# #
1441# # class SimpleVals(MeasurementResults):
1442# # """
1443# # data:[float] - list of values
1444# # """
1445# # def __init__(self, data: List[float]) -> None:
1446# # self.data = data
1447# #
1448# #
1449# # class TimeSeriesValue(MeasurementResults):
1450# # """
1451# # data:[(float, float, float)] - list of (start_time, lenght, average_value_for_interval)
1452# # odata: original values
1453# # """
1454# # def __init__(self, data: List[Tuple[float, float]]) -> None:
1455# # assert len(data) > 0
1456# # self.odata = data[:]
1457# # self.data = [] # type: List[Tuple[float, float, float]]
1458# #
1459# # cstart = 0.0
1460# # for nstart, nval in data:
1461# # self.data.append((cstart, nstart - cstart, nval))
1462# # cstart = nstart
1463# #
1464# # @property
1465# # def values(self) -> List[float]:
1466# # return [val[2] for val in self.data]
1467# #
1468# # def average_interval(self) -> float:
1469# # return float(sum([val[1] for val in self.data])) / len(self.data)
1470# #
1471# # def skip(self, seconds) -> 'TimeSeriesValue':
1472# # nres = []
1473# # for start, ln, val in self.data:
1474# # nstart = start + ln - seconds
1475# # if nstart > 0:
1476# # nres.append([nstart, val])
1477# # return self.__class__(nres)
1478# #
1479# # def derived(self, tdelta) -> 'TimeSeriesValue':
1480# # end = self.data[-1][0] + self.data[-1][1]
1481# # tdelta = float(tdelta)
1482# #
1483# # ln = end / tdelta
1484# #
1485# # if ln - int(ln) > 0:
1486# # ln += 1
1487# #
1488# # res = [[tdelta * i, 0.0] for i in range(int(ln))]
1489# #
1490# # for start, lenght, val in self.data:
1491# # start_idx = int(start / tdelta)
1492# # end_idx = int((start + lenght) / tdelta)
1493# #
1494# # for idx in range(start_idx, end_idx + 1):
1495# # rstart = tdelta * idx
1496# # rend = tdelta * (idx + 1)
1497# #
1498# # intersection_ln = min(rend, start + lenght) - max(start, rstart)
1499# # if intersection_ln > 0:
1500# # try:
1501# # res[idx][1] += val * intersection_ln / tdelta
1502# # except IndexError:
1503# # raise
1504# #
1505# # return self.__class__(res)
1506#
1507#
1508# def console_report_stage(ctx: TestRun) -> None:
1509# # TODO(koder): load data from storage
1510# raise NotImplementedError("...")
1511# # first_report = True
1512# # text_rep_fname = ctx.config.text_report_file
1513# #
1514# # with open(text_rep_fname, "w") as fd:
1515# # for tp, data in ctx.results.items():
1516# # if 'io' == tp and data is not None:
1517# # rep_lst = []
1518# # for result in data:
1519# # rep_lst.append(
1520# # IOPerfTest.format_for_console(list(result)))
1521# # rep = "\n\n".join(rep_lst)
1522# # elif tp in ['mysql', 'pgbench'] and data is not None:
1523# # rep = MysqlTest.format_for_console(data)
1524# # elif tp == 'omg':
1525# # rep = OmgTest.format_for_console(data)
1526# # else:
1527# # logger.warning("Can't generate text report for " + tp)
1528# # continue
1529# #
1530# # fd.write(rep)
1531# # fd.write("\n")
1532# #
1533# # if first_report:
1534# # logger.info("Text report were stored in " + text_rep_fname)
1535# # first_report = False
1536# #
1537# # print("\n" + rep + "\n")
1538#
1539#
1540# # def test_load_report_stage(cfg: Config, ctx: TestRun) -> None:
1541# # load_rep_fname = cfg.load_report_file
1542# # found = False
1543# # for idx, (tp, data) in enumerate(ctx.results.items()):
1544# # if 'io' == tp and data is not None:
1545# # if found:
1546# # logger.error("Making reports for more than one " +
1547# # "io block isn't supported! All " +
1548# # "report, except first are skipped")
1549# # continue
1550# # found = True
1551# # report.make_load_report(idx, cfg['results'], load_rep_fname)
1552# #
1553# #
1554#
1555# # def html_report_stage(ctx: TestRun) -> None:
1556# # TODO(koder): load data from storage
1557# # raise NotImplementedError("...")
1558# # html_rep_fname = cfg.html_report_file
1559# # found = False
1560# # for tp, data in ctx.results.items():
1561# # if 'io' == tp and data is not None:
1562# # if found or len(data) > 1:
1563# # logger.error("Making reports for more than one " +
1564# # "io block isn't supported! All " +
1565# # "report, except first are skipped")
1566# # continue
1567# # found = True
1568# # report.make_io_report(list(data[0]),
1569# # cfg.get('comment', ''),
1570# # html_rep_fname,
1571# # lab_info=ctx.nodes)
1572#
1573# #
1574# # def load_data_from_path(test_res_dir: str) -> Mapping[str, List[Any]]:
1575# # files = get_test_files(test_res_dir)
1576# # raw_res = yaml_load(open(files['raw_results']).read())
1577# # res = collections.defaultdict(list)
1578# #
1579# # for tp, test_lists in raw_res:
1580# # for tests in test_lists:
1581# # for suite_name, suite_data in tests.items():
1582# # result_folder = suite_data[0]
1583# # res[tp].append(TOOL_TYPE_MAPPER[tp].load(suite_name, result_folder))
1584# #
1585# # return res
1586# #
1587# #
1588# # def load_data_from_path_stage(var_dir: str, _, ctx: TestRun) -> None:
1589# # for tp, vals in load_data_from_path(var_dir).items():
1590# # ctx.results.setdefault(tp, []).extend(vals)
1591# #
1592# #
1593# # def load_data_from(var_dir: str) -> Callable[[TestRun], None]:
1594# # return functools.partial(load_data_from_path_stage, var_dir)