blob: 1a3e846bcfc21cc5d4827bc700211e7541604b9f [file] [log] [blame]
koder aka kdanilovbc2c8982015-06-13 02:50:43 +03001import re
2import time
3import json
koder aka kdanilovf236b9c2015-06-24 18:17:22 +03004import stat
koder aka kdanilov6ab4d432015-06-22 00:26:28 +03005import random
koder aka kdanilov76471642015-08-14 11:44:43 +03006import shutil
koder aka kdanilovbc2c8982015-06-13 02:50:43 +03007import os.path
8import logging
9import datetime
10import functools
11import subprocess
12import collections
13
14import yaml
15import paramiko
16import texttable
17from paramiko.ssh_exception import SSHException
18from concurrent.futures import ThreadPoolExecutor
19
koder aka kdanilov6ab4d432015-06-22 00:26:28 +030020import wally
koder aka kdanilovbc2c8982015-06-13 02:50:43 +030021from wally.pretty_yaml import dumps
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +030022from wally.statistic import round_3_digit, data_property, average
koder aka kdanilovbc2c8982015-06-13 02:50:43 +030023from wally.utils import ssize2b, sec_to_str, StopTestError, Barrier, get_os
24from wally.ssh_utils import (save_to_remote, read_from_remote, BGSSHTask, reconnect)
25
26from .fio_task_parser import (execution_time, fio_cfg_compile,
koder aka kdanilovf236b9c2015-06-24 18:17:22 +030027 get_test_summary, get_test_summary_tuple,
28 get_test_sync_mode, FioJobSection)
29
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +030030from ..itest import (TimeSeriesValue, PerfTest, TestResults,
31 run_on_node, TestConfig, MeasurementMatrix)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +030032
33logger = logging.getLogger("wally")
34
35
36# Results folder structure
37# results/
38# {loadtype}_{num}/
39# config.yaml
40# ......
41
42
43class NoData(object):
44 pass
45
46
47def cached_prop(func):
48 @property
49 @functools.wraps(func)
50 def closure(self):
51 val = getattr(self, "_" + func.__name__)
52 if val is NoData:
53 val = func(self)
54 setattr(self, "_" + func.__name__, val)
55 return val
56 return closure
57
58
59def load_fio_log_file(fname):
60 with open(fname) as fd:
61 it = [ln.split(',')[:2] for ln in fd]
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +030062
63 vals = [(float(off) / 1000, # convert us to ms
64 float(val.strip()) + 0.5) # add 0.5 to compemsate average value
65 # as fio trimm all values in log to integer
66 for off, val in it]
67
koder aka kdanilovbc2c8982015-06-13 02:50:43 +030068 return TimeSeriesValue(vals)
69
70
koder aka kdanilov0fdaaee2015-06-30 11:10:48 +030071def load_test_results(folder, run_num):
koder aka kdanilovbc2c8982015-06-13 02:50:43 +030072 res = {}
73 params = None
74
75 fn = os.path.join(folder, str(run_num) + '_params.yaml')
76 params = yaml.load(open(fn).read())
77
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +030078 conn_ids_set = set()
79 rr = r"{0}_(?P<conn_id>.*?)_(?P<type>[^_.]*)\.\d+\.log$".format(run_num)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +030080 for fname in os.listdir(folder):
koder aka kdanilovbc2c8982015-06-13 02:50:43 +030081 rm = re.match(rr, fname)
82 if rm is None:
83 continue
84
85 conn_id_s = rm.group('conn_id')
86 conn_id = conn_id_s.replace('_', ':')
87 ftype = rm.group('type')
88
89 if ftype not in ('iops', 'bw', 'lat'):
90 continue
91
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +030092 ts = load_fio_log_file(os.path.join(folder, fname))
93 res.setdefault(ftype, {}).setdefault(conn_id, []).append(ts)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +030094
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +030095 conn_ids_set.add(conn_id)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +030096
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +030097 mm_res = {}
98
koder aka kdanilov9e0512a2015-08-10 14:51:59 +030099 if len(res) == 0:
100 raise ValueError("No data was found")
101
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300102 for key, data in res.items():
103 conn_ids = sorted(conn_ids_set)
104 matr = [data[conn_id] for conn_id in conn_ids]
105
106 mm_res[key] = MeasurementMatrix(matr, conn_ids)
107
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300108 raw_res = {}
109 for conn_id in conn_ids:
110 fn = os.path.join(folder, "{0}_{1}_rawres.json".format(run_num, conn_id_s))
111
112 # remove message hack
113 fc = "{" + open(fn).read().split('{', 1)[1]
114 raw_res[conn_id] = json.loads(fc)
115
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300116 fio_task = FioJobSection(params['name'])
117 fio_task.vals.update(params['vals'])
118
119 config = TestConfig('io', params, None, params['nodes'], folder, None)
koder aka kdanilov0fdaaee2015-06-30 11:10:48 +0300120 return FioRunResult(config, fio_task, mm_res, raw_res, params['intervals'], run_num)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300121
122
123class Attrmapper(object):
124 def __init__(self, dct):
125 self.__dct = dct
126
127 def __getattr__(self, name):
128 try:
129 return self.__dct[name]
130 except KeyError:
131 raise AttributeError(name)
132
133
134class DiskPerfInfo(object):
135 def __init__(self, name, summary, params, testnodes_count):
136 self.name = name
137 self.bw = None
138 self.iops = None
139 self.lat = None
140 self.lat_50 = None
141 self.lat_95 = None
koder aka kdanilov170936a2015-06-27 22:51:17 +0300142 self.lat_avg = None
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300143
144 self.raw_bw = []
145 self.raw_iops = []
146 self.raw_lat = []
147
148 self.params = params
149 self.testnodes_count = testnodes_count
150 self.summary = summary
151 self.p = Attrmapper(self.params['vals'])
152
153 self.sync_mode = get_test_sync_mode(self.params['vals'])
154 self.concurence = self.params['vals'].get('numjobs', 1)
155
156
157def get_lat_perc_50_95(lat_mks):
158 curr_perc = 0
159 perc_50 = None
160 perc_95 = None
161 pkey = None
162 for key, val in sorted(lat_mks.items()):
163 if curr_perc + val >= 50 and perc_50 is None:
164 if pkey is None or val < 1.:
165 perc_50 = key
166 else:
167 perc_50 = (50. - curr_perc) / val * (key - pkey) + pkey
168
169 if curr_perc + val >= 95:
170 if pkey is None or val < 1.:
171 perc_95 = key
172 else:
173 perc_95 = (95. - curr_perc) / val * (key - pkey) + pkey
174 break
175
176 pkey = key
177 curr_perc += val
178
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300179 # for k, v in sorted(lat_mks.items()):
180 # if k / 1000 > 0:
181 # print "{0:>4}".format(k / 1000), v
182
183 # print perc_50 / 1000., perc_95 / 1000.
184 # exit(1)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300185 return perc_50 / 1000., perc_95 / 1000.
186
187
koder aka kdanilov0fdaaee2015-06-30 11:10:48 +0300188class IOTestResults(object):
189 def __init__(self, suite_name, fio_results, log_directory):
190 self.suite_name = suite_name
191 self.fio_results = fio_results
192 self.log_directory = log_directory
193
194 def __iter__(self):
195 return iter(self.fio_results)
196
197 def __len__(self):
198 return len(self.fio_results)
199
200 def get_yamable(self):
201 items = [(fio_res.summary(), fio_res.idx) for fio_res in self]
202 return {self.suite_name: [self.log_directory] + items}
203
204
205class FioRunResult(TestResults):
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300206 """
207 Fio run results
208 config: TestConfig
209 fio_task: FioJobSection
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300210 ts_results: {str: MeasurementMatrix[TimeSeriesValue]}
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300211 raw_result: ????
212 run_interval:(float, float) - test tun time, used for sensors
213 """
koder aka kdanilov0fdaaee2015-06-30 11:10:48 +0300214 def __init__(self, config, fio_task, ts_results, raw_result, run_interval, idx):
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300215
koder aka kdanilov170936a2015-06-27 22:51:17 +0300216 self.name = fio_task.name.rsplit("_", 1)[0]
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300217 self.fio_task = fio_task
koder aka kdanilov0fdaaee2015-06-30 11:10:48 +0300218 self.idx = idx
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300219
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300220 self.bw = ts_results.get('bw')
221 self.lat = ts_results.get('lat')
222 self.iops = ts_results.get('iops')
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300223
224 res = {"bw": self.bw, "lat": self.lat, "iops": self.iops}
225
226 self.sensors_data = None
227 self._pinfo = None
228 TestResults.__init__(self, config, res, raw_result, run_interval)
229
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300230 def get_params_from_fio_report(self):
231 nodes = self.bw.connections_ids
232
233 iops = [self.raw_result[node]['jobs'][0]['mixed']['iops'] for node in nodes]
234 total_ios = [self.raw_result[node]['jobs'][0]['mixed']['total_ios'] for node in nodes]
235 runtime = [self.raw_result[node]['jobs'][0]['mixed']['runtime'] / 1000 for node in nodes]
236 flt_iops = [float(ios) / rtime for ios, rtime in zip(total_ios, runtime)]
237
238 bw = [self.raw_result[node]['jobs'][0]['mixed']['bw'] for node in nodes]
239 total_bytes = [self.raw_result[node]['jobs'][0]['mixed']['io_bytes'] for node in nodes]
240 flt_bw = [float(tbytes) / rtime for tbytes, rtime in zip(total_bytes, runtime)]
241
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300242 return {'iops': iops,
243 'flt_iops': flt_iops,
244 'bw': bw,
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300245 'flt_bw': flt_bw}
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300246
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300247 def summary(self):
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300248 return get_test_summary(self.fio_task, len(self.config.nodes))
249
250 def summary_tpl(self):
251 return get_test_summary_tuple(self.fio_task, len(self.config.nodes))
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300252
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300253 def get_lat_perc_50_95_multy(self):
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300254 lat_mks = collections.defaultdict(lambda: 0)
255 num_res = 0
256
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300257 for result in self.raw_result.values():
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300258 num_res += len(result['jobs'])
259 for job_info in result['jobs']:
260 for k, v in job_info['latency_ms'].items():
261 if isinstance(k, basestring) and k.startswith('>='):
262 lat_mks[int(k[2:]) * 1000] += v
263 else:
264 lat_mks[int(k) * 1000] += v
265
266 for k, v in job_info['latency_us'].items():
267 lat_mks[int(k)] += v
268
269 for k, v in lat_mks.items():
270 lat_mks[k] = float(v) / num_res
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300271 return get_lat_perc_50_95(lat_mks)
272
273 def disk_perf_info(self, avg_interval=2.0):
274
275 if self._pinfo is not None:
276 return self._pinfo
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300277
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300278 testnodes_count = len(self.config.nodes)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300279
280 pinfo = DiskPerfInfo(self.name,
281 self.summary(),
282 self.params,
283 testnodes_count)
284
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300285 def prepare(data, drop=1):
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300286 if data is None:
287 return data
288
289 res = []
290 for ts_data in data:
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300291 if ts_data.average_interval() < avg_interval:
292 ts_data = ts_data.derived(avg_interval)
293
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300294 # drop last value on bounds
295 # as they may contains ranges without activities
koder aka kdanilov0fdaaee2015-06-30 11:10:48 +0300296 assert len(ts_data.values) >= drop + 1, str(drop) + " " + str(ts_data.values)
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300297
298 if drop > 0:
299 res.append(ts_data.values[:-drop])
300 else:
301 res.append(ts_data.values)
302
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300303 return res
304
305 def agg_data(matr):
306 arr = sum(matr, [])
307 min_len = min(map(len, arr))
308 res = []
309 for idx in range(min_len):
310 res.append(sum(dt[idx] for dt in arr))
311 return res
312
koder aka kdanilov170936a2015-06-27 22:51:17 +0300313 pinfo.raw_lat = map(prepare, self.lat.per_vm())
314 num_th = sum(map(len, pinfo.raw_lat))
315 lat_avg = [val / num_th for val in agg_data(pinfo.raw_lat)]
316 pinfo.lat_avg = data_property(lat_avg).average / 1000 # us to ms
317
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300318 pinfo.lat_50, pinfo.lat_95 = self.get_lat_perc_50_95_multy()
319 pinfo.lat = pinfo.lat_50
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300320
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300321 pinfo.raw_bw = map(prepare, self.bw.per_vm())
322 pinfo.raw_iops = map(prepare, self.iops.per_vm())
323
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300324 fparams = self.get_params_from_fio_report()
325 fio_report_bw = sum(fparams['flt_bw'])
326 fio_report_iops = sum(fparams['flt_iops'])
327
328 agg_bw = agg_data(pinfo.raw_bw)
329 agg_iops = agg_data(pinfo.raw_iops)
330
331 log_bw_avg = average(agg_bw)
332 log_iops_avg = average(agg_iops)
333
334 # update values to match average from fio report
335 coef_iops = fio_report_iops / float(log_iops_avg)
336 coef_bw = fio_report_bw / float(log_bw_avg)
337
338 bw_log = data_property([val * coef_bw for val in agg_bw])
339 iops_log = data_property([val * coef_iops for val in agg_iops])
340
341 bw_report = data_property([fio_report_bw])
342 iops_report = data_property([fio_report_iops])
343
344 # When IOPS/BW per thread is too low
345 # data from logs is rounded to match
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300346 iops_per_th = sum(sum(pinfo.raw_iops, []), [])
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300347 if average(iops_per_th) > 10:
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300348 pinfo.iops = iops_log
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300349 pinfo.iops2 = iops_report
350 else:
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300351 pinfo.iops = iops_report
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300352 pinfo.iops2 = iops_log
353
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300354 bw_per_th = sum(sum(pinfo.raw_bw, []), [])
355 if average(bw_per_th) > 10:
356 pinfo.bw = bw_log
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300357 pinfo.bw2 = bw_report
koder aka kdanilov170936a2015-06-27 22:51:17 +0300358 else:
359 pinfo.bw = bw_report
360 pinfo.bw2 = bw_log
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300361
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300362 self._pinfo = pinfo
363
364 return pinfo
365
366
367class IOPerfTest(PerfTest):
368 tcp_conn_timeout = 30
369 max_pig_timeout = 5
370 soft_runcycle = 5 * 60
371
372 def __init__(self, config):
373 PerfTest.__init__(self, config)
374
375 get = self.config.params.get
376 do_get = self.config.params.__getitem__
377
378 self.config_fname = do_get('cfg')
379
380 if '/' not in self.config_fname and '.' not in self.config_fname:
381 cfgs_dir = os.path.dirname(__file__)
382 self.config_fname = os.path.join(cfgs_dir,
383 self.config_fname + '.cfg')
384
385 self.alive_check_interval = get('alive_check_interval')
386 self.use_system_fio = get('use_system_fio', False)
387
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300388 if get('prefill_files') is not None:
389 logger.warning("prefill_files option is depricated. Use force_prefill instead")
390
391 self.force_prefill = get('force_prefill', False)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300392 self.config_params = get('params', {}).copy()
393
394 self.io_py_remote = self.join_remote("agent.py")
395 self.results_file = self.join_remote("results.json")
396 self.pid_file = self.join_remote("pid")
397 self.task_file = self.join_remote("task.cfg")
398 self.sh_file = self.join_remote("cmd.sh")
399 self.err_out_file = self.join_remote("fio_err_out")
400 self.exit_code_file = self.join_remote("exit_code")
401
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300402 self.max_latency = get("max_lat", None)
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300403 self.min_bw_per_thread = get("min_bw", None)
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300404
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300405 self.use_sudo = get("use_sudo", True)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300406
407 self.raw_cfg = open(self.config_fname).read()
408 self.fio_configs = fio_cfg_compile(self.raw_cfg,
409 self.config_fname,
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300410 self.config_params)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300411 self.fio_configs = list(self.fio_configs)
412
413 @classmethod
koder aka kdanilov0fdaaee2015-06-30 11:10:48 +0300414 def load(cls, suite_name, folder):
415 res = []
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300416 for fname in os.listdir(folder):
417 if re.match("\d+_params.yaml$", fname):
418 num = int(fname.split('_')[0])
koder aka kdanilov0fdaaee2015-06-30 11:10:48 +0300419 res.append(load_test_results(folder, num))
420 return IOTestResults(suite_name, res, folder)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300421
422 def cleanup(self):
423 # delete_file(conn, self.io_py_remote)
424 # Need to remove tempo files, used for testing
425 pass
426
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300427 # size is megabytes
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300428 def check_prefill_required(self, rossh, fname, size, num_blocks=16):
koder aka kdanilov170936a2015-06-27 22:51:17 +0300429 try:
430 with rossh.connection.open_sftp() as sftp:
431 fstats = sftp.stat(fname)
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300432
koder aka kdanilov170936a2015-06-27 22:51:17 +0300433 if stat.S_ISREG(fstats.st_mode) and fstats.st_size < size * 1024 ** 2:
434 return True
435 except EnvironmentError:
koder aka kdanilovf95cfc12015-06-23 03:33:19 +0300436 return True
437
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300438 cmd = 'python -c "' + \
439 "import sys;" + \
440 "fd = open('{0}', 'rb');" + \
441 "fd.seek({1});" + \
442 "data = fd.read(1024); " + \
443 "sys.stdout.write(data + ' ' * ( 1024 - len(data)))\" | md5sum"
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300444
445 if self.use_sudo:
446 cmd = "sudo " + cmd
447
koder aka kdanilovf95cfc12015-06-23 03:33:19 +0300448 zero_md5 = '0f343b0931126a20f133d67c2b018a3b'
koder aka kdanilov8fbb27f2015-07-17 22:23:31 +0300449 bsize = size * (1024 ** 2)
450 offsets = [random.randrange(bsize - 1024) for _ in range(num_blocks)]
451 offsets.append(bsize - 1024)
452 offsets.append(0)
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300453
koder aka kdanilovf95cfc12015-06-23 03:33:19 +0300454 for offset in offsets:
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300455 data = rossh(cmd.format(fname, offset), nolog=True)
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300456
457 md = ""
458 for line in data.split("\n"):
459 if "unable to resolve" not in line:
460 md = line.split()[0].strip()
461 break
koder aka kdanilovf95cfc12015-06-23 03:33:19 +0300462
463 if len(md) != 32:
464 logger.error("File data check is failed - " + data)
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300465 return True
koder aka kdanilovf95cfc12015-06-23 03:33:19 +0300466
467 if zero_md5 == md:
468 return True
469
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300470 return False
471
472 def prefill_test_files(self, rossh, files, force=False):
473 if self.use_system_fio:
474 cmd_templ = "fio "
475 else:
476 cmd_templ = "{0}/fio ".format(self.config.remote_dir)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300477
478 if self.use_sudo:
479 cmd_templ = "sudo " + cmd_templ
480
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300481 cmd_templ += "--name=xxx --filename={0} --direct=1" + \
482 " --bs=4m --size={1}m --rw=write"
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300483
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300484 ssize = 0
485
486 if force:
487 logger.info("File prefilling is forced")
488
489 ddtime = 0
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300490 for fname, curr_sz in files.items():
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300491 if not force:
492 if not self.check_prefill_required(rossh, fname, curr_sz):
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300493 logger.debug("prefill is skipped")
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300494 continue
495
496 logger.info("Prefilling file {0}".format(fname))
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300497 cmd = cmd_templ.format(fname, curr_sz)
498 ssize += curr_sz
499
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300500 stime = time.time()
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300501 rossh(cmd, timeout=curr_sz)
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300502 ddtime += time.time() - stime
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300503
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300504 if ddtime > 1.0:
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300505 fill_bw = int(ssize / ddtime)
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300506 mess = "Initiall fio fill bw is {0} MiBps for this vm"
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300507 logger.info(mess.format(fill_bw))
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300508
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300509 def install_utils(self, node, rossh, max_retry=3, timeout=5):
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300510 need_install = []
511 packs = [('screen', 'screen')]
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300512 os_info = get_os(rossh)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300513
514 if self.use_system_fio:
515 packs.append(('fio', 'fio'))
516 else:
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300517 packs.append(('bzip2', 'bzip2'))
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300518
519 for bin_name, package in packs:
520 if bin_name is None:
521 need_install.append(package)
522 continue
523
524 try:
525 rossh('which ' + bin_name, nolog=True)
526 except OSError:
527 need_install.append(package)
528
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300529 if len(need_install) != 0:
530 if 'redhat' == os_info.distro:
531 cmd = "sudo yum -y install " + " ".join(need_install)
532 else:
533 cmd = "sudo apt-get -y install " + " ".join(need_install)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300534
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300535 for _ in range(max_retry):
536 try:
537 rossh(cmd)
538 break
539 except OSError as err:
540 time.sleep(timeout)
541 else:
542 raise OSError("Can't install - " + str(err))
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300543
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300544 if not self.use_system_fio:
545 fio_dir = os.path.dirname(os.path.dirname(wally.__file__))
546 fio_dir = os.path.join(os.getcwd(), fio_dir)
547 fio_dir = os.path.join(fio_dir, 'fio_binaries')
548 fname = 'fio_{0.release}_{0.arch}.bz2'.format(os_info)
549 fio_path = os.path.join(fio_dir, fname)
550
551 if not os.path.exists(fio_path):
552 raise RuntimeError("No prebuild fio available for {0}".format(os_info))
553
554 bz_dest = self.join_remote('fio.bz2')
555 with node.connection.open_sftp() as sftp:
556 sftp.put(fio_path, bz_dest)
557
558 rossh("bzip2 --decompress " + bz_dest, nolog=True)
559 rossh("chmod a+x " + self.join_remote("fio"), nolog=True)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300560
561 def pre_run(self):
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300562 files = {}
563 for section in self.fio_configs:
564 sz = ssize2b(section.vals['size'])
565 msz = sz / (1024 ** 2)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300566
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300567 if sz % (1024 ** 2) != 0:
568 msz += 1
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300569
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300570 fname = section.vals['filename']
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300571
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300572 # if already has other test with the same file name
573 # take largest size
574 files[fname] = max(files.get(fname, 0), msz)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300575
576 with ThreadPoolExecutor(len(self.config.nodes)) as pool:
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300577 fc = functools.partial(self.pre_run_th,
578 files=files,
579 force=self.force_prefill)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300580 list(pool.map(fc, self.config.nodes))
581
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300582 def pre_run_th(self, node, files, force):
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300583 try:
koder aka kdanilov8fbb27f2015-07-17 22:23:31 +0300584 # fill files with pseudo-random data
585 rossh = run_on_node(node)
586 rossh.connection = node.connection
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300587
koder aka kdanilov8fbb27f2015-07-17 22:23:31 +0300588 try:
589 cmd = 'mkdir -p "{0}"'.format(self.config.remote_dir)
590 if self.use_sudo:
591 cmd = "sudo " + cmd
592 cmd += " ; sudo chown {0} {1}".format(node.get_user(),
593 self.config.remote_dir)
594 rossh(cmd, nolog=True)
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300595
koder aka kdanilov8fbb27f2015-07-17 22:23:31 +0300596 assert self.config.remote_dir != "" and self.config.remote_dir != "/"
597 rossh("rm -rf {0}/*".format(self.config.remote_dir), nolog=True)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300598
koder aka kdanilov8fbb27f2015-07-17 22:23:31 +0300599 except Exception as exc:
600 msg = "Failed to create folder {0} on remote {1}. Error: {2!s}"
601 msg = msg.format(self.config.remote_dir, node.get_conn_id(), exc)
602 logger.exception(msg)
603 raise StopTestError(msg, exc)
604
605 self.install_utils(node, rossh)
606 self.prefill_test_files(rossh, files, force)
607 except:
608 logger.exception("XXXX")
609 raise
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300610
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300611 def show_test_execution_time(self):
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300612 if len(self.fio_configs) > 1:
613 # +10% - is a rough estimation for additional operations
614 # like sftp, etc
615 exec_time = int(sum(map(execution_time, self.fio_configs)) * 1.1)
616 exec_time_s = sec_to_str(exec_time)
617 now_dt = datetime.datetime.now()
618 end_dt = now_dt + datetime.timedelta(0, exec_time)
619 msg = "Entire test should takes aroud: {0} and finished at {1}"
620 logger.info(msg.format(exec_time_s,
621 end_dt.strftime("%H:%M:%S")))
622
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300623 def run(self):
624 logger.debug("Run preparation")
625 self.pre_run()
626 self.show_test_execution_time()
627
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300628 tname = os.path.basename(self.config_fname)
629 if tname.endswith('.cfg'):
630 tname = tname[:-4]
631
632 barrier = Barrier(len(self.config.nodes))
633 results = []
634
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300635 # set of Operation_Mode_BlockSize str's
636 # which should not be tested anymore, as
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300637 # they already too slow with previous thread count
638 lat_bw_limit_reached = set()
639
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300640 with ThreadPoolExecutor(len(self.config.nodes)) as pool:
641 for pos, fio_cfg in enumerate(self.fio_configs):
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300642 test_descr = get_test_summary(fio_cfg.vals).split("th")[0]
643 if test_descr in lat_bw_limit_reached:
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300644 continue
645 else:
646 logger.info("Will run {0} test".format(fio_cfg.name))
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300647
648 templ = "Test should takes about {0}." + \
649 " Should finish at {1}," + \
650 " will wait at most till {2}"
651 exec_time = execution_time(fio_cfg)
652 exec_time_str = sec_to_str(exec_time)
653 timeout = int(exec_time + max(300, exec_time))
654
655 now_dt = datetime.datetime.now()
656 end_dt = now_dt + datetime.timedelta(0, exec_time)
657 wait_till = now_dt + datetime.timedelta(0, timeout)
658
659 logger.info(templ.format(exec_time_str,
660 end_dt.strftime("%H:%M:%S"),
661 wait_till.strftime("%H:%M:%S")))
662
663 func = functools.partial(self.do_run,
664 barrier=barrier,
665 fio_cfg=fio_cfg,
666 pos=pos)
667
668 max_retr = 3
669 for idx in range(max_retr):
670 try:
671 intervals = list(pool.map(func, self.config.nodes))
koder aka kdanilov76471642015-08-14 11:44:43 +0300672 if None not in intervals:
673 break
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300674 except (EnvironmentError, SSHException) as exc:
675 logger.exception("During fio run")
676 if idx == max_retr - 1:
677 raise StopTestError("Fio failed", exc)
678
679 logger.info("Sleeping 30s and retrying")
680 time.sleep(30)
681
682 fname = "{0}_task.fio".format(pos)
683 with open(os.path.join(self.config.log_directory, fname), "w") as fd:
684 fd.write(str(fio_cfg))
685
686 params = {'vm_count': len(self.config.nodes)}
687 params['name'] = fio_cfg.name
688 params['vals'] = dict(fio_cfg.vals.items())
689 params['intervals'] = intervals
690 params['nodes'] = [node.get_conn_id() for node in self.config.nodes]
691
692 fname = "{0}_params.yaml".format(pos)
693 with open(os.path.join(self.config.log_directory, fname), "w") as fd:
694 fd.write(dumps(params))
695
koder aka kdanilov0fdaaee2015-06-30 11:10:48 +0300696 res = load_test_results(self.config.log_directory, pos)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300697 results.append(res)
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300698
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300699 if self.max_latency is not None:
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300700 lat_50, _ = res.get_lat_perc_50_95_multy()
701
702 # conver us to ms
703 if self.max_latency < lat_50:
704 logger.info(("Will skip all subsequent tests of {0} " +
705 "due to lat/bw limits").format(fio_cfg.name))
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300706 lat_bw_limit_reached.add(test_descr)
707
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300708 test_res = res.get_params_from_fio_report()
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300709 if self.min_bw_per_thread is not None:
710 if self.min_bw_per_thread > average(test_res['bw']):
711 lat_bw_limit_reached.add(test_descr)
712
koder aka kdanilov0fdaaee2015-06-30 11:10:48 +0300713 return IOTestResults(self.config.params['cfg'],
714 results, self.config.log_directory)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300715
716 def do_run(self, node, barrier, fio_cfg, pos, nolog=False):
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300717 if self.use_sudo:
718 sudo = "sudo "
719 else:
720 sudo = ""
721
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300722 bash_file = "#!/bin/bash\n" + \
723 "cd {exec_folder}\n" + \
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300724 "{fio_path}fio --output-format=json --output={out_file} " + \
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300725 "--alloc-size=262144 {job_file} " + \
726 " >{err_out_file} 2>&1 \n" + \
727 "echo $? >{res_code_file}\n"
728
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300729 exec_folder = self.config.remote_dir
730
731 if self.use_system_fio:
732 fio_path = ""
733 else:
734 if not exec_folder.endswith("/"):
735 fio_path = exec_folder + "/"
736 else:
737 fio_path = exec_folder
738
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300739 bash_file = bash_file.format(out_file=self.results_file,
740 job_file=self.task_file,
741 err_out_file=self.err_out_file,
742 res_code_file=self.exit_code_file,
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300743 exec_folder=exec_folder,
744 fio_path=fio_path)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300745
746 with node.connection.open_sftp() as sftp:
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300747 save_to_remote(sftp, self.task_file, str(fio_cfg))
748 save_to_remote(sftp, self.sh_file, bash_file)
749
750 exec_time = execution_time(fio_cfg)
751
752 timeout = int(exec_time + max(300, exec_time))
753 soft_tout = exec_time
754
755 begin = time.time()
756
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300757 fnames_before = run_on_node(node)("ls -1 " + exec_folder, nolog=True)
758
759 barrier.wait()
760
koder aka kdanilov5414a992015-06-13 03:07:25 +0300761 task = BGSSHTask(node, self.use_sudo)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300762 task.start(sudo + "bash " + self.sh_file)
763
764 while True:
765 try:
766 task.wait(soft_tout, timeout)
767 break
768 except paramiko.SSHException:
769 pass
770
771 try:
772 node.connection.close()
773 except:
774 pass
775
776 reconnect(node.connection, node.conn_url)
777
778 end = time.time()
779 rossh = run_on_node(node)
780 fnames_after = rossh("ls -1 " + exec_folder, nolog=True)
781
782 conn_id = node.get_conn_id().replace(":", "_")
783 if not nolog:
784 logger.debug("Test on node {0} is finished".format(conn_id))
785
786 log_files_pref = []
787 if 'write_lat_log' in fio_cfg.vals:
788 fname = fio_cfg.vals['write_lat_log']
789 log_files_pref.append(fname + '_clat')
790 log_files_pref.append(fname + '_lat')
791 log_files_pref.append(fname + '_slat')
792
793 if 'write_iops_log' in fio_cfg.vals:
794 fname = fio_cfg.vals['write_iops_log']
795 log_files_pref.append(fname + '_iops')
796
797 if 'write_bw_log' in fio_cfg.vals:
798 fname = fio_cfg.vals['write_bw_log']
799 log_files_pref.append(fname + '_bw')
800
801 files = collections.defaultdict(lambda: [])
802 all_files = [os.path.basename(self.results_file)]
803 new_files = set(fnames_after.split()) - set(fnames_before.split())
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300804
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300805 for fname in new_files:
806 if fname.endswith('.log') and fname.split('.')[0] in log_files_pref:
807 name, _ = os.path.splitext(fname)
808 if fname.count('.') == 1:
809 tp = name.split("_")[-1]
810 cnt = 0
811 else:
812 tp_cnt = name.split("_")[-1]
813 tp, cnt = tp_cnt.split('.')
814 files[tp].append((int(cnt), fname))
815 all_files.append(fname)
816
817 arch_name = self.join_remote('wally_result.tar.gz')
818 tmp_dir = os.path.join(self.config.log_directory, 'tmp_' + conn_id)
koder aka kdanilov76471642015-08-14 11:44:43 +0300819
820 if os.path.exists(tmp_dir):
821 shutil.rmtree(tmp_dir)
822
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300823 os.mkdir(tmp_dir)
824 loc_arch_name = os.path.join(tmp_dir, 'wally_result.{0}.tar.gz'.format(conn_id))
825 file_full_names = " ".join(all_files)
826
827 try:
828 os.unlink(loc_arch_name)
829 except:
830 pass
831
832 with node.connection.open_sftp() as sftp:
koder aka kdanilov76471642015-08-14 11:44:43 +0300833 try:
834 exit_code = read_from_remote(sftp, self.exit_code_file)
835 except IOError:
836 logger.error("No exit code file found on %s. Looks like process failed to start",
837 conn_id)
838 return None
839
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300840 err_out = read_from_remote(sftp, self.err_out_file)
841 exit_code = exit_code.strip()
842
843 if exit_code != '0':
844 msg = "fio exit with code {0}: {1}".format(exit_code, err_out)
845 logger.critical(msg.strip())
846 raise StopTestError("fio failed")
847
848 rossh("rm -f {0}".format(arch_name), nolog=True)
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300849 pack_files_cmd = "cd {0} ; tar zcvf {1} {2}".format(exec_folder, arch_name, file_full_names)
850 rossh(pack_files_cmd, nolog=True)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300851 sftp.get(arch_name, loc_arch_name)
852
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300853 unpack_files_cmd = "cd {0} ; tar xvzf {1} >/dev/null".format(tmp_dir, loc_arch_name)
854 subprocess.check_call(unpack_files_cmd, shell=True)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300855 os.unlink(loc_arch_name)
856
857 for ftype, fls in files.items():
858 for idx, fname in fls:
859 cname = os.path.join(tmp_dir, fname)
860 loc_fname = "{0}_{1}_{2}.{3}.log".format(pos, conn_id, ftype, idx)
861 loc_path = os.path.join(self.config.log_directory, loc_fname)
862 os.rename(cname, loc_path)
863
864 cname = os.path.join(tmp_dir,
865 os.path.basename(self.results_file))
866 loc_fname = "{0}_{1}_rawres.json".format(pos, conn_id)
867 loc_path = os.path.join(self.config.log_directory, loc_fname)
868 os.rename(cname, loc_path)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300869 os.rmdir(tmp_dir)
koder aka kdanilov6ab4d432015-06-22 00:26:28 +0300870
871 remove_remote_res_files_cmd = "cd {0} ; rm -f {1} {2}".format(exec_folder,
872 arch_name,
873 file_full_names)
874 rossh(remove_remote_res_files_cmd, nolog=True)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300875 return begin, end
876
877 @classmethod
koder aka kdanilov6b872662015-06-23 01:58:36 +0300878 def prepare_data(cls, results):
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300879 """
880 create a table with io performance report
881 for console
882 """
883
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300884 def key_func(data):
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300885 tpl = data.summary_tpl()
koder aka kdanilov170936a2015-06-27 22:51:17 +0300886 return (data.name,
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300887 tpl.oper,
888 tpl.mode,
koder aka kdanilov170936a2015-06-27 22:51:17 +0300889 ssize2b(tpl.bsize),
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300890 int(tpl.th_count) * int(tpl.vm_count))
koder aka kdanilov6b872662015-06-23 01:58:36 +0300891 res = []
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300892
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300893 for item in sorted(results, key=key_func):
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300894 test_dinfo = item.disk_perf_info()
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300895
896 iops, _ = test_dinfo.iops.rounded_average_conf()
897
898 bw, bw_conf = test_dinfo.bw.rounded_average_conf()
899 _, bw_dev = test_dinfo.bw.rounded_average_dev()
900 conf_perc = int(round(bw_conf * 100 / bw))
901 dev_perc = int(round(bw_dev * 100 / bw))
902
koder aka kdanilov6b872662015-06-23 01:58:36 +0300903 lat_50 = round_3_digit(int(test_dinfo.lat_50))
904 lat_95 = round_3_digit(int(test_dinfo.lat_95))
koder aka kdanilov170936a2015-06-27 22:51:17 +0300905 lat_avg = round_3_digit(int(test_dinfo.lat_avg))
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300906
koder aka kdanilovbb6d6cd2015-06-20 02:55:07 +0300907 testnodes_count = len(item.config.nodes)
908 iops_per_vm = round_3_digit(iops / testnodes_count)
909 bw_per_vm = round_3_digit(bw / testnodes_count)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300910
911 iops = round_3_digit(iops)
912 bw = round_3_digit(bw)
913
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300914 summ = "{0.oper}{0.mode} {0.bsize:>4} {0.th_count:>3}th {0.vm_count:>2}vm".format(item.summary_tpl())
915
916 res.append({"name": key_func(item)[0],
917 "key": key_func(item)[:4],
918 "summ": summ,
koder aka kdanilov6b872662015-06-23 01:58:36 +0300919 "iops": int(iops),
920 "bw": int(bw),
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300921 "conf": str(conf_perc),
922 "dev": str(dev_perc),
koder aka kdanilov6b872662015-06-23 01:58:36 +0300923 "iops_per_vm": int(iops_per_vm),
924 "bw_per_vm": int(bw_per_vm),
925 "lat_50": lat_50,
koder aka kdanilov170936a2015-06-27 22:51:17 +0300926 "lat_95": lat_95,
927 "lat_avg": lat_avg})
koder aka kdanilovbc2c8982015-06-13 02:50:43 +0300928
koder aka kdanilov6b872662015-06-23 01:58:36 +0300929 return res
930
931 Field = collections.namedtuple("Field", ("header", "attr", "allign", "size"))
932 fiels_and_header = [
933 Field("Name", "name", "l", 7),
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300934 Field("Description", "summ", "l", 19),
koder aka kdanilov6b872662015-06-23 01:58:36 +0300935 Field("IOPS\ncum", "iops", "r", 3),
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300936 Field("KiBps\ncum", "bw", "r", 6),
937 Field("Cnf %\n95%", "conf", "r", 3),
938 Field("Dev%", "dev", "r", 3),
939 Field("iops\n/vm", "iops_per_vm", "r", 3),
940 Field("KiBps\n/vm", "bw_per_vm", "r", 6),
koder aka kdanilov6b872662015-06-23 01:58:36 +0300941 Field("lat ms\nmedian", "lat_50", "r", 3),
koder aka kdanilov170936a2015-06-27 22:51:17 +0300942 Field("lat ms\n95%", "lat_95", "r", 3),
943 Field("lat\navg", "lat_avg", "r", 3),
koder aka kdanilov6b872662015-06-23 01:58:36 +0300944 ]
945
946 fiels_and_header_dct = dict((item.attr, item) for item in fiels_and_header)
947
948 @classmethod
949 def format_for_console(cls, results):
950 """
951 create a table with io performance report
952 for console
953 """
954
955 tab = texttable.Texttable(max_width=120)
956 tab.set_deco(tab.HEADER | tab.VLINES | tab.BORDER)
957 tab.set_cols_align([f.allign for f in cls.fiels_and_header])
958 sep = ["-" * f.size for f in cls.fiels_and_header]
959 tab.header([f.header for f in cls.fiels_and_header])
koder aka kdanilov6b872662015-06-23 01:58:36 +0300960 prev_k = None
961 for item in cls.prepare_data(results):
koder aka kdanilov6b872662015-06-23 01:58:36 +0300962 if prev_k is not None:
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300963 if prev_k != item["key"]:
koder aka kdanilov6b872662015-06-23 01:58:36 +0300964 tab.add_row(sep)
965
koder aka kdanilovf236b9c2015-06-24 18:17:22 +0300966 prev_k = item["key"]
koder aka kdanilov6b872662015-06-23 01:58:36 +0300967 tab.add_row([item[f.attr] for f in cls.fiels_and_header])
968
969 return tab.draw()
970
971 @classmethod
972 def format_diff_for_console(cls, list_of_results):
973 """
974 create a table with io performance report
975 for console
976 """
977
978 tab = texttable.Texttable(max_width=200)
979 tab.set_deco(tab.HEADER | tab.VLINES | tab.BORDER)
980
981 header = [
982 cls.fiels_and_header_dct["name"].header,
983 cls.fiels_and_header_dct["summ"].header,
984 ]
985 allign = ["l", "l"]
986
987 header.append("IOPS ~ Cnf% ~ Dev%")
988 allign.extend(["r"] * len(list_of_results))
989 header.extend(
990 "IOPS_{0} %".format(i + 2) for i in range(len(list_of_results[1:]))
991 )
992
993 header.append("BW")
994 allign.extend(["r"] * len(list_of_results))
995 header.extend(
996 "BW_{0} %".format(i + 2) for i in range(len(list_of_results[1:]))
997 )
998
999 header.append("LAT")
1000 allign.extend(["r"] * len(list_of_results))
1001 header.extend(
1002 "LAT_{0}".format(i + 2) for i in range(len(list_of_results[1:]))
1003 )
1004
1005 tab.header(header)
1006 sep = ["-" * 3] * len(header)
1007 processed_results = map(cls.prepare_data, list_of_results)
1008
1009 key2results = []
1010 for res in processed_results:
1011 key2results.append(dict(
1012 ((item["name"], item["summ"]), item) for item in res
1013 ))
1014
1015 prev_k = None
koder aka kdanilovf236b9c2015-06-24 18:17:22 +03001016 iops_frmt = "{0[iops]} ~ {0[conf]:>2} ~ {0[dev]:>2}"
koder aka kdanilov6b872662015-06-23 01:58:36 +03001017 for item in processed_results[0]:
koder aka kdanilov6b872662015-06-23 01:58:36 +03001018 if prev_k is not None:
koder aka kdanilovf236b9c2015-06-24 18:17:22 +03001019 if prev_k != item["key"]:
koder aka kdanilov6b872662015-06-23 01:58:36 +03001020 tab.add_row(sep)
1021
koder aka kdanilovf236b9c2015-06-24 18:17:22 +03001022 prev_k = item["key"]
koder aka kdanilov6b872662015-06-23 01:58:36 +03001023
1024 key = (item['name'], item['summ'])
1025 line = list(key)
1026 base = key2results[0][key]
1027
1028 line.append(iops_frmt.format(base))
1029
1030 for test_results in key2results[1:]:
1031 val = test_results.get(key)
1032 if val is None:
1033 line.append("-")
1034 elif base['iops'] == 0:
1035 line.append("Nan")
1036 else:
koder aka kdanilovf236b9c2015-06-24 18:17:22 +03001037 prc_val = {'dev': val['dev'], 'conf': val['conf']}
koder aka kdanilov6b872662015-06-23 01:58:36 +03001038 prc_val['iops'] = int(100 * val['iops'] / base['iops'])
1039 line.append(iops_frmt.format(prc_val))
1040
1041 line.append(base['bw'])
1042
1043 for test_results in key2results[1:]:
1044 val = test_results.get(key)
1045 if val is None:
1046 line.append("-")
1047 elif base['bw'] == 0:
1048 line.append("Nan")
1049 else:
1050 line.append(int(100 * val['bw'] / base['bw']))
1051
1052 for test_results in key2results:
1053 val = test_results.get(key)
1054 if val is None:
1055 line.append("-")
1056 else:
1057 line.append("{0[lat_50]} - {0[lat_95]}".format(val))
1058
1059 tab.add_row(line)
1060
1061 tab.set_cols_align(allign)
koder aka kdanilovbc2c8982015-06-13 02:50:43 +03001062 return tab.draw()