blob: 8507041cb7b33227ea0961ee83046dd92d61685f [file] [log] [blame]
Ved-vampir5c7b6142015-04-24 19:49:59 +03001""" Analize test results for finding bottlenecks """
2
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +03003import re
Ved-vampir5c7b6142015-04-24 19:49:59 +03004import sys
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +03005import csv
6import time
7import bisect
koder aka kdanilovf86d7af2015-05-06 04:01:54 +03008import os.path
Ved-vampir5c7b6142015-04-24 19:49:59 +03009import argparse
koder aka kdanilovf86d7af2015-05-06 04:01:54 +030010import collections
Ved-vampir5c7b6142015-04-24 19:49:59 +030011
12
koder aka kdanilovf86d7af2015-05-06 04:01:54 +030013import yaml
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030014import texttable
Ved-vampir5c7b6142015-04-24 19:49:59 +030015
koder aka kdanilov416b87a2015-05-12 00:26:04 +030016try:
17 import pygraphviz as pgv
18except ImportError:
19 pgv = None
Ved-vampirfcea0722015-04-27 14:06:13 +030020
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030021from wally.run_test import load_data_from
koder aka kdanilov416b87a2015-05-12 00:26:04 +030022from wally.utils import b2ssize, b2ssize_10
Ved-vampirfcea0722015-04-27 14:06:13 +030023
24
koder aka kdanilovf86d7af2015-05-06 04:01:54 +030025class SensorInfo(object):
koder aka kdanilov416b87a2015-05-12 00:26:04 +030026 def __init__(self, name, print_name, native_ext, to_bytes_coef):
koder aka kdanilovf86d7af2015-05-06 04:01:54 +030027 self.name = name
koder aka kdanilov416b87a2015-05-12 00:26:04 +030028 self.print_name = print_name
koder aka kdanilovf86d7af2015-05-06 04:01:54 +030029 self.native_ext = native_ext
30 self.to_bytes_coef = to_bytes_coef
Ved-vampirfcea0722015-04-27 14:06:13 +030031
koder aka kdanilov416b87a2015-05-12 00:26:04 +030032
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030033_SINFO = [
koder aka kdanilov416b87a2015-05-12 00:26:04 +030034 SensorInfo('recv_bytes', 'net_recv', 'B', 1),
35 SensorInfo('send_bytes', 'net_send', 'B', 1),
36 SensorInfo('sectors_written', 'hdd_write', 'Sect', 512),
37 SensorInfo('sectors_read', 'hdd_read', 'Sect', 512),
38 SensorInfo('reads_completed', 'read_op', 'OP', None),
39 SensorInfo('writes_completed', 'write_op', 'OP', None),
koder aka kdanilovf86d7af2015-05-06 04:01:54 +030040]
Ved-vampirfcea0722015-04-27 14:06:13 +030041
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030042SINFO_MAP = dict((sinfo.name, sinfo) for sinfo in _SINFO)
koder aka kdanilov416b87a2015-05-12 00:26:04 +030043to_bytes = dict((sinfo.name, sinfo.to_bytes_coef)
44 for sinfo in _SINFO
45 if sinfo.to_bytes_coef is not None)
Ved-vampirfcea0722015-04-27 14:06:13 +030046
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030047
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030048class NodeSensorsData(object):
49 def __init__(self, source_id, hostname, headers, values):
50 self.source_id = source_id
51 self.hostname = hostname
52 self.headers = headers
53 self.values = values
54 self.times = None
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030055
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030056 def finalize(self):
57 self.times = [v[0] for v in self.values]
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030058
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030059 def get_data_for_interval(self, beg, end):
60 p1 = bisect.bisect_left(self.times, beg)
61 p2 = bisect.bisect_right(self.times, end)
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030062
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030063 obj = self.__class__(self.source_id,
64 self.hostname,
65 self.headers,
66 self.values[p1:p2])
67 obj.times = self.times[p1:p2]
68 return obj
69
70 def __getitem__(self, name):
71 idx = self.headers.index(name.split('.'))
72 # +1 as first is a time
73 return [val[idx] for val in self.values]
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030074
75
76def load_results_csv(fd):
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030077 data = fd.read()
78 results = {}
79 for block in data.split("NEW_DATA"):
80 block = block.strip()
81 if len(block) == 0:
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030082 continue
83
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030084 it = csv.reader(block.split("\n"))
85 headers = next(it)
86 sens_data = [map(int, vals) for vals in it]
87 source_id, hostname = headers[:2]
88 headers = [(None, 'time')] + \
89 [header.split('.') for header in headers[2:]]
90 assert set(map(len, headers)) == set([2])
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030091
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030092 results[source_id] = NodeSensorsData(source_id, hostname,
93 headers, sens_data)
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030094
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030095 return results
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030096
97
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030098def load_test_timings(fname, max_diff=1000):
koder aka kdanilov416b87a2015-05-12 00:26:04 +030099 raw_map = collections.defaultdict(lambda: [])
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300100
101 class data(object):
102 pass
103
104 load_data_from(fname)(None, data)
105
106 for test_type, test_results in data.results:
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300107 if test_type == 'io':
108 for tests_res in test_results:
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300109 raw_map[tests_res.config.name].append(tests_res.run_interval)
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300110
111 result = {}
112 for name, intervals in raw_map.items():
113 intervals.sort()
114 curr_start, curr_stop = intervals[0]
115 curr_result = []
116
117 for (start, stop) in intervals[1:]:
118 if abs(curr_start - start) < max_diff:
119 # if abs(curr_stop - stop) > 2:
120 # print abs(curr_stop - stop)
121 assert abs(curr_stop - stop) < max_diff
122 else:
123 assert start + max_diff >= curr_stop
124 assert stop > curr_stop
125 curr_result.append((curr_start, curr_stop))
126 curr_start, curr_stop = start, stop
127 curr_result.append((curr_start, curr_stop))
128
129 merged_res = []
130 curr_start, curr_stop = curr_result[0]
131 for start, stop in curr_result[1:]:
132 if abs(curr_stop - start) < max_diff:
133 curr_stop = stop
134 else:
135 merged_res.append((curr_start, curr_stop))
136 curr_start, curr_stop = start, stop
137 merged_res.append((curr_start, curr_stop))
138 result[name] = merged_res
139
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300140 return result
141
142
143critical_values = dict(
144 io_queue=1,
145 mem_usage_percent=0.8)
Ved-vampirfcea0722015-04-27 14:06:13 +0300146
Ved-vampirfcea0722015-04-27 14:06:13 +0300147
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300148class AggregatedData(object):
149 def __init__(self, sensor_name):
150 self.sensor_name = sensor_name
Ved-vampir5c7b6142015-04-24 19:49:59 +0300151
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300152 # (node, device): count
153 self.per_device = collections.defaultdict(lambda: 0)
154
155 # node: count
156 self.per_node = collections.defaultdict(lambda: 0)
157
158 # role: count
159 self.per_role = collections.defaultdict(lambda: 0)
160
161 # (role_or_node, device_or_*): count
162 self.all_together = collections.defaultdict(lambda: 0)
163
164 def __str__(self):
165 res = "<AggregatedData({0})>\n".format(self.sensor_name)
166 for (role_or_node, device), val in self.all_together.items():
167 res += " {0}:{1} = {2}\n".format(role_or_node, device, val)
168 return res
169
170
171def total_consumption(sensors_data, roles_map):
172 result = {}
173
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300174 for name, sensor_data in sensors_data.items():
175 for pos, (dev, sensor) in enumerate(sensor_data.headers):
176 if 'time' == sensor:
177 continue
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300178
179 try:
180 ad = result[sensor]
181 except KeyError:
182 ad = result[sensor] = AggregatedData(sensor)
183
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300184 val = sum(vals[pos] for vals in sensor_data.values)
185
186 ad.per_device[(sensor_data.hostname, dev)] += val
187
188 # vals1 = sensors_data['localhost:22']['sdc.sectors_read']
189 # vals2 = sensors_data['localhost:22']['sdb.sectors_written']
190
191 # from matplotlib import pyplot as plt
192 # plt.plot(range(len(vals1)), vals1)
193 # plt.plot(range(len(vals2)), vals2)
194 # plt.show()
195 # exit(1)
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300196
197 for ad in result.values():
198 for (hostname, dev), val in ad.per_device.items():
199 ad.per_node[hostname] += val
200
201 for role in roles_map[hostname]:
202 ad.per_role[role] += val
203
204 ad.all_together[(hostname, dev)] = val
205
206 for role, val in ad.per_role.items():
207 ad.all_together[(role, '*')] = val
208
209 for node, val in ad.per_node.items():
210 ad.all_together[(node, '*')] = val
211
212 return result
213
214
215def avg_load(data):
216 load = {}
217
218 min_time = 0xFFFFFFFFFFF
219 max_time = 0
220
221 for tm, item in data:
222
223 min_time = min(min_time, item.ctime)
224 max_time = max(max_time, item.ctime)
225
226 for name, max_val in critical_values.items():
227 for (dev, sensor), val in item.values:
228 if sensor == name and val > max_val:
229 load[(item.hostname, dev, sensor)] += 1
230 return load, max_time - min_time
231
232
233def print_bottlenecks(data_iter, max_bottlenecks=15):
234 load, duration = avg_load(data_iter)
235 rev_items = ((v, k) for (k, v) in load.items())
236
237 res = sorted(rev_items, reverse=True)[:max_bottlenecks]
238
239 max_name_sz = max(len(name) for _, name in res)
240 frmt = "{{0:>{0}}} | {{1:>4}}".format(max_name_sz)
241 table = [frmt.format("Component", "% times load > 100%")]
242
243 for (v, k) in res:
244 table.append(frmt.format(k, int(v * 100.0 / duration + 0.5)))
245
246 return "\n".join(table)
247
248
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300249def print_consumption(agg, min_transfer=None):
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300250 rev_items = []
251 for (node_or_role, dev), v in agg.all_together.items():
252 rev_items.append((int(v), node_or_role + ':' + dev))
253
254 res = sorted(rev_items, reverse=True)
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300255
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300256 if min_transfer is not None:
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300257 res = [(v, k)
258 for (v, k) in res
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300259 if v >= min_transfer]
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300260
261 if len(res) == 0:
262 return None
263
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300264 res = [(b2ssize(v) + "B", k) for (v, k) in res]
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300265
266 max_name_sz = max(len(name) for _, name in res)
267 max_val_sz = max(len(val) for val, _ in res)
268
269 frmt = " {{0:>{0}}} | {{1:>{1}}} ".format(max_name_sz, max_val_sz)
270 table = [frmt.format("Component", "Usage")]
271
272 for (v, k) in res:
273 table.append(frmt.format(k, v))
274
275 return "\n".join(table)
Ved-vampir5c7b6142015-04-24 19:49:59 +0300276
277
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300278def make_roles_mapping(source_id_mapping, source_id2hostname):
279 result = {}
280 for ssh_url, roles in source_id_mapping.items():
281 if '@' in ssh_url:
282 source_id = ssh_url.split('@')[1]
283 else:
284 source_id = ssh_url.split('://')[1]
285
286 if source_id.count(':') == 2:
287 source_id = source_id.rsplit(":", 1)[0]
288
289 if source_id.endswith(':'):
290 source_id += "22"
291
292 if source_id in source_id2hostname:
293 result[source_id] = roles
294 result[source_id2hostname[source_id]] = roles
295
296 for testnode_src in (set(source_id2hostname) - set(result)):
297 result[testnode_src] = ['testnode']
298 result[source_id2hostname[testnode_src]] = ['testnode']
299
300 return result
301
302
303def get_testdata_size(consumption):
304 max_data = 0
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300305 for name, sens in SINFO_MAP.items():
306 if sens.to_bytes_coef is not None:
307 agg = consumption.get(name)
308 if agg is not None:
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300309 cdt = agg.per_role.get('testnode', 0) * sens.to_bytes_coef
310 max_data = max(max_data, cdt)
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300311 return max_data
312
313
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300314def get_testop_cout(consumption):
315 max_op = 0
316 for name, sens in SINFO_MAP.items():
317 if sens.to_bytes_coef is None:
318 agg = consumption.get(name)
319 if agg is not None:
320 max_op = max(max_op, agg.per_role.get('testnode', 0))
321 return max_op
322
323
324def get_data_for_intervals(data, intervals):
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300325 res = {}
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300326 for begin, end in intervals:
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300327 for name, node_data in data.items():
328 ndata = node_data.get_data_for_interval(begin, end)
329 res[name] = ndata
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300330 return res
331
332
333class Host(object):
334 def __init__(self, name=None):
335 self.name = name
336 self.hdd_devs = {}
337 self.net_devs = None
338
339
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300340def plot_consumption(per_consumer_table, fields, refload):
341 if pgv is None:
342 return
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300343
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300344 hosts = {}
345 storage_sensors = ('sectors_written', 'sectors_read')
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300346
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300347 for (hostname, dev), consumption in per_consumer_table.items():
348 if hostname not in hosts:
349 hosts[hostname] = Host(hostname)
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300350
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300351 host = hosts[hostname]
352 cons_map = dict(zip(fields, consumption))
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300353
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300354 for sn in storage_sensors:
355 vl = cons_map.get(sn, 0)
356 if vl > 0:
357 host.hdd_devs.setdefault(dev, {})[sn] = vl
358
359 p = pgv.AGraph(name='system', directed=True)
360
361 net = "Network"
362 p.add_node(net)
363
364 in_color = 'red'
365 out_color = 'green'
366
367 for host in hosts.values():
368 g = p.subgraph(name="cluster_" + host.name, label=host.name,
369 color="blue")
370 g.add_node(host.name, shape="diamond")
371 p.add_edge(host.name, net)
372 p.add_edge(net, host.name)
373
374 for dev_name, values in host.hdd_devs.items():
375 if dev_name == '*':
376 continue
377
378 to = values.get('sectors_written', 0)
379 frm = values.get('sectors_read', 0)
380 to_pw = 7 * to / refload
381 frm_pw = 7 * frm / refload
382 min_with = 0.1
383
384 if to_pw > min_with or frm_pw > min_with:
385 dev_fqn = host.name + "." + dev_name
386 g.add_node(dev_fqn)
387
388 if to_pw > min_with:
389 g.add_edge(host.name, dev_fqn,
390 label=b2ssize(to) + "B",
391 penwidth=to_pw,
392 fontcolor=out_color,
393 color=out_color)
394
395 if frm_pw > min_with:
396 g.add_edge(dev_fqn, host.name,
397 label=b2ssize(frm) + "B",
398 penwidth=frm_pw,
399 color=in_color,
400 fontcolor=in_color)
401
402 return p.string()
403
404
405def parse_args(args):
406 parser = argparse.ArgumentParser()
407 parser.add_argument('-t', '--time_period', nargs=2,
408 type=int, default=None,
409 help="Begin and end time for tests")
410 parser.add_argument('-m', '--max-bottlenek', type=int,
411 default=15, help="Max bottlenek to show")
412 parser.add_argument('-x', '--max-diff', type=int,
413 default=10, help="Max bottlenek to show in" +
414 "0.1% from test nodes summ load")
415 parser.add_argument('-d', '--debug-ver', action='store_true',
416 help="Full report with original data")
417 parser.add_argument('-u', '--user-ver', action='store_true',
418 default=True, help="Avg load report")
419 parser.add_argument('-s', '--select-loads', nargs='*', default=[])
420 parser.add_argument('-f', '--fields', nargs='*', default=[])
421 parser.add_argument('results_folder')
422 return parser.parse_args(args[1:])
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300423
424
Ved-vampir5c7b6142015-04-24 19:49:59 +0300425def main(argv):
426 opts = parse_args(argv)
427
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300428 stor_dir = os.path.join(opts.results_folder, 'sensor_storage')
429 data = {}
430 source_id2hostname = {}
431
432 csv_files = os.listdir(stor_dir)
433 for fname in csv_files:
434 assert re.match(r"\d+_\d+.csv$", fname)
435
436 csv_files.sort(key=lambda x: int(x.split('_')[0]))
437
438 for fname in csv_files:
439 with open(os.path.join(stor_dir, fname)) as fd:
440 for name, node_sens_data in load_results_csv(fd).items():
441 if name in data:
442 assert data[name].hostname == node_sens_data.hostname
443 assert data[name].source_id == node_sens_data.source_id
444 assert data[name].headers == node_sens_data.headers
445 data[name].values.extend(node_sens_data.values)
446 else:
447 data[name] = node_sens_data
448
449 for nd in data.values():
450 assert nd.source_id not in source_id2hostname
451 source_id2hostname[nd.source_id] = nd.hostname
452 nd.finalize()
Ved-vampir5c7b6142015-04-24 19:49:59 +0300453
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300454 roles_file = os.path.join(opts.results_folder,
455 'nodes.yaml')
Ved-vampirfcea0722015-04-27 14:06:13 +0300456
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300457 src2roles = yaml.load(open(roles_file))
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300458
459 timings = load_test_timings(opts.results_folder)
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300460
461 roles_map = make_roles_mapping(src2roles, source_id2hostname)
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300462 max_diff = float(opts.max_diff) / 1000
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300463
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300464 fields = ('recv_bytes', 'send_bytes',
465 'sectors_read', 'sectors_written',
466 'reads_completed', 'writes_completed')
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300467
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300468 if opts.fields != []:
469 fields = [field for field in fields if field in opts.fields]
470
471 for test_name, intervals in sorted(timings.items()):
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300472 if opts.select_loads != []:
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300473 if test_name not in opts.select_loads:
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300474 continue
475
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300476 data_chunks = get_data_for_intervals(data, intervals)
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300477
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300478 consumption = total_consumption(data_chunks, roles_map)
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300479
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300480 testdata_sz = get_testdata_size(consumption) * max_diff
481 testop_count = get_testop_cout(consumption) * max_diff
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300482
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300483 per_consumer_table = {}
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300484 per_consumer_table_str = {}
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300485
486 all_consumers = set(consumption.values()[0].all_together)
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300487 fields = [field for field in fields if field in consumption]
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300488 all_consumers_sum = []
489
490 for consumer in all_consumers:
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300491 tb_str = per_consumer_table_str[consumer] = []
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300492 tb = per_consumer_table[consumer] = []
493 vl = 0
494 for name in fields:
495 val = consumption[name].all_together[consumer]
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300496 if SINFO_MAP[name].to_bytes_coef is None:
497 if val < testop_count:
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300498 tb_str.append('0')
499 else:
500 tb_str.append(b2ssize_10(int(val)))
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300501 else:
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300502 val = int(val) * SINFO_MAP[name].to_bytes_coef
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300503 if val < testdata_sz:
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300504 tb_str.append('-')
505 else:
506 tb_str.append(b2ssize(val) + "B")
507 tb.append(int(val))
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300508 vl += int(val)
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300509 all_consumers_sum.append((vl, consumer))
510
511 all_consumers_sum.sort(reverse=True)
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300512
513 plot_consumption(per_consumer_table, fields,
514 testdata_sz / max_diff)
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300515
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300516 tt = texttable.Texttable(max_width=130)
517 tt.set_cols_align(["l"] + ["r"] * len(fields))
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300518
519 header = ["Name"]
520 for fld in fields:
521 if fld in SINFO_MAP:
522 header.append(SINFO_MAP[fld].print_name)
523 else:
524 header.append(fld)
525 tt.header(header)
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300526
527 for summ, consumer in all_consumers_sum:
528 if summ > 0:
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300529 tt.add_row([":".join(consumer)] +
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300530 per_consumer_table_str[consumer])
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300531
532 tt.set_deco(texttable.Texttable.VLINES | texttable.Texttable.HEADER)
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300533 res = tt.draw()
534 max_len = max(map(len, res.split("\n")))
535 print test_name.center(max_len)
536 print res
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300537
Ved-vampir5c7b6142015-04-24 19:49:59 +0300538
539if __name__ == "__main__":
540 exit(main(sys.argv))