blob: 01489f65cbf8a343f57ada0153274458bce6f8a5 [file] [log] [blame]
Ved-vampir5c7b6142015-04-24 19:49:59 +03001""" Analize test results for finding bottlenecks """
2
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +03003import re
Ved-vampir5c7b6142015-04-24 19:49:59 +03004import sys
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +03005import csv
6import time
7import bisect
koder aka kdanilovf86d7af2015-05-06 04:01:54 +03008import os.path
Ved-vampir5c7b6142015-04-24 19:49:59 +03009import argparse
koder aka kdanilovf86d7af2015-05-06 04:01:54 +030010import collections
Ved-vampir5c7b6142015-04-24 19:49:59 +030011
12
koder aka kdanilovf86d7af2015-05-06 04:01:54 +030013import yaml
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030014import texttable
Ved-vampir5c7b6142015-04-24 19:49:59 +030015
koder aka kdanilov416b87a2015-05-12 00:26:04 +030016try:
17 import pygraphviz as pgv
18except ImportError:
19 pgv = None
Ved-vampirfcea0722015-04-27 14:06:13 +030020
Ved-vampirb1e18612015-05-22 18:10:22 +030021sys.path.append("/mnt/other/work/disk_perf_test_tool")
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030022from wally.run_test import load_data_from
koder aka kdanilov416b87a2015-05-12 00:26:04 +030023from wally.utils import b2ssize, b2ssize_10
Ved-vampirfcea0722015-04-27 14:06:13 +030024
25
koder aka kdanilovf86d7af2015-05-06 04:01:54 +030026class SensorInfo(object):
koder aka kdanilov416b87a2015-05-12 00:26:04 +030027 def __init__(self, name, print_name, native_ext, to_bytes_coef):
koder aka kdanilovf86d7af2015-05-06 04:01:54 +030028 self.name = name
koder aka kdanilov416b87a2015-05-12 00:26:04 +030029 self.print_name = print_name
koder aka kdanilovf86d7af2015-05-06 04:01:54 +030030 self.native_ext = native_ext
31 self.to_bytes_coef = to_bytes_coef
Ved-vampirfcea0722015-04-27 14:06:13 +030032
koder aka kdanilov416b87a2015-05-12 00:26:04 +030033
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030034_SINFO = [
koder aka kdanilov416b87a2015-05-12 00:26:04 +030035 SensorInfo('recv_bytes', 'net_recv', 'B', 1),
36 SensorInfo('send_bytes', 'net_send', 'B', 1),
37 SensorInfo('sectors_written', 'hdd_write', 'Sect', 512),
38 SensorInfo('sectors_read', 'hdd_read', 'Sect', 512),
39 SensorInfo('reads_completed', 'read_op', 'OP', None),
40 SensorInfo('writes_completed', 'write_op', 'OP', None),
Ved-vampirb1e18612015-05-22 18:10:22 +030041 SensorInfo('procs_blocked', 'blocked_procs', 'P', None),
koder aka kdanilovf86d7af2015-05-06 04:01:54 +030042]
Ved-vampirfcea0722015-04-27 14:06:13 +030043
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030044SINFO_MAP = dict((sinfo.name, sinfo) for sinfo in _SINFO)
koder aka kdanilov416b87a2015-05-12 00:26:04 +030045to_bytes = dict((sinfo.name, sinfo.to_bytes_coef)
46 for sinfo in _SINFO
47 if sinfo.to_bytes_coef is not None)
Ved-vampirfcea0722015-04-27 14:06:13 +030048
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030049
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030050class NodeSensorsData(object):
51 def __init__(self, source_id, hostname, headers, values):
52 self.source_id = source_id
53 self.hostname = hostname
54 self.headers = headers
55 self.values = values
56 self.times = None
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030057
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030058 def finalize(self):
59 self.times = [v[0] for v in self.values]
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030060
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030061 def get_data_for_interval(self, beg, end):
62 p1 = bisect.bisect_left(self.times, beg)
63 p2 = bisect.bisect_right(self.times, end)
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030064
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030065 obj = self.__class__(self.source_id,
66 self.hostname,
67 self.headers,
68 self.values[p1:p2])
69 obj.times = self.times[p1:p2]
70 return obj
71
72 def __getitem__(self, name):
73 idx = self.headers.index(name.split('.'))
74 # +1 as first is a time
75 return [val[idx] for val in self.values]
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030076
77
78def load_results_csv(fd):
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030079 data = fd.read()
80 results = {}
81 for block in data.split("NEW_DATA"):
82 block = block.strip()
83 if len(block) == 0:
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030084 continue
85
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030086 it = csv.reader(block.split("\n"))
87 headers = next(it)
Ved-vampirb1e18612015-05-22 18:10:22 +030088 sens_data = [map(float, vals) for vals in it]
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030089 source_id, hostname = headers[:2]
90 headers = [(None, 'time')] + \
91 [header.split('.') for header in headers[2:]]
92 assert set(map(len, headers)) == set([2])
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030093
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030094 results[source_id] = NodeSensorsData(source_id, hostname,
95 headers, sens_data)
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030096
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +030097 return results
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +030098
99
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300100def load_test_timings(fname, max_diff=1000):
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300101 raw_map = collections.defaultdict(lambda: [])
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300102
103 class data(object):
104 pass
105
106 load_data_from(fname)(None, data)
Ved-vampirb1e18612015-05-22 18:10:22 +0300107 for test_type, test_results in data.results.items():
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300108 if test_type == 'io':
109 for tests_res in test_results:
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300110 raw_map[tests_res.config.name].append(tests_res.run_interval)
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300111
112 result = {}
113 for name, intervals in raw_map.items():
114 intervals.sort()
115 curr_start, curr_stop = intervals[0]
116 curr_result = []
117
118 for (start, stop) in intervals[1:]:
119 if abs(curr_start - start) < max_diff:
120 # if abs(curr_stop - stop) > 2:
121 # print abs(curr_stop - stop)
122 assert abs(curr_stop - stop) < max_diff
123 else:
124 assert start + max_diff >= curr_stop
125 assert stop > curr_stop
126 curr_result.append((curr_start, curr_stop))
127 curr_start, curr_stop = start, stop
128 curr_result.append((curr_start, curr_stop))
129
130 merged_res = []
131 curr_start, curr_stop = curr_result[0]
132 for start, stop in curr_result[1:]:
133 if abs(curr_stop - start) < max_diff:
134 curr_stop = stop
135 else:
136 merged_res.append((curr_start, curr_stop))
137 curr_start, curr_stop = start, stop
138 merged_res.append((curr_start, curr_stop))
139 result[name] = merged_res
140
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300141 return result
142
143
144critical_values = dict(
145 io_queue=1,
Ved-vampirb1e18612015-05-22 18:10:22 +0300146 usage_percent=0.8,
147 procs_blocked=1,
148 procs_queue=1)
Ved-vampirfcea0722015-04-27 14:06:13 +0300149
Ved-vampirfcea0722015-04-27 14:06:13 +0300150
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300151class AggregatedData(object):
152 def __init__(self, sensor_name):
153 self.sensor_name = sensor_name
Ved-vampir5c7b6142015-04-24 19:49:59 +0300154
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300155 # (node, device): count
156 self.per_device = collections.defaultdict(lambda: 0)
157
158 # node: count
159 self.per_node = collections.defaultdict(lambda: 0)
160
161 # role: count
162 self.per_role = collections.defaultdict(lambda: 0)
163
164 # (role_or_node, device_or_*): count
165 self.all_together = collections.defaultdict(lambda: 0)
166
167 def __str__(self):
168 res = "<AggregatedData({0})>\n".format(self.sensor_name)
169 for (role_or_node, device), val in self.all_together.items():
170 res += " {0}:{1} = {2}\n".format(role_or_node, device, val)
171 return res
172
173
174def total_consumption(sensors_data, roles_map):
175 result = {}
176
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300177 for name, sensor_data in sensors_data.items():
178 for pos, (dev, sensor) in enumerate(sensor_data.headers):
Ved-vampirb1e18612015-05-22 18:10:22 +0300179
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300180 if 'time' == sensor:
181 continue
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300182
183 try:
184 ad = result[sensor]
185 except KeyError:
186 ad = result[sensor] = AggregatedData(sensor)
187
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300188 val = sum(vals[pos] for vals in sensor_data.values)
189
190 ad.per_device[(sensor_data.hostname, dev)] += val
191
192 # vals1 = sensors_data['localhost:22']['sdc.sectors_read']
193 # vals2 = sensors_data['localhost:22']['sdb.sectors_written']
194
195 # from matplotlib import pyplot as plt
196 # plt.plot(range(len(vals1)), vals1)
197 # plt.plot(range(len(vals2)), vals2)
198 # plt.show()
199 # exit(1)
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300200
201 for ad in result.values():
202 for (hostname, dev), val in ad.per_device.items():
203 ad.per_node[hostname] += val
204
205 for role in roles_map[hostname]:
206 ad.per_role[role] += val
207
208 ad.all_together[(hostname, dev)] = val
209
210 for role, val in ad.per_role.items():
211 ad.all_together[(role, '*')] = val
212
213 for node, val in ad.per_node.items():
214 ad.all_together[(node, '*')] = val
215
216 return result
217
218
Ved-vampirb1e18612015-05-22 18:10:22 +0300219def avg_load(sensors_data):
220 load = collections.defaultdict(lambda: 0)
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300221
222 min_time = 0xFFFFFFFFFFF
223 max_time = 0
224
Ved-vampirb1e18612015-05-22 18:10:22 +0300225 for sensor_data in sensors_data.values():
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300226
Ved-vampirb1e18612015-05-22 18:10:22 +0300227 min_time = min(min_time, min(sensor_data.times))
228 max_time = max(max_time, max(sensor_data.times))
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300229
230 for name, max_val in critical_values.items():
Ved-vampirb1e18612015-05-22 18:10:22 +0300231 for pos, (dev, sensor) in enumerate(sensor_data.headers):
232 if sensor == name:
233 for vals in sensor_data.values:
234 if vals[pos] > max_val:
235 load[(sensor_data.hostname, dev, sensor)] += 1
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300236 return load, max_time - min_time
237
238
Ved-vampirb1e18612015-05-22 18:10:22 +0300239def print_bottlenecks(sensors_data, max_bottlenecks=15):
240 load, duration = avg_load(sensors_data)
241
242 if not load:
243 return "\n*** No bottlenecks found *** \n"
244
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300245 rev_items = ((v, k) for (k, v) in load.items())
246
247 res = sorted(rev_items, reverse=True)[:max_bottlenecks]
248
249 max_name_sz = max(len(name) for _, name in res)
250 frmt = "{{0:>{0}}} | {{1:>4}}".format(max_name_sz)
251 table = [frmt.format("Component", "% times load > 100%")]
252
253 for (v, k) in res:
254 table.append(frmt.format(k, int(v * 100.0 / duration + 0.5)))
255
256 return "\n".join(table)
257
258
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300259def print_consumption(agg, min_transfer=None):
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300260 rev_items = []
261 for (node_or_role, dev), v in agg.all_together.items():
262 rev_items.append((int(v), node_or_role + ':' + dev))
263
264 res = sorted(rev_items, reverse=True)
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300265
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300266 if min_transfer is not None:
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300267 res = [(v, k)
268 for (v, k) in res
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300269 if v >= min_transfer]
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300270
271 if len(res) == 0:
272 return None
273
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300274 res = [(b2ssize(v) + "B", k) for (v, k) in res]
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300275
276 max_name_sz = max(len(name) for _, name in res)
277 max_val_sz = max(len(val) for val, _ in res)
278
279 frmt = " {{0:>{0}}} | {{1:>{1}}} ".format(max_name_sz, max_val_sz)
280 table = [frmt.format("Component", "Usage")]
281
282 for (v, k) in res:
283 table.append(frmt.format(k, v))
284
285 return "\n".join(table)
Ved-vampir5c7b6142015-04-24 19:49:59 +0300286
287
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300288def make_roles_mapping(source_id_mapping, source_id2hostname):
289 result = {}
290 for ssh_url, roles in source_id_mapping.items():
291 if '@' in ssh_url:
292 source_id = ssh_url.split('@')[1]
293 else:
294 source_id = ssh_url.split('://')[1]
295
296 if source_id.count(':') == 2:
297 source_id = source_id.rsplit(":", 1)[0]
298
299 if source_id.endswith(':'):
300 source_id += "22"
301
302 if source_id in source_id2hostname:
303 result[source_id] = roles
304 result[source_id2hostname[source_id]] = roles
305
306 for testnode_src in (set(source_id2hostname) - set(result)):
307 result[testnode_src] = ['testnode']
308 result[source_id2hostname[testnode_src]] = ['testnode']
309
310 return result
311
312
313def get_testdata_size(consumption):
314 max_data = 0
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300315 for name, sens in SINFO_MAP.items():
316 if sens.to_bytes_coef is not None:
317 agg = consumption.get(name)
318 if agg is not None:
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300319 cdt = agg.per_role.get('testnode', 0) * sens.to_bytes_coef
320 max_data = max(max_data, cdt)
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300321 return max_data
322
323
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300324def get_testop_cout(consumption):
325 max_op = 0
326 for name, sens in SINFO_MAP.items():
327 if sens.to_bytes_coef is None:
328 agg = consumption.get(name)
329 if agg is not None:
330 max_op = max(max_op, agg.per_role.get('testnode', 0))
331 return max_op
332
333
334def get_data_for_intervals(data, intervals):
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300335 res = {}
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300336 for begin, end in intervals:
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300337 for name, node_data in data.items():
338 ndata = node_data.get_data_for_interval(begin, end)
339 res[name] = ndata
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300340 return res
341
342
343class Host(object):
344 def __init__(self, name=None):
345 self.name = name
346 self.hdd_devs = {}
347 self.net_devs = None
348
349
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300350def plot_consumption(per_consumer_table, fields, refload):
351 if pgv is None:
352 return
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300353
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300354 hosts = {}
355 storage_sensors = ('sectors_written', 'sectors_read')
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300356
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300357 for (hostname, dev), consumption in per_consumer_table.items():
358 if hostname not in hosts:
359 hosts[hostname] = Host(hostname)
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300360
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300361 host = hosts[hostname]
362 cons_map = dict(zip(fields, consumption))
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300363
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300364 for sn in storage_sensors:
365 vl = cons_map.get(sn, 0)
366 if vl > 0:
367 host.hdd_devs.setdefault(dev, {})[sn] = vl
368
369 p = pgv.AGraph(name='system', directed=True)
370
371 net = "Network"
372 p.add_node(net)
373
374 in_color = 'red'
375 out_color = 'green'
376
377 for host in hosts.values():
378 g = p.subgraph(name="cluster_" + host.name, label=host.name,
379 color="blue")
380 g.add_node(host.name, shape="diamond")
381 p.add_edge(host.name, net)
382 p.add_edge(net, host.name)
383
384 for dev_name, values in host.hdd_devs.items():
385 if dev_name == '*':
386 continue
387
388 to = values.get('sectors_written', 0)
389 frm = values.get('sectors_read', 0)
390 to_pw = 7 * to / refload
391 frm_pw = 7 * frm / refload
392 min_with = 0.1
393
394 if to_pw > min_with or frm_pw > min_with:
395 dev_fqn = host.name + "." + dev_name
396 g.add_node(dev_fqn)
397
398 if to_pw > min_with:
399 g.add_edge(host.name, dev_fqn,
400 label=b2ssize(to) + "B",
401 penwidth=to_pw,
402 fontcolor=out_color,
403 color=out_color)
404
405 if frm_pw > min_with:
406 g.add_edge(dev_fqn, host.name,
407 label=b2ssize(frm) + "B",
408 penwidth=frm_pw,
409 color=in_color,
410 fontcolor=in_color)
411
412 return p.string()
413
414
415def parse_args(args):
416 parser = argparse.ArgumentParser()
417 parser.add_argument('-t', '--time_period', nargs=2,
418 type=int, default=None,
419 help="Begin and end time for tests")
420 parser.add_argument('-m', '--max-bottlenek', type=int,
Ved-vampirb1e18612015-05-22 18:10:22 +0300421 default=15, help="Max bottleneck to show")
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300422 parser.add_argument('-x', '--max-diff', type=int,
Ved-vampirb1e18612015-05-22 18:10:22 +0300423 default=10, help="Max bottleneck to show in" +
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300424 "0.1% from test nodes summ load")
425 parser.add_argument('-d', '--debug-ver', action='store_true',
426 help="Full report with original data")
427 parser.add_argument('-u', '--user-ver', action='store_true',
428 default=True, help="Avg load report")
429 parser.add_argument('-s', '--select-loads', nargs='*', default=[])
430 parser.add_argument('-f', '--fields', nargs='*', default=[])
431 parser.add_argument('results_folder')
432 return parser.parse_args(args[1:])
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300433
434
Ved-vampir5c7b6142015-04-24 19:49:59 +0300435def main(argv):
436 opts = parse_args(argv)
437
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300438 stor_dir = os.path.join(opts.results_folder, 'sensor_storage')
439 data = {}
440 source_id2hostname = {}
441
442 csv_files = os.listdir(stor_dir)
443 for fname in csv_files:
444 assert re.match(r"\d+_\d+.csv$", fname)
445
446 csv_files.sort(key=lambda x: int(x.split('_')[0]))
447
448 for fname in csv_files:
449 with open(os.path.join(stor_dir, fname)) as fd:
450 for name, node_sens_data in load_results_csv(fd).items():
451 if name in data:
452 assert data[name].hostname == node_sens_data.hostname
453 assert data[name].source_id == node_sens_data.source_id
454 assert data[name].headers == node_sens_data.headers
455 data[name].values.extend(node_sens_data.values)
456 else:
457 data[name] = node_sens_data
458
459 for nd in data.values():
460 assert nd.source_id not in source_id2hostname
461 source_id2hostname[nd.source_id] = nd.hostname
462 nd.finalize()
Ved-vampir5c7b6142015-04-24 19:49:59 +0300463
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300464 roles_file = os.path.join(opts.results_folder,
465 'nodes.yaml')
Ved-vampirfcea0722015-04-27 14:06:13 +0300466
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300467 src2roles = yaml.load(open(roles_file))
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300468
469 timings = load_test_timings(opts.results_folder)
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300470
471 roles_map = make_roles_mapping(src2roles, source_id2hostname)
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300472 max_diff = float(opts.max_diff) / 1000
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300473
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300474 fields = ('recv_bytes', 'send_bytes',
475 'sectors_read', 'sectors_written',
476 'reads_completed', 'writes_completed')
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300477
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300478 if opts.fields != []:
479 fields = [field for field in fields if field in opts.fields]
480
481 for test_name, intervals in sorted(timings.items()):
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300482 if opts.select_loads != []:
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300483 if test_name not in opts.select_loads:
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300484 continue
485
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300486 data_chunks = get_data_for_intervals(data, intervals)
koder aka kdanilovf86d7af2015-05-06 04:01:54 +0300487
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300488 consumption = total_consumption(data_chunks, roles_map)
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300489
Ved-vampirb1e18612015-05-22 18:10:22 +0300490 bottlenecks = print_bottlenecks(data_chunks)
491
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300492 testdata_sz = get_testdata_size(consumption) * max_diff
493 testop_count = get_testop_cout(consumption) * max_diff
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300494
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300495 per_consumer_table = {}
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300496 per_consumer_table_str = {}
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300497
Ved-vampirb1e18612015-05-22 18:10:22 +0300498 all_consumers = set()#consumption.values()[0].all_together)
499 for value in consumption.values():
500 all_consumers = all_consumers | set(value.all_together)
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300501 fields = [field for field in fields if field in consumption]
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300502 all_consumers_sum = []
503
504 for consumer in all_consumers:
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300505 tb_str = per_consumer_table_str[consumer] = []
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300506 tb = per_consumer_table[consumer] = []
507 vl = 0
508 for name in fields:
509 val = consumption[name].all_together[consumer]
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300510 if SINFO_MAP[name].to_bytes_coef is None:
511 if val < testop_count:
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300512 tb_str.append('0')
513 else:
514 tb_str.append(b2ssize_10(int(val)))
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300515 else:
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300516 val = int(val) * SINFO_MAP[name].to_bytes_coef
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300517 if val < testdata_sz:
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300518 tb_str.append('-')
519 else:
520 tb_str.append(b2ssize(val) + "B")
521 tb.append(int(val))
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300522 vl += int(val)
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300523 all_consumers_sum.append((vl, consumer))
524
525 all_consumers_sum.sort(reverse=True)
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300526
527 plot_consumption(per_consumer_table, fields,
528 testdata_sz / max_diff)
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300529
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300530 tt = texttable.Texttable(max_width=130)
531 tt.set_cols_align(["l"] + ["r"] * len(fields))
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300532
533 header = ["Name"]
534 for fld in fields:
535 if fld in SINFO_MAP:
536 header.append(SINFO_MAP[fld].print_name)
537 else:
538 header.append(fld)
539 tt.header(header)
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300540
541 for summ, consumer in all_consumers_sum:
542 if summ > 0:
koder aka kdanilov416b87a2015-05-12 00:26:04 +0300543 tt.add_row([":".join(consumer)] +
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300544 per_consumer_table_str[consumer])
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300545
546 tt.set_deco(texttable.Texttable.VLINES | texttable.Texttable.HEADER)
koder aka kdanilov4af1c1d2015-05-18 15:48:58 +0300547 res = tt.draw()
548 max_len = max(map(len, res.split("\n")))
549 print test_name.center(max_len)
550 print res
Ved-vampirb1e18612015-05-22 18:10:22 +0300551 print bottlenecks
koder aka kdanilovd5ed4da2015-05-07 23:33:23 +0300552
Ved-vampir5c7b6142015-04-24 19:49:59 +0300553
554if __name__ == "__main__":
555 exit(main(sys.argv))