blob: 861e5130c3694fade4e04a0b614848a943ee0b6e [file] [log] [blame]
koder aka kdanilov108ac362017-01-19 20:17:16 +02001import os
koder aka kdanilov7f59d562016-12-26 01:34:23 +02002import abc
koder aka kdanilova047e1b2015-04-21 23:16:59 +03003import logging
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03004import warnings
koder aka kdanilov108ac362017-01-19 20:17:16 +02005from io import BytesIO
6from functools import wraps
koder aka kdanilov108ac362017-01-19 20:17:16 +02007from collections import defaultdict
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03008from typing import Dict, Any, Iterator, Tuple, cast, List, Callable, Set
koder aka kdanilovcff7b2e2015-04-18 20:48:15 +03009
koder aka kdanilovffaf48d2016-12-27 02:25:29 +020010import numpy
koder aka kdanilov108ac362017-01-19 20:17:16 +020011import scipy.stats
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030012
kdanylov aka koder45183182017-04-30 23:55:40 +030013# import matplotlib
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030014# matplotlib.use('GTKAgg')
15
koder aka kdanilova732a602017-02-01 20:29:56 +020016import matplotlib.pyplot as plt
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030017from matplotlib import gridspec
18
19from cephlib.common import float2str
20from cephlib.plot import plot_hmap_with_y_histo, hmap_from_2d
koder aka kdanilovbe8f89f2015-04-28 14:51:51 +030021
koder aka kdanilov108ac362017-01-19 20:17:16 +020022import wally
koder aka kdanilovffaf48d2016-12-27 02:25:29 +020023
koder aka kdanilov108ac362017-01-19 20:17:16 +020024from . import html
koder aka kdanilov39e449e2016-12-17 15:15:26 +020025from .stage import Stage, StepOrder
26from .test_run_class import TestRun
koder aka kdanilov108ac362017-01-19 20:17:16 +020027from .hlstorage import ResultStorage
28from .node_interfaces import NodeInfo
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030029from .utils import b2ssize, b2ssize_10, STORAGE_ROLES, unit_conversion_coef
koder aka kdanilova732a602017-02-01 20:29:56 +020030from .statistic import (calc_norm_stat_props, calc_histo_stat_props, moving_average, moving_dev,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030031 hist_outliers_perc, find_ouliers_ts, approximate_curve)
32from .result_classes import (StatProps, DataSource, TimeSeries, NormStatProps, HistoStatProps, SuiteConfig)
koder aka kdanilov108ac362017-01-19 20:17:16 +020033from .suits.io.fio import FioTest, FioJobConfig
koder aka kdanilova732a602017-02-01 20:29:56 +020034from .suits.io.fio_job import FioJobParams
35from .suits.job import JobConfig
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030036from .data_selectors import get_aggregated, AGG_TAG, summ_sensors, find_sensors_to_2d, find_nodes_by_roles
37
38
39with warnings.catch_warnings():
40 warnings.simplefilter("ignore")
41 import seaborn
koder aka kdanilovcff7b2e2015-04-18 20:48:15 +030042
koder aka kdanilov4a510ee2015-04-21 18:50:42 +030043
koder aka kdanilov962ee5f2016-12-19 02:40:08 +020044logger = logging.getLogger("wally")
koder aka kdanilova047e1b2015-04-21 23:16:59 +030045
46
koder aka kdanilov108ac362017-01-19 20:17:16 +020047# ---------------- CONSTS ---------------------------------------------------------------------------------------------
koder aka kdanilov39e449e2016-12-17 15:15:26 +020048
koder aka kdanilov7f59d562016-12-26 01:34:23 +020049
koder aka kdanilov108ac362017-01-19 20:17:16 +020050DEBUG = False
51LARGE_BLOCKS = 256
koder aka kdanilov39e449e2016-12-17 15:15:26 +020052
koder aka kdanilov39e449e2016-12-17 15:15:26 +020053
koder aka kdanilov108ac362017-01-19 20:17:16 +020054# ---------------- PROFILES ------------------------------------------------------------------------------------------
55
56
koder aka kdanilova732a602017-02-01 20:29:56 +020057# this is default values, real values is loaded from config
58
koder aka kdanilov108ac362017-01-19 20:17:16 +020059class ColorProfile:
60 primary_color = 'b'
61 suppl_color1 = 'teal'
62 suppl_color2 = 'magenta'
koder aka kdanilova732a602017-02-01 20:29:56 +020063 suppl_color3 = 'orange'
koder aka kdanilov108ac362017-01-19 20:17:16 +020064 box_color = 'y'
koder aka kdanilova732a602017-02-01 20:29:56 +020065 err_color = 'red'
koder aka kdanilov108ac362017-01-19 20:17:16 +020066
67 noise_alpha = 0.3
68 subinfo_alpha = 0.7
69
koder aka kdanilova732a602017-02-01 20:29:56 +020070 imshow_colormap = None # type: str
71
koder aka kdanilov108ac362017-01-19 20:17:16 +020072
73class StyleProfile:
74 grid = True
75 tide_layout = True
76 hist_boxes = 10
koder aka kdanilova732a602017-02-01 20:29:56 +020077 hist_lat_boxes = 25
78 hm_hist_bins_count = 25
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030079 hm_x_slots = 25
koder aka kdanilov108ac362017-01-19 20:17:16 +020080 min_points_for_dev = 5
81
82 dev_range_x = 2.0
83 dev_perc = 95
84
koder aka kdanilova732a602017-02-01 20:29:56 +020085 point_shape = 'o'
86 err_point_shape = '*'
koder aka kdanilov108ac362017-01-19 20:17:16 +020087
koder aka kdanilova732a602017-02-01 20:29:56 +020088 avg_range = 20
89 approx_average = True
90
91 curve_approx_level = 6
koder aka kdanilov108ac362017-01-19 20:17:16 +020092 curve_approx_points = 100
93 assert avg_range >= min_points_for_dev
94
koder aka kdanilova732a602017-02-01 20:29:56 +020095 # figure size in inches
96 figsize = (10, 6)
97
koder aka kdanilov108ac362017-01-19 20:17:16 +020098 extra_io_spine = True
99
100 legend_for_eng = True
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300101 # heatmap_interpolation = '1d'
102 heatmap_interpolation = None
koder aka kdanilova732a602017-02-01 20:29:56 +0200103 heatmap_interpolation_points = 300
104 outliers_q_nd = 3.0
105 outliers_hide_q_nd = 4.0
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300106 outliers_lat = (0.01, 0.9)
koder aka kdanilova732a602017-02-01 20:29:56 +0200107
108 violin_instead_of_box = True
109 violin_point_count = 30000
110
111 heatmap_colorbar = False
112
113 min_iops_vs_qd_jobs = 3
koder aka kdanilov108ac362017-01-19 20:17:16 +0200114
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300115 qd_bins = [0, 1, 2, 4, 6, 8, 12, 16, 20, 26, 32, 40, 48, 56, 64, 96, 128]
116 iotime_bins = list(range(0, 1030, 50))
117 block_size_bins = [0, 2, 4, 8, 16, 32, 48, 64, 96, 128, 192, 256, 384, 512, 1024, 2048]
118
koder aka kdanilov108ac362017-01-19 20:17:16 +0200119
kdanylov aka koder45183182017-04-30 23:55:40 +0300120DefColorProfile = ColorProfile()
121DefStyleProfile = StyleProfile()
122
123
koder aka kdanilov108ac362017-01-19 20:17:16 +0200124# ---------------- STRUCTS -------------------------------------------------------------------------------------------
koder aka kdanilov39e449e2016-12-17 15:15:26 +0200125
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200126
127# TODO: need to be revised, have to user StatProps fields instead
128class StoragePerfSummary:
129 def __init__(self, name: str) -> None:
130 self.direct_iops_r_max = 0 # type: int
131 self.direct_iops_w_max = 0 # type: int
132
133 # 64 used instead of 4k to faster feed caches
134 self.direct_iops_w64_max = 0 # type: int
135
136 self.rws4k_10ms = 0 # type: int
137 self.rws4k_30ms = 0 # type: int
138 self.rws4k_100ms = 0 # type: int
139 self.bw_write_max = 0 # type: int
140 self.bw_read_max = 0 # type: int
141
142 self.bw = None # type: float
143 self.iops = None # type: float
144 self.lat = None # type: float
145 self.lat_50 = None # type: float
146 self.lat_95 = None # type: float
147
148
koder aka kdanilov108ac362017-01-19 20:17:16 +0200149class IOSummary:
150 def __init__(self,
151 qd: int,
152 block_size: int,
153 nodes_count:int,
154 bw: NormStatProps,
155 lat: HistoStatProps) -> None:
156
157 self.qd = qd
158 self.nodes_count = nodes_count
159 self.block_size = block_size
160
161 self.bw = bw
162 self.lat = lat
163
164
165# -------------- AGGREGATION AND STAT FUNCTIONS ----------------------------------------------------------------------
koder aka kdanilov108ac362017-01-19 20:17:16 +0200166
koder aka kdanilova732a602017-02-01 20:29:56 +0200167def make_iosum(rstorage: ResultStorage, suite: SuiteConfig, job: FioJobConfig) -> IOSummary:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200168 lat = get_aggregated(rstorage, suite, job, "lat")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200169 io = get_aggregated(rstorage, suite, job, "bw")
170
171 return IOSummary(job.qd,
172 nodes_count=len(suite.nodes_ids),
173 block_size=job.bsize,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300174 lat=calc_histo_stat_props(lat, rebins_count=StyleProfile.hist_boxes),
koder aka kdanilov108ac362017-01-19 20:17:16 +0200175 bw=calc_norm_stat_props(io, StyleProfile.hist_boxes))
176
koder aka kdanilov108ac362017-01-19 20:17:16 +0200177
koder aka kdanilova732a602017-02-01 20:29:56 +0200178def is_sensor_numarray(sensor: str, metric: str) -> bool:
179 """Returns True if sensor provides one-dimension array of numeric values. One number per one measurement."""
180 return True
181
182
183LEVEL_SENSORS = {("block-io", "io_queue"),
184 ("system-cpu", "procs_blocked"),
185 ("system-cpu", "procs_queue")}
186
187
188def is_level_sensor(sensor: str, metric: str) -> bool:
189 """Returns True if sensor measure level of any kind, E.g. queue depth."""
190 return (sensor, metric) in LEVEL_SENSORS
191
192
193def is_delta_sensor(sensor: str, metric: str) -> bool:
194 """Returns True if sensor provides deltas for cumulative value. E.g. io completed in given period"""
195 return not is_level_sensor(sensor, metric)
196
koder aka kdanilov108ac362017-01-19 20:17:16 +0200197# -------------- PLOT HELPERS FUNCTIONS ------------------------------------------------------------------------------
198
koder aka kdanilova732a602017-02-01 20:29:56 +0200199def get_emb_data_svg(plt: Any, format: str = 'svg') -> bytes:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200200 bio = BytesIO()
koder aka kdanilova732a602017-02-01 20:29:56 +0200201 if format in ('png', 'jpg'):
202 plt.savefig(bio, format=format)
203 return bio.getvalue()
204 elif format == 'svg':
205 plt.savefig(bio, format='svg')
206 img_start = "<!-- Created with matplotlib (http://matplotlib.org/) -->"
207 return bio.getvalue().decode("utf8").split(img_start, 1)[1].encode("utf8")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200208
209
210def provide_plot(func: Callable[..., None]) -> Callable[..., str]:
211 @wraps(func)
koder aka kdanilova732a602017-02-01 20:29:56 +0200212 def closure1(storage: ResultStorage,
213 path: DataSource,
214 *args, **kwargs) -> str:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200215 fpath = storage.check_plot_file(path)
216 if not fpath:
koder aka kdanilova732a602017-02-01 20:29:56 +0200217 format = path.tag.split(".")[-1]
218
219 plt.figure(figsize=StyleProfile.figsize)
220 plt.subplots_adjust(right=0.66)
221
koder aka kdanilov108ac362017-01-19 20:17:16 +0200222 func(*args, **kwargs)
koder aka kdanilova732a602017-02-01 20:29:56 +0200223 fpath = storage.put_plot_file(get_emb_data_svg(plt, format=format), path)
224 logger.debug("Plot %s saved to %r", path, fpath)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200225 plt.clf()
koder aka kdanilova732a602017-02-01 20:29:56 +0200226 plt.close('all')
koder aka kdanilov108ac362017-01-19 20:17:16 +0200227 return fpath
228 return closure1
229
230
231def apply_style(style: StyleProfile, eng: bool = True, no_legend: bool = False) -> None:
232 if style.grid:
233 plt.grid(True)
234
235 if (style.legend_for_eng or not eng) and not no_legend:
236 legend_location = "center left"
237 legend_bbox_to_anchor = (1.03, 0.81)
238 plt.legend(loc=legend_location, bbox_to_anchor=legend_bbox_to_anchor)
239
240
241# -------------- PLOT FUNCTIONS --------------------------------------------------------------------------------------
242
243
244@provide_plot
245def plot_hist(title: str, units: str,
246 prop: StatProps,
kdanylov aka koder45183182017-04-30 23:55:40 +0300247 colors: ColorProfile = DefColorProfile,
248 style: StyleProfile = DefStyleProfile) -> None:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200249
250 # TODO: unit should came from ts
koder aka kdanilova732a602017-02-01 20:29:56 +0200251 normed_bins = prop.bins_populations / prop.bins_populations.sum()
252 bar_width = prop.bins_edges[1] - prop.bins_edges[0]
253 plt.bar(prop.bins_edges, normed_bins, color=colors.box_color, width=bar_width, label="Real data")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200254
255 plt.xlabel(units)
256 plt.ylabel("Value probability")
257 plt.title(title)
258
259 dist_plotted = False
260 if isinstance(prop, NormStatProps):
261 nprop = cast(NormStatProps, prop)
262 stats = scipy.stats.norm(nprop.average, nprop.deviation)
263
koder aka kdanilova732a602017-02-01 20:29:56 +0200264 new_edges, step = numpy.linspace(prop.bins_edges[0], prop.bins_edges[-1],
265 len(prop.bins_edges) * 10, retstep=True)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200266
koder aka kdanilova732a602017-02-01 20:29:56 +0200267 ypoints = stats.cdf(new_edges) * 11
koder aka kdanilov108ac362017-01-19 20:17:16 +0200268 ypoints = [next - prev for (next, prev) in zip(ypoints[1:], ypoints[:-1])]
koder aka kdanilova732a602017-02-01 20:29:56 +0200269 xpoints = (new_edges[1:] + new_edges[:-1]) / 2
koder aka kdanilov108ac362017-01-19 20:17:16 +0200270
koder aka kdanilova732a602017-02-01 20:29:56 +0200271 plt.plot(xpoints, ypoints, color=colors.primary_color, label="Expected from\nnormal\ndistribution")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200272 dist_plotted = True
273
koder aka kdanilova732a602017-02-01 20:29:56 +0200274 plt.gca().set_xlim(left=prop.bins_edges[0])
275 if prop.log_bins:
276 plt.xscale('log')
277
koder aka kdanilov108ac362017-01-19 20:17:16 +0200278 apply_style(style, eng=True, no_legend=not dist_plotted)
279
280
281@provide_plot
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300282def plot_simple_over_time(tss: List[Tuple[str, numpy.ndarray]],
283 title: str,
284 ylabel: str,
285 xlabel: str = "time, s",
286 average: bool = False,
kdanylov aka koder45183182017-04-30 23:55:40 +0300287 colors: ColorProfile = DefColorProfile,
288 style: StyleProfile = DefStyleProfile) -> None:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300289 fig, ax = plt.subplots(figsize=(12, 6))
290 for name, arr in tss:
291 if average:
292 avg_vals = moving_average(arr, style.avg_range)
293 if style.approx_average:
294 time_points = numpy.arange(len(avg_vals))
295 avg_vals = approximate_curve(time_points, avg_vals, time_points, style.curve_approx_level)
296 arr = avg_vals
297 ax.plot(arr, label=name)
298 ax.set_title(title)
299 ax.set_ylabel(ylabel)
300 ax.set_xlabel(xlabel)
301 apply_style(style, eng=True)
302
303
304@provide_plot
305def plot_hmap_from_2d(data2d: numpy.ndarray,
306 title: str, ylabel: str, xlabel: str = 'time, s', bins: numpy.ndarray = None,
kdanylov aka koder45183182017-04-30 23:55:40 +0300307 colors: ColorProfile = DefColorProfile, style: StyleProfile = DefStyleProfile) -> None:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300308 ioq1d, ranges = hmap_from_2d(data2d)
309 ax, _ = plot_hmap_with_y_histo(ioq1d, ranges, bins=bins)
310 ax.set_ylabel(ylabel)
311 ax.set_xlabel(xlabel)
312 ax.set_title(title)
313
314
315@provide_plot
316def plot_v_over_time(title: str,
317 units: str,
koder aka kdanilov108ac362017-01-19 20:17:16 +0200318 ts: TimeSeries,
319 plot_avg_dev: bool = True,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300320 plot_points: bool = True,
kdanylov aka koder45183182017-04-30 23:55:40 +0300321 colors: ColorProfile = DefColorProfile,
322 style: StyleProfile = DefStyleProfile) -> None:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200323
324 min_time = min(ts.times)
325
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300326 # convert time to ms
327 coef = float(unit_conversion_coef(ts.time_units, 's'))
328 time_points = numpy.array([(val_time - min_time) * coef for val_time in ts.times])
koder aka kdanilova732a602017-02-01 20:29:56 +0200329
330 outliers_idxs = find_ouliers_ts(ts.data, cut_range=style.outliers_q_nd)
331 outliers_4q_idxs = find_ouliers_ts(ts.data, cut_range=style.outliers_hide_q_nd)
332 normal_idxs = numpy.logical_not(outliers_idxs)
333 outliers_idxs = outliers_idxs & numpy.logical_not(outliers_4q_idxs)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300334 # hidden_outliers_count = numpy.count_nonzero(outliers_4q_idxs)
koder aka kdanilova732a602017-02-01 20:29:56 +0200335
336 data = ts.data[normal_idxs]
337 data_times = time_points[normal_idxs]
338 outliers = ts.data[outliers_idxs]
339 outliers_times = time_points[outliers_idxs]
koder aka kdanilov108ac362017-01-19 20:17:16 +0200340
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300341 if plot_points:
342 alpha = colors.noise_alpha if plot_avg_dev else 1.0
343 plt.plot(data_times, data, style.point_shape,
344 color=colors.primary_color, alpha=alpha, label="Data")
345 plt.plot(outliers_times, outliers, style.err_point_shape,
346 color=colors.err_color, label="Outliers")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200347
koder aka kdanilova732a602017-02-01 20:29:56 +0200348 has_negative_dev = False
349 plus_minus = "\xb1"
koder aka kdanilov108ac362017-01-19 20:17:16 +0200350
koder aka kdanilova732a602017-02-01 20:29:56 +0200351 if plot_avg_dev and len(data) < style.avg_range * 2:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300352 logger.warning("Array %r to small to plot average over %s points", title, style.avg_range)
koder aka kdanilova732a602017-02-01 20:29:56 +0200353 elif plot_avg_dev:
354 avg_vals = moving_average(data, style.avg_range)
355 dev_vals = moving_dev(data, style.avg_range)
356 avg_times = moving_average(data_times, style.avg_range)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200357
koder aka kdanilova732a602017-02-01 20:29:56 +0200358 if style.approx_average:
359 avg_vals = approximate_curve(avg_times, avg_vals, avg_times, style.curve_approx_level)
360 dev_vals = approximate_curve(avg_times, dev_vals, avg_times, style.curve_approx_level)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200361
koder aka kdanilova732a602017-02-01 20:29:56 +0200362 plt.plot(avg_times, avg_vals, c=colors.suppl_color1, label="Average")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200363
koder aka kdanilova732a602017-02-01 20:29:56 +0200364 low_vals_dev = avg_vals - dev_vals * style.dev_range_x
365 hight_vals_dev = avg_vals + dev_vals * style.dev_range_x
366 if style.dev_range_x - int(style.dev_range_x) < 0.01:
367 plt.plot(avg_times, low_vals_dev, c=colors.suppl_color2,
368 label="{}{}*stdev".format(plus_minus, int(style.dev_range_x)))
369 else:
370 plt.plot(avg_times, low_vals_dev, c=colors.suppl_color2,
371 label="{}{}*stdev".format(plus_minus, style.dev_range_x))
372 plt.plot(avg_times, hight_vals_dev, c=colors.suppl_color2)
373 has_negative_dev = low_vals_dev.min() < 0
koder aka kdanilov108ac362017-01-19 20:17:16 +0200374
375 plt.xlim(-5, max(time_points) + 5)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200376 plt.xlabel("Time, seconds from test begin")
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300377
378 if plot_avg_dev:
379 plt.ylabel("{}. Average and {}stddev over {} points".format(units, plus_minus, style.avg_range))
380 else:
381 plt.ylabel(units)
382
koder aka kdanilov108ac362017-01-19 20:17:16 +0200383 plt.title(title)
koder aka kdanilova732a602017-02-01 20:29:56 +0200384
385 if has_negative_dev:
386 plt.gca().set_ylim(bottom=0)
387
koder aka kdanilov108ac362017-01-19 20:17:16 +0200388 apply_style(style, eng=True)
389
390
391@provide_plot
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300392def plot_lat_over_time(title: str, ts: TimeSeries,
393 ylabel: str,
394 samples: int = 5,
kdanylov aka koder45183182017-04-30 23:55:40 +0300395 colors: ColorProfile = DefColorProfile, style: StyleProfile = DefStyleProfile) -> None:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200396
397 min_time = min(ts.times)
398 times = [int(tm - min_time + 500) // 1000 for tm in ts.times]
399 ts_len = len(times)
400 step = ts_len / samples
401 points = [times[int(i * step + 0.5)] for i in range(samples)]
402 points.append(times[-1])
403 bounds = list(zip(points[:-1], points[1:]))
koder aka kdanilov108ac362017-01-19 20:17:16 +0200404 agg_data = []
405 positions = []
406 labels = []
407
koder aka kdanilov108ac362017-01-19 20:17:16 +0200408 for begin, end in bounds:
koder aka kdanilova732a602017-02-01 20:29:56 +0200409 agg_hist = ts.data[begin:end].sum(axis=0)
410
411 if style.violin_instead_of_box:
412 # cut outliers
413 idx1, idx2 = hist_outliers_perc(agg_hist, style.outliers_lat)
414 agg_hist = agg_hist[idx1:idx2]
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300415 curr_bins_vals = ts.histo_bins[idx1:idx2]
koder aka kdanilova732a602017-02-01 20:29:56 +0200416
417 correct_coef = style.violin_point_count / sum(agg_hist)
418 if correct_coef > 1:
419 correct_coef = 1
420 else:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300421 curr_bins_vals = ts.histo_bins
koder aka kdanilova732a602017-02-01 20:29:56 +0200422 correct_coef = 1
koder aka kdanilov108ac362017-01-19 20:17:16 +0200423
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300424 vals = numpy.empty(shape=[numpy.sum(agg_hist)], dtype='float32')
koder aka kdanilov108ac362017-01-19 20:17:16 +0200425 cidx = 0
koder aka kdanilov108ac362017-01-19 20:17:16 +0200426
koder aka kdanilova732a602017-02-01 20:29:56 +0200427 non_zero, = agg_hist.nonzero()
koder aka kdanilov108ac362017-01-19 20:17:16 +0200428 for pos in non_zero:
koder aka kdanilova732a602017-02-01 20:29:56 +0200429 count = int(agg_hist[pos] * correct_coef + 0.5)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200430
koder aka kdanilova732a602017-02-01 20:29:56 +0200431 if count != 0:
432 vals[cidx: cidx + count] = curr_bins_vals[pos]
433 cidx += count
434
435 agg_data.append(vals[:cidx])
koder aka kdanilov108ac362017-01-19 20:17:16 +0200436 positions.append((end + begin) / 2)
437 labels.append(str((end + begin) // 2))
438
koder aka kdanilova732a602017-02-01 20:29:56 +0200439 if style.violin_instead_of_box:
440 patches = plt.violinplot(agg_data,
441 positions=positions,
442 showmeans=True,
443 showmedians=True,
444 widths=step / 2)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200445
koder aka kdanilova732a602017-02-01 20:29:56 +0200446 patches['cmeans'].set_color("blue")
447 patches['cmedians'].set_color("green")
448 if style.legend_for_eng:
449 legend_location = "center left"
450 legend_bbox_to_anchor = (1.03, 0.81)
451 plt.legend([patches['cmeans'], patches['cmedians']], ["mean", "median"],
452 loc=legend_location, bbox_to_anchor=legend_bbox_to_anchor)
453 else:
454 plt.boxplot(agg_data, 0, '', positions=positions, labels=labels, widths=step / 4)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200455
koder aka kdanilov108ac362017-01-19 20:17:16 +0200456 plt.xlim(min(times), max(times))
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300457 plt.ylabel(ylabel)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200458 plt.xlabel("Time, seconds from test begin, sampled for ~{} seconds".format(int(step)))
koder aka kdanilov108ac362017-01-19 20:17:16 +0200459 plt.title(title)
460 apply_style(style, eng=True, no_legend=True)
461
462
463@provide_plot
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300464def plot_histo_heatmap(title: str,
465 ts: TimeSeries,
466 ylabel: str,
467 xlabel: str = "time, s",
kdanylov aka koder45183182017-04-30 23:55:40 +0300468 colors: ColorProfile = DefColorProfile, style: StyleProfile = DefStyleProfile) -> None:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200469
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300470 # only histogram-based ts can be plotted
koder aka kdanilova732a602017-02-01 20:29:56 +0200471 assert len(ts.data.shape) == 2
koder aka kdanilova732a602017-02-01 20:29:56 +0200472
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300473 # Find global outliers. As load is expected to be stable during one job
474 # outliers range can be detected globally
koder aka kdanilova732a602017-02-01 20:29:56 +0200475 total_hist = ts.data.sum(axis=0)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300476 idx1, idx2 = hist_outliers_perc(total_hist,
477 bounds_perc=style.outliers_lat,
478 min_bins_left=style.hm_hist_bins_count)
koder aka kdanilova732a602017-02-01 20:29:56 +0200479
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300480 # merge outliers with most close non-outliers cell
481 orig_data = ts.data[:, idx1:idx2].copy()
482 if idx1 > 0:
483 orig_data[:, 0] += ts.data[:, :idx1].sum(axis=1)
koder aka kdanilova732a602017-02-01 20:29:56 +0200484
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300485 if idx2 < ts.data.shape[1]:
486 orig_data[:, -1] += ts.data[:, idx2:].sum(axis=1)
koder aka kdanilova732a602017-02-01 20:29:56 +0200487
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300488 bins_vals = ts.histo_bins[idx1:idx2]
489
490 # rebin over X axis
491 # aggregate some lines in ts.data to plot not more than style.hm_x_slots x bins
492 agg_idx = float(len(orig_data)) / style.hm_x_slots
493 if agg_idx >= 2:
494 data = numpy.zeros([style.hm_x_slots, orig_data.shape[1]], dtype=numpy.float32) # type: List[numpy.ndarray]
495 next = agg_idx
496 count = 0
497 data_idx = 0
498 for idx, arr in enumerate(orig_data):
499 if idx >= next:
500 data[data_idx] /= count
501 data_idx += 1
502 next += agg_idx
503 count = 0
504 data[data_idx] += arr
505 count += 1
506
507 if count > 1:
508 data[-1] /= count
509 else:
510 data = orig_data
511
512 # rebin over Y axis
513 # =================
koder aka kdanilova732a602017-02-01 20:29:56 +0200514
515 # don't using rebin_histogram here, as we need apply same bins for many arrays
516 step = (bins_vals[-1] - bins_vals[0]) / style.hm_hist_bins_count
517 new_bins_edges = numpy.arange(style.hm_hist_bins_count) * step + bins_vals[0]
518 bin_mapping = numpy.clip(numpy.searchsorted(new_bins_edges, bins_vals) - 1, 0, len(new_bins_edges) - 1)
519
520 # map origin bins ranges to heatmap bins, iterate over rows
521 cmap = []
522 for line in data:
523 curr_bins = [0] * style.hm_hist_bins_count
524 for idx, count in zip(bin_mapping, line):
525 curr_bins[idx] += count
526 cmap.append(curr_bins)
527 ncmap = numpy.array(cmap)
528
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300529 # plot data
530 # =========
koder aka kdanilova732a602017-02-01 20:29:56 +0200531
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300532 fig = plt.figure(figsize=(12, 6))
533 boxes = 3
534 gs = gridspec.GridSpec(1, boxes)
535 ax = fig.add_subplot(gs[0, :boxes - 1])
koder aka kdanilova732a602017-02-01 20:29:56 +0200536
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300537 labels = list(map(float2str, (new_bins_edges[:-1] + new_bins_edges[1:]) / 2)) + \
538 [float2str(new_bins_edges[-1]) + "+"]
539 seaborn.heatmap(ncmap[:,::-1].T, xticklabels=False, cmap="Blues", ax=ax)
540 ax.set_yticklabels(labels, rotation='horizontal')
541 ax.set_xticklabels([])
koder aka kdanilova732a602017-02-01 20:29:56 +0200542
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300543 # plot overall histogram
544 # =======================
koder aka kdanilova732a602017-02-01 20:29:56 +0200545
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300546 ax2 = fig.add_subplot(gs[0, boxes - 1])
547 ax2.set_yticklabels([])
548 ax2.set_xticklabels([])
koder aka kdanilova732a602017-02-01 20:29:56 +0200549
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300550 histo = ncmap.sum(axis=0).reshape((-1,))
551 ax2.set_ylim(top=histo.size, bottom=0)
552 plt.barh(numpy.arange(histo.size) + 0.5, width=histo, axes=ax2)
553
554 # Set labels
555 # ==========
556
557 ax.set_title(title)
558 ax.set_ylabel(ylabel)
559 ax.set_xlabel(xlabel)
560
koder aka kdanilova732a602017-02-01 20:29:56 +0200561
koder aka kdanilov108ac362017-01-19 20:17:16 +0200562
563@provide_plot
564def io_chart(title: str,
565 legend: str,
566 iosums: List[IOSummary],
567 iops_log_spine: bool = False,
568 lat_log_spine: bool = False,
kdanylov aka koder45183182017-04-30 23:55:40 +0300569 colors: ColorProfile = DefColorProfile, style: StyleProfile = DefStyleProfile) -> None:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200570
571 # -------------- MAGIC VALUES ---------------------
572 # IOPS bar width
573 width = 0.35
574
575 # offset from center of bar to deviation/confidence range indicator
576 err_x_offset = 0.05
577
koder aka kdanilov108ac362017-01-19 20:17:16 +0200578 # extra space on top and bottom, comparing to maximal tight layout
579 extra_y_space = 0.05
580
581 # additional spine for BW/IOPS on left side of plot
582 extra_io_spine_x_offset = -0.1
583
584 # extra space on left and right sides
585 extra_x_space = 0.5
586
587 # legend location settings
588 legend_location = "center left"
589 legend_bbox_to_anchor = (1.1, 0.81)
590
591 # plot box size adjust (only plot, not spines and legend)
592 plot_box_adjust = {'right': 0.66}
593 # -------------- END OF MAGIC VALUES ---------------------
594
595 block_size = iosums[0].block_size
596 lc = len(iosums)
597 xt = list(range(1, lc + 1))
598
599 # x coordinate of middle of the bars
600 xpos = [i - width / 2 for i in xt]
601
602 # import matplotlib.gridspec as gridspec
603 # gs = gridspec.GridSpec(1, 3, width_ratios=[1, 4, 1])
604 # p1 = plt.subplot(gs[1])
605
kdanylov aka koder45183182017-04-30 23:55:40 +0300606 logger.warning("Check coef usage!")
607
koder aka kdanilova732a602017-02-01 20:29:56 +0200608 fig, p1 = plt.subplots(figsize=StyleProfile.figsize)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200609
610 # plot IOPS/BW bars
611 if block_size >= LARGE_BLOCKS:
612 iops_primary = False
kdanylov aka koder45183182017-04-30 23:55:40 +0300613 coef = float(unit_conversion_coef(iosums[0].bw.units, "MiBps"))
koder aka kdanilov108ac362017-01-19 20:17:16 +0200614 p1.set_ylabel("BW (MiBps)")
615 else:
616 iops_primary = True
kdanylov aka koder45183182017-04-30 23:55:40 +0300617 coef = float(unit_conversion_coef(iosums[0].bw.units, "MiBps")) / block_size
koder aka kdanilov108ac362017-01-19 20:17:16 +0200618 p1.set_ylabel("IOPS")
619
kdanylov aka koder45183182017-04-30 23:55:40 +0300620 vals = [iosum.bw.average * coef for iosum in iosums]
621
622 p1.bar(xpos, vals, width=width, color=colors.box_color, label=legend)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200623
624 # set correct x limits for primary IO spine
625 min_io = min(iosum.bw.average - iosum.bw.deviation * style.dev_range_x for iosum in iosums)
626 max_io = max(iosum.bw.average + iosum.bw.deviation * style.dev_range_x for iosum in iosums)
627 border = (max_io - min_io) * extra_y_space
628 io_lims = (min_io - border, max_io + border)
629
kdanylov aka koder45183182017-04-30 23:55:40 +0300630 p1.set_ylim(io_lims[0] * coef, io_lims[-1] * coef)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200631
632 # plot deviation and confidence error ranges
633 err1_legend = err2_legend = None
634 for pos, iosum in zip(xpos, iosums):
635 err1_legend = p1.errorbar(pos + width / 2 - err_x_offset,
kdanylov aka koder45183182017-04-30 23:55:40 +0300636 iosum.bw.average * coef,
637 iosum.bw.deviation * style.dev_range_x * coef,
koder aka kdanilov108ac362017-01-19 20:17:16 +0200638 alpha=colors.subinfo_alpha,
639 color=colors.suppl_color1) # 'magenta'
640 err2_legend = p1.errorbar(pos + width / 2 + err_x_offset,
kdanylov aka koder45183182017-04-30 23:55:40 +0300641 iosum.bw.average * coef,
642 iosum.bw.confidence * coef,
koder aka kdanilov108ac362017-01-19 20:17:16 +0200643 alpha=colors.subinfo_alpha,
644 color=colors.suppl_color2) # 'teal'
645
646 if style.grid:
647 p1.grid(True)
648
649 handles1, labels1 = p1.get_legend_handles_labels()
650
651 handles1 += [err1_legend, err2_legend]
652 labels1 += ["{}% dev".format(style.dev_perc),
653 "{}% conf".format(int(100 * iosums[0].bw.confidence_level))]
654
655 # extra y spine for latency on right side
656 p2 = p1.twinx()
657
658 # plot median and 95 perc latency
659 p2.plot(xt, [iosum.lat.perc_50 for iosum in iosums], label="lat med")
660 p2.plot(xt, [iosum.lat.perc_95 for iosum in iosums], label="lat 95%")
661
662 # limit and label x spine
663 plt.xlim(extra_x_space, lc + extra_x_space)
664 plt.xticks(xt, ["{0} * {1}".format(iosum.qd, iosum.nodes_count) for iosum in iosums])
665 p1.set_xlabel("QD * Test node count")
666
667 # apply log scales for X spines, if set
668 if iops_log_spine:
669 p1.set_yscale('log')
670
671 if lat_log_spine:
672 p2.set_yscale('log')
673
674 # extra y spine for BW/IOPS on left side
675 if style.extra_io_spine:
676 p3 = p1.twinx()
677 if iops_log_spine:
678 p3.set_yscale('log')
679
680 if iops_primary:
681 p3.set_ylabel("BW (MiBps)")
kdanylov aka koder45183182017-04-30 23:55:40 +0300682 p3.set_ylim(io_lims[0] * coef, io_lims[1] * coef)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200683 else:
684 p3.set_ylabel("IOPS")
kdanylov aka koder45183182017-04-30 23:55:40 +0300685 p3.set_ylim(io_lims[0] * coef, io_lims[1] * coef)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200686
687 p3.spines["left"].set_position(("axes", extra_io_spine_x_offset))
688 p3.spines["left"].set_visible(True)
689 p3.yaxis.set_label_position('left')
690 p3.yaxis.set_ticks_position('left')
691
692 p2.set_ylabel("Latency (ms)")
693
694 plt.title(title)
695
696 # legend box
697 handles2, labels2 = p2.get_legend_handles_labels()
koder aka kdanilova732a602017-02-01 20:29:56 +0200698 plt.legend(handles1 + handles2, labels1 + labels2,
699 loc=legend_location,
700 bbox_to_anchor=legend_bbox_to_anchor)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200701
702 # adjust central box size to fit legend
703 plt.subplots_adjust(**plot_box_adjust)
704 apply_style(style, eng=False, no_legend=True)
705
706
707# -------------------- REPORT HELPERS --------------------------------------------------------------------------------
708
709
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200710class HTMLBlock:
711 data = None # type: str
712 js_links = [] # type: List[str]
713 css_links = [] # type: List[str]
koder aka kdanilova732a602017-02-01 20:29:56 +0200714 order_attr = None # type: Any
715
716 def __init__(self, data: str, order_attr: Any = None) -> None:
717 self.data = data
718 self.order_attr = order_attr
719
kdanylov aka koder45183182017-04-30 23:55:40 +0300720 def __eq__(self, o: Any) -> bool:
koder aka kdanilova732a602017-02-01 20:29:56 +0200721 return o.order_attr == self.order_attr # type: ignore
722
kdanylov aka koder45183182017-04-30 23:55:40 +0300723 def __lt__(self, o: Any) -> bool:
koder aka kdanilova732a602017-02-01 20:29:56 +0200724 return o.order_attr > self.order_attr # type: ignore
725
726
727class Table:
728 def __init__(self, header: List[str]) -> None:
729 self.header = header
730 self.data = []
731
732 def add_line(self, values: List[str]) -> None:
733 self.data.append(values)
734
735 def html(self):
736 return html.table("", self.header, self.data)
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200737
738
koder aka kdanilov108ac362017-01-19 20:17:16 +0200739class Menu1st:
740 engineering = "Engineering"
741 summary = "Summary"
koder aka kdanilova732a602017-02-01 20:29:56 +0200742 per_job = "Per Job"
koder aka kdanilov108ac362017-01-19 20:17:16 +0200743
744
745class Menu2ndEng:
746 iops_time = "IOPS(time)"
747 hist = "IOPS/lat overall histogram"
748 lat_time = "Lat(time)"
749
750
751class Menu2ndSumm:
752 io_lat_qd = "IO & Lat vs QD"
753
754
koder aka kdanilova732a602017-02-01 20:29:56 +0200755menu_1st_order = [Menu1st.summary, Menu1st.engineering, Menu1st.per_job]
koder aka kdanilov108ac362017-01-19 20:17:16 +0200756
757
758# -------------------- REPORTS --------------------------------------------------------------------------------------
759
760
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200761class Reporter(metaclass=abc.ABCMeta):
koder aka kdanilova732a602017-02-01 20:29:56 +0200762 suite_types = set() # type: Set[str]
763
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200764 @abc.abstractmethod
koder aka kdanilova732a602017-02-01 20:29:56 +0200765 def get_divs(self, suite: SuiteConfig, storage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
766 pass
767
768
769class JobReporter(metaclass=abc.ABCMeta):
770 suite_type = set() # type: Set[str]
771
772 @abc.abstractmethod
773 def get_divs(self,
774 suite: SuiteConfig,
775 job: JobConfig,
776 storage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200777 pass
778
779
780# Main performance report
781class PerformanceSummary(Reporter):
koder aka kdanilova732a602017-02-01 20:29:56 +0200782 """Aggregated summary fro storage"""
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200783
784
785# Main performance report
koder aka kdanilov108ac362017-01-19 20:17:16 +0200786class IO_QD(Reporter):
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200787 """Creates graph, which show how IOPS and Latency depend on QD"""
koder aka kdanilova732a602017-02-01 20:29:56 +0200788 suite_types = {'fio'}
789
790 def get_divs(self, suite: SuiteConfig, rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
791 ts_map = defaultdict(list) # type: Dict[FioJobParams, List[Tuple[SuiteConfig, FioJobConfig]]]
792 str_summary = {} # type: Dict[FioJobParams, List[IOSummary]]
koder aka kdanilov108ac362017-01-19 20:17:16 +0200793 for job in rstorage.iter_job(suite):
794 fjob = cast(FioJobConfig, job)
koder aka kdanilova732a602017-02-01 20:29:56 +0200795 fjob_no_qd = cast(FioJobParams, fjob.params.copy(qd=None))
796 str_summary[fjob_no_qd] = (fjob_no_qd.summary, fjob_no_qd.long_summary)
797 ts_map[fjob_no_qd].append((suite, fjob))
koder aka kdanilov108ac362017-01-19 20:17:16 +0200798
koder aka kdanilova732a602017-02-01 20:29:56 +0200799 for tpl, suites_jobs in ts_map.items():
800 if len(suites_jobs) > StyleProfile.min_iops_vs_qd_jobs:
801 iosums = [make_iosum(rstorage, suite, job) for suite, job in suites_jobs]
802 iosums.sort(key=lambda x: x.qd)
803 summary, summary_long = str_summary[tpl]
804 ds = DataSource(suite_id=suite.storage_id,
805 job_id=summary,
806 node_id=AGG_TAG,
807 sensor="fio",
808 dev=AGG_TAG,
809 metric="io_over_qd",
810 tag="svg")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200811
koder aka kdanilova732a602017-02-01 20:29:56 +0200812 title = "IOPS, BW, Lat vs. QD.\n" + summary_long
813 fpath = io_chart(rstorage, ds, title=title, legend="IOPS/BW", iosums=iosums) # type: str
814 yield Menu1st.summary, Menu2ndSumm.io_lat_qd, HTMLBlock(html.img(fpath))
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200815
816
817# Linearization report
818class IOPS_Bsize(Reporter):
819 """Creates graphs, which show how IOPS and Latency depend on block size"""
820
821
822# IOPS/latency distribution
koder aka kdanilova732a602017-02-01 20:29:56 +0200823class StatInfo(JobReporter):
824 """Statistic info for job results"""
825 suite_types = {'fio'}
826
827 def get_divs(self, suite: SuiteConfig, job: JobConfig,
828 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
829
830 fjob = cast(FioJobConfig, job)
831 io_sum = make_iosum(rstorage, suite, fjob)
832
833 summary_data = [
834 ["Summary", job.params.long_summary],
835 ]
836
837 res = html.H2(html.center("Test summary"))
838 res += html.table("Test info", None, summary_data)
839 stat_data_headers = ["Name", "Average ~ Dev", "Conf interval", "Mediana", "Mode", "Kurt / Skew", "95%", "99%"]
840
kdanylov aka koder45183182017-04-30 23:55:40 +0300841 bw_target_units = 'Bps'
842 bw_coef = float(unit_conversion_coef(io_sum.bw.units, bw_target_units))
843
koder aka kdanilova732a602017-02-01 20:29:56 +0200844 bw_data = ["Bandwidth",
kdanylov aka koder45183182017-04-30 23:55:40 +0300845 "{}{} ~ {}{}".format(b2ssize(io_sum.bw.average * bw_coef), bw_target_units,
846 b2ssize(io_sum.bw.deviation * bw_coef), bw_target_units),
847 b2ssize(io_sum.bw.confidence * bw_coef) + bw_target_units,
848 b2ssize(io_sum.bw.perc_50 * bw_coef) + bw_target_units,
koder aka kdanilova732a602017-02-01 20:29:56 +0200849 "-",
850 "{:.2f} / {:.2f}".format(io_sum.bw.kurt, io_sum.bw.skew),
kdanylov aka koder45183182017-04-30 23:55:40 +0300851 b2ssize(io_sum.bw.perc_5 * bw_coef) + bw_target_units,
852 b2ssize(io_sum.bw.perc_1 * bw_coef) + bw_target_units]
koder aka kdanilova732a602017-02-01 20:29:56 +0200853
kdanylov aka koder45183182017-04-30 23:55:40 +0300854 iops_coef = float(unit_conversion_coef(io_sum.bw.units, 'KiBps')) / fjob.bsize
koder aka kdanilova732a602017-02-01 20:29:56 +0200855 iops_data = ["IOPS",
kdanylov aka koder45183182017-04-30 23:55:40 +0300856 "{}IOPS ~ {}IOPS".format(b2ssize_10(io_sum.bw.average * iops_coef),
857 b2ssize_10(io_sum.bw.deviation * iops_coef)),
858 b2ssize_10(io_sum.bw.confidence * iops_coef) + "IOPS",
859 b2ssize_10(io_sum.bw.perc_50 * iops_coef) + "IOPS",
koder aka kdanilova732a602017-02-01 20:29:56 +0200860 "-",
861 "{:.2f} / {:.2f}".format(io_sum.bw.kurt, io_sum.bw.skew),
kdanylov aka koder45183182017-04-30 23:55:40 +0300862 b2ssize_10(io_sum.bw.perc_5 * iops_coef) + "IOPS",
863 b2ssize_10(io_sum.bw.perc_1 * iops_coef) + "IOPS"]
koder aka kdanilova732a602017-02-01 20:29:56 +0200864
kdanylov aka koder45183182017-04-30 23:55:40 +0300865 lat_target_unit = 's'
866 lat_coef = unit_conversion_coef(io_sum.lat.units, lat_target_unit)
koder aka kdanilova732a602017-02-01 20:29:56 +0200867 # latency
868 lat_data = ["Latency",
869 "-",
870 "-",
kdanylov aka koder45183182017-04-30 23:55:40 +0300871 b2ssize_10(io_sum.lat.perc_50 * lat_coef) + lat_target_unit,
koder aka kdanilova732a602017-02-01 20:29:56 +0200872 "-",
873 "-",
kdanylov aka koder45183182017-04-30 23:55:40 +0300874 b2ssize_10(io_sum.lat.perc_95 * lat_coef) + lat_target_unit,
875 b2ssize_10(io_sum.lat.perc_99 * lat_coef) + lat_target_unit]
koder aka kdanilova732a602017-02-01 20:29:56 +0200876
877 # sensor usage
878 stat_data = [iops_data, bw_data, lat_data]
879 res += html.table("Load stats info", stat_data_headers, stat_data)
880
881 resource_headers = ["Resource", "Usage count", "Proportional to work done"]
882
kdanylov aka koder45183182017-04-30 23:55:40 +0300883 tot_io_coef = float(unit_conversion_coef(io_sum.bw.units, "KiBps"))
884 tot_ops_coef = tot_io_coef / fjob.bsize
885
886 io_transfered = io_sum.bw.data.sum() * tot_io_coef
koder aka kdanilova732a602017-02-01 20:29:56 +0200887 resource_data = [
kdanylov aka koder45183182017-04-30 23:55:40 +0300888 ["IO made", b2ssize_10(io_transfered * tot_ops_coef) + "OP", "-"],
koder aka kdanilova732a602017-02-01 20:29:56 +0200889 ["Data transfered", b2ssize(io_transfered) + "B", "-"]
890 ]
891
koder aka kdanilova732a602017-02-01 20:29:56 +0200892 storage = rstorage.storage
893 nodes = storage.load_list(NodeInfo, 'all_nodes') # type: List[NodeInfo]
894
kdanylov aka koder45183182017-04-30 23:55:40 +0300895 ops_done = io_transfered * tot_ops_coef
koder aka kdanilova732a602017-02-01 20:29:56 +0200896
897 all_metrics = [
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300898 ("Test nodes net send", 'net-io', 'send_bytes', b2ssize, ['testnode'], "B", io_transfered),
899 ("Test nodes net recv", 'net-io', 'recv_bytes', b2ssize, ['testnode'], "B", io_transfered),
koder aka kdanilova732a602017-02-01 20:29:56 +0200900
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300901 ("Test nodes disk write", 'block-io', 'sectors_written', b2ssize, ['testnode'], "B", io_transfered),
902 ("Test nodes disk read", 'block-io', 'sectors_read', b2ssize, ['testnode'], "B", io_transfered),
903 ("Test nodes writes", 'block-io', 'writes_completed', b2ssize_10, ['testnode'], "OP", ops_done),
904 ("Test nodes reads", 'block-io', 'reads_completed', b2ssize_10, ['testnode'], "OP", ops_done),
koder aka kdanilova732a602017-02-01 20:29:56 +0200905
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300906 ("Storage nodes net send", 'net-io', 'send_bytes', b2ssize, STORAGE_ROLES, "B", io_transfered),
907 ("Storage nodes net recv", 'net-io', 'recv_bytes', b2ssize, STORAGE_ROLES, "B", io_transfered),
koder aka kdanilova732a602017-02-01 20:29:56 +0200908
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300909 ("Storage nodes disk write", 'block-io', 'sectors_written', b2ssize, STORAGE_ROLES, "B", io_transfered),
910 ("Storage nodes disk read", 'block-io', 'sectors_read', b2ssize, STORAGE_ROLES, "B", io_transfered),
911 ("Storage nodes writes", 'block-io', 'writes_completed', b2ssize_10, STORAGE_ROLES, "OP", ops_done),
912 ("Storage nodes reads", 'block-io', 'reads_completed', b2ssize_10, STORAGE_ROLES, "OP", ops_done),
koder aka kdanilova732a602017-02-01 20:29:56 +0200913 ]
914
915 all_agg = {}
916
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300917 for descr, sensor, metric, ffunc, roles, units, denom in all_metrics:
koder aka kdanilova732a602017-02-01 20:29:56 +0200918 if not nodes:
919 continue
920
kdanylov aka koder45183182017-04-30 23:55:40 +0300921 res_ts = summ_sensors(rstorage, roles, sensor=sensor, metric=metric, time_range=job.reliable_info_range_s)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300922 if res_ts is None:
koder aka kdanilova732a602017-02-01 20:29:56 +0200923 continue
924
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300925 agg = res_ts.data.sum()
koder aka kdanilova732a602017-02-01 20:29:56 +0200926 resource_data.append([descr, ffunc(agg) + units, "{:.1f}".format(agg / denom)])
927 all_agg[descr] = agg
928
koder aka kdanilova732a602017-02-01 20:29:56 +0200929 cums = [
930 ("Test nodes writes", "Test nodes reads", "Total test ops", b2ssize_10, "OP", ops_done),
931 ("Storage nodes writes", "Storage nodes reads", "Total storage ops", b2ssize_10, "OP", ops_done),
932 ("Storage nodes disk write", "Storage nodes disk read", "Total storage IO size", b2ssize,
933 "B", io_transfered),
934 ("Test nodes disk write", "Test nodes disk read", "Total test nodes IO size", b2ssize, "B", io_transfered),
935 ]
936
937 for name1, name2, descr, ffunc, units, denom in cums:
938 if name1 in all_agg and name2 in all_agg:
939 agg = all_agg[name1] + all_agg[name2]
940 resource_data.append([descr, ffunc(agg) + units, "{:.1f}".format(agg / denom)])
941
942 res += html.table("Resources usage", resource_headers, resource_data)
943
944 yield Menu1st.per_job, job.summary, HTMLBlock(res)
945
946
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300947# CPU load
948class CPULoadPlot(JobReporter):
949 def get_divs(self,
950 suite: SuiteConfig,
951 job: JobConfig,
952 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
953
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300954 # plot CPU time
955 for rt, roles in [('storage', STORAGE_ROLES), ('test', ['testnode'])]:
956 cpu_ts = {}
957 cpu_metrics = "idle guest iowait irq nice sirq steal sys user".split()
958 for name in cpu_metrics:
kdanylov aka koder45183182017-04-30 23:55:40 +0300959 cpu_ts[name] = summ_sensors(rstorage, roles, sensor='system-cpu', metric=name,
960 time_range=job.reliable_info_range_s)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300961
962 it = iter(cpu_ts.values())
963 total_over_time = next(it).data.copy()
964 for ts in it:
965 total_over_time += ts.data
966
kdanylov aka koder45183182017-04-30 23:55:40 +0300967 fname = plot_simple_over_time(rstorage,
968 cpu_ts['idle'].source(job_id=job.storage_id,
969 suite_id=suite.storage_id,
970 metric='allcpu', tag=rt + '.plt.svg'),
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300971 tss=[(name, ts.data * 100 / total_over_time) for name, ts in cpu_ts.items()],
972 average=True,
973 ylabel="CPU time %",
974 title="{} nodes CPU usage".format(rt.capitalize()))
975
976 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fname))
977
978
979# IO time and QD
980class QDIOTimeHeatmap(JobReporter):
981 def get_divs(self,
982 suite: SuiteConfig,
983 job: JobConfig,
984 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
985
986 # TODO: fix this hardcode, need to track what devices are actually used on test and storage nodes
987 # use saved storage info in nodes
988
989 journal_devs = None
990 storage_devs = None
991 test_nodes_devs = ['rbd0']
992
993 for node in find_nodes_by_roles(rstorage, STORAGE_ROLES):
994 cjd = set(node.params['ceph_journal_devs'])
995 if journal_devs is None:
996 journal_devs = cjd
997 else:
998 assert journal_devs == cjd, "{!r} != {!r}".format(journal_devs, cjd)
999
1000 csd = set(node.params['ceph_storage_devs'])
1001 if storage_devs is None:
1002 storage_devs = csd
1003 else:
1004 assert storage_devs == csd, "{!r} != {!r}".format(storage_devs, csd)
1005
1006 storage_nodes_devs = list(journal_devs) + list(storage_devs)
1007 trange = (job.reliable_info_range[0] // 1000, job.reliable_info_range[1] // 1000)
1008
1009 for name, devs, roles in [('storage', storage_devs, STORAGE_ROLES),
1010 ('journal', journal_devs, STORAGE_ROLES),
1011 ('test', test_nodes_devs, ['testnode'])]:
1012 # QD heatmap
1013 ioq2d = find_sensors_to_2d(rstorage, roles, sensor='block-io', devs=devs,
1014 metric='io_queue', time_range=trange)
1015 fname = plot_hmap_from_2d(rstorage, DataSource(suite.storage_id,
1016 job.storage_id,
1017 AGG_TAG,
1018 'block-io',
1019 name,
1020 metric='io_queue',
1021 tag="hmap.svg"),
1022 ioq2d, ylabel="IO QD", title=name.capitalize() + " devs QD",
1023 bins=StyleProfile.qd_bins,
1024 xlabel='Time') # type: str
1025 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fname))
1026
1027 # Block size heatmap
1028 wc2d = find_sensors_to_2d(rstorage, roles, sensor='block-io', devs=devs,
1029 metric='writes_completed', time_range=trange)
1030 wc2d[wc2d < 1E-3] = 1
1031 sw2d = find_sensors_to_2d(rstorage, roles, sensor='block-io', devs=devs,
1032 metric='sectors_written', time_range=trange)
1033 data2d = sw2d / wc2d / 1024
1034 fname = plot_hmap_from_2d(rstorage, DataSource(suite.storage_id,
1035 job.storage_id,
1036 AGG_TAG,
1037 'block-io',
1038 name,
1039 metric='wr_block_size',
1040 tag="hmap.svg"),
1041 data2d, ylabel="IO bsize, KiB", title=name.capitalize() + " write block size",
1042 xlabel='Time',
1043 bins=StyleProfile.block_size_bins) # type: str
1044 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fname))
1045
1046 # iotime heatmap
1047 wtime2d = find_sensors_to_2d(rstorage, roles, sensor='block-io', devs=devs,
1048 metric='io_time', time_range=trange)
1049 fname = plot_hmap_from_2d(rstorage, DataSource(suite.storage_id,
1050 job.storage_id,
1051 AGG_TAG,
1052 'block-io',
1053 name,
1054 metric='io_time',
1055 tag="hmap.svg"),
1056 wtime2d, ylabel="IO time (ms) per second",
1057 title=name.capitalize() + " iotime",
1058 xlabel='Time',
1059 bins=StyleProfile.iotime_bins) # type: str
1060 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fname))
1061
1062
koder aka kdanilova732a602017-02-01 20:29:56 +02001063# IOPS/latency distribution
1064class IOHist(JobReporter):
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001065 """IOPS.latency distribution histogram"""
koder aka kdanilova732a602017-02-01 20:29:56 +02001066 suite_types = {'fio'}
koder aka kdanilov108ac362017-01-19 20:17:16 +02001067
koder aka kdanilova732a602017-02-01 20:29:56 +02001068 def get_divs(self,
1069 suite: SuiteConfig,
1070 job: JobConfig,
1071 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
koder aka kdanilov108ac362017-01-19 20:17:16 +02001072
koder aka kdanilova732a602017-02-01 20:29:56 +02001073 fjob = cast(FioJobConfig, job)
koder aka kdanilov108ac362017-01-19 20:17:16 +02001074
koder aka kdanilova732a602017-02-01 20:29:56 +02001075 yield Menu1st.per_job, fjob.summary, HTMLBlock(html.H2(html.center("Load histograms")))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001076
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001077 # agg_lat = get_aggregated(rstorage, suite, fjob, "lat")
1078 # # bins_edges = numpy.array(get_lat_vals(agg_lat.data.shape[1]), dtype='float32') / 1000 # convert us to ms
1079 # lat_stat_prop = calc_histo_stat_props(agg_lat, bins_edges=None, rebins_count=StyleProfile.hist_lat_boxes)
1080 #
1081 # long_summary = cast(FioJobParams, fjob.params).long_summary
1082 #
1083 # title = "Latency distribution"
1084 # units = "ms"
1085 #
1086 # fpath = plot_hist(rstorage, agg_lat.source(tag='hist.svg'), title, units, lat_stat_prop) # type: str
1087 # yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilova732a602017-02-01 20:29:56 +02001088
1089 agg_io = get_aggregated(rstorage, suite, fjob, "bw")
1090
1091 if fjob.bsize >= LARGE_BLOCKS:
1092 title = "BW distribution"
1093 units = "MiBps"
kdanylov aka koder45183182017-04-30 23:55:40 +03001094 agg_io.data //= int(unit_conversion_coef(units, agg_io.units))
koder aka kdanilova732a602017-02-01 20:29:56 +02001095 else:
1096 title = "IOPS distribution"
kdanylov aka koder45183182017-04-30 23:55:40 +03001097 agg_io.data //= (int(unit_conversion_coef("KiBps", agg_io.units)) * fjob.bsize)
koder aka kdanilova732a602017-02-01 20:29:56 +02001098 units = "IOPS"
1099
1100 io_stat_prop = calc_norm_stat_props(agg_io, bins_count=StyleProfile.hist_boxes)
1101 fpath = plot_hist(rstorage, agg_io.source(tag='hist.svg'), title, units, io_stat_prop) # type: str
1102 yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001103
1104
koder aka kdanilov108ac362017-01-19 20:17:16 +02001105# IOPS/latency over test time for each job
koder aka kdanilova732a602017-02-01 20:29:56 +02001106class IOTime(JobReporter):
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001107 """IOPS/latency during test"""
koder aka kdanilova732a602017-02-01 20:29:56 +02001108 suite_types = {'fio'}
koder aka kdanilov108ac362017-01-19 20:17:16 +02001109
koder aka kdanilova732a602017-02-01 20:29:56 +02001110 def get_divs(self,
1111 suite: SuiteConfig,
1112 job: JobConfig,
1113 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
koder aka kdanilov108ac362017-01-19 20:17:16 +02001114
koder aka kdanilova732a602017-02-01 20:29:56 +02001115 fjob = cast(FioJobConfig, job)
koder aka kdanilov108ac362017-01-19 20:17:16 +02001116
koder aka kdanilova732a602017-02-01 20:29:56 +02001117 agg_io = get_aggregated(rstorage, suite, fjob, "bw")
1118 if fjob.bsize >= LARGE_BLOCKS:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001119 title = "Fio measured Bandwidth over time"
koder aka kdanilova732a602017-02-01 20:29:56 +02001120 units = "MiBps"
kdanylov aka koder45183182017-04-30 23:55:40 +03001121 agg_io.data //= int(unit_conversion_coef(units, agg_io.units))
koder aka kdanilova732a602017-02-01 20:29:56 +02001122 else:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001123 title = "Fio measured IOPS over time"
kdanylov aka koder45183182017-04-30 23:55:40 +03001124 agg_io.data //= (int(unit_conversion_coef("KiBps", agg_io.units)) * fjob.bsize)
koder aka kdanilova732a602017-02-01 20:29:56 +02001125 units = "IOPS"
koder aka kdanilov108ac362017-01-19 20:17:16 +02001126
koder aka kdanilova732a602017-02-01 20:29:56 +02001127 fpath = plot_v_over_time(rstorage, agg_io.source(tag='ts.svg'), title, units, agg_io) # type: str
1128 yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001129
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001130 agg_lat = get_aggregated(rstorage, suite, fjob, "lat").copy()
1131 TARGET_UNITS = 'ms'
1132 coef = unit_conversion_coef(agg_lat.units, TARGET_UNITS)
1133 agg_lat.histo_bins = agg_lat.histo_bins.copy() * float(coef)
1134 agg_lat.units = TARGET_UNITS
koder aka kdanilov108ac362017-01-19 20:17:16 +02001135
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001136 fpath = plot_lat_over_time(rstorage, agg_lat.source(tag='ts.svg'), "Latency",
1137 agg_lat, ylabel="Latency, " + agg_lat.units) # type: str
koder aka kdanilova732a602017-02-01 20:29:56 +02001138 yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001139
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001140 fpath = plot_histo_heatmap(rstorage,
1141 agg_lat.source(tag='hmap.svg'),
1142 "Latency heatmap",
1143 agg_lat,
1144 ylabel="Latency, " + agg_lat.units,
1145 xlabel='Test time') # type: str
koder aka kdanilov108ac362017-01-19 20:17:16 +02001146
koder aka kdanilova732a602017-02-01 20:29:56 +02001147 yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001148
1149
1150class ResourceUsage:
1151 def __init__(self, io_r_ops: int, io_w_ops: int, io_r_kb: int, io_w_kb: int) -> None:
1152 self.io_w_ops = io_w_ops
1153 self.io_r_ops = io_r_ops
1154 self.io_w_kb = io_w_kb
1155 self.io_r_kb = io_r_kb
1156
1157 self.cpu_used_user = None # type: int
1158 self.cpu_used_sys = None # type: int
1159 self.cpu_wait_io = None # type: int
1160
1161 self.net_send_packets = None # type: int
1162 self.net_recv_packets = None # type: int
1163 self.net_send_kb = None # type: int
1164 self.net_recv_kb = None # type: int
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001165
1166
1167# Cluster load over test time
koder aka kdanilova732a602017-02-01 20:29:56 +02001168class ClusterLoad(JobReporter):
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001169 """IOPS/latency during test"""
1170
koder aka kdanilova732a602017-02-01 20:29:56 +02001171 # TODO: units should came from sensor
koder aka kdanilov108ac362017-01-19 20:17:16 +02001172 storage_sensors = [
kdanylov aka koder45183182017-04-30 23:55:40 +03001173 ('block-io', 'reads_completed', "Read", 'iop'),
1174 ('block-io', 'writes_completed', "Write", 'iop'),
1175 ('block-io', 'sectors_read', "Read", 'KiB'),
1176 ('block-io', 'sectors_written', "Write", 'KiB'),
koder aka kdanilov108ac362017-01-19 20:17:16 +02001177 ]
1178
koder aka kdanilova732a602017-02-01 20:29:56 +02001179 def get_divs(self,
1180 suite: SuiteConfig,
1181 job: JobConfig,
1182 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
koder aka kdanilova732a602017-02-01 20:29:56 +02001183 yield Menu1st.per_job, job.summary, HTMLBlock(html.H2(html.center("Cluster load")))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001184
kdanylov aka koder45183182017-04-30 23:55:40 +03001185 for sensor, metric, op, units in self.storage_sensors:
1186 ts = summ_sensors(rstorage, ['testnode'], sensor, metric, job.reliable_info_range_s)
koder aka kdanilova732a602017-02-01 20:29:56 +02001187 ds = DataSource(suite_id=suite.storage_id,
1188 job_id=job.storage_id,
1189 node_id="test_nodes",
1190 sensor=sensor,
1191 dev=AGG_TAG,
1192 metric=metric,
1193 tag="ts.svg")
koder aka kdanilov108ac362017-01-19 20:17:16 +02001194
kdanylov aka koder45183182017-04-30 23:55:40 +03001195 data = ts.data if units != 'KiB' else ts.data * float(unit_conversion_coef(ts.units, 'KiB'))
koder aka kdanilova732a602017-02-01 20:29:56 +02001196 ts = TimeSeries(name="",
kdanylov aka koder45183182017-04-30 23:55:40 +03001197 times=numpy.arange(*job.reliable_info_range_s),
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001198 data=data,
koder aka kdanilova732a602017-02-01 20:29:56 +02001199 raw=None,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001200 units=units if ts.units is None else ts.units,
1201 time_units=ts.time_units,
1202 source=ds,
1203 histo_bins=ts.histo_bins)
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +03001204
kdanylov aka koder45183182017-04-30 23:55:40 +03001205 sensor_title = "{} {}".format(op, units)
koder aka kdanilova732a602017-02-01 20:29:56 +02001206 fpath = plot_v_over_time(rstorage, ds, sensor_title, sensor_title, ts=ts) # type: str
1207 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001208
1209
1210# Ceph cluster summary
1211class ResourceConsumption(Reporter):
1212 """Resources consumption report, only text"""
1213
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001214
1215# Node load over test time
1216class NodeLoad(Reporter):
1217 """IOPS/latency during test"""
1218
1219
1220# Ceph cluster summary
1221class CephClusterSummary(Reporter):
1222 """IOPS/latency during test"""
1223
1224
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001225# TODO: Ceph operation breakout report
1226# TODO: Resource consumption for different type of test
1227
1228
koder aka kdanilov108ac362017-01-19 20:17:16 +02001229# ------------------------------------------ REPORT STAGES -----------------------------------------------------------
1230
1231
1232class HtmlReportStage(Stage):
1233 priority = StepOrder.REPORT
1234
1235 def run(self, ctx: TestRun) -> None:
1236 rstorage = ResultStorage(ctx.storage)
koder aka kdanilova732a602017-02-01 20:29:56 +02001237
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001238 job_reporters = [StatInfo(), IOTime(), IOHist(), ClusterLoad(), CPULoadPlot(),
1239 QDIOTimeHeatmap()] # type: List[JobReporter]
1240 reporters = []
koder aka kdanilova732a602017-02-01 20:29:56 +02001241
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001242 # reporters = [IO_QD()] # type: List[Reporter]
koder aka kdanilova732a602017-02-01 20:29:56 +02001243 # job_reporters = [ClusterLoad()]
koder aka kdanilov108ac362017-01-19 20:17:16 +02001244
1245 root_dir = os.path.dirname(os.path.dirname(wally.__file__))
1246 doc_templ_path = os.path.join(root_dir, "report_templates/index.html")
1247 report_template = open(doc_templ_path, "rt").read()
1248 css_file_src = os.path.join(root_dir, "report_templates/main.css")
1249 css_file = open(css_file_src, "rt").read()
1250
1251 menu_block = []
1252 content_block = []
1253 link_idx = 0
1254
koder aka kdanilova732a602017-02-01 20:29:56 +02001255 # matplotlib.rcParams.update(ctx.config.reporting.matplotlib_params.raw())
1256 # ColorProfile.__dict__.update(ctx.config.reporting.colors.raw())
1257 # StyleProfile.__dict__.update(ctx.config.reporting.style.raw())
koder aka kdanilov108ac362017-01-19 20:17:16 +02001258
koder aka kdanilova732a602017-02-01 20:29:56 +02001259 items = defaultdict(lambda: defaultdict(list)) # type: Dict[str, Dict[str, List[HTMLBlock]]]
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001260 DEBUG = False
koder aka kdanilova732a602017-02-01 20:29:56 +02001261 # TODO: filter reporters
koder aka kdanilov108ac362017-01-19 20:17:16 +02001262 for suite in rstorage.iter_suite(FioTest.name):
koder aka kdanilova732a602017-02-01 20:29:56 +02001263 all_jobs = list(rstorage.iter_job(suite))
1264 all_jobs.sort(key=lambda job: job.params)
1265 for job in all_jobs:
1266 for reporter in job_reporters:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001267 logger.debug("Start reporter %s on job %s suite %s",
1268 reporter.__class__.__name__, job.summary, suite.test_type)
koder aka kdanilova732a602017-02-01 20:29:56 +02001269 for block, item, html in reporter.get_divs(suite, job, rstorage):
1270 items[block][item].append(html)
1271 if DEBUG:
1272 break
1273
koder aka kdanilov108ac362017-01-19 20:17:16 +02001274 for reporter in reporters:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001275 logger.debug("Start reporter %s on suite %s", reporter.__class__.__name__, suite.test_type)
koder aka kdanilov108ac362017-01-19 20:17:16 +02001276 for block, item, html in reporter.get_divs(suite, rstorage):
1277 items[block][item].append(html)
1278
koder aka kdanilova732a602017-02-01 20:29:56 +02001279 if DEBUG:
1280 break
1281
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001282 logger.debug("Generating result html")
1283
koder aka kdanilov108ac362017-01-19 20:17:16 +02001284 for idx_1st, menu_1st in enumerate(sorted(items, key=lambda x: menu_1st_order.index(x))):
1285 menu_block.append(
1286 '<a href="#item{}" class="nav-group" data-toggle="collapse" data-parent="#MainMenu">{}</a>'
1287 .format(idx_1st, menu_1st)
1288 )
1289 menu_block.append('<div class="collapse" id="item{}">'.format(idx_1st))
1290 for menu_2nd in sorted(items[menu_1st]):
1291 menu_block.append(' <a href="#content{}" class="nav-group-item">{}</a>'
1292 .format(link_idx, menu_2nd))
1293 content_block.append('<div id="content{}">'.format(link_idx))
koder aka kdanilova732a602017-02-01 20:29:56 +02001294 content_block.extend(" " + x.data for x in items[menu_1st][menu_2nd])
koder aka kdanilov108ac362017-01-19 20:17:16 +02001295 content_block.append('</div>')
1296 link_idx += 1
1297 menu_block.append('</div>')
1298
1299 report = report_template.replace("{{{menu}}}", ("\n" + " " * 16).join(menu_block))
1300 report = report.replace("{{{content}}}", ("\n" + " " * 16).join(content_block))
1301 report_path = rstorage.put_report(report, "index.html")
1302 rstorage.put_report(css_file, "main.css")
1303 logger.info("Report is stored into %r", report_path)