blob: 8c2c1810926300aa51e82ec7b2dcb5ff10b07211 [file] [log] [blame]
koder aka kdanilov108ac362017-01-19 20:17:16 +02001import os
koder aka kdanilov7f59d562016-12-26 01:34:23 +02002import abc
koder aka kdanilova047e1b2015-04-21 23:16:59 +03003import logging
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03004import warnings
koder aka kdanilov108ac362017-01-19 20:17:16 +02005from io import BytesIO
6from functools import wraps
koder aka kdanilov108ac362017-01-19 20:17:16 +02007from collections import defaultdict
kdanylov aka koder736e5c12017-05-07 17:27:14 +03008from typing import Dict, Any, Iterator, Tuple, cast, List, Callable, Set, Optional, Union
koder aka kdanilovcff7b2e2015-04-18 20:48:15 +03009
koder aka kdanilovffaf48d2016-12-27 02:25:29 +020010import numpy
koder aka kdanilov108ac362017-01-19 20:17:16 +020011import scipy.stats
kdanylov aka koder736e5c12017-05-07 17:27:14 +030012import matplotlib.style
kdanylov aka koder4e4af682017-05-01 01:52:14 +030013from matplotlib.figure import Figure
koder aka kdanilova732a602017-02-01 20:29:56 +020014import matplotlib.pyplot as plt
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030015from matplotlib import gridspec
kdanylov aka koder736e5c12017-05-07 17:27:14 +030016from statsmodels.tsa.stattools import adfuller
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030017
18from cephlib.common import float2str
19from cephlib.plot import plot_hmap_with_y_histo, hmap_from_2d
kdanylov aka koder736e5c12017-05-07 17:27:14 +030020import xmlbuilder3
koder aka kdanilovbe8f89f2015-04-28 14:51:51 +030021
koder aka kdanilov108ac362017-01-19 20:17:16 +020022import wally
koder aka kdanilovffaf48d2016-12-27 02:25:29 +020023
koder aka kdanilov108ac362017-01-19 20:17:16 +020024from . import html
koder aka kdanilov39e449e2016-12-17 15:15:26 +020025from .stage import Stage, StepOrder
26from .test_run_class import TestRun
koder aka kdanilov108ac362017-01-19 20:17:16 +020027from .hlstorage import ResultStorage
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030028from .utils import b2ssize, b2ssize_10, STORAGE_ROLES, unit_conversion_coef
koder aka kdanilova732a602017-02-01 20:29:56 +020029from .statistic import (calc_norm_stat_props, calc_histo_stat_props, moving_average, moving_dev,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030030 hist_outliers_perc, find_ouliers_ts, approximate_curve)
31from .result_classes import (StatProps, DataSource, TimeSeries, NormStatProps, HistoStatProps, SuiteConfig)
koder aka kdanilov108ac362017-01-19 20:17:16 +020032from .suits.io.fio import FioTest, FioJobConfig
koder aka kdanilova732a602017-02-01 20:29:56 +020033from .suits.io.fio_job import FioJobParams
34from .suits.job import JobConfig
kdanylov aka koder736e5c12017-05-07 17:27:14 +030035from .data_selectors import (get_aggregated, AGG_TAG, summ_sensors, find_sensors_to_2d, find_nodes_by_roles,
36 get_ts_for_time_range)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030037
38
39with warnings.catch_warnings():
40 warnings.simplefilter("ignore")
41 import seaborn
koder aka kdanilovcff7b2e2015-04-18 20:48:15 +030042
koder aka kdanilov4a510ee2015-04-21 18:50:42 +030043
koder aka kdanilov962ee5f2016-12-19 02:40:08 +020044logger = logging.getLogger("wally")
koder aka kdanilova047e1b2015-04-21 23:16:59 +030045
46
koder aka kdanilov108ac362017-01-19 20:17:16 +020047# ---------------- CONSTS ---------------------------------------------------------------------------------------------
koder aka kdanilov39e449e2016-12-17 15:15:26 +020048
koder aka kdanilov7f59d562016-12-26 01:34:23 +020049
koder aka kdanilov108ac362017-01-19 20:17:16 +020050DEBUG = False
koder aka kdanilov39e449e2016-12-17 15:15:26 +020051
koder aka kdanilov39e449e2016-12-17 15:15:26 +020052
koder aka kdanilov108ac362017-01-19 20:17:16 +020053# ---------------- PROFILES ------------------------------------------------------------------------------------------
54
55
koder aka kdanilova732a602017-02-01 20:29:56 +020056# this is default values, real values is loaded from config
57
koder aka kdanilov108ac362017-01-19 20:17:16 +020058class ColorProfile:
59 primary_color = 'b'
60 suppl_color1 = 'teal'
61 suppl_color2 = 'magenta'
koder aka kdanilova732a602017-02-01 20:29:56 +020062 suppl_color3 = 'orange'
koder aka kdanilov108ac362017-01-19 20:17:16 +020063 box_color = 'y'
koder aka kdanilova732a602017-02-01 20:29:56 +020064 err_color = 'red'
koder aka kdanilov108ac362017-01-19 20:17:16 +020065
66 noise_alpha = 0.3
67 subinfo_alpha = 0.7
68
koder aka kdanilova732a602017-02-01 20:29:56 +020069 imshow_colormap = None # type: str
kdanylov aka koder736e5c12017-05-07 17:27:14 +030070 hmap_cmap = "Blues"
koder aka kdanilova732a602017-02-01 20:29:56 +020071
koder aka kdanilov108ac362017-01-19 20:17:16 +020072
kdanylov aka koder4e4af682017-05-01 01:52:14 +030073default_format = 'svg'
kdanylov aka koder736e5c12017-05-07 17:27:14 +030074io_chart_format = 'svg'
kdanylov aka koder4e4af682017-05-01 01:52:14 +030075
76
koder aka kdanilov108ac362017-01-19 20:17:16 +020077class StyleProfile:
kdanylov aka koder736e5c12017-05-07 17:27:14 +030078 default_style = 'seaborn-white'
79 io_chart_style = 'classic'
80
kdanylov aka koder4e4af682017-05-01 01:52:14 +030081 dpi = 80
koder aka kdanilov108ac362017-01-19 20:17:16 +020082 grid = True
kdanylov aka koder4e4af682017-05-01 01:52:14 +030083 tide_layout = False
koder aka kdanilov108ac362017-01-19 20:17:16 +020084 hist_boxes = 10
koder aka kdanilova732a602017-02-01 20:29:56 +020085 hist_lat_boxes = 25
86 hm_hist_bins_count = 25
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030087 hm_x_slots = 25
koder aka kdanilov108ac362017-01-19 20:17:16 +020088 min_points_for_dev = 5
89
kdanylov aka koder736e5c12017-05-07 17:27:14 +030090 x_label_rotation = 35
91
koder aka kdanilov108ac362017-01-19 20:17:16 +020092 dev_range_x = 2.0
93 dev_perc = 95
94
koder aka kdanilova732a602017-02-01 20:29:56 +020095 point_shape = 'o'
96 err_point_shape = '*'
koder aka kdanilov108ac362017-01-19 20:17:16 +020097
koder aka kdanilova732a602017-02-01 20:29:56 +020098 avg_range = 20
99 approx_average = True
100
101 curve_approx_level = 6
koder aka kdanilov108ac362017-01-19 20:17:16 +0200102 curve_approx_points = 100
103 assert avg_range >= min_points_for_dev
104
koder aka kdanilova732a602017-02-01 20:29:56 +0200105 # figure size in inches
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300106 figsize = (8, 4)
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300107 figsize_long = (8, 4)
108 qd_chart_inches = (16, 9)
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300109
110 subplot_adjust_r = 0.75
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300111 subplot_adjust_r_no_legend = 0.9
112 title_font_size = 12
koder aka kdanilova732a602017-02-01 20:29:56 +0200113
koder aka kdanilov108ac362017-01-19 20:17:16 +0200114 extra_io_spine = True
115
116 legend_for_eng = True
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300117 # heatmap_interpolation = '1d'
118 heatmap_interpolation = None
koder aka kdanilova732a602017-02-01 20:29:56 +0200119 heatmap_interpolation_points = 300
120 outliers_q_nd = 3.0
121 outliers_hide_q_nd = 4.0
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300122 outliers_lat = (0.01, 0.9)
koder aka kdanilova732a602017-02-01 20:29:56 +0200123
124 violin_instead_of_box = True
125 violin_point_count = 30000
126
127 heatmap_colorbar = False
128
129 min_iops_vs_qd_jobs = 3
koder aka kdanilov108ac362017-01-19 20:17:16 +0200130
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300131 qd_bins = [0, 1, 2, 4, 6, 8, 12, 16, 20, 26, 32, 40, 48, 56, 64, 96, 128]
132 iotime_bins = list(range(0, 1030, 50))
133 block_size_bins = [0, 2, 4, 8, 16, 32, 48, 64, 96, 128, 192, 256, 384, 512, 1024, 2048]
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300134 large_blocks = 256
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300135
koder aka kdanilov108ac362017-01-19 20:17:16 +0200136
kdanylov aka koder45183182017-04-30 23:55:40 +0300137DefColorProfile = ColorProfile()
138DefStyleProfile = StyleProfile()
139
140
koder aka kdanilov108ac362017-01-19 20:17:16 +0200141# ---------------- STRUCTS -------------------------------------------------------------------------------------------
koder aka kdanilov39e449e2016-12-17 15:15:26 +0200142
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200143
144# TODO: need to be revised, have to user StatProps fields instead
145class StoragePerfSummary:
146 def __init__(self, name: str) -> None:
147 self.direct_iops_r_max = 0 # type: int
148 self.direct_iops_w_max = 0 # type: int
149
150 # 64 used instead of 4k to faster feed caches
151 self.direct_iops_w64_max = 0 # type: int
152
153 self.rws4k_10ms = 0 # type: int
154 self.rws4k_30ms = 0 # type: int
155 self.rws4k_100ms = 0 # type: int
156 self.bw_write_max = 0 # type: int
157 self.bw_read_max = 0 # type: int
158
159 self.bw = None # type: float
160 self.iops = None # type: float
161 self.lat = None # type: float
162 self.lat_50 = None # type: float
163 self.lat_95 = None # type: float
164
165
koder aka kdanilov108ac362017-01-19 20:17:16 +0200166class IOSummary:
167 def __init__(self,
168 qd: int,
169 block_size: int,
170 nodes_count:int,
171 bw: NormStatProps,
172 lat: HistoStatProps) -> None:
173
174 self.qd = qd
175 self.nodes_count = nodes_count
176 self.block_size = block_size
177
178 self.bw = bw
179 self.lat = lat
180
181
182# -------------- AGGREGATION AND STAT FUNCTIONS ----------------------------------------------------------------------
koder aka kdanilov108ac362017-01-19 20:17:16 +0200183
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300184iosum_cache = {} # type: Dict[Tuple[str, str]]
185
186
187def make_iosum(rstorage: ResultStorage, suite: SuiteConfig, job: FioJobConfig, nc: bool = False) -> IOSummary:
188 key = (suite.storage_id, job.storage_id)
189 if not nc and key in iosum_cache:
190 return iosum_cache[key]
191
koder aka kdanilov108ac362017-01-19 20:17:16 +0200192 lat = get_aggregated(rstorage, suite, job, "lat")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200193 io = get_aggregated(rstorage, suite, job, "bw")
194
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300195 res = IOSummary(job.qd,
196 nodes_count=len(suite.nodes_ids),
197 block_size=job.bsize,
198 lat=calc_histo_stat_props(lat, rebins_count=StyleProfile.hist_boxes),
199 bw=calc_norm_stat_props(io, StyleProfile.hist_boxes))
200
201 if not nc:
202 iosum_cache[key] = res
203
204 return res
koder aka kdanilov108ac362017-01-19 20:17:16 +0200205
koder aka kdanilov108ac362017-01-19 20:17:16 +0200206
koder aka kdanilova732a602017-02-01 20:29:56 +0200207def is_sensor_numarray(sensor: str, metric: str) -> bool:
208 """Returns True if sensor provides one-dimension array of numeric values. One number per one measurement."""
209 return True
210
211
212LEVEL_SENSORS = {("block-io", "io_queue"),
213 ("system-cpu", "procs_blocked"),
214 ("system-cpu", "procs_queue")}
215
216
217def is_level_sensor(sensor: str, metric: str) -> bool:
218 """Returns True if sensor measure level of any kind, E.g. queue depth."""
219 return (sensor, metric) in LEVEL_SENSORS
220
221
222def is_delta_sensor(sensor: str, metric: str) -> bool:
223 """Returns True if sensor provides deltas for cumulative value. E.g. io completed in given period"""
224 return not is_level_sensor(sensor, metric)
225
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300226
227cpu_load_cache = {} # type: Dict[Tuple[int, Tuple[str, ...], Tuple[int, int]], Dict[str, TimeSeries]]
228
229
230def get_cluster_cpu_load(rstorage: ResultStorage, roles: List[str],
231 time_range: Tuple[int, int], nc: bool = False) -> Dict[str, TimeSeries]:
232
233 key = (id(rstorage), tuple(roles), time_range)
234 if not nc and key in cpu_load_cache:
235 return cpu_load_cache[key]
236
237 cpu_ts = {}
238 cpu_metrics = "idle guest iowait sirq nice irq steal sys user".split()
239 for name in cpu_metrics:
240 cpu_ts[name] = summ_sensors(rstorage, roles, sensor='system-cpu', metric=name, time_range=time_range)
241
242 it = iter(cpu_ts.values())
243 total_over_time = next(it).data.copy() # type: numpy.ndarray
244 for ts in it:
245 if ts is not None:
246 total_over_time += ts.data
247
248 total = cpu_ts['idle'].copy(no_data=True)
249 total.data = total_over_time
250 cpu_ts['total'] = total
251
252 if not nc:
253 cpu_load_cache[key] = cpu_ts
254
255 return cpu_ts
256
257
koder aka kdanilov108ac362017-01-19 20:17:16 +0200258# -------------- PLOT HELPERS FUNCTIONS ------------------------------------------------------------------------------
259
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300260def get_emb_image(fig: Figure, format: str, **opts) -> bytes:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200261 bio = BytesIO()
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300262 if format == 'svg':
263 fig.savefig(bio, format='svg', **opts)
koder aka kdanilova732a602017-02-01 20:29:56 +0200264 img_start = "<!-- Created with matplotlib (http://matplotlib.org/) -->"
265 return bio.getvalue().decode("utf8").split(img_start, 1)[1].encode("utf8")
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300266 else:
267 fig.savefig(bio, format=format, **opts)
268 return bio.getvalue()
koder aka kdanilov108ac362017-01-19 20:17:16 +0200269
270
271def provide_plot(func: Callable[..., None]) -> Callable[..., str]:
272 @wraps(func)
koder aka kdanilova732a602017-02-01 20:29:56 +0200273 def closure1(storage: ResultStorage,
274 path: DataSource,
275 *args, **kwargs) -> str:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200276 fpath = storage.check_plot_file(path)
277 if not fpath:
koder aka kdanilova732a602017-02-01 20:29:56 +0200278 format = path.tag.split(".")[-1]
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300279 fig = plt.figure(figsize=StyleProfile.figsize)
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300280 plt.style.use(StyleProfile.default_style)
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300281 func(fig, *args, **kwargs)
282 fpath = storage.put_plot_file(get_emb_image(fig, format=format, dpi=DefStyleProfile.dpi), path)
koder aka kdanilova732a602017-02-01 20:29:56 +0200283 logger.debug("Plot %s saved to %r", path, fpath)
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300284 plt.close(fig)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200285 return fpath
286 return closure1
287
288
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300289def apply_style(fig: Figure, title: str, style: StyleProfile, eng: bool = True,
290 no_legend: bool = False) -> None:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300291
292 for ax in fig.axes:
293 ax.grid(style.grid)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200294
295 if (style.legend_for_eng or not eng) and not no_legend:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300296 fig.subplots_adjust(right=StyleProfile.subplot_adjust_r)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200297 legend_location = "center left"
298 legend_bbox_to_anchor = (1.03, 0.81)
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300299 for ax in fig.axes:
300 ax.legend(loc=legend_location, bbox_to_anchor=legend_bbox_to_anchor)
301 else:
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300302 fig.subplots_adjust(right=StyleProfile.subplot_adjust_r_no_legend)
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300303
304 if style.tide_layout:
305 fig.set_tight_layout(True)
306
307 fig.suptitle(title, fontsize=style.title_font_size)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200308
309
310# -------------- PLOT FUNCTIONS --------------------------------------------------------------------------------------
311
312
313@provide_plot
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300314def plot_hist(fig: Figure, title: str, units: str,
koder aka kdanilov108ac362017-01-19 20:17:16 +0200315 prop: StatProps,
kdanylov aka koder45183182017-04-30 23:55:40 +0300316 colors: ColorProfile = DefColorProfile,
317 style: StyleProfile = DefStyleProfile) -> None:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200318
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300319 ax = fig.add_subplot(111)
320
koder aka kdanilov108ac362017-01-19 20:17:16 +0200321 # TODO: unit should came from ts
koder aka kdanilova732a602017-02-01 20:29:56 +0200322 normed_bins = prop.bins_populations / prop.bins_populations.sum()
323 bar_width = prop.bins_edges[1] - prop.bins_edges[0]
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300324 ax.bar(prop.bins_edges, normed_bins, color=colors.box_color, width=bar_width, label="Real data")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200325
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300326 ax.set(xlabel=units, ylabel="Value probability")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200327
328 dist_plotted = False
329 if isinstance(prop, NormStatProps):
330 nprop = cast(NormStatProps, prop)
331 stats = scipy.stats.norm(nprop.average, nprop.deviation)
332
koder aka kdanilova732a602017-02-01 20:29:56 +0200333 new_edges, step = numpy.linspace(prop.bins_edges[0], prop.bins_edges[-1],
334 len(prop.bins_edges) * 10, retstep=True)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200335
koder aka kdanilova732a602017-02-01 20:29:56 +0200336 ypoints = stats.cdf(new_edges) * 11
koder aka kdanilov108ac362017-01-19 20:17:16 +0200337 ypoints = [next - prev for (next, prev) in zip(ypoints[1:], ypoints[:-1])]
koder aka kdanilova732a602017-02-01 20:29:56 +0200338 xpoints = (new_edges[1:] + new_edges[:-1]) / 2
koder aka kdanilov108ac362017-01-19 20:17:16 +0200339
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300340 ax.plot(xpoints, ypoints, color=colors.primary_color, label="Expected from\nnormal\ndistribution")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200341 dist_plotted = True
342
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300343 ax.set_xlim(left=prop.bins_edges[0])
koder aka kdanilova732a602017-02-01 20:29:56 +0200344 if prop.log_bins:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300345 ax.set_xscale('log')
koder aka kdanilova732a602017-02-01 20:29:56 +0200346
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300347 apply_style(fig, title, style, eng=True, no_legend=not dist_plotted)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200348
349
350@provide_plot
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300351def plot_simple_over_time(fig: Figure,
352 tss: List[Tuple[str, numpy.ndarray]],
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300353 title: str,
354 ylabel: str,
355 xlabel: str = "time, s",
356 average: bool = False,
kdanylov aka koder45183182017-04-30 23:55:40 +0300357 colors: ColorProfile = DefColorProfile,
358 style: StyleProfile = DefStyleProfile) -> None:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300359 ax = fig.add_subplot(111)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300360 for name, arr in tss:
361 if average:
362 avg_vals = moving_average(arr, style.avg_range)
363 if style.approx_average:
364 time_points = numpy.arange(len(avg_vals))
365 avg_vals = approximate_curve(time_points, avg_vals, time_points, style.curve_approx_level)
366 arr = avg_vals
367 ax.plot(arr, label=name)
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300368 ax.set(xlabel=xlabel, ylabel=ylabel)
369 apply_style(fig, title, style, eng=True)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300370
371
372@provide_plot
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300373def plot_simple_bars(fig: Figure,
374 title: str,
375 names: List[str],
376 values: List[float],
377 errs: List[float] = None,
378 colors: ColorProfile = DefColorProfile,
379 style: StyleProfile = DefStyleProfile) -> None:
380
381 ax = fig.add_subplot(111)
382 ind = numpy.arange(len(names))
383 width = 0.35
384 ax.barh(ind, values, width, xerr=errs)
385
386 ax.set_yticks(ind + width / 2)
387 ax.set_yticklabels(names)
388 ax.set_xlim(0, max(val + err for val, err in zip(values, errs)) * 1.1)
389
390 apply_style(fig, title, style, no_legend=True)
391 ax.axvline(x=1.0, color='r', linestyle='--', linewidth=1, alpha=0.5)
392 fig.subplots_adjust(left=0.2)
393
394
395@provide_plot
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300396def plot_hmap_from_2d(fig: Figure,
397 data2d: numpy.ndarray,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300398 title: str, ylabel: str, xlabel: str = 'time, s', bins: numpy.ndarray = None,
kdanylov aka koder45183182017-04-30 23:55:40 +0300399 colors: ColorProfile = DefColorProfile, style: StyleProfile = DefStyleProfile) -> None:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300400 fig.set_size_inches(*style.figsize_long)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300401 ioq1d, ranges = hmap_from_2d(data2d)
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300402 ax, _ = plot_hmap_with_y_histo(fig, ioq1d, ranges, bins=bins, cmap=colors.hmap_cmap)
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300403 ax.set(ylabel=ylabel, xlabel=xlabel)
404 apply_style(fig, title, style, no_legend=True)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300405
406
407@provide_plot
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300408def plot_v_over_time(fig: Figure,
409 title: str,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300410 units: str,
koder aka kdanilov108ac362017-01-19 20:17:16 +0200411 ts: TimeSeries,
412 plot_avg_dev: bool = True,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300413 plot_points: bool = True,
kdanylov aka koder45183182017-04-30 23:55:40 +0300414 colors: ColorProfile = DefColorProfile,
415 style: StyleProfile = DefStyleProfile) -> None:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200416
417 min_time = min(ts.times)
418
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300419 # convert time to ms
420 coef = float(unit_conversion_coef(ts.time_units, 's'))
421 time_points = numpy.array([(val_time - min_time) * coef for val_time in ts.times])
koder aka kdanilova732a602017-02-01 20:29:56 +0200422
423 outliers_idxs = find_ouliers_ts(ts.data, cut_range=style.outliers_q_nd)
424 outliers_4q_idxs = find_ouliers_ts(ts.data, cut_range=style.outliers_hide_q_nd)
425 normal_idxs = numpy.logical_not(outliers_idxs)
426 outliers_idxs = outliers_idxs & numpy.logical_not(outliers_4q_idxs)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300427 # hidden_outliers_count = numpy.count_nonzero(outliers_4q_idxs)
koder aka kdanilova732a602017-02-01 20:29:56 +0200428
429 data = ts.data[normal_idxs]
430 data_times = time_points[normal_idxs]
431 outliers = ts.data[outliers_idxs]
432 outliers_times = time_points[outliers_idxs]
koder aka kdanilov108ac362017-01-19 20:17:16 +0200433
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300434 ax = fig.add_subplot(111)
435
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300436 if plot_points:
437 alpha = colors.noise_alpha if plot_avg_dev else 1.0
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300438 ax.plot(data_times, data, style.point_shape,
439 color=colors.primary_color, alpha=alpha, label="Data")
440 ax.plot(outliers_times, outliers, style.err_point_shape,
441 color=colors.err_color, label="Outliers")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200442
koder aka kdanilova732a602017-02-01 20:29:56 +0200443 has_negative_dev = False
444 plus_minus = "\xb1"
koder aka kdanilov108ac362017-01-19 20:17:16 +0200445
koder aka kdanilova732a602017-02-01 20:29:56 +0200446 if plot_avg_dev and len(data) < style.avg_range * 2:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300447 logger.warning("Array %r to small to plot average over %s points", title, style.avg_range)
koder aka kdanilova732a602017-02-01 20:29:56 +0200448 elif plot_avg_dev:
449 avg_vals = moving_average(data, style.avg_range)
450 dev_vals = moving_dev(data, style.avg_range)
451 avg_times = moving_average(data_times, style.avg_range)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200452
koder aka kdanilova732a602017-02-01 20:29:56 +0200453 if style.approx_average:
454 avg_vals = approximate_curve(avg_times, avg_vals, avg_times, style.curve_approx_level)
455 dev_vals = approximate_curve(avg_times, dev_vals, avg_times, style.curve_approx_level)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200456
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300457 ax.plot(avg_times, avg_vals, c=colors.suppl_color1, label="Average")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200458
koder aka kdanilova732a602017-02-01 20:29:56 +0200459 low_vals_dev = avg_vals - dev_vals * style.dev_range_x
460 hight_vals_dev = avg_vals + dev_vals * style.dev_range_x
461 if style.dev_range_x - int(style.dev_range_x) < 0.01:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300462 ax.plot(avg_times, low_vals_dev, c=colors.suppl_color2,
463 label="{}{}*stdev".format(plus_minus, int(style.dev_range_x)))
koder aka kdanilova732a602017-02-01 20:29:56 +0200464 else:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300465 ax.plot(avg_times, low_vals_dev, c=colors.suppl_color2,
466 label="{}{}*stdev".format(plus_minus, style.dev_range_x))
467 ax.plot(avg_times, hight_vals_dev, c=colors.suppl_color2)
koder aka kdanilova732a602017-02-01 20:29:56 +0200468 has_negative_dev = low_vals_dev.min() < 0
koder aka kdanilov108ac362017-01-19 20:17:16 +0200469
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300470 ax.set_xlim(-5, max(time_points) + 5)
471 ax.set_xlabel("Time, seconds from test begin")
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300472
473 if plot_avg_dev:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300474 ax.set_ylabel("{}. Average and {}stddev over {} points".format(units, plus_minus, style.avg_range))
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300475 else:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300476 ax.set_ylabel(units)
koder aka kdanilova732a602017-02-01 20:29:56 +0200477
478 if has_negative_dev:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300479 ax.set_ylim(bottom=0)
koder aka kdanilova732a602017-02-01 20:29:56 +0200480
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300481 apply_style(fig, title, style, eng=True)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200482
483
484@provide_plot
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300485def plot_lat_over_time(fig: Figure,
486 title: str,
487 ts: TimeSeries,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300488 ylabel: str,
489 samples: int = 5,
kdanylov aka koder45183182017-04-30 23:55:40 +0300490 colors: ColorProfile = DefColorProfile, style: StyleProfile = DefStyleProfile) -> None:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200491
492 min_time = min(ts.times)
493 times = [int(tm - min_time + 500) // 1000 for tm in ts.times]
494 ts_len = len(times)
495 step = ts_len / samples
496 points = [times[int(i * step + 0.5)] for i in range(samples)]
497 points.append(times[-1])
498 bounds = list(zip(points[:-1], points[1:]))
koder aka kdanilov108ac362017-01-19 20:17:16 +0200499 agg_data = []
500 positions = []
501 labels = []
502
koder aka kdanilov108ac362017-01-19 20:17:16 +0200503 for begin, end in bounds:
koder aka kdanilova732a602017-02-01 20:29:56 +0200504 agg_hist = ts.data[begin:end].sum(axis=0)
505
506 if style.violin_instead_of_box:
507 # cut outliers
508 idx1, idx2 = hist_outliers_perc(agg_hist, style.outliers_lat)
509 agg_hist = agg_hist[idx1:idx2]
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300510 curr_bins_vals = ts.histo_bins[idx1:idx2]
koder aka kdanilova732a602017-02-01 20:29:56 +0200511
512 correct_coef = style.violin_point_count / sum(agg_hist)
513 if correct_coef > 1:
514 correct_coef = 1
515 else:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300516 curr_bins_vals = ts.histo_bins
koder aka kdanilova732a602017-02-01 20:29:56 +0200517 correct_coef = 1
koder aka kdanilov108ac362017-01-19 20:17:16 +0200518
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300519 vals = numpy.empty(shape=[numpy.sum(agg_hist)], dtype='float32')
koder aka kdanilov108ac362017-01-19 20:17:16 +0200520 cidx = 0
koder aka kdanilov108ac362017-01-19 20:17:16 +0200521
koder aka kdanilova732a602017-02-01 20:29:56 +0200522 non_zero, = agg_hist.nonzero()
koder aka kdanilov108ac362017-01-19 20:17:16 +0200523 for pos in non_zero:
koder aka kdanilova732a602017-02-01 20:29:56 +0200524 count = int(agg_hist[pos] * correct_coef + 0.5)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200525
koder aka kdanilova732a602017-02-01 20:29:56 +0200526 if count != 0:
527 vals[cidx: cidx + count] = curr_bins_vals[pos]
528 cidx += count
529
530 agg_data.append(vals[:cidx])
koder aka kdanilov108ac362017-01-19 20:17:16 +0200531 positions.append((end + begin) / 2)
532 labels.append(str((end + begin) // 2))
533
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300534 ax = fig.add_subplot(111)
koder aka kdanilova732a602017-02-01 20:29:56 +0200535 if style.violin_instead_of_box:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300536 patches = ax.violinplot(agg_data,
537 positions=positions,
538 showmeans=True,
539 showmedians=True,
540 widths=step / 2)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200541
koder aka kdanilova732a602017-02-01 20:29:56 +0200542 patches['cmeans'].set_color("blue")
543 patches['cmedians'].set_color("green")
544 if style.legend_for_eng:
545 legend_location = "center left"
546 legend_bbox_to_anchor = (1.03, 0.81)
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300547 ax.legend([patches['cmeans'], patches['cmedians']], ["mean", "median"],
548 loc=legend_location, bbox_to_anchor=legend_bbox_to_anchor)
koder aka kdanilova732a602017-02-01 20:29:56 +0200549 else:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300550 ax.boxplot(agg_data, 0, '', positions=positions, labels=labels, widths=step / 4)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200551
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300552 ax.set_xlim(min(times), max(times))
553 ax.set(ylabel=ylabel, xlabel="Time, seconds from test begin, sampled for ~{} seconds".format(int(step)))
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300554 apply_style(fig, title, style, eng=True, no_legend=True)
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300555 fig.subplots_adjust(right=style.subplot_adjust_r)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200556
557
558@provide_plot
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300559def plot_histo_heatmap(fig: Figure,
560 title: str,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300561 ts: TimeSeries,
562 ylabel: str,
563 xlabel: str = "time, s",
kdanylov aka koder45183182017-04-30 23:55:40 +0300564 colors: ColorProfile = DefColorProfile, style: StyleProfile = DefStyleProfile) -> None:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200565
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300566 fig.set_size_inches(*style.figsize_long)
567
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300568 # only histogram-based ts can be plotted
koder aka kdanilova732a602017-02-01 20:29:56 +0200569 assert len(ts.data.shape) == 2
koder aka kdanilova732a602017-02-01 20:29:56 +0200570
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300571 # Find global outliers. As load is expected to be stable during one job
572 # outliers range can be detected globally
koder aka kdanilova732a602017-02-01 20:29:56 +0200573 total_hist = ts.data.sum(axis=0)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300574 idx1, idx2 = hist_outliers_perc(total_hist,
575 bounds_perc=style.outliers_lat,
576 min_bins_left=style.hm_hist_bins_count)
koder aka kdanilova732a602017-02-01 20:29:56 +0200577
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300578 # merge outliers with most close non-outliers cell
579 orig_data = ts.data[:, idx1:idx2].copy()
580 if idx1 > 0:
581 orig_data[:, 0] += ts.data[:, :idx1].sum(axis=1)
koder aka kdanilova732a602017-02-01 20:29:56 +0200582
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300583 if idx2 < ts.data.shape[1]:
584 orig_data[:, -1] += ts.data[:, idx2:].sum(axis=1)
koder aka kdanilova732a602017-02-01 20:29:56 +0200585
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300586 bins_vals = ts.histo_bins[idx1:idx2]
587
588 # rebin over X axis
589 # aggregate some lines in ts.data to plot not more than style.hm_x_slots x bins
590 agg_idx = float(len(orig_data)) / style.hm_x_slots
591 if agg_idx >= 2:
592 data = numpy.zeros([style.hm_x_slots, orig_data.shape[1]], dtype=numpy.float32) # type: List[numpy.ndarray]
593 next = agg_idx
594 count = 0
595 data_idx = 0
596 for idx, arr in enumerate(orig_data):
597 if idx >= next:
598 data[data_idx] /= count
599 data_idx += 1
600 next += agg_idx
601 count = 0
602 data[data_idx] += arr
603 count += 1
604
605 if count > 1:
606 data[-1] /= count
607 else:
608 data = orig_data
609
610 # rebin over Y axis
611 # =================
koder aka kdanilova732a602017-02-01 20:29:56 +0200612
613 # don't using rebin_histogram here, as we need apply same bins for many arrays
614 step = (bins_vals[-1] - bins_vals[0]) / style.hm_hist_bins_count
615 new_bins_edges = numpy.arange(style.hm_hist_bins_count) * step + bins_vals[0]
616 bin_mapping = numpy.clip(numpy.searchsorted(new_bins_edges, bins_vals) - 1, 0, len(new_bins_edges) - 1)
617
618 # map origin bins ranges to heatmap bins, iterate over rows
619 cmap = []
620 for line in data:
621 curr_bins = [0] * style.hm_hist_bins_count
622 for idx, count in zip(bin_mapping, line):
623 curr_bins[idx] += count
624 cmap.append(curr_bins)
625 ncmap = numpy.array(cmap)
626
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300627 # plot data
628 # =========
koder aka kdanilova732a602017-02-01 20:29:56 +0200629
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300630 boxes = 3
631 gs = gridspec.GridSpec(1, boxes)
632 ax = fig.add_subplot(gs[0, :boxes - 1])
koder aka kdanilova732a602017-02-01 20:29:56 +0200633
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300634 labels = list(map(float2str, (new_bins_edges[:-1] + new_bins_edges[1:]) / 2)) + \
635 [float2str(new_bins_edges[-1]) + "+"]
636 seaborn.heatmap(ncmap[:,::-1].T, xticklabels=False, cmap="Blues", ax=ax)
637 ax.set_yticklabels(labels, rotation='horizontal')
638 ax.set_xticklabels([])
koder aka kdanilova732a602017-02-01 20:29:56 +0200639
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300640 # plot overall histogram
641 # =======================
koder aka kdanilova732a602017-02-01 20:29:56 +0200642
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300643 ax2 = fig.add_subplot(gs[0, boxes - 1])
644 ax2.set_yticklabels([])
645 ax2.set_xticklabels([])
koder aka kdanilova732a602017-02-01 20:29:56 +0200646
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300647 histo = ncmap.sum(axis=0).reshape((-1,))
648 ax2.set_ylim(top=histo.size, bottom=0)
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300649 ax2.barh(numpy.arange(histo.size) + 0.5, width=histo)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300650
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300651 ax.set(ylabel=ylabel, xlabel=xlabel)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300652
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300653 apply_style(fig, title, style, eng=True, no_legend=True)
koder aka kdanilova732a602017-02-01 20:29:56 +0200654
koder aka kdanilov108ac362017-01-19 20:17:16 +0200655
656@provide_plot
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300657def io_chart(fig: Figure,
658 title: str,
koder aka kdanilov108ac362017-01-19 20:17:16 +0200659 legend: str,
660 iosums: List[IOSummary],
661 iops_log_spine: bool = False,
662 lat_log_spine: bool = False,
kdanylov aka koder45183182017-04-30 23:55:40 +0300663 colors: ColorProfile = DefColorProfile, style: StyleProfile = DefStyleProfile) -> None:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200664
665 # -------------- MAGIC VALUES ---------------------
666 # IOPS bar width
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300667 width = 0.2
koder aka kdanilov108ac362017-01-19 20:17:16 +0200668
669 # offset from center of bar to deviation/confidence range indicator
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300670 err_x_offset = 0.03
koder aka kdanilov108ac362017-01-19 20:17:16 +0200671
koder aka kdanilov108ac362017-01-19 20:17:16 +0200672 # extra space on top and bottom, comparing to maximal tight layout
673 extra_y_space = 0.05
674
675 # additional spine for BW/IOPS on left side of plot
676 extra_io_spine_x_offset = -0.1
677
678 # extra space on left and right sides
679 extra_x_space = 0.5
680
681 # legend location settings
682 legend_location = "center left"
683 legend_bbox_to_anchor = (1.1, 0.81)
684
koder aka kdanilov108ac362017-01-19 20:17:16 +0200685 # -------------- END OF MAGIC VALUES ---------------------
686
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300687 matplotlib.style.use(style.io_chart_style)
688
koder aka kdanilov108ac362017-01-19 20:17:16 +0200689 block_size = iosums[0].block_size
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300690 xpos = numpy.arange(1, len(iosums) + 1, dtype='uint')
koder aka kdanilov108ac362017-01-19 20:17:16 +0200691
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300692 ax = fig.add_subplot(111)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200693
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300694 coef_mb = float(unit_conversion_coef(iosums[0].bw.units, "MiBps"))
695 coef_iops = float(unit_conversion_coef(iosums[0].bw.units, "KiBps")) / block_size
696
697 iops_primary = block_size < style.large_blocks
698
699 coef = coef_iops if iops_primary else coef_mb
700 ax.set_ylabel("IOPS" if iops_primary else "BW (MiBps)")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200701
kdanylov aka koder45183182017-04-30 23:55:40 +0300702 vals = [iosum.bw.average * coef for iosum in iosums]
703
koder aka kdanilov108ac362017-01-19 20:17:16 +0200704 # set correct x limits for primary IO spine
705 min_io = min(iosum.bw.average - iosum.bw.deviation * style.dev_range_x for iosum in iosums)
706 max_io = max(iosum.bw.average + iosum.bw.deviation * style.dev_range_x for iosum in iosums)
707 border = (max_io - min_io) * extra_y_space
708 io_lims = (min_io - border, max_io + border)
709
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300710 ax.set_ylim(io_lims[0] * coef, io_lims[-1] * coef)
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300711 ax.bar(xpos - width / 2, vals, width=width, color=colors.box_color, label=legend)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200712
713 # plot deviation and confidence error ranges
714 err1_legend = err2_legend = None
715 for pos, iosum in zip(xpos, iosums):
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300716 dev_bar_pos = pos - err_x_offset
717 err1_legend = ax.errorbar(dev_bar_pos,
kdanylov aka koder45183182017-04-30 23:55:40 +0300718 iosum.bw.average * coef,
719 iosum.bw.deviation * style.dev_range_x * coef,
koder aka kdanilov108ac362017-01-19 20:17:16 +0200720 alpha=colors.subinfo_alpha,
721 color=colors.suppl_color1) # 'magenta'
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300722
723 conf_bar_pos = pos + err_x_offset
724 err2_legend = ax.errorbar(conf_bar_pos,
kdanylov aka koder45183182017-04-30 23:55:40 +0300725 iosum.bw.average * coef,
726 iosum.bw.confidence * coef,
koder aka kdanilov108ac362017-01-19 20:17:16 +0200727 alpha=colors.subinfo_alpha,
728 color=colors.suppl_color2) # 'teal'
729
730 if style.grid:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300731 ax.grid(True)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200732
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300733 handles1, labels1 = ax.get_legend_handles_labels()
koder aka kdanilov108ac362017-01-19 20:17:16 +0200734
735 handles1 += [err1_legend, err2_legend]
736 labels1 += ["{}% dev".format(style.dev_perc),
737 "{}% conf".format(int(100 * iosums[0].bw.confidence_level))]
738
739 # extra y spine for latency on right side
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300740 ax2 = ax.twinx()
koder aka kdanilov108ac362017-01-19 20:17:16 +0200741
742 # plot median and 95 perc latency
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300743 lat_coef_ms = float(unit_conversion_coef(iosums[0].lat.units, "ms"))
744 ax2.plot(xpos, [iosum.lat.perc_50 * lat_coef_ms for iosum in iosums], label="lat med")
745 ax2.plot(xpos, [iosum.lat.perc_95 * lat_coef_ms for iosum in iosums], label="lat 95%")
746
747 for grid_line in ax2.get_ygridlines():
748 grid_line.set_linestyle(":")
749
750 # extra y spine for BW/IOPS on left side
751 if style.extra_io_spine:
752 ax3 = ax.twinx()
753 if iops_log_spine:
754 ax3.set_yscale('log')
755
756 ax3.set_ylabel("BW (MiBps)" if iops_primary else "IOPS")
757 secondary_coef = coef_mb if iops_primary else coef_iops
758 ax3.set_ylim(io_lims[0] * secondary_coef, io_lims[1] * secondary_coef)
759 ax3.spines["left"].set_position(("axes", extra_io_spine_x_offset))
760 ax3.spines["left"].set_visible(True)
761 ax3.yaxis.set_label_position('left')
762 ax3.yaxis.set_ticks_position('left')
763 else:
764 ax3 = None
765
766 ax2.set_ylabel("Latency (ms)")
767
768 # legend box
769 handles2, labels2 = ax2.get_legend_handles_labels()
770 ax.legend(handles1 + handles2, labels1 + labels2,
771 loc=legend_location,
772 bbox_to_anchor=legend_bbox_to_anchor)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200773
774 # limit and label x spine
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300775 ax.set_xlim(extra_x_space, len(iosums) + extra_x_space)
776 ax.set_xticks(xpos)
777 ax.set_xticklabels(["{0} * {1} = {2}".format(iosum.qd, iosum.nodes_count, iosum.qd * iosum.nodes_count)
778 for iosum in iosums])
779 ax.set_xlabel("IO queue depth * test node count = total parallel requests")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200780
781 # apply log scales for X spines, if set
782 if iops_log_spine:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300783 ax.set_yscale('log')
koder aka kdanilov108ac362017-01-19 20:17:16 +0200784
785 if lat_log_spine:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300786 ax2.set_yscale('log')
koder aka kdanilov108ac362017-01-19 20:17:16 +0200787
koder aka kdanilov108ac362017-01-19 20:17:16 +0200788 # adjust central box size to fit legend
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300789 apply_style(fig, title, style, eng=False, no_legend=True)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200790
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300791 # override some styles
792 fig.set_size_inches(*style.qd_chart_inches)
793 fig.subplots_adjust(right=StyleProfile.subplot_adjust_r)
794
795 if style.extra_io_spine:
796 ax3.grid(False)
797
koder aka kdanilov108ac362017-01-19 20:17:16 +0200798
799# -------------------- REPORT HELPERS --------------------------------------------------------------------------------
800
801
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200802class HTMLBlock:
803 data = None # type: str
804 js_links = [] # type: List[str]
805 css_links = [] # type: List[str]
koder aka kdanilova732a602017-02-01 20:29:56 +0200806 order_attr = None # type: Any
807
808 def __init__(self, data: str, order_attr: Any = None) -> None:
809 self.data = data
810 self.order_attr = order_attr
811
kdanylov aka koder45183182017-04-30 23:55:40 +0300812 def __eq__(self, o: Any) -> bool:
koder aka kdanilova732a602017-02-01 20:29:56 +0200813 return o.order_attr == self.order_attr # type: ignore
814
kdanylov aka koder45183182017-04-30 23:55:40 +0300815 def __lt__(self, o: Any) -> bool:
koder aka kdanilova732a602017-02-01 20:29:56 +0200816 return o.order_attr > self.order_attr # type: ignore
817
818
819class Table:
820 def __init__(self, header: List[str]) -> None:
821 self.header = header
822 self.data = []
823
824 def add_line(self, values: List[str]) -> None:
825 self.data.append(values)
826
827 def html(self):
828 return html.table("", self.header, self.data)
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200829
830
koder aka kdanilov108ac362017-01-19 20:17:16 +0200831class Menu1st:
832 engineering = "Engineering"
833 summary = "Summary"
koder aka kdanilova732a602017-02-01 20:29:56 +0200834 per_job = "Per Job"
koder aka kdanilov108ac362017-01-19 20:17:16 +0200835
836
837class Menu2ndEng:
838 iops_time = "IOPS(time)"
839 hist = "IOPS/lat overall histogram"
840 lat_time = "Lat(time)"
841
842
843class Menu2ndSumm:
844 io_lat_qd = "IO & Lat vs QD"
845
846
koder aka kdanilova732a602017-02-01 20:29:56 +0200847menu_1st_order = [Menu1st.summary, Menu1st.engineering, Menu1st.per_job]
koder aka kdanilov108ac362017-01-19 20:17:16 +0200848
849
850# -------------------- REPORTS --------------------------------------------------------------------------------------
851
852
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200853class Reporter(metaclass=abc.ABCMeta):
koder aka kdanilova732a602017-02-01 20:29:56 +0200854 suite_types = set() # type: Set[str]
855
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200856 @abc.abstractmethod
koder aka kdanilova732a602017-02-01 20:29:56 +0200857 def get_divs(self, suite: SuiteConfig, storage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
858 pass
859
860
861class JobReporter(metaclass=abc.ABCMeta):
862 suite_type = set() # type: Set[str]
863
864 @abc.abstractmethod
865 def get_divs(self,
866 suite: SuiteConfig,
867 job: JobConfig,
868 storage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200869 pass
870
871
872# Main performance report
873class PerformanceSummary(Reporter):
koder aka kdanilova732a602017-02-01 20:29:56 +0200874 """Aggregated summary fro storage"""
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200875
876
877# Main performance report
koder aka kdanilov108ac362017-01-19 20:17:16 +0200878class IO_QD(Reporter):
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200879 """Creates graph, which show how IOPS and Latency depend on QD"""
koder aka kdanilova732a602017-02-01 20:29:56 +0200880 suite_types = {'fio'}
881
882 def get_divs(self, suite: SuiteConfig, rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
883 ts_map = defaultdict(list) # type: Dict[FioJobParams, List[Tuple[SuiteConfig, FioJobConfig]]]
884 str_summary = {} # type: Dict[FioJobParams, List[IOSummary]]
koder aka kdanilov108ac362017-01-19 20:17:16 +0200885 for job in rstorage.iter_job(suite):
886 fjob = cast(FioJobConfig, job)
koder aka kdanilova732a602017-02-01 20:29:56 +0200887 fjob_no_qd = cast(FioJobParams, fjob.params.copy(qd=None))
888 str_summary[fjob_no_qd] = (fjob_no_qd.summary, fjob_no_qd.long_summary)
889 ts_map[fjob_no_qd].append((suite, fjob))
koder aka kdanilov108ac362017-01-19 20:17:16 +0200890
koder aka kdanilova732a602017-02-01 20:29:56 +0200891 for tpl, suites_jobs in ts_map.items():
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300892 if len(suites_jobs) >= StyleProfile.min_iops_vs_qd_jobs:
893
koder aka kdanilova732a602017-02-01 20:29:56 +0200894 iosums = [make_iosum(rstorage, suite, job) for suite, job in suites_jobs]
895 iosums.sort(key=lambda x: x.qd)
896 summary, summary_long = str_summary[tpl]
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300897
898 yield Menu1st.summary, Menu2ndSumm.io_lat_qd, \
899 HTMLBlock(html.H2(html.center("IOPS, BW, Lat = func(QD). " + summary_long)))
900
koder aka kdanilova732a602017-02-01 20:29:56 +0200901 ds = DataSource(suite_id=suite.storage_id,
902 job_id=summary,
903 node_id=AGG_TAG,
904 sensor="fio",
905 dev=AGG_TAG,
906 metric="io_over_qd",
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300907 tag=io_chart_format)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200908
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300909 fpath = io_chart(rstorage, ds, title="", legend="IOPS/BW", iosums=iosums) # type: str
910 yield Menu1st.summary, Menu2ndSumm.io_lat_qd, HTMLBlock(html.center(html.img(fpath)))
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200911
912
913# Linearization report
914class IOPS_Bsize(Reporter):
915 """Creates graphs, which show how IOPS and Latency depend on block size"""
916
917
koder aka kdanilova732a602017-02-01 20:29:56 +0200918class StatInfo(JobReporter):
919 """Statistic info for job results"""
920 suite_types = {'fio'}
921
922 def get_divs(self, suite: SuiteConfig, job: JobConfig,
923 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
924
925 fjob = cast(FioJobConfig, job)
926 io_sum = make_iosum(rstorage, suite, fjob)
927
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300928 res = html.H2(html.center("Test summary - " + job.params.long_summary))
929 stat_data_headers = ["Name", "Average ~ Dev", "Conf interval", "Mediana", "Mode", "Kurt / Skew", "95%", "99%",
930 "ADF test"]
koder aka kdanilova732a602017-02-01 20:29:56 +0200931
kdanylov aka koder45183182017-04-30 23:55:40 +0300932 bw_target_units = 'Bps'
933 bw_coef = float(unit_conversion_coef(io_sum.bw.units, bw_target_units))
934
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300935 adf_v, *_1, stats, _2 = adfuller(io_sum.bw.data)
936
937 for v in ("1%", "5%", "10%"):
938 if adf_v <= stats[v]:
939 ad_test = v
940 break
941 else:
942 ad_test = "Failed"
943
koder aka kdanilova732a602017-02-01 20:29:56 +0200944 bw_data = ["Bandwidth",
kdanylov aka koder45183182017-04-30 23:55:40 +0300945 "{}{} ~ {}{}".format(b2ssize(io_sum.bw.average * bw_coef), bw_target_units,
946 b2ssize(io_sum.bw.deviation * bw_coef), bw_target_units),
947 b2ssize(io_sum.bw.confidence * bw_coef) + bw_target_units,
948 b2ssize(io_sum.bw.perc_50 * bw_coef) + bw_target_units,
koder aka kdanilova732a602017-02-01 20:29:56 +0200949 "-",
950 "{:.2f} / {:.2f}".format(io_sum.bw.kurt, io_sum.bw.skew),
kdanylov aka koder45183182017-04-30 23:55:40 +0300951 b2ssize(io_sum.bw.perc_5 * bw_coef) + bw_target_units,
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300952 b2ssize(io_sum.bw.perc_1 * bw_coef) + bw_target_units,
953 ad_test]
koder aka kdanilova732a602017-02-01 20:29:56 +0200954
kdanylov aka koder45183182017-04-30 23:55:40 +0300955 iops_coef = float(unit_conversion_coef(io_sum.bw.units, 'KiBps')) / fjob.bsize
koder aka kdanilova732a602017-02-01 20:29:56 +0200956 iops_data = ["IOPS",
kdanylov aka koder45183182017-04-30 23:55:40 +0300957 "{}IOPS ~ {}IOPS".format(b2ssize_10(io_sum.bw.average * iops_coef),
958 b2ssize_10(io_sum.bw.deviation * iops_coef)),
959 b2ssize_10(io_sum.bw.confidence * iops_coef) + "IOPS",
960 b2ssize_10(io_sum.bw.perc_50 * iops_coef) + "IOPS",
koder aka kdanilova732a602017-02-01 20:29:56 +0200961 "-",
962 "{:.2f} / {:.2f}".format(io_sum.bw.kurt, io_sum.bw.skew),
kdanylov aka koder45183182017-04-30 23:55:40 +0300963 b2ssize_10(io_sum.bw.perc_5 * iops_coef) + "IOPS",
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300964 b2ssize_10(io_sum.bw.perc_1 * iops_coef) + "IOPS",
965 ad_test]
koder aka kdanilova732a602017-02-01 20:29:56 +0200966
kdanylov aka koder45183182017-04-30 23:55:40 +0300967 lat_target_unit = 's'
968 lat_coef = unit_conversion_coef(io_sum.lat.units, lat_target_unit)
koder aka kdanilova732a602017-02-01 20:29:56 +0200969 # latency
970 lat_data = ["Latency",
971 "-",
972 "-",
kdanylov aka koder45183182017-04-30 23:55:40 +0300973 b2ssize_10(io_sum.lat.perc_50 * lat_coef) + lat_target_unit,
koder aka kdanilova732a602017-02-01 20:29:56 +0200974 "-",
975 "-",
kdanylov aka koder45183182017-04-30 23:55:40 +0300976 b2ssize_10(io_sum.lat.perc_95 * lat_coef) + lat_target_unit,
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300977 b2ssize_10(io_sum.lat.perc_99 * lat_coef) + lat_target_unit,
978 '-']
koder aka kdanilova732a602017-02-01 20:29:56 +0200979
980 # sensor usage
981 stat_data = [iops_data, bw_data, lat_data]
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300982 res += html.center(html.table("Load stats info", stat_data_headers, stat_data))
983 yield Menu1st.per_job, job.summary, HTMLBlock(res)
koder aka kdanilova732a602017-02-01 20:29:56 +0200984
koder aka kdanilova732a602017-02-01 20:29:56 +0200985
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300986def avg_dev_div(vec: numpy.ndarray, denom: numpy.ndarray, avg_ranges: int = 10) -> Tuple[float, float]:
987 step = min(vec.size, denom.size) // avg_ranges
988 assert step >= 1
989 vals = []
kdanylov aka koder45183182017-04-30 23:55:40 +0300990
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300991 whole_sum = denom.sum() / denom.size * step * 0.5
992 for i in range(0, avg_ranges):
993 s1 = denom[i * step: (i + 1) * step].sum()
994 if s1 >= whole_sum:
995 vals.append(vec[i * step: (i + 1) * step].sum() / s1)
koder aka kdanilova732a602017-02-01 20:29:56 +0200996
kdanylov aka koder736e5c12017-05-07 17:27:14 +0300997 assert len(vals) > 1
998 return vec.sum() / denom.sum(), numpy.std(vals, ddof=1)
koder aka kdanilova732a602017-02-01 20:29:56 +0200999
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001000
1001class Resources(JobReporter):
1002 """Statistic info for job results"""
1003 suite_types = {'fio'}
1004
1005 def get_divs(self, suite: SuiteConfig, job: JobConfig,
1006 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
1007
1008 fjob = cast(FioJobConfig, job)
1009 io_sum = make_iosum(rstorage, suite, fjob)
1010
1011 tot_io_coef = float(unit_conversion_coef(io_sum.bw.units, "Bps"))
1012 io_transfered = io_sum.bw.data * tot_io_coef
1013 ops_done = io_transfered / (fjob.bsize * float(unit_conversion_coef("KiBps", "Bps")))
1014
1015 io_made = "Client IOP made"
1016 data_tr = "Client data transfered"
1017
1018 records = {
1019 io_made: (b2ssize_10(ops_done.sum()) + "OP", None, None),
1020 data_tr: (b2ssize(io_transfered.sum()) + "B", None, None)
1021 } # type: Dict[str, Tuple[str, float, float]]
1022
1023 test_send = "Test nodes net send"
1024 test_recv = "Test nodes net recv"
1025 test_net = "Test nodes net total"
1026 test_send_pkt = "Test nodes send pkt"
1027 test_recv_pkt = "Test nodes recv pkt"
1028 test_net_pkt = "Test nodes total pkt"
1029
1030 test_write = "Test nodes disk write"
1031 test_read = "Test nodes disk read"
1032 test_write_iop = "Test nodes write IOP"
1033 test_read_iop = "Test nodes read IOP"
1034 test_iop = "Test nodes IOP"
1035 test_rw = "Test nodes disk IO"
1036
1037 storage_send = "Storage nodes net send"
1038 storage_recv = "Storage nodes net recv"
1039 storage_send_pkt = "Storage nodes send pkt"
1040 storage_recv_pkt = "Storage nodes recv pkt"
1041 storage_net = "Storage nodes net total"
1042 storage_net_pkt = "Storage nodes total pkt"
1043
1044 storage_write = "Storage nodes disk write"
1045 storage_read = "Storage nodes disk read"
1046 storage_write_iop = "Storage nodes write IOP"
1047 storage_read_iop = "Storage nodes read IOP"
1048 storage_iop = "Storage nodes IOP"
1049 storage_rw = "Storage nodes disk IO"
1050
1051 storage_cpu = "Storage nodes CPU"
1052 storage_cpu_s = "Storage nodes CPU s/IOP"
1053 storage_cpu_s_b = "Storage nodes CPU s/B"
koder aka kdanilova732a602017-02-01 20:29:56 +02001054
1055 all_metrics = [
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001056 (test_send, 'net-io', 'send_bytes', b2ssize, ['testnode'], "B", io_transfered),
1057 (test_recv, 'net-io', 'recv_bytes', b2ssize, ['testnode'], "B", io_transfered),
1058 (test_send_pkt, 'net-io', 'send_packets', b2ssize_10, ['testnode'], "pkt", ops_done),
1059 (test_recv_pkt, 'net-io', 'recv_packets', b2ssize_10, ['testnode'], "pkt", ops_done),
koder aka kdanilova732a602017-02-01 20:29:56 +02001060
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001061 (test_write, 'block-io', 'sectors_written', b2ssize, ['testnode'], "B", io_transfered),
1062 (test_read, 'block-io', 'sectors_read', b2ssize, ['testnode'], "B", io_transfered),
1063 (test_write_iop, 'block-io', 'writes_completed', b2ssize_10, ['testnode'], "OP", ops_done),
1064 (test_read_iop, 'block-io', 'reads_completed', b2ssize_10, ['testnode'], "OP", ops_done),
koder aka kdanilova732a602017-02-01 20:29:56 +02001065
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001066 (storage_send, 'net-io', 'send_bytes', b2ssize, STORAGE_ROLES, "B", io_transfered),
1067 (storage_recv, 'net-io', 'recv_bytes', b2ssize, STORAGE_ROLES, "B", io_transfered),
1068 (storage_send_pkt, 'net-io', 'send_packets', b2ssize_10, STORAGE_ROLES, "OP", ops_done),
1069 (storage_recv_pkt, 'net-io', 'recv_packets', b2ssize_10, STORAGE_ROLES, "OP", ops_done),
koder aka kdanilova732a602017-02-01 20:29:56 +02001070
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001071 (storage_write, 'block-io', 'sectors_written', b2ssize, STORAGE_ROLES, "B", io_transfered),
1072 (storage_read, 'block-io', 'sectors_read', b2ssize, STORAGE_ROLES, "B", io_transfered),
1073 (storage_write_iop, 'block-io', 'writes_completed', b2ssize_10, STORAGE_ROLES, "OP", ops_done),
1074 (storage_read_iop, 'block-io', 'reads_completed', b2ssize_10, STORAGE_ROLES, "OP", ops_done),
koder aka kdanilova732a602017-02-01 20:29:56 +02001075 ]
1076
1077 all_agg = {}
1078
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001079 for vname, sensor, metric, ffunc, roles, units, service_provided_count in all_metrics:
kdanylov aka koder45183182017-04-30 23:55:40 +03001080 res_ts = summ_sensors(rstorage, roles, sensor=sensor, metric=metric, time_range=job.reliable_info_range_s)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001081 if res_ts is None:
koder aka kdanilova732a602017-02-01 20:29:56 +02001082 continue
1083
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001084 data = res_ts.data
1085 if units == "B":
1086 data = data * float(unit_conversion_coef(res_ts.units, "B"))
1087
1088 records[vname] = (ffunc(data.sum()) + units, *avg_dev_div(data, service_provided_count))
1089 all_agg[vname] = data
1090
1091 # cpu usage
1092 nodes_count = len(list(find_nodes_by_roles(rstorage, STORAGE_ROLES)))
1093 cpu_ts = get_cluster_cpu_load(rstorage, STORAGE_ROLES, job.reliable_info_range_s)
1094
1095 cpus_used_sec = (1.0 - cpu_ts['idle'].data / cpu_ts['total'].data) * nodes_count
1096 used_s = b2ssize_10(cpus_used_sec.sum()) + 's'
1097
1098 all_agg[storage_cpu] = cpus_used_sec
1099 records[storage_cpu_s] = (used_s, *avg_dev_div(cpus_used_sec, ops_done))
1100 records[storage_cpu_s_b] = (used_s, *avg_dev_div(cpus_used_sec, io_transfered))
koder aka kdanilova732a602017-02-01 20:29:56 +02001101
koder aka kdanilova732a602017-02-01 20:29:56 +02001102 cums = [
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001103 (test_iop, test_read_iop, test_write_iop, b2ssize_10, "OP", ops_done),
1104 (test_rw, test_read, test_write, b2ssize, "B", io_transfered),
1105 (test_net, test_send, test_recv, b2ssize, "B", io_transfered),
1106 (test_net_pkt, test_send_pkt, test_recv_pkt, b2ssize_10, "pkt", ops_done),
1107
1108 (storage_iop, storage_read_iop, storage_write_iop, b2ssize_10, "OP", ops_done),
1109 (storage_rw, storage_read, storage_write, b2ssize, "B", io_transfered),
1110 (storage_net, storage_send, storage_recv, b2ssize, "B", io_transfered),
1111 (storage_net_pkt, storage_send_pkt, storage_recv_pkt, b2ssize_10, "pkt", ops_done),
koder aka kdanilova732a602017-02-01 20:29:56 +02001112 ]
1113
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001114 for vname, name1, name2, ffunc, units, service_provided_masked in cums:
koder aka kdanilova732a602017-02-01 20:29:56 +02001115 if name1 in all_agg and name2 in all_agg:
1116 agg = all_agg[name1] + all_agg[name2]
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001117 records[vname] = (ffunc(agg.sum()) + units, *avg_dev_div(agg, service_provided_masked))
koder aka kdanilova732a602017-02-01 20:29:56 +02001118
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001119 table_structure = [
1120 "Service provided",
1121 (io_made, data_tr),
1122 "Test nodes total load",
1123 (test_send_pkt, test_send),
1124 (test_recv_pkt, test_recv),
1125 (test_net_pkt, test_net),
1126 (test_write_iop, test_write),
1127 (test_read_iop, test_read),
1128 (test_iop, test_rw),
1129 (test_iop, test_rw),
1130 "Storage nodes resource consumed",
1131 (storage_send_pkt, storage_send),
1132 (storage_recv_pkt, storage_recv),
1133 (storage_net_pkt, storage_net),
1134 (storage_write_iop, storage_write),
1135 (storage_read_iop, storage_read),
1136 (storage_iop, storage_rw),
1137 (storage_cpu_s, storage_cpu_s_b),
1138 ] # type: List[Union[str, Tuple[Optional[str], Optional[str]]]
koder aka kdanilova732a602017-02-01 20:29:56 +02001139
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001140 yield Menu1st.per_job, job.summary, HTMLBlock(html.H2(html.center("Resources usage")))
1141
1142 doc = xmlbuilder3.XMLBuilder("table",
1143 **{"class": "table table-bordered table-striped table-condensed table-hover",
1144 "style": "width: auto;"})
1145
1146 with doc.thead:
1147 with doc.tr:
1148 [doc.th(header) for header in ["Resource", "Usage count", "To service"] * 2]
1149
1150 cols = 6
1151
1152 short_name = {
1153 name: (name if name in {io_made, data_tr} else " ".join(name.split()[2:]).capitalize())
1154 for name in records.keys()
1155 }
1156
1157 short_name[storage_cpu_s] = "CPU (s/IOP)"
1158 short_name[storage_cpu_s_b] = "CPU (s/B)"
1159
1160 with doc.tbody:
1161 with doc.tr:
1162 doc.td(colspan=str(cols // 2)).center.b("Operations")
1163 doc.td(colspan=str(cols // 2)).center.b("Bytes")
1164
1165 for line in table_structure:
1166 with doc.tr:
1167 if isinstance(line, str):
1168 with doc.td(colspan=str(cols)):
1169 doc.center.b(line)
1170 else:
1171 for name in line:
1172 if name is None:
1173 doc.td("-", colspan=str(cols // 2))
1174 continue
1175
1176 amount_s, avg, dev = records[name]
1177
1178 if name in (storage_cpu_s, storage_cpu_s_b) and avg is not None:
1179 dev_s = str(int(dev * 100 / avg)) + "%" if avg > 1E-9 else b2ssize_10(dev) + 's'
1180 rel_val_s = "{}s ~ {}".format(b2ssize_10(avg), dev_s)
1181 else:
1182 if avg is None:
1183 rel_val_s = '-'
1184 else:
1185 avg_s = int(avg) if avg > 10 else '{:.1f}'.format(avg)
1186 if avg > 1E-5:
1187 dev_s = str(int(dev * 100 / avg)) + "%"
1188 else:
1189 dev_s = int(dev) if dev > 10 else '{:.1f}'.format(dev)
1190 rel_val_s = "{} ~ {}".format(avg_s, dev_s)
1191
1192 doc.td(short_name[name], align="left")
1193 doc.td(amount_s, align="right")
1194
1195 if avg is None or avg < 0.9:
1196 doc.td(rel_val_s, align="right")
1197 elif avg < 2.0:
1198 doc.td(align="right").font(rel_val_s, color='green')
1199 elif avg < 5.0:
1200 doc.td(align="right").font(rel_val_s, color='orange')
1201 else:
1202 doc.td(align="right").font(rel_val_s, color='red')
1203
1204 res = xmlbuilder3.tostr(doc).split("\n", 1)[1]
1205 yield Menu1st.per_job, job.summary, HTMLBlock(html.center(res))
1206
1207 iop_names = [test_write_iop, test_read_iop, test_iop,
1208 storage_write_iop, storage_read_iop, storage_iop]
1209
1210 bytes_names = [test_write, test_read, test_rw,
1211 test_send, test_recv, test_net,
1212 storage_write, storage_read, storage_rw,
1213 storage_send, storage_recv, storage_net]
1214
1215 net_pkt_names = [test_send_pkt, test_recv_pkt, test_net_pkt,
1216 storage_send_pkt, storage_recv_pkt, storage_net_pkt]
1217
1218 for tp, names in [('iop', iop_names), ("bytes", bytes_names), ('Net packets per IOP', net_pkt_names)]:
1219 vals = []
1220 devs = []
1221 avail_names = []
1222 for name in names:
1223 if name in records:
1224 avail_names.append(name)
1225 _, avg, dev = records[name]
1226 vals.append(avg)
1227 devs.append(dev)
1228
1229 # synchronously sort values and names, values is a key
1230 vals, names, devs = map(list, zip(*sorted(zip(vals, names, devs))))
1231
1232 ds = DataSource(suite_id=suite.storage_id,
1233 job_id=job.storage_id,
1234 node_id=AGG_TAG,
1235 sensor='resources',
1236 dev=AGG_TAG,
1237 metric=tp.replace(' ', "_") + '2service_bar',
1238 tag=default_format)
1239
1240 fname = plot_simple_bars(rstorage, ds,
1241 "Resource consuption / service provided, " + tp,
1242 [name.replace(" nodes", "") for name in names],
1243 vals, devs)
1244
1245 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fname))
1246
1247
1248class BottleNeck(JobReporter):
1249 """Statistic info for job results"""
1250 suite_types = {'fio'}
1251
1252 def get_divs(self, suite: SuiteConfig, job: JobConfig, rstorage: ResultStorage) -> \
1253 Iterator[Tuple[str, str, HTMLBlock]]:
1254
1255 nodes = list(find_nodes_by_roles(rstorage, STORAGE_ROLES))
1256
1257 sensor = 'block-io'
1258 metric = 'io_queue'
1259 bn_val = 16
1260
1261 for node in nodes:
1262 bn = 0
1263 tot = 0
1264 for _, ds in rstorage.iter_sensors(node_id=node.node_id, sensor=sensor, metric=metric):
1265 if ds.dev in ('sdb', 'sdc', 'sdd', 'sde'):
1266 data = rstorage.load_sensor(ds)
1267 p1 = job.reliable_info_range_s[0] * unit_conversion_coef('s', data.time_units)
1268 p2 = job.reliable_info_range_s[1] * unit_conversion_coef('s', data.time_units)
1269 idx1, idx2 = numpy.searchsorted(data.times, (p1, p2))
1270 bn += (data.data[idx1: idx2] > bn_val).sum()
1271 tot += idx2 - idx1
1272 print(node, bn, tot)
1273
1274 yield Menu1st.per_job, job.summary, HTMLBlock("")
koder aka kdanilova732a602017-02-01 20:29:56 +02001275
1276
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001277# CPU load
1278class CPULoadPlot(JobReporter):
1279 def get_divs(self,
1280 suite: SuiteConfig,
1281 job: JobConfig,
1282 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
1283
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001284 # plot CPU time
1285 for rt, roles in [('storage', STORAGE_ROLES), ('test', ['testnode'])]:
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001286 cpu_ts = get_cluster_cpu_load(rstorage, roles, job.reliable_info_range_s)
1287 tss = [(name, ts.data * 100 / cpu_ts['total'].data)
1288 for name, ts in cpu_ts.items()
1289 if name in {'user', 'sys', 'irq', 'idle'}]
kdanylov aka koder45183182017-04-30 23:55:40 +03001290 fname = plot_simple_over_time(rstorage,
1291 cpu_ts['idle'].source(job_id=job.storage_id,
1292 suite_id=suite.storage_id,
kdanylov aka koder4e4af682017-05-01 01:52:14 +03001293 metric='allcpu', tag=rt + '.plt.' + default_format),
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001294 tss=tss,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001295 average=True,
1296 ylabel="CPU time %",
1297 title="{} nodes CPU usage".format(rt.capitalize()))
1298
1299 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fname))
1300
1301
1302# IO time and QD
1303class QDIOTimeHeatmap(JobReporter):
1304 def get_divs(self,
1305 suite: SuiteConfig,
1306 job: JobConfig,
1307 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
1308
1309 # TODO: fix this hardcode, need to track what devices are actually used on test and storage nodes
1310 # use saved storage info in nodes
1311
1312 journal_devs = None
1313 storage_devs = None
1314 test_nodes_devs = ['rbd0']
1315
1316 for node in find_nodes_by_roles(rstorage, STORAGE_ROLES):
1317 cjd = set(node.params['ceph_journal_devs'])
1318 if journal_devs is None:
1319 journal_devs = cjd
1320 else:
1321 assert journal_devs == cjd, "{!r} != {!r}".format(journal_devs, cjd)
1322
1323 csd = set(node.params['ceph_storage_devs'])
1324 if storage_devs is None:
1325 storage_devs = csd
1326 else:
1327 assert storage_devs == csd, "{!r} != {!r}".format(storage_devs, csd)
1328
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001329 trange = (job.reliable_info_range[0] // 1000, job.reliable_info_range[1] // 1000)
1330
1331 for name, devs, roles in [('storage', storage_devs, STORAGE_ROLES),
1332 ('journal', journal_devs, STORAGE_ROLES),
1333 ('test', test_nodes_devs, ['testnode'])]:
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001334
1335 yield Menu1st.per_job, job.summary, \
1336 HTMLBlock(html.H2(html.center("{} IO heatmaps".format(name.capitalize()))))
1337
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001338 # QD heatmap
1339 ioq2d = find_sensors_to_2d(rstorage, roles, sensor='block-io', devs=devs,
1340 metric='io_queue', time_range=trange)
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001341
1342 ds = DataSource(suite.storage_id, job.storage_id, AGG_TAG, 'block-io', name, tag="hmap." + default_format)
1343
1344 fname = plot_hmap_from_2d(rstorage,
1345 ds(metric='io_queue'),
1346 ioq2d,
1347 ylabel="IO QD",
1348 title=name.capitalize() + " devs QD",
1349 xlabel='Time',
1350 bins=StyleProfile.qd_bins) # type: str
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001351 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fname))
1352
1353 # Block size heatmap
1354 wc2d = find_sensors_to_2d(rstorage, roles, sensor='block-io', devs=devs,
1355 metric='writes_completed', time_range=trange)
1356 wc2d[wc2d < 1E-3] = 1
1357 sw2d = find_sensors_to_2d(rstorage, roles, sensor='block-io', devs=devs,
1358 metric='sectors_written', time_range=trange)
1359 data2d = sw2d / wc2d / 1024
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001360 fname = plot_hmap_from_2d(rstorage,
1361 ds(metric='wr_block_size'),
1362 data2d,
1363 ylabel="IO bsize, KiB",
1364 title=name.capitalize() + " write block size",
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001365 xlabel='Time',
1366 bins=StyleProfile.block_size_bins) # type: str
1367 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fname))
1368
1369 # iotime heatmap
1370 wtime2d = find_sensors_to_2d(rstorage, roles, sensor='block-io', devs=devs,
1371 metric='io_time', time_range=trange)
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001372 fname = plot_hmap_from_2d(rstorage,
1373 ds(metric='io_time'),
1374 wtime2d,
1375 ylabel="IO time (ms) per second",
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001376 title=name.capitalize() + " iotime",
1377 xlabel='Time',
1378 bins=StyleProfile.iotime_bins) # type: str
1379 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fname))
1380
1381
koder aka kdanilov108ac362017-01-19 20:17:16 +02001382# IOPS/latency over test time for each job
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001383class LoadToolResults(JobReporter):
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001384 """IOPS/latency during test"""
koder aka kdanilova732a602017-02-01 20:29:56 +02001385 suite_types = {'fio'}
koder aka kdanilov108ac362017-01-19 20:17:16 +02001386
koder aka kdanilova732a602017-02-01 20:29:56 +02001387 def get_divs(self,
1388 suite: SuiteConfig,
1389 job: JobConfig,
1390 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
koder aka kdanilov108ac362017-01-19 20:17:16 +02001391
koder aka kdanilova732a602017-02-01 20:29:56 +02001392 fjob = cast(FioJobConfig, job)
koder aka kdanilov108ac362017-01-19 20:17:16 +02001393
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001394 yield Menu1st.per_job, job.summary, HTMLBlock(html.H2(html.center("Load tool results")))
1395
koder aka kdanilova732a602017-02-01 20:29:56 +02001396 agg_io = get_aggregated(rstorage, suite, fjob, "bw")
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001397 if fjob.bsize >= DefStyleProfile.large_blocks:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001398 title = "Fio measured Bandwidth over time"
koder aka kdanilova732a602017-02-01 20:29:56 +02001399 units = "MiBps"
kdanylov aka koder45183182017-04-30 23:55:40 +03001400 agg_io.data //= int(unit_conversion_coef(units, agg_io.units))
koder aka kdanilova732a602017-02-01 20:29:56 +02001401 else:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001402 title = "Fio measured IOPS over time"
kdanylov aka koder45183182017-04-30 23:55:40 +03001403 agg_io.data //= (int(unit_conversion_coef("KiBps", agg_io.units)) * fjob.bsize)
koder aka kdanilova732a602017-02-01 20:29:56 +02001404 units = "IOPS"
koder aka kdanilov108ac362017-01-19 20:17:16 +02001405
kdanylov aka koder4e4af682017-05-01 01:52:14 +03001406 fpath = plot_v_over_time(rstorage, agg_io.source(tag='ts.' + default_format), title, units, agg_io) # type: str
koder aka kdanilova732a602017-02-01 20:29:56 +02001407 yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001408
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001409 agg_lat = get_aggregated(rstorage, suite, fjob, "lat").copy()
1410 TARGET_UNITS = 'ms'
1411 coef = unit_conversion_coef(agg_lat.units, TARGET_UNITS)
1412 agg_lat.histo_bins = agg_lat.histo_bins.copy() * float(coef)
1413 agg_lat.units = TARGET_UNITS
koder aka kdanilov108ac362017-01-19 20:17:16 +02001414
kdanylov aka koder4e4af682017-05-01 01:52:14 +03001415 fpath = plot_lat_over_time(rstorage, agg_lat.source(tag='ts.' + default_format), "Latency",
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001416 agg_lat, ylabel="Latency, " + agg_lat.units) # type: str
koder aka kdanilova732a602017-02-01 20:29:56 +02001417 yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001418
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001419 fpath = plot_histo_heatmap(rstorage,
kdanylov aka koder4e4af682017-05-01 01:52:14 +03001420 agg_lat.source(tag='hmap.' + default_format),
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001421 "Latency heatmap",
1422 agg_lat,
1423 ylabel="Latency, " + agg_lat.units,
1424 xlabel='Test time') # type: str
koder aka kdanilov108ac362017-01-19 20:17:16 +02001425
koder aka kdanilova732a602017-02-01 20:29:56 +02001426 yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001427
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001428 fjob = cast(FioJobConfig, job)
koder aka kdanilov108ac362017-01-19 20:17:16 +02001429
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001430 # agg_lat = get_aggregated(rstorage, suite, fjob, "lat")
1431 # # bins_edges = numpy.array(get_lat_vals(agg_lat.data.shape[1]), dtype='float32') / 1000 # convert us to ms
1432 # lat_stat_prop = calc_histo_stat_props(agg_lat, bins_edges=None, rebins_count=StyleProfile.hist_lat_boxes)
1433 #
1434 # long_summary = cast(FioJobParams, fjob.params).long_summary
1435 #
1436 # title = "Latency distribution"
1437 # units = "ms"
1438 #
1439 # fpath = plot_hist(rstorage, agg_lat.source(tag='hist.svg'), title, units, lat_stat_prop) # type: str
1440 # yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001441
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001442 agg_io = get_aggregated(rstorage, suite, fjob, "bw")
koder aka kdanilov108ac362017-01-19 20:17:16 +02001443
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001444 if fjob.bsize >= DefStyleProfile.large_blocks:
1445 title = "BW distribution"
1446 units = "MiBps"
1447 agg_io.data //= int(unit_conversion_coef(units, agg_io.units))
1448 else:
1449 title = "IOPS distribution"
1450 agg_io.data //= (int(unit_conversion_coef("KiBps", agg_io.units)) * fjob.bsize)
1451 units = "IOPS"
1452
1453 io_stat_prop = calc_norm_stat_props(agg_io, bins_count=StyleProfile.hist_boxes)
1454 fpath = plot_hist(rstorage, agg_io.source(tag='hist.' + default_format),
1455 title, units, io_stat_prop) # type: str
1456 yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001457
1458
1459# Cluster load over test time
koder aka kdanilova732a602017-02-01 20:29:56 +02001460class ClusterLoad(JobReporter):
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001461 """IOPS/latency during test"""
1462
koder aka kdanilova732a602017-02-01 20:29:56 +02001463 # TODO: units should came from sensor
koder aka kdanilov108ac362017-01-19 20:17:16 +02001464 storage_sensors = [
kdanylov aka koder45183182017-04-30 23:55:40 +03001465 ('block-io', 'reads_completed', "Read", 'iop'),
1466 ('block-io', 'writes_completed', "Write", 'iop'),
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001467 ('block-io', 'sectors_read', "Read", 'MiB'),
1468 ('block-io', 'sectors_written', "Write", 'MiB'),
koder aka kdanilov108ac362017-01-19 20:17:16 +02001469 ]
1470
koder aka kdanilova732a602017-02-01 20:29:56 +02001471 def get_divs(self,
1472 suite: SuiteConfig,
1473 job: JobConfig,
1474 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
koder aka kdanilova732a602017-02-01 20:29:56 +02001475 yield Menu1st.per_job, job.summary, HTMLBlock(html.H2(html.center("Cluster load")))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001476
kdanylov aka koder45183182017-04-30 23:55:40 +03001477 for sensor, metric, op, units in self.storage_sensors:
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001478 ts = summ_sensors(rstorage, STORAGE_ROLES, sensor, metric, job.reliable_info_range_s)
koder aka kdanilova732a602017-02-01 20:29:56 +02001479 ds = DataSource(suite_id=suite.storage_id,
1480 job_id=job.storage_id,
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001481 node_id="storage",
koder aka kdanilova732a602017-02-01 20:29:56 +02001482 sensor=sensor,
1483 dev=AGG_TAG,
1484 metric=metric,
kdanylov aka koder4e4af682017-05-01 01:52:14 +03001485 tag="ts." + default_format)
koder aka kdanilov108ac362017-01-19 20:17:16 +02001486
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001487 data = ts.data if units != 'MiB' else ts.data * float(unit_conversion_coef(ts.units, 'MiB'))
koder aka kdanilova732a602017-02-01 20:29:56 +02001488 ts = TimeSeries(name="",
kdanylov aka koder45183182017-04-30 23:55:40 +03001489 times=numpy.arange(*job.reliable_info_range_s),
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001490 data=data,
koder aka kdanilova732a602017-02-01 20:29:56 +02001491 raw=None,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001492 units=units if ts.units is None else ts.units,
1493 time_units=ts.time_units,
1494 source=ds,
1495 histo_bins=ts.histo_bins)
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +03001496
kdanylov aka koder45183182017-04-30 23:55:40 +03001497 sensor_title = "{} {}".format(op, units)
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001498 fpath = plot_v_over_time(rstorage, ds, sensor_title, units, ts=ts) # type: str
koder aka kdanilova732a602017-02-01 20:29:56 +02001499 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001500
1501
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001502
1503# Node load over test time
1504class NodeLoad(Reporter):
1505 """IOPS/latency during test"""
1506
1507
1508# Ceph cluster summary
1509class CephClusterSummary(Reporter):
1510 """IOPS/latency during test"""
1511
1512
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001513# TODO: Ceph operation breakout report
1514# TODO: Resource consumption for different type of test
1515
1516
koder aka kdanilov108ac362017-01-19 20:17:16 +02001517# ------------------------------------------ REPORT STAGES -----------------------------------------------------------
1518
1519
1520class HtmlReportStage(Stage):
1521 priority = StepOrder.REPORT
1522
1523 def run(self, ctx: TestRun) -> None:
1524 rstorage = ResultStorage(ctx.storage)
koder aka kdanilova732a602017-02-01 20:29:56 +02001525
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001526 job_reporters = [StatInfo(), Resources(), LoadToolResults(), ClusterLoad(), CPULoadPlot(),
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001527 QDIOTimeHeatmap()] # type: List[JobReporter]
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001528 # job_reporters = [QDIOTimeHeatmap()] # type: List[JobReporter]
1529 # job_reporters = []
1530 reporters = [IO_QD()] # type: List[Reporter]
1531 # reporters = [] # type: List[Reporter]
koder aka kdanilov108ac362017-01-19 20:17:16 +02001532
1533 root_dir = os.path.dirname(os.path.dirname(wally.__file__))
1534 doc_templ_path = os.path.join(root_dir, "report_templates/index.html")
1535 report_template = open(doc_templ_path, "rt").read()
1536 css_file_src = os.path.join(root_dir, "report_templates/main.css")
1537 css_file = open(css_file_src, "rt").read()
1538
1539 menu_block = []
1540 content_block = []
1541 link_idx = 0
1542
koder aka kdanilova732a602017-02-01 20:29:56 +02001543 # matplotlib.rcParams.update(ctx.config.reporting.matplotlib_params.raw())
1544 # ColorProfile.__dict__.update(ctx.config.reporting.colors.raw())
1545 # StyleProfile.__dict__.update(ctx.config.reporting.style.raw())
koder aka kdanilov108ac362017-01-19 20:17:16 +02001546
koder aka kdanilova732a602017-02-01 20:29:56 +02001547 items = defaultdict(lambda: defaultdict(list)) # type: Dict[str, Dict[str, List[HTMLBlock]]]
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001548 DEBUG = False
koder aka kdanilova732a602017-02-01 20:29:56 +02001549 # TODO: filter reporters
koder aka kdanilov108ac362017-01-19 20:17:16 +02001550 for suite in rstorage.iter_suite(FioTest.name):
koder aka kdanilova732a602017-02-01 20:29:56 +02001551 all_jobs = list(rstorage.iter_job(suite))
1552 all_jobs.sort(key=lambda job: job.params)
1553 for job in all_jobs:
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001554 if 'rwd16384_qd1' == job.summary:
1555 try:
1556 for reporter in job_reporters:
1557 logger.debug("Start reporter %s on job %s suite %s",
1558 reporter.__class__.__name__, job.summary, suite.test_type)
1559 for block, item, html in reporter.get_divs(suite, job, rstorage):
1560 items[block][item].append(html)
1561 if DEBUG:
1562 break
1563 except Exception:
1564 logger.exception("Failed to generate report for %s", job)
koder aka kdanilova732a602017-02-01 20:29:56 +02001565
koder aka kdanilov108ac362017-01-19 20:17:16 +02001566 for reporter in reporters:
kdanylov aka koder736e5c12017-05-07 17:27:14 +03001567 try:
1568 logger.debug("Start reporter %s on suite %s", reporter.__class__.__name__, suite.test_type)
1569 for block, item, html in reporter.get_divs(suite, rstorage):
1570 items[block][item].append(html)
1571 except Exception as exc:
1572 logger.exception("Failed to generate report")
koder aka kdanilov108ac362017-01-19 20:17:16 +02001573
koder aka kdanilova732a602017-02-01 20:29:56 +02001574 if DEBUG:
1575 break
1576
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001577 logger.debug("Generating result html")
1578
koder aka kdanilov108ac362017-01-19 20:17:16 +02001579 for idx_1st, menu_1st in enumerate(sorted(items, key=lambda x: menu_1st_order.index(x))):
1580 menu_block.append(
1581 '<a href="#item{}" class="nav-group" data-toggle="collapse" data-parent="#MainMenu">{}</a>'
1582 .format(idx_1st, menu_1st)
1583 )
1584 menu_block.append('<div class="collapse" id="item{}">'.format(idx_1st))
1585 for menu_2nd in sorted(items[menu_1st]):
1586 menu_block.append(' <a href="#content{}" class="nav-group-item">{}</a>'
1587 .format(link_idx, menu_2nd))
1588 content_block.append('<div id="content{}">'.format(link_idx))
koder aka kdanilova732a602017-02-01 20:29:56 +02001589 content_block.extend(" " + x.data for x in items[menu_1st][menu_2nd])
koder aka kdanilov108ac362017-01-19 20:17:16 +02001590 content_block.append('</div>')
1591 link_idx += 1
1592 menu_block.append('</div>')
1593
1594 report = report_template.replace("{{{menu}}}", ("\n" + " " * 16).join(menu_block))
1595 report = report.replace("{{{content}}}", ("\n" + " " * 16).join(content_block))
1596 report_path = rstorage.put_report(report, "index.html")
1597 rstorage.put_report(css_file, "main.css")
1598 logger.info("Report is stored into %r", report_path)