blob: da81ab57ad62f6b9031204917676e39ddba7e2cb [file] [log] [blame]
koder aka kdanilov108ac362017-01-19 20:17:16 +02001import os
koder aka kdanilov7f59d562016-12-26 01:34:23 +02002import abc
koder aka kdanilova047e1b2015-04-21 23:16:59 +03003import logging
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03004import warnings
koder aka kdanilov108ac362017-01-19 20:17:16 +02005from io import BytesIO
6from functools import wraps
koder aka kdanilov108ac362017-01-19 20:17:16 +02007from collections import defaultdict
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03008from typing import Dict, Any, Iterator, Tuple, cast, List, Callable, Set
koder aka kdanilovcff7b2e2015-04-18 20:48:15 +03009
koder aka kdanilovffaf48d2016-12-27 02:25:29 +020010import numpy
koder aka kdanilov108ac362017-01-19 20:17:16 +020011import scipy.stats
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030012
kdanylov aka koder45183182017-04-30 23:55:40 +030013# import matplotlib
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030014# matplotlib.use('GTKAgg')
15
kdanylov aka koder4e4af682017-05-01 01:52:14 +030016from matplotlib.figure import Figure
koder aka kdanilova732a602017-02-01 20:29:56 +020017import matplotlib.pyplot as plt
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030018from matplotlib import gridspec
19
20from cephlib.common import float2str
21from cephlib.plot import plot_hmap_with_y_histo, hmap_from_2d
koder aka kdanilovbe8f89f2015-04-28 14:51:51 +030022
koder aka kdanilov108ac362017-01-19 20:17:16 +020023import wally
koder aka kdanilovffaf48d2016-12-27 02:25:29 +020024
koder aka kdanilov108ac362017-01-19 20:17:16 +020025from . import html
koder aka kdanilov39e449e2016-12-17 15:15:26 +020026from .stage import Stage, StepOrder
27from .test_run_class import TestRun
koder aka kdanilov108ac362017-01-19 20:17:16 +020028from .hlstorage import ResultStorage
29from .node_interfaces import NodeInfo
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030030from .utils import b2ssize, b2ssize_10, STORAGE_ROLES, unit_conversion_coef
koder aka kdanilova732a602017-02-01 20:29:56 +020031from .statistic import (calc_norm_stat_props, calc_histo_stat_props, moving_average, moving_dev,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030032 hist_outliers_perc, find_ouliers_ts, approximate_curve)
33from .result_classes import (StatProps, DataSource, TimeSeries, NormStatProps, HistoStatProps, SuiteConfig)
koder aka kdanilov108ac362017-01-19 20:17:16 +020034from .suits.io.fio import FioTest, FioJobConfig
koder aka kdanilova732a602017-02-01 20:29:56 +020035from .suits.io.fio_job import FioJobParams
36from .suits.job import JobConfig
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030037from .data_selectors import get_aggregated, AGG_TAG, summ_sensors, find_sensors_to_2d, find_nodes_by_roles
38
39
40with warnings.catch_warnings():
41 warnings.simplefilter("ignore")
42 import seaborn
koder aka kdanilovcff7b2e2015-04-18 20:48:15 +030043
koder aka kdanilov4a510ee2015-04-21 18:50:42 +030044
koder aka kdanilov962ee5f2016-12-19 02:40:08 +020045logger = logging.getLogger("wally")
koder aka kdanilova047e1b2015-04-21 23:16:59 +030046
47
koder aka kdanilov108ac362017-01-19 20:17:16 +020048# ---------------- CONSTS ---------------------------------------------------------------------------------------------
koder aka kdanilov39e449e2016-12-17 15:15:26 +020049
koder aka kdanilov7f59d562016-12-26 01:34:23 +020050
koder aka kdanilov108ac362017-01-19 20:17:16 +020051DEBUG = False
52LARGE_BLOCKS = 256
koder aka kdanilov39e449e2016-12-17 15:15:26 +020053
koder aka kdanilov39e449e2016-12-17 15:15:26 +020054
koder aka kdanilov108ac362017-01-19 20:17:16 +020055# ---------------- PROFILES ------------------------------------------------------------------------------------------
56
57
koder aka kdanilova732a602017-02-01 20:29:56 +020058# this is default values, real values is loaded from config
59
koder aka kdanilov108ac362017-01-19 20:17:16 +020060class ColorProfile:
61 primary_color = 'b'
62 suppl_color1 = 'teal'
63 suppl_color2 = 'magenta'
koder aka kdanilova732a602017-02-01 20:29:56 +020064 suppl_color3 = 'orange'
koder aka kdanilov108ac362017-01-19 20:17:16 +020065 box_color = 'y'
koder aka kdanilova732a602017-02-01 20:29:56 +020066 err_color = 'red'
koder aka kdanilov108ac362017-01-19 20:17:16 +020067
68 noise_alpha = 0.3
69 subinfo_alpha = 0.7
70
koder aka kdanilova732a602017-02-01 20:29:56 +020071 imshow_colormap = None # type: str
72
koder aka kdanilov108ac362017-01-19 20:17:16 +020073
kdanylov aka koder4e4af682017-05-01 01:52:14 +030074default_format = 'svg'
75
76
koder aka kdanilov108ac362017-01-19 20:17:16 +020077class StyleProfile:
kdanylov aka koder4e4af682017-05-01 01:52:14 +030078 dpi = 80
koder aka kdanilov108ac362017-01-19 20:17:16 +020079 grid = True
kdanylov aka koder4e4af682017-05-01 01:52:14 +030080 tide_layout = False
koder aka kdanilov108ac362017-01-19 20:17:16 +020081 hist_boxes = 10
koder aka kdanilova732a602017-02-01 20:29:56 +020082 hist_lat_boxes = 25
83 hm_hist_bins_count = 25
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030084 hm_x_slots = 25
koder aka kdanilov108ac362017-01-19 20:17:16 +020085 min_points_for_dev = 5
86
87 dev_range_x = 2.0
88 dev_perc = 95
89
koder aka kdanilova732a602017-02-01 20:29:56 +020090 point_shape = 'o'
91 err_point_shape = '*'
koder aka kdanilov108ac362017-01-19 20:17:16 +020092
koder aka kdanilova732a602017-02-01 20:29:56 +020093 avg_range = 20
94 approx_average = True
95
96 curve_approx_level = 6
koder aka kdanilov108ac362017-01-19 20:17:16 +020097 curve_approx_points = 100
98 assert avg_range >= min_points_for_dev
99
koder aka kdanilova732a602017-02-01 20:29:56 +0200100 # figure size in inches
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300101 figsize = (8, 4)
102 figsize_long = (5.5, 3)
103
104 subplot_adjust_r = 0.75
105 subplot_adjust_r_no_leg = 0.9
106 title_font_size = 10
koder aka kdanilova732a602017-02-01 20:29:56 +0200107
koder aka kdanilov108ac362017-01-19 20:17:16 +0200108 extra_io_spine = True
109
110 legend_for_eng = True
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300111 # heatmap_interpolation = '1d'
112 heatmap_interpolation = None
koder aka kdanilova732a602017-02-01 20:29:56 +0200113 heatmap_interpolation_points = 300
114 outliers_q_nd = 3.0
115 outliers_hide_q_nd = 4.0
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300116 outliers_lat = (0.01, 0.9)
koder aka kdanilova732a602017-02-01 20:29:56 +0200117
118 violin_instead_of_box = True
119 violin_point_count = 30000
120
121 heatmap_colorbar = False
122
123 min_iops_vs_qd_jobs = 3
koder aka kdanilov108ac362017-01-19 20:17:16 +0200124
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300125 qd_bins = [0, 1, 2, 4, 6, 8, 12, 16, 20, 26, 32, 40, 48, 56, 64, 96, 128]
126 iotime_bins = list(range(0, 1030, 50))
127 block_size_bins = [0, 2, 4, 8, 16, 32, 48, 64, 96, 128, 192, 256, 384, 512, 1024, 2048]
128
koder aka kdanilov108ac362017-01-19 20:17:16 +0200129
kdanylov aka koder45183182017-04-30 23:55:40 +0300130DefColorProfile = ColorProfile()
131DefStyleProfile = StyleProfile()
132
133
koder aka kdanilov108ac362017-01-19 20:17:16 +0200134# ---------------- STRUCTS -------------------------------------------------------------------------------------------
koder aka kdanilov39e449e2016-12-17 15:15:26 +0200135
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200136
137# TODO: need to be revised, have to user StatProps fields instead
138class StoragePerfSummary:
139 def __init__(self, name: str) -> None:
140 self.direct_iops_r_max = 0 # type: int
141 self.direct_iops_w_max = 0 # type: int
142
143 # 64 used instead of 4k to faster feed caches
144 self.direct_iops_w64_max = 0 # type: int
145
146 self.rws4k_10ms = 0 # type: int
147 self.rws4k_30ms = 0 # type: int
148 self.rws4k_100ms = 0 # type: int
149 self.bw_write_max = 0 # type: int
150 self.bw_read_max = 0 # type: int
151
152 self.bw = None # type: float
153 self.iops = None # type: float
154 self.lat = None # type: float
155 self.lat_50 = None # type: float
156 self.lat_95 = None # type: float
157
158
koder aka kdanilov108ac362017-01-19 20:17:16 +0200159class IOSummary:
160 def __init__(self,
161 qd: int,
162 block_size: int,
163 nodes_count:int,
164 bw: NormStatProps,
165 lat: HistoStatProps) -> None:
166
167 self.qd = qd
168 self.nodes_count = nodes_count
169 self.block_size = block_size
170
171 self.bw = bw
172 self.lat = lat
173
174
175# -------------- AGGREGATION AND STAT FUNCTIONS ----------------------------------------------------------------------
koder aka kdanilov108ac362017-01-19 20:17:16 +0200176
koder aka kdanilova732a602017-02-01 20:29:56 +0200177def make_iosum(rstorage: ResultStorage, suite: SuiteConfig, job: FioJobConfig) -> IOSummary:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200178 lat = get_aggregated(rstorage, suite, job, "lat")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200179 io = get_aggregated(rstorage, suite, job, "bw")
180
181 return IOSummary(job.qd,
182 nodes_count=len(suite.nodes_ids),
183 block_size=job.bsize,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300184 lat=calc_histo_stat_props(lat, rebins_count=StyleProfile.hist_boxes),
koder aka kdanilov108ac362017-01-19 20:17:16 +0200185 bw=calc_norm_stat_props(io, StyleProfile.hist_boxes))
186
koder aka kdanilov108ac362017-01-19 20:17:16 +0200187
koder aka kdanilova732a602017-02-01 20:29:56 +0200188def is_sensor_numarray(sensor: str, metric: str) -> bool:
189 """Returns True if sensor provides one-dimension array of numeric values. One number per one measurement."""
190 return True
191
192
193LEVEL_SENSORS = {("block-io", "io_queue"),
194 ("system-cpu", "procs_blocked"),
195 ("system-cpu", "procs_queue")}
196
197
198def is_level_sensor(sensor: str, metric: str) -> bool:
199 """Returns True if sensor measure level of any kind, E.g. queue depth."""
200 return (sensor, metric) in LEVEL_SENSORS
201
202
203def is_delta_sensor(sensor: str, metric: str) -> bool:
204 """Returns True if sensor provides deltas for cumulative value. E.g. io completed in given period"""
205 return not is_level_sensor(sensor, metric)
206
koder aka kdanilov108ac362017-01-19 20:17:16 +0200207# -------------- PLOT HELPERS FUNCTIONS ------------------------------------------------------------------------------
208
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300209def get_emb_image(fig: Figure, format: str, **opts) -> bytes:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200210 bio = BytesIO()
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300211 if format == 'svg':
212 fig.savefig(bio, format='svg', **opts)
koder aka kdanilova732a602017-02-01 20:29:56 +0200213 img_start = "<!-- Created with matplotlib (http://matplotlib.org/) -->"
214 return bio.getvalue().decode("utf8").split(img_start, 1)[1].encode("utf8")
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300215 else:
216 fig.savefig(bio, format=format, **opts)
217 return bio.getvalue()
koder aka kdanilov108ac362017-01-19 20:17:16 +0200218
219
220def provide_plot(func: Callable[..., None]) -> Callable[..., str]:
221 @wraps(func)
koder aka kdanilova732a602017-02-01 20:29:56 +0200222 def closure1(storage: ResultStorage,
223 path: DataSource,
224 *args, **kwargs) -> str:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200225 fpath = storage.check_plot_file(path)
226 if not fpath:
koder aka kdanilova732a602017-02-01 20:29:56 +0200227 format = path.tag.split(".")[-1]
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300228 fig = plt.figure(figsize=StyleProfile.figsize)
229 func(fig, *args, **kwargs)
230 fpath = storage.put_plot_file(get_emb_image(fig, format=format, dpi=DefStyleProfile.dpi), path)
koder aka kdanilova732a602017-02-01 20:29:56 +0200231 logger.debug("Plot %s saved to %r", path, fpath)
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300232 plt.close(fig)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200233 return fpath
234 return closure1
235
236
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300237def apply_style(fig: Figure, title: str, style: StyleProfile, eng: bool = True, no_legend: bool = False) -> None:
238
239 for ax in fig.axes:
240 ax.grid(style.grid)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200241
242 if (style.legend_for_eng or not eng) and not no_legend:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300243 fig.subplots_adjust(right=StyleProfile.subplot_adjust_r)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200244 legend_location = "center left"
245 legend_bbox_to_anchor = (1.03, 0.81)
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300246 for ax in fig.axes:
247 ax.legend(loc=legend_location, bbox_to_anchor=legend_bbox_to_anchor)
248 else:
249 fig.subplots_adjust(right=StyleProfile.subplot_adjust_r_no_leg)
250
251 if style.tide_layout:
252 fig.set_tight_layout(True)
253
254 fig.suptitle(title, fontsize=style.title_font_size)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200255
256
257# -------------- PLOT FUNCTIONS --------------------------------------------------------------------------------------
258
259
260@provide_plot
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300261def plot_hist(fig: Figure, title: str, units: str,
koder aka kdanilov108ac362017-01-19 20:17:16 +0200262 prop: StatProps,
kdanylov aka koder45183182017-04-30 23:55:40 +0300263 colors: ColorProfile = DefColorProfile,
264 style: StyleProfile = DefStyleProfile) -> None:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200265
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300266 ax = fig.add_subplot(111)
267
koder aka kdanilov108ac362017-01-19 20:17:16 +0200268 # TODO: unit should came from ts
koder aka kdanilova732a602017-02-01 20:29:56 +0200269 normed_bins = prop.bins_populations / prop.bins_populations.sum()
270 bar_width = prop.bins_edges[1] - prop.bins_edges[0]
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300271 ax.bar(prop.bins_edges, normed_bins, color=colors.box_color, width=bar_width, label="Real data")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200272
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300273 ax.set(xlabel=units, ylabel="Value probability")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200274
275 dist_plotted = False
276 if isinstance(prop, NormStatProps):
277 nprop = cast(NormStatProps, prop)
278 stats = scipy.stats.norm(nprop.average, nprop.deviation)
279
koder aka kdanilova732a602017-02-01 20:29:56 +0200280 new_edges, step = numpy.linspace(prop.bins_edges[0], prop.bins_edges[-1],
281 len(prop.bins_edges) * 10, retstep=True)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200282
koder aka kdanilova732a602017-02-01 20:29:56 +0200283 ypoints = stats.cdf(new_edges) * 11
koder aka kdanilov108ac362017-01-19 20:17:16 +0200284 ypoints = [next - prev for (next, prev) in zip(ypoints[1:], ypoints[:-1])]
koder aka kdanilova732a602017-02-01 20:29:56 +0200285 xpoints = (new_edges[1:] + new_edges[:-1]) / 2
koder aka kdanilov108ac362017-01-19 20:17:16 +0200286
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300287 ax.plot(xpoints, ypoints, color=colors.primary_color, label="Expected from\nnormal\ndistribution")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200288 dist_plotted = True
289
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300290 ax.set_xlim(left=prop.bins_edges[0])
koder aka kdanilova732a602017-02-01 20:29:56 +0200291 if prop.log_bins:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300292 ax.set_xscale('log')
koder aka kdanilova732a602017-02-01 20:29:56 +0200293
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300294 apply_style(fig, title, style, eng=True, no_legend=not dist_plotted)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200295
296
297@provide_plot
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300298def plot_simple_over_time(fig: Figure,
299 tss: List[Tuple[str, numpy.ndarray]],
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300300 title: str,
301 ylabel: str,
302 xlabel: str = "time, s",
303 average: bool = False,
kdanylov aka koder45183182017-04-30 23:55:40 +0300304 colors: ColorProfile = DefColorProfile,
305 style: StyleProfile = DefStyleProfile) -> None:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300306 ax = fig.add_subplot(111)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300307 for name, arr in tss:
308 if average:
309 avg_vals = moving_average(arr, style.avg_range)
310 if style.approx_average:
311 time_points = numpy.arange(len(avg_vals))
312 avg_vals = approximate_curve(time_points, avg_vals, time_points, style.curve_approx_level)
313 arr = avg_vals
314 ax.plot(arr, label=name)
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300315 ax.set(xlabel=xlabel, ylabel=ylabel)
316 apply_style(fig, title, style, eng=True)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300317
318
319@provide_plot
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300320def plot_hmap_from_2d(fig: Figure,
321 data2d: numpy.ndarray,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300322 title: str, ylabel: str, xlabel: str = 'time, s', bins: numpy.ndarray = None,
kdanylov aka koder45183182017-04-30 23:55:40 +0300323 colors: ColorProfile = DefColorProfile, style: StyleProfile = DefStyleProfile) -> None:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300324 fig.set_size_inches(*style.figsize_long)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300325 ioq1d, ranges = hmap_from_2d(data2d)
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300326 ax, _ = plot_hmap_with_y_histo(fig, ioq1d, ranges, bins=bins)
327 ax.set(ylabel=ylabel, xlabel=xlabel)
328 apply_style(fig, title, style, no_legend=True)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300329
330
331@provide_plot
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300332def plot_v_over_time(fig: Figure,
333 title: str,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300334 units: str,
koder aka kdanilov108ac362017-01-19 20:17:16 +0200335 ts: TimeSeries,
336 plot_avg_dev: bool = True,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300337 plot_points: bool = True,
kdanylov aka koder45183182017-04-30 23:55:40 +0300338 colors: ColorProfile = DefColorProfile,
339 style: StyleProfile = DefStyleProfile) -> None:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200340
341 min_time = min(ts.times)
342
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300343 # convert time to ms
344 coef = float(unit_conversion_coef(ts.time_units, 's'))
345 time_points = numpy.array([(val_time - min_time) * coef for val_time in ts.times])
koder aka kdanilova732a602017-02-01 20:29:56 +0200346
347 outliers_idxs = find_ouliers_ts(ts.data, cut_range=style.outliers_q_nd)
348 outliers_4q_idxs = find_ouliers_ts(ts.data, cut_range=style.outliers_hide_q_nd)
349 normal_idxs = numpy.logical_not(outliers_idxs)
350 outliers_idxs = outliers_idxs & numpy.logical_not(outliers_4q_idxs)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300351 # hidden_outliers_count = numpy.count_nonzero(outliers_4q_idxs)
koder aka kdanilova732a602017-02-01 20:29:56 +0200352
353 data = ts.data[normal_idxs]
354 data_times = time_points[normal_idxs]
355 outliers = ts.data[outliers_idxs]
356 outliers_times = time_points[outliers_idxs]
koder aka kdanilov108ac362017-01-19 20:17:16 +0200357
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300358 ax = fig.add_subplot(111)
359
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300360 if plot_points:
361 alpha = colors.noise_alpha if plot_avg_dev else 1.0
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300362 ax.plot(data_times, data, style.point_shape,
363 color=colors.primary_color, alpha=alpha, label="Data")
364 ax.plot(outliers_times, outliers, style.err_point_shape,
365 color=colors.err_color, label="Outliers")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200366
koder aka kdanilova732a602017-02-01 20:29:56 +0200367 has_negative_dev = False
368 plus_minus = "\xb1"
koder aka kdanilov108ac362017-01-19 20:17:16 +0200369
koder aka kdanilova732a602017-02-01 20:29:56 +0200370 if plot_avg_dev and len(data) < style.avg_range * 2:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300371 logger.warning("Array %r to small to plot average over %s points", title, style.avg_range)
koder aka kdanilova732a602017-02-01 20:29:56 +0200372 elif plot_avg_dev:
373 avg_vals = moving_average(data, style.avg_range)
374 dev_vals = moving_dev(data, style.avg_range)
375 avg_times = moving_average(data_times, style.avg_range)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200376
koder aka kdanilova732a602017-02-01 20:29:56 +0200377 if style.approx_average:
378 avg_vals = approximate_curve(avg_times, avg_vals, avg_times, style.curve_approx_level)
379 dev_vals = approximate_curve(avg_times, dev_vals, avg_times, style.curve_approx_level)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200380
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300381 ax.plot(avg_times, avg_vals, c=colors.suppl_color1, label="Average")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200382
koder aka kdanilova732a602017-02-01 20:29:56 +0200383 low_vals_dev = avg_vals - dev_vals * style.dev_range_x
384 hight_vals_dev = avg_vals + dev_vals * style.dev_range_x
385 if style.dev_range_x - int(style.dev_range_x) < 0.01:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300386 ax.plot(avg_times, low_vals_dev, c=colors.suppl_color2,
387 label="{}{}*stdev".format(plus_minus, int(style.dev_range_x)))
koder aka kdanilova732a602017-02-01 20:29:56 +0200388 else:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300389 ax.plot(avg_times, low_vals_dev, c=colors.suppl_color2,
390 label="{}{}*stdev".format(plus_minus, style.dev_range_x))
391 ax.plot(avg_times, hight_vals_dev, c=colors.suppl_color2)
koder aka kdanilova732a602017-02-01 20:29:56 +0200392 has_negative_dev = low_vals_dev.min() < 0
koder aka kdanilov108ac362017-01-19 20:17:16 +0200393
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300394 ax.set_xlim(-5, max(time_points) + 5)
395 ax.set_xlabel("Time, seconds from test begin")
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300396
397 if plot_avg_dev:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300398 ax.set_ylabel("{}. Average and {}stddev over {} points".format(units, plus_minus, style.avg_range))
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300399 else:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300400 ax.set_ylabel(units)
koder aka kdanilova732a602017-02-01 20:29:56 +0200401
402 if has_negative_dev:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300403 ax.set_ylim(bottom=0)
koder aka kdanilova732a602017-02-01 20:29:56 +0200404
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300405 apply_style(fig, title, style, eng=True)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200406
407
408@provide_plot
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300409def plot_lat_over_time(fig: Figure,
410 title: str,
411 ts: TimeSeries,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300412 ylabel: str,
413 samples: int = 5,
kdanylov aka koder45183182017-04-30 23:55:40 +0300414 colors: ColorProfile = DefColorProfile, style: StyleProfile = DefStyleProfile) -> None:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200415
416 min_time = min(ts.times)
417 times = [int(tm - min_time + 500) // 1000 for tm in ts.times]
418 ts_len = len(times)
419 step = ts_len / samples
420 points = [times[int(i * step + 0.5)] for i in range(samples)]
421 points.append(times[-1])
422 bounds = list(zip(points[:-1], points[1:]))
koder aka kdanilov108ac362017-01-19 20:17:16 +0200423 agg_data = []
424 positions = []
425 labels = []
426
koder aka kdanilov108ac362017-01-19 20:17:16 +0200427 for begin, end in bounds:
koder aka kdanilova732a602017-02-01 20:29:56 +0200428 agg_hist = ts.data[begin:end].sum(axis=0)
429
430 if style.violin_instead_of_box:
431 # cut outliers
432 idx1, idx2 = hist_outliers_perc(agg_hist, style.outliers_lat)
433 agg_hist = agg_hist[idx1:idx2]
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300434 curr_bins_vals = ts.histo_bins[idx1:idx2]
koder aka kdanilova732a602017-02-01 20:29:56 +0200435
436 correct_coef = style.violin_point_count / sum(agg_hist)
437 if correct_coef > 1:
438 correct_coef = 1
439 else:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300440 curr_bins_vals = ts.histo_bins
koder aka kdanilova732a602017-02-01 20:29:56 +0200441 correct_coef = 1
koder aka kdanilov108ac362017-01-19 20:17:16 +0200442
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300443 vals = numpy.empty(shape=[numpy.sum(agg_hist)], dtype='float32')
koder aka kdanilov108ac362017-01-19 20:17:16 +0200444 cidx = 0
koder aka kdanilov108ac362017-01-19 20:17:16 +0200445
koder aka kdanilova732a602017-02-01 20:29:56 +0200446 non_zero, = agg_hist.nonzero()
koder aka kdanilov108ac362017-01-19 20:17:16 +0200447 for pos in non_zero:
koder aka kdanilova732a602017-02-01 20:29:56 +0200448 count = int(agg_hist[pos] * correct_coef + 0.5)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200449
koder aka kdanilova732a602017-02-01 20:29:56 +0200450 if count != 0:
451 vals[cidx: cidx + count] = curr_bins_vals[pos]
452 cidx += count
453
454 agg_data.append(vals[:cidx])
koder aka kdanilov108ac362017-01-19 20:17:16 +0200455 positions.append((end + begin) / 2)
456 labels.append(str((end + begin) // 2))
457
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300458 ax = fig.add_subplot(111)
koder aka kdanilova732a602017-02-01 20:29:56 +0200459 if style.violin_instead_of_box:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300460 patches = ax.violinplot(agg_data,
461 positions=positions,
462 showmeans=True,
463 showmedians=True,
464 widths=step / 2)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200465
koder aka kdanilova732a602017-02-01 20:29:56 +0200466 patches['cmeans'].set_color("blue")
467 patches['cmedians'].set_color("green")
468 if style.legend_for_eng:
469 legend_location = "center left"
470 legend_bbox_to_anchor = (1.03, 0.81)
471 plt.legend([patches['cmeans'], patches['cmedians']], ["mean", "median"],
472 loc=legend_location, bbox_to_anchor=legend_bbox_to_anchor)
473 else:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300474 ax.boxplot(agg_data, 0, '', positions=positions, labels=labels, widths=step / 4)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200475
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300476 ax.set_xlim(min(times), max(times))
477 ax.set(ylabel=ylabel, xlabel="Time, seconds from test begin, sampled for ~{} seconds".format(int(step)))
478
479 apply_style(fig, title, style, eng=True, no_legend=True)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200480
481
482@provide_plot
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300483def plot_histo_heatmap(fig: Figure,
484 title: str,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300485 ts: TimeSeries,
486 ylabel: str,
487 xlabel: str = "time, s",
kdanylov aka koder45183182017-04-30 23:55:40 +0300488 colors: ColorProfile = DefColorProfile, style: StyleProfile = DefStyleProfile) -> None:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200489
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300490 fig.set_size_inches(*style.figsize_long)
491
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300492 # only histogram-based ts can be plotted
koder aka kdanilova732a602017-02-01 20:29:56 +0200493 assert len(ts.data.shape) == 2
koder aka kdanilova732a602017-02-01 20:29:56 +0200494
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300495 # Find global outliers. As load is expected to be stable during one job
496 # outliers range can be detected globally
koder aka kdanilova732a602017-02-01 20:29:56 +0200497 total_hist = ts.data.sum(axis=0)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300498 idx1, idx2 = hist_outliers_perc(total_hist,
499 bounds_perc=style.outliers_lat,
500 min_bins_left=style.hm_hist_bins_count)
koder aka kdanilova732a602017-02-01 20:29:56 +0200501
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300502 # merge outliers with most close non-outliers cell
503 orig_data = ts.data[:, idx1:idx2].copy()
504 if idx1 > 0:
505 orig_data[:, 0] += ts.data[:, :idx1].sum(axis=1)
koder aka kdanilova732a602017-02-01 20:29:56 +0200506
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300507 if idx2 < ts.data.shape[1]:
508 orig_data[:, -1] += ts.data[:, idx2:].sum(axis=1)
koder aka kdanilova732a602017-02-01 20:29:56 +0200509
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300510 bins_vals = ts.histo_bins[idx1:idx2]
511
512 # rebin over X axis
513 # aggregate some lines in ts.data to plot not more than style.hm_x_slots x bins
514 agg_idx = float(len(orig_data)) / style.hm_x_slots
515 if agg_idx >= 2:
516 data = numpy.zeros([style.hm_x_slots, orig_data.shape[1]], dtype=numpy.float32) # type: List[numpy.ndarray]
517 next = agg_idx
518 count = 0
519 data_idx = 0
520 for idx, arr in enumerate(orig_data):
521 if idx >= next:
522 data[data_idx] /= count
523 data_idx += 1
524 next += agg_idx
525 count = 0
526 data[data_idx] += arr
527 count += 1
528
529 if count > 1:
530 data[-1] /= count
531 else:
532 data = orig_data
533
534 # rebin over Y axis
535 # =================
koder aka kdanilova732a602017-02-01 20:29:56 +0200536
537 # don't using rebin_histogram here, as we need apply same bins for many arrays
538 step = (bins_vals[-1] - bins_vals[0]) / style.hm_hist_bins_count
539 new_bins_edges = numpy.arange(style.hm_hist_bins_count) * step + bins_vals[0]
540 bin_mapping = numpy.clip(numpy.searchsorted(new_bins_edges, bins_vals) - 1, 0, len(new_bins_edges) - 1)
541
542 # map origin bins ranges to heatmap bins, iterate over rows
543 cmap = []
544 for line in data:
545 curr_bins = [0] * style.hm_hist_bins_count
546 for idx, count in zip(bin_mapping, line):
547 curr_bins[idx] += count
548 cmap.append(curr_bins)
549 ncmap = numpy.array(cmap)
550
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300551 # plot data
552 # =========
koder aka kdanilova732a602017-02-01 20:29:56 +0200553
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300554 boxes = 3
555 gs = gridspec.GridSpec(1, boxes)
556 ax = fig.add_subplot(gs[0, :boxes - 1])
koder aka kdanilova732a602017-02-01 20:29:56 +0200557
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300558 labels = list(map(float2str, (new_bins_edges[:-1] + new_bins_edges[1:]) / 2)) + \
559 [float2str(new_bins_edges[-1]) + "+"]
560 seaborn.heatmap(ncmap[:,::-1].T, xticklabels=False, cmap="Blues", ax=ax)
561 ax.set_yticklabels(labels, rotation='horizontal')
562 ax.set_xticklabels([])
koder aka kdanilova732a602017-02-01 20:29:56 +0200563
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300564 # plot overall histogram
565 # =======================
koder aka kdanilova732a602017-02-01 20:29:56 +0200566
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300567 ax2 = fig.add_subplot(gs[0, boxes - 1])
568 ax2.set_yticklabels([])
569 ax2.set_xticklabels([])
koder aka kdanilova732a602017-02-01 20:29:56 +0200570
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300571 histo = ncmap.sum(axis=0).reshape((-1,))
572 ax2.set_ylim(top=histo.size, bottom=0)
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300573 ax2.barh(numpy.arange(histo.size) + 0.5, width=histo)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300574
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300575 ax.set(ylabel=ylabel, xlabel=xlabel)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300576
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300577 apply_style(fig, title, style, eng=True, no_legend=True)
koder aka kdanilova732a602017-02-01 20:29:56 +0200578
koder aka kdanilov108ac362017-01-19 20:17:16 +0200579
580@provide_plot
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300581def io_chart(fig: Figure,
582 title: str,
koder aka kdanilov108ac362017-01-19 20:17:16 +0200583 legend: str,
584 iosums: List[IOSummary],
585 iops_log_spine: bool = False,
586 lat_log_spine: bool = False,
kdanylov aka koder45183182017-04-30 23:55:40 +0300587 colors: ColorProfile = DefColorProfile, style: StyleProfile = DefStyleProfile) -> None:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200588
589 # -------------- MAGIC VALUES ---------------------
590 # IOPS bar width
591 width = 0.35
592
593 # offset from center of bar to deviation/confidence range indicator
594 err_x_offset = 0.05
595
koder aka kdanilov108ac362017-01-19 20:17:16 +0200596 # extra space on top and bottom, comparing to maximal tight layout
597 extra_y_space = 0.05
598
599 # additional spine for BW/IOPS on left side of plot
600 extra_io_spine_x_offset = -0.1
601
602 # extra space on left and right sides
603 extra_x_space = 0.5
604
605 # legend location settings
606 legend_location = "center left"
607 legend_bbox_to_anchor = (1.1, 0.81)
608
609 # plot box size adjust (only plot, not spines and legend)
610 plot_box_adjust = {'right': 0.66}
611 # -------------- END OF MAGIC VALUES ---------------------
612
613 block_size = iosums[0].block_size
614 lc = len(iosums)
615 xt = list(range(1, lc + 1))
616
617 # x coordinate of middle of the bars
618 xpos = [i - width / 2 for i in xt]
619
620 # import matplotlib.gridspec as gridspec
621 # gs = gridspec.GridSpec(1, 3, width_ratios=[1, 4, 1])
622 # p1 = plt.subplot(gs[1])
623
kdanylov aka koder45183182017-04-30 23:55:40 +0300624 logger.warning("Check coef usage!")
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300625 ax = fig.add_subplot(111)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200626
627 # plot IOPS/BW bars
628 if block_size >= LARGE_BLOCKS:
629 iops_primary = False
kdanylov aka koder45183182017-04-30 23:55:40 +0300630 coef = float(unit_conversion_coef(iosums[0].bw.units, "MiBps"))
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300631 ax.set_ylabel("BW (MiBps)")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200632 else:
633 iops_primary = True
kdanylov aka koder45183182017-04-30 23:55:40 +0300634 coef = float(unit_conversion_coef(iosums[0].bw.units, "MiBps")) / block_size
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300635 ax.set_ylabel("IOPS")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200636
kdanylov aka koder45183182017-04-30 23:55:40 +0300637 vals = [iosum.bw.average * coef for iosum in iosums]
638
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300639 ax.bar(xpos, vals, width=width, color=colors.box_color, label=legend)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200640
641 # set correct x limits for primary IO spine
642 min_io = min(iosum.bw.average - iosum.bw.deviation * style.dev_range_x for iosum in iosums)
643 max_io = max(iosum.bw.average + iosum.bw.deviation * style.dev_range_x for iosum in iosums)
644 border = (max_io - min_io) * extra_y_space
645 io_lims = (min_io - border, max_io + border)
646
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300647 ax.set_ylim(io_lims[0] * coef, io_lims[-1] * coef)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200648
649 # plot deviation and confidence error ranges
650 err1_legend = err2_legend = None
651 for pos, iosum in zip(xpos, iosums):
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300652 err1_legend = ax.errorbar(pos + width / 2 - err_x_offset,
kdanylov aka koder45183182017-04-30 23:55:40 +0300653 iosum.bw.average * coef,
654 iosum.bw.deviation * style.dev_range_x * coef,
koder aka kdanilov108ac362017-01-19 20:17:16 +0200655 alpha=colors.subinfo_alpha,
656 color=colors.suppl_color1) # 'magenta'
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300657 err2_legend = ax.errorbar(pos + width / 2 + err_x_offset,
kdanylov aka koder45183182017-04-30 23:55:40 +0300658 iosum.bw.average * coef,
659 iosum.bw.confidence * coef,
koder aka kdanilov108ac362017-01-19 20:17:16 +0200660 alpha=colors.subinfo_alpha,
661 color=colors.suppl_color2) # 'teal'
662
663 if style.grid:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300664 ax.grid(True)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200665
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300666 handles1, labels1 = ax.get_legend_handles_labels()
koder aka kdanilov108ac362017-01-19 20:17:16 +0200667
668 handles1 += [err1_legend, err2_legend]
669 labels1 += ["{}% dev".format(style.dev_perc),
670 "{}% conf".format(int(100 * iosums[0].bw.confidence_level))]
671
672 # extra y spine for latency on right side
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300673 ax2 = ax.twinx()
koder aka kdanilov108ac362017-01-19 20:17:16 +0200674
675 # plot median and 95 perc latency
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300676 ax2.plot(xt, [iosum.lat.perc_50 for iosum in iosums], label="lat med")
677 ax2.plot(xt, [iosum.lat.perc_95 for iosum in iosums], label="lat 95%")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200678
679 # limit and label x spine
680 plt.xlim(extra_x_space, lc + extra_x_space)
681 plt.xticks(xt, ["{0} * {1}".format(iosum.qd, iosum.nodes_count) for iosum in iosums])
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300682 ax.set_xlabel("QD * Test node count")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200683
684 # apply log scales for X spines, if set
685 if iops_log_spine:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300686 ax.set_yscale('log')
koder aka kdanilov108ac362017-01-19 20:17:16 +0200687
688 if lat_log_spine:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300689 ax2.set_yscale('log')
koder aka kdanilov108ac362017-01-19 20:17:16 +0200690
691 # extra y spine for BW/IOPS on left side
692 if style.extra_io_spine:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300693 ax3 = ax.twinx()
koder aka kdanilov108ac362017-01-19 20:17:16 +0200694 if iops_log_spine:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300695 ax3.set_yscale('log')
koder aka kdanilov108ac362017-01-19 20:17:16 +0200696
697 if iops_primary:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300698 ax3.set_ylabel("BW (MiBps)")
699 ax3.set_ylim(io_lims[0] * coef, io_lims[1] * coef)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200700 else:
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300701 ax3.set_ylabel("IOPS")
702 ax3.set_ylim(io_lims[0] * coef, io_lims[1] * coef)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200703
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300704 ax3.spines["left"].set_position(("axes", extra_io_spine_x_offset))
705 ax3.spines["left"].set_visible(True)
706 ax3.yaxis.set_label_position('left')
707 ax3.yaxis.set_ticks_position('left')
koder aka kdanilov108ac362017-01-19 20:17:16 +0200708
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300709 ax2.set_ylabel("Latency (ms)")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200710
711 # legend box
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300712 handles2, labels2 = ax2.get_legend_handles_labels()
koder aka kdanilova732a602017-02-01 20:29:56 +0200713 plt.legend(handles1 + handles2, labels1 + labels2,
714 loc=legend_location,
715 bbox_to_anchor=legend_bbox_to_anchor)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200716
717 # adjust central box size to fit legend
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300718 # plt.subplots_adjust(**plot_box_adjust)
719 apply_style(fig, title, style, eng=False, no_legend=True)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200720
721
722# -------------------- REPORT HELPERS --------------------------------------------------------------------------------
723
724
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200725class HTMLBlock:
726 data = None # type: str
727 js_links = [] # type: List[str]
728 css_links = [] # type: List[str]
koder aka kdanilova732a602017-02-01 20:29:56 +0200729 order_attr = None # type: Any
730
731 def __init__(self, data: str, order_attr: Any = None) -> None:
732 self.data = data
733 self.order_attr = order_attr
734
kdanylov aka koder45183182017-04-30 23:55:40 +0300735 def __eq__(self, o: Any) -> bool:
koder aka kdanilova732a602017-02-01 20:29:56 +0200736 return o.order_attr == self.order_attr # type: ignore
737
kdanylov aka koder45183182017-04-30 23:55:40 +0300738 def __lt__(self, o: Any) -> bool:
koder aka kdanilova732a602017-02-01 20:29:56 +0200739 return o.order_attr > self.order_attr # type: ignore
740
741
742class Table:
743 def __init__(self, header: List[str]) -> None:
744 self.header = header
745 self.data = []
746
747 def add_line(self, values: List[str]) -> None:
748 self.data.append(values)
749
750 def html(self):
751 return html.table("", self.header, self.data)
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200752
753
koder aka kdanilov108ac362017-01-19 20:17:16 +0200754class Menu1st:
755 engineering = "Engineering"
756 summary = "Summary"
koder aka kdanilova732a602017-02-01 20:29:56 +0200757 per_job = "Per Job"
koder aka kdanilov108ac362017-01-19 20:17:16 +0200758
759
760class Menu2ndEng:
761 iops_time = "IOPS(time)"
762 hist = "IOPS/lat overall histogram"
763 lat_time = "Lat(time)"
764
765
766class Menu2ndSumm:
767 io_lat_qd = "IO & Lat vs QD"
768
769
koder aka kdanilova732a602017-02-01 20:29:56 +0200770menu_1st_order = [Menu1st.summary, Menu1st.engineering, Menu1st.per_job]
koder aka kdanilov108ac362017-01-19 20:17:16 +0200771
772
773# -------------------- REPORTS --------------------------------------------------------------------------------------
774
775
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200776class Reporter(metaclass=abc.ABCMeta):
koder aka kdanilova732a602017-02-01 20:29:56 +0200777 suite_types = set() # type: Set[str]
778
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200779 @abc.abstractmethod
koder aka kdanilova732a602017-02-01 20:29:56 +0200780 def get_divs(self, suite: SuiteConfig, storage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
781 pass
782
783
784class JobReporter(metaclass=abc.ABCMeta):
785 suite_type = set() # type: Set[str]
786
787 @abc.abstractmethod
788 def get_divs(self,
789 suite: SuiteConfig,
790 job: JobConfig,
791 storage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200792 pass
793
794
795# Main performance report
796class PerformanceSummary(Reporter):
koder aka kdanilova732a602017-02-01 20:29:56 +0200797 """Aggregated summary fro storage"""
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200798
799
800# Main performance report
koder aka kdanilov108ac362017-01-19 20:17:16 +0200801class IO_QD(Reporter):
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200802 """Creates graph, which show how IOPS and Latency depend on QD"""
koder aka kdanilova732a602017-02-01 20:29:56 +0200803 suite_types = {'fio'}
804
805 def get_divs(self, suite: SuiteConfig, rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
806 ts_map = defaultdict(list) # type: Dict[FioJobParams, List[Tuple[SuiteConfig, FioJobConfig]]]
807 str_summary = {} # type: Dict[FioJobParams, List[IOSummary]]
koder aka kdanilov108ac362017-01-19 20:17:16 +0200808 for job in rstorage.iter_job(suite):
809 fjob = cast(FioJobConfig, job)
koder aka kdanilova732a602017-02-01 20:29:56 +0200810 fjob_no_qd = cast(FioJobParams, fjob.params.copy(qd=None))
811 str_summary[fjob_no_qd] = (fjob_no_qd.summary, fjob_no_qd.long_summary)
812 ts_map[fjob_no_qd].append((suite, fjob))
koder aka kdanilov108ac362017-01-19 20:17:16 +0200813
koder aka kdanilova732a602017-02-01 20:29:56 +0200814 for tpl, suites_jobs in ts_map.items():
815 if len(suites_jobs) > StyleProfile.min_iops_vs_qd_jobs:
816 iosums = [make_iosum(rstorage, suite, job) for suite, job in suites_jobs]
817 iosums.sort(key=lambda x: x.qd)
818 summary, summary_long = str_summary[tpl]
819 ds = DataSource(suite_id=suite.storage_id,
820 job_id=summary,
821 node_id=AGG_TAG,
822 sensor="fio",
823 dev=AGG_TAG,
824 metric="io_over_qd",
825 tag="svg")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200826
koder aka kdanilova732a602017-02-01 20:29:56 +0200827 title = "IOPS, BW, Lat vs. QD.\n" + summary_long
828 fpath = io_chart(rstorage, ds, title=title, legend="IOPS/BW", iosums=iosums) # type: str
829 yield Menu1st.summary, Menu2ndSumm.io_lat_qd, HTMLBlock(html.img(fpath))
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200830
831
832# Linearization report
833class IOPS_Bsize(Reporter):
834 """Creates graphs, which show how IOPS and Latency depend on block size"""
835
836
837# IOPS/latency distribution
koder aka kdanilova732a602017-02-01 20:29:56 +0200838class StatInfo(JobReporter):
839 """Statistic info for job results"""
840 suite_types = {'fio'}
841
842 def get_divs(self, suite: SuiteConfig, job: JobConfig,
843 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
844
845 fjob = cast(FioJobConfig, job)
846 io_sum = make_iosum(rstorage, suite, fjob)
847
848 summary_data = [
849 ["Summary", job.params.long_summary],
850 ]
851
852 res = html.H2(html.center("Test summary"))
853 res += html.table("Test info", None, summary_data)
854 stat_data_headers = ["Name", "Average ~ Dev", "Conf interval", "Mediana", "Mode", "Kurt / Skew", "95%", "99%"]
855
kdanylov aka koder45183182017-04-30 23:55:40 +0300856 bw_target_units = 'Bps'
857 bw_coef = float(unit_conversion_coef(io_sum.bw.units, bw_target_units))
858
koder aka kdanilova732a602017-02-01 20:29:56 +0200859 bw_data = ["Bandwidth",
kdanylov aka koder45183182017-04-30 23:55:40 +0300860 "{}{} ~ {}{}".format(b2ssize(io_sum.bw.average * bw_coef), bw_target_units,
861 b2ssize(io_sum.bw.deviation * bw_coef), bw_target_units),
862 b2ssize(io_sum.bw.confidence * bw_coef) + bw_target_units,
863 b2ssize(io_sum.bw.perc_50 * bw_coef) + bw_target_units,
koder aka kdanilova732a602017-02-01 20:29:56 +0200864 "-",
865 "{:.2f} / {:.2f}".format(io_sum.bw.kurt, io_sum.bw.skew),
kdanylov aka koder45183182017-04-30 23:55:40 +0300866 b2ssize(io_sum.bw.perc_5 * bw_coef) + bw_target_units,
867 b2ssize(io_sum.bw.perc_1 * bw_coef) + bw_target_units]
koder aka kdanilova732a602017-02-01 20:29:56 +0200868
kdanylov aka koder45183182017-04-30 23:55:40 +0300869 iops_coef = float(unit_conversion_coef(io_sum.bw.units, 'KiBps')) / fjob.bsize
koder aka kdanilova732a602017-02-01 20:29:56 +0200870 iops_data = ["IOPS",
kdanylov aka koder45183182017-04-30 23:55:40 +0300871 "{}IOPS ~ {}IOPS".format(b2ssize_10(io_sum.bw.average * iops_coef),
872 b2ssize_10(io_sum.bw.deviation * iops_coef)),
873 b2ssize_10(io_sum.bw.confidence * iops_coef) + "IOPS",
874 b2ssize_10(io_sum.bw.perc_50 * iops_coef) + "IOPS",
koder aka kdanilova732a602017-02-01 20:29:56 +0200875 "-",
876 "{:.2f} / {:.2f}".format(io_sum.bw.kurt, io_sum.bw.skew),
kdanylov aka koder45183182017-04-30 23:55:40 +0300877 b2ssize_10(io_sum.bw.perc_5 * iops_coef) + "IOPS",
878 b2ssize_10(io_sum.bw.perc_1 * iops_coef) + "IOPS"]
koder aka kdanilova732a602017-02-01 20:29:56 +0200879
kdanylov aka koder45183182017-04-30 23:55:40 +0300880 lat_target_unit = 's'
881 lat_coef = unit_conversion_coef(io_sum.lat.units, lat_target_unit)
koder aka kdanilova732a602017-02-01 20:29:56 +0200882 # latency
883 lat_data = ["Latency",
884 "-",
885 "-",
kdanylov aka koder45183182017-04-30 23:55:40 +0300886 b2ssize_10(io_sum.lat.perc_50 * lat_coef) + lat_target_unit,
koder aka kdanilova732a602017-02-01 20:29:56 +0200887 "-",
888 "-",
kdanylov aka koder45183182017-04-30 23:55:40 +0300889 b2ssize_10(io_sum.lat.perc_95 * lat_coef) + lat_target_unit,
890 b2ssize_10(io_sum.lat.perc_99 * lat_coef) + lat_target_unit]
koder aka kdanilova732a602017-02-01 20:29:56 +0200891
892 # sensor usage
893 stat_data = [iops_data, bw_data, lat_data]
894 res += html.table("Load stats info", stat_data_headers, stat_data)
895
896 resource_headers = ["Resource", "Usage count", "Proportional to work done"]
897
kdanylov aka koder45183182017-04-30 23:55:40 +0300898 tot_io_coef = float(unit_conversion_coef(io_sum.bw.units, "KiBps"))
899 tot_ops_coef = tot_io_coef / fjob.bsize
900
901 io_transfered = io_sum.bw.data.sum() * tot_io_coef
koder aka kdanilova732a602017-02-01 20:29:56 +0200902 resource_data = [
kdanylov aka koder45183182017-04-30 23:55:40 +0300903 ["IO made", b2ssize_10(io_transfered * tot_ops_coef) + "OP", "-"],
koder aka kdanilova732a602017-02-01 20:29:56 +0200904 ["Data transfered", b2ssize(io_transfered) + "B", "-"]
905 ]
906
koder aka kdanilova732a602017-02-01 20:29:56 +0200907 storage = rstorage.storage
908 nodes = storage.load_list(NodeInfo, 'all_nodes') # type: List[NodeInfo]
909
kdanylov aka koder45183182017-04-30 23:55:40 +0300910 ops_done = io_transfered * tot_ops_coef
koder aka kdanilova732a602017-02-01 20:29:56 +0200911
912 all_metrics = [
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300913 ("Test nodes net send", 'net-io', 'send_bytes', b2ssize, ['testnode'], "B", io_transfered),
914 ("Test nodes net recv", 'net-io', 'recv_bytes', b2ssize, ['testnode'], "B", io_transfered),
koder aka kdanilova732a602017-02-01 20:29:56 +0200915
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300916 ("Test nodes disk write", 'block-io', 'sectors_written', b2ssize, ['testnode'], "B", io_transfered),
917 ("Test nodes disk read", 'block-io', 'sectors_read', b2ssize, ['testnode'], "B", io_transfered),
918 ("Test nodes writes", 'block-io', 'writes_completed', b2ssize_10, ['testnode'], "OP", ops_done),
919 ("Test nodes reads", 'block-io', 'reads_completed', b2ssize_10, ['testnode'], "OP", ops_done),
koder aka kdanilova732a602017-02-01 20:29:56 +0200920
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300921 ("Storage nodes net send", 'net-io', 'send_bytes', b2ssize, STORAGE_ROLES, "B", io_transfered),
922 ("Storage nodes net recv", 'net-io', 'recv_bytes', b2ssize, STORAGE_ROLES, "B", io_transfered),
koder aka kdanilova732a602017-02-01 20:29:56 +0200923
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300924 ("Storage nodes disk write", 'block-io', 'sectors_written', b2ssize, STORAGE_ROLES, "B", io_transfered),
925 ("Storage nodes disk read", 'block-io', 'sectors_read', b2ssize, STORAGE_ROLES, "B", io_transfered),
926 ("Storage nodes writes", 'block-io', 'writes_completed', b2ssize_10, STORAGE_ROLES, "OP", ops_done),
927 ("Storage nodes reads", 'block-io', 'reads_completed', b2ssize_10, STORAGE_ROLES, "OP", ops_done),
koder aka kdanilova732a602017-02-01 20:29:56 +0200928 ]
929
930 all_agg = {}
931
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300932 for descr, sensor, metric, ffunc, roles, units, denom in all_metrics:
koder aka kdanilova732a602017-02-01 20:29:56 +0200933 if not nodes:
934 continue
935
kdanylov aka koder45183182017-04-30 23:55:40 +0300936 res_ts = summ_sensors(rstorage, roles, sensor=sensor, metric=metric, time_range=job.reliable_info_range_s)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300937 if res_ts is None:
koder aka kdanilova732a602017-02-01 20:29:56 +0200938 continue
939
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300940 agg = res_ts.data.sum()
koder aka kdanilova732a602017-02-01 20:29:56 +0200941 resource_data.append([descr, ffunc(agg) + units, "{:.1f}".format(agg / denom)])
942 all_agg[descr] = agg
943
koder aka kdanilova732a602017-02-01 20:29:56 +0200944 cums = [
945 ("Test nodes writes", "Test nodes reads", "Total test ops", b2ssize_10, "OP", ops_done),
946 ("Storage nodes writes", "Storage nodes reads", "Total storage ops", b2ssize_10, "OP", ops_done),
947 ("Storage nodes disk write", "Storage nodes disk read", "Total storage IO size", b2ssize,
948 "B", io_transfered),
949 ("Test nodes disk write", "Test nodes disk read", "Total test nodes IO size", b2ssize, "B", io_transfered),
950 ]
951
952 for name1, name2, descr, ffunc, units, denom in cums:
953 if name1 in all_agg and name2 in all_agg:
954 agg = all_agg[name1] + all_agg[name2]
955 resource_data.append([descr, ffunc(agg) + units, "{:.1f}".format(agg / denom)])
956
957 res += html.table("Resources usage", resource_headers, resource_data)
958
959 yield Menu1st.per_job, job.summary, HTMLBlock(res)
960
961
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300962# CPU load
963class CPULoadPlot(JobReporter):
964 def get_divs(self,
965 suite: SuiteConfig,
966 job: JobConfig,
967 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
968
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300969 # plot CPU time
970 for rt, roles in [('storage', STORAGE_ROLES), ('test', ['testnode'])]:
971 cpu_ts = {}
972 cpu_metrics = "idle guest iowait irq nice sirq steal sys user".split()
973 for name in cpu_metrics:
kdanylov aka koder45183182017-04-30 23:55:40 +0300974 cpu_ts[name] = summ_sensors(rstorage, roles, sensor='system-cpu', metric=name,
975 time_range=job.reliable_info_range_s)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300976
977 it = iter(cpu_ts.values())
978 total_over_time = next(it).data.copy()
979 for ts in it:
980 total_over_time += ts.data
981
kdanylov aka koder45183182017-04-30 23:55:40 +0300982 fname = plot_simple_over_time(rstorage,
983 cpu_ts['idle'].source(job_id=job.storage_id,
984 suite_id=suite.storage_id,
kdanylov aka koder4e4af682017-05-01 01:52:14 +0300985 metric='allcpu', tag=rt + '.plt.' + default_format),
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300986 tss=[(name, ts.data * 100 / total_over_time) for name, ts in cpu_ts.items()],
987 average=True,
988 ylabel="CPU time %",
989 title="{} nodes CPU usage".format(rt.capitalize()))
990
991 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fname))
992
993
994# IO time and QD
995class QDIOTimeHeatmap(JobReporter):
996 def get_divs(self,
997 suite: SuiteConfig,
998 job: JobConfig,
999 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
1000
1001 # TODO: fix this hardcode, need to track what devices are actually used on test and storage nodes
1002 # use saved storage info in nodes
1003
1004 journal_devs = None
1005 storage_devs = None
1006 test_nodes_devs = ['rbd0']
1007
1008 for node in find_nodes_by_roles(rstorage, STORAGE_ROLES):
1009 cjd = set(node.params['ceph_journal_devs'])
1010 if journal_devs is None:
1011 journal_devs = cjd
1012 else:
1013 assert journal_devs == cjd, "{!r} != {!r}".format(journal_devs, cjd)
1014
1015 csd = set(node.params['ceph_storage_devs'])
1016 if storage_devs is None:
1017 storage_devs = csd
1018 else:
1019 assert storage_devs == csd, "{!r} != {!r}".format(storage_devs, csd)
1020
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001021 trange = (job.reliable_info_range[0] // 1000, job.reliable_info_range[1] // 1000)
1022
1023 for name, devs, roles in [('storage', storage_devs, STORAGE_ROLES),
1024 ('journal', journal_devs, STORAGE_ROLES),
1025 ('test', test_nodes_devs, ['testnode'])]:
1026 # QD heatmap
1027 ioq2d = find_sensors_to_2d(rstorage, roles, sensor='block-io', devs=devs,
1028 metric='io_queue', time_range=trange)
1029 fname = plot_hmap_from_2d(rstorage, DataSource(suite.storage_id,
1030 job.storage_id,
1031 AGG_TAG,
1032 'block-io',
1033 name,
1034 metric='io_queue',
kdanylov aka koder4e4af682017-05-01 01:52:14 +03001035 tag="hmap." + default_format),
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001036 ioq2d, ylabel="IO QD", title=name.capitalize() + " devs QD",
1037 bins=StyleProfile.qd_bins,
1038 xlabel='Time') # type: str
1039 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fname))
1040
1041 # Block size heatmap
1042 wc2d = find_sensors_to_2d(rstorage, roles, sensor='block-io', devs=devs,
1043 metric='writes_completed', time_range=trange)
1044 wc2d[wc2d < 1E-3] = 1
1045 sw2d = find_sensors_to_2d(rstorage, roles, sensor='block-io', devs=devs,
1046 metric='sectors_written', time_range=trange)
1047 data2d = sw2d / wc2d / 1024
1048 fname = plot_hmap_from_2d(rstorage, DataSource(suite.storage_id,
1049 job.storage_id,
1050 AGG_TAG,
1051 'block-io',
1052 name,
1053 metric='wr_block_size',
kdanylov aka koder4e4af682017-05-01 01:52:14 +03001054 tag="hmap." + default_format),
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001055 data2d, ylabel="IO bsize, KiB", title=name.capitalize() + " write block size",
1056 xlabel='Time',
1057 bins=StyleProfile.block_size_bins) # type: str
1058 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fname))
1059
1060 # iotime heatmap
1061 wtime2d = find_sensors_to_2d(rstorage, roles, sensor='block-io', devs=devs,
1062 metric='io_time', time_range=trange)
1063 fname = plot_hmap_from_2d(rstorage, DataSource(suite.storage_id,
1064 job.storage_id,
1065 AGG_TAG,
1066 'block-io',
1067 name,
1068 metric='io_time',
kdanylov aka koder4e4af682017-05-01 01:52:14 +03001069 tag="hmap." + default_format),
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001070 wtime2d, ylabel="IO time (ms) per second",
1071 title=name.capitalize() + " iotime",
1072 xlabel='Time',
1073 bins=StyleProfile.iotime_bins) # type: str
1074 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fname))
1075
1076
koder aka kdanilova732a602017-02-01 20:29:56 +02001077# IOPS/latency distribution
1078class IOHist(JobReporter):
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001079 """IOPS.latency distribution histogram"""
koder aka kdanilova732a602017-02-01 20:29:56 +02001080 suite_types = {'fio'}
koder aka kdanilov108ac362017-01-19 20:17:16 +02001081
koder aka kdanilova732a602017-02-01 20:29:56 +02001082 def get_divs(self,
1083 suite: SuiteConfig,
1084 job: JobConfig,
1085 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
koder aka kdanilov108ac362017-01-19 20:17:16 +02001086
koder aka kdanilova732a602017-02-01 20:29:56 +02001087 fjob = cast(FioJobConfig, job)
koder aka kdanilov108ac362017-01-19 20:17:16 +02001088
koder aka kdanilova732a602017-02-01 20:29:56 +02001089 yield Menu1st.per_job, fjob.summary, HTMLBlock(html.H2(html.center("Load histograms")))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001090
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001091 # agg_lat = get_aggregated(rstorage, suite, fjob, "lat")
1092 # # bins_edges = numpy.array(get_lat_vals(agg_lat.data.shape[1]), dtype='float32') / 1000 # convert us to ms
1093 # lat_stat_prop = calc_histo_stat_props(agg_lat, bins_edges=None, rebins_count=StyleProfile.hist_lat_boxes)
1094 #
1095 # long_summary = cast(FioJobParams, fjob.params).long_summary
1096 #
1097 # title = "Latency distribution"
1098 # units = "ms"
1099 #
1100 # fpath = plot_hist(rstorage, agg_lat.source(tag='hist.svg'), title, units, lat_stat_prop) # type: str
1101 # yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilova732a602017-02-01 20:29:56 +02001102
1103 agg_io = get_aggregated(rstorage, suite, fjob, "bw")
1104
1105 if fjob.bsize >= LARGE_BLOCKS:
1106 title = "BW distribution"
1107 units = "MiBps"
kdanylov aka koder45183182017-04-30 23:55:40 +03001108 agg_io.data //= int(unit_conversion_coef(units, agg_io.units))
koder aka kdanilova732a602017-02-01 20:29:56 +02001109 else:
1110 title = "IOPS distribution"
kdanylov aka koder45183182017-04-30 23:55:40 +03001111 agg_io.data //= (int(unit_conversion_coef("KiBps", agg_io.units)) * fjob.bsize)
koder aka kdanilova732a602017-02-01 20:29:56 +02001112 units = "IOPS"
1113
1114 io_stat_prop = calc_norm_stat_props(agg_io, bins_count=StyleProfile.hist_boxes)
kdanylov aka koder4e4af682017-05-01 01:52:14 +03001115 fpath = plot_hist(rstorage, agg_io.source(tag='hist.' + default_format),
1116 title, units, io_stat_prop) # type: str
koder aka kdanilova732a602017-02-01 20:29:56 +02001117 yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001118
1119
koder aka kdanilov108ac362017-01-19 20:17:16 +02001120# IOPS/latency over test time for each job
koder aka kdanilova732a602017-02-01 20:29:56 +02001121class IOTime(JobReporter):
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001122 """IOPS/latency during test"""
koder aka kdanilova732a602017-02-01 20:29:56 +02001123 suite_types = {'fio'}
koder aka kdanilov108ac362017-01-19 20:17:16 +02001124
koder aka kdanilova732a602017-02-01 20:29:56 +02001125 def get_divs(self,
1126 suite: SuiteConfig,
1127 job: JobConfig,
1128 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
koder aka kdanilov108ac362017-01-19 20:17:16 +02001129
koder aka kdanilova732a602017-02-01 20:29:56 +02001130 fjob = cast(FioJobConfig, job)
koder aka kdanilov108ac362017-01-19 20:17:16 +02001131
koder aka kdanilova732a602017-02-01 20:29:56 +02001132 agg_io = get_aggregated(rstorage, suite, fjob, "bw")
1133 if fjob.bsize >= LARGE_BLOCKS:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001134 title = "Fio measured Bandwidth over time"
koder aka kdanilova732a602017-02-01 20:29:56 +02001135 units = "MiBps"
kdanylov aka koder45183182017-04-30 23:55:40 +03001136 agg_io.data //= int(unit_conversion_coef(units, agg_io.units))
koder aka kdanilova732a602017-02-01 20:29:56 +02001137 else:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001138 title = "Fio measured IOPS over time"
kdanylov aka koder45183182017-04-30 23:55:40 +03001139 agg_io.data //= (int(unit_conversion_coef("KiBps", agg_io.units)) * fjob.bsize)
koder aka kdanilova732a602017-02-01 20:29:56 +02001140 units = "IOPS"
koder aka kdanilov108ac362017-01-19 20:17:16 +02001141
kdanylov aka koder4e4af682017-05-01 01:52:14 +03001142 fpath = plot_v_over_time(rstorage, agg_io.source(tag='ts.' + default_format), title, units, agg_io) # type: str
koder aka kdanilova732a602017-02-01 20:29:56 +02001143 yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001144
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001145 agg_lat = get_aggregated(rstorage, suite, fjob, "lat").copy()
1146 TARGET_UNITS = 'ms'
1147 coef = unit_conversion_coef(agg_lat.units, TARGET_UNITS)
1148 agg_lat.histo_bins = agg_lat.histo_bins.copy() * float(coef)
1149 agg_lat.units = TARGET_UNITS
koder aka kdanilov108ac362017-01-19 20:17:16 +02001150
kdanylov aka koder4e4af682017-05-01 01:52:14 +03001151 fpath = plot_lat_over_time(rstorage, agg_lat.source(tag='ts.' + default_format), "Latency",
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001152 agg_lat, ylabel="Latency, " + agg_lat.units) # type: str
koder aka kdanilova732a602017-02-01 20:29:56 +02001153 yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001154
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001155 fpath = plot_histo_heatmap(rstorage,
kdanylov aka koder4e4af682017-05-01 01:52:14 +03001156 agg_lat.source(tag='hmap.' + default_format),
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001157 "Latency heatmap",
1158 agg_lat,
1159 ylabel="Latency, " + agg_lat.units,
1160 xlabel='Test time') # type: str
koder aka kdanilov108ac362017-01-19 20:17:16 +02001161
koder aka kdanilova732a602017-02-01 20:29:56 +02001162 yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001163
1164
1165class ResourceUsage:
1166 def __init__(self, io_r_ops: int, io_w_ops: int, io_r_kb: int, io_w_kb: int) -> None:
1167 self.io_w_ops = io_w_ops
1168 self.io_r_ops = io_r_ops
1169 self.io_w_kb = io_w_kb
1170 self.io_r_kb = io_r_kb
1171
1172 self.cpu_used_user = None # type: int
1173 self.cpu_used_sys = None # type: int
1174 self.cpu_wait_io = None # type: int
1175
1176 self.net_send_packets = None # type: int
1177 self.net_recv_packets = None # type: int
1178 self.net_send_kb = None # type: int
1179 self.net_recv_kb = None # type: int
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001180
1181
1182# Cluster load over test time
koder aka kdanilova732a602017-02-01 20:29:56 +02001183class ClusterLoad(JobReporter):
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001184 """IOPS/latency during test"""
1185
koder aka kdanilova732a602017-02-01 20:29:56 +02001186 # TODO: units should came from sensor
koder aka kdanilov108ac362017-01-19 20:17:16 +02001187 storage_sensors = [
kdanylov aka koder45183182017-04-30 23:55:40 +03001188 ('block-io', 'reads_completed', "Read", 'iop'),
1189 ('block-io', 'writes_completed', "Write", 'iop'),
1190 ('block-io', 'sectors_read', "Read", 'KiB'),
1191 ('block-io', 'sectors_written', "Write", 'KiB'),
koder aka kdanilov108ac362017-01-19 20:17:16 +02001192 ]
1193
koder aka kdanilova732a602017-02-01 20:29:56 +02001194 def get_divs(self,
1195 suite: SuiteConfig,
1196 job: JobConfig,
1197 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
koder aka kdanilova732a602017-02-01 20:29:56 +02001198 yield Menu1st.per_job, job.summary, HTMLBlock(html.H2(html.center("Cluster load")))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001199
kdanylov aka koder45183182017-04-30 23:55:40 +03001200 for sensor, metric, op, units in self.storage_sensors:
1201 ts = summ_sensors(rstorage, ['testnode'], sensor, metric, job.reliable_info_range_s)
koder aka kdanilova732a602017-02-01 20:29:56 +02001202 ds = DataSource(suite_id=suite.storage_id,
1203 job_id=job.storage_id,
1204 node_id="test_nodes",
1205 sensor=sensor,
1206 dev=AGG_TAG,
1207 metric=metric,
kdanylov aka koder4e4af682017-05-01 01:52:14 +03001208 tag="ts." + default_format)
koder aka kdanilov108ac362017-01-19 20:17:16 +02001209
kdanylov aka koder45183182017-04-30 23:55:40 +03001210 data = ts.data if units != 'KiB' else ts.data * float(unit_conversion_coef(ts.units, 'KiB'))
koder aka kdanilova732a602017-02-01 20:29:56 +02001211 ts = TimeSeries(name="",
kdanylov aka koder45183182017-04-30 23:55:40 +03001212 times=numpy.arange(*job.reliable_info_range_s),
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001213 data=data,
koder aka kdanilova732a602017-02-01 20:29:56 +02001214 raw=None,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001215 units=units if ts.units is None else ts.units,
1216 time_units=ts.time_units,
1217 source=ds,
1218 histo_bins=ts.histo_bins)
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +03001219
kdanylov aka koder45183182017-04-30 23:55:40 +03001220 sensor_title = "{} {}".format(op, units)
koder aka kdanilova732a602017-02-01 20:29:56 +02001221 fpath = plot_v_over_time(rstorage, ds, sensor_title, sensor_title, ts=ts) # type: str
1222 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001223
1224
1225# Ceph cluster summary
1226class ResourceConsumption(Reporter):
1227 """Resources consumption report, only text"""
1228
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001229
1230# Node load over test time
1231class NodeLoad(Reporter):
1232 """IOPS/latency during test"""
1233
1234
1235# Ceph cluster summary
1236class CephClusterSummary(Reporter):
1237 """IOPS/latency during test"""
1238
1239
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001240# TODO: Ceph operation breakout report
1241# TODO: Resource consumption for different type of test
1242
1243
koder aka kdanilov108ac362017-01-19 20:17:16 +02001244# ------------------------------------------ REPORT STAGES -----------------------------------------------------------
1245
1246
1247class HtmlReportStage(Stage):
1248 priority = StepOrder.REPORT
1249
1250 def run(self, ctx: TestRun) -> None:
1251 rstorage = ResultStorage(ctx.storage)
koder aka kdanilova732a602017-02-01 20:29:56 +02001252
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001253 job_reporters = [StatInfo(), IOTime(), IOHist(), ClusterLoad(), CPULoadPlot(),
1254 QDIOTimeHeatmap()] # type: List[JobReporter]
1255 reporters = []
koder aka kdanilova732a602017-02-01 20:29:56 +02001256
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001257 # reporters = [IO_QD()] # type: List[Reporter]
koder aka kdanilova732a602017-02-01 20:29:56 +02001258 # job_reporters = [ClusterLoad()]
koder aka kdanilov108ac362017-01-19 20:17:16 +02001259
1260 root_dir = os.path.dirname(os.path.dirname(wally.__file__))
1261 doc_templ_path = os.path.join(root_dir, "report_templates/index.html")
1262 report_template = open(doc_templ_path, "rt").read()
1263 css_file_src = os.path.join(root_dir, "report_templates/main.css")
1264 css_file = open(css_file_src, "rt").read()
1265
1266 menu_block = []
1267 content_block = []
1268 link_idx = 0
1269
koder aka kdanilova732a602017-02-01 20:29:56 +02001270 # matplotlib.rcParams.update(ctx.config.reporting.matplotlib_params.raw())
1271 # ColorProfile.__dict__.update(ctx.config.reporting.colors.raw())
1272 # StyleProfile.__dict__.update(ctx.config.reporting.style.raw())
koder aka kdanilov108ac362017-01-19 20:17:16 +02001273
koder aka kdanilova732a602017-02-01 20:29:56 +02001274 items = defaultdict(lambda: defaultdict(list)) # type: Dict[str, Dict[str, List[HTMLBlock]]]
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001275 DEBUG = False
koder aka kdanilova732a602017-02-01 20:29:56 +02001276 # TODO: filter reporters
koder aka kdanilov108ac362017-01-19 20:17:16 +02001277 for suite in rstorage.iter_suite(FioTest.name):
koder aka kdanilova732a602017-02-01 20:29:56 +02001278 all_jobs = list(rstorage.iter_job(suite))
1279 all_jobs.sort(key=lambda job: job.params)
1280 for job in all_jobs:
1281 for reporter in job_reporters:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001282 logger.debug("Start reporter %s on job %s suite %s",
1283 reporter.__class__.__name__, job.summary, suite.test_type)
koder aka kdanilova732a602017-02-01 20:29:56 +02001284 for block, item, html in reporter.get_divs(suite, job, rstorage):
1285 items[block][item].append(html)
1286 if DEBUG:
1287 break
1288
koder aka kdanilov108ac362017-01-19 20:17:16 +02001289 for reporter in reporters:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001290 logger.debug("Start reporter %s on suite %s", reporter.__class__.__name__, suite.test_type)
koder aka kdanilov108ac362017-01-19 20:17:16 +02001291 for block, item, html in reporter.get_divs(suite, rstorage):
1292 items[block][item].append(html)
1293
koder aka kdanilova732a602017-02-01 20:29:56 +02001294 if DEBUG:
1295 break
1296
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001297 logger.debug("Generating result html")
1298
koder aka kdanilov108ac362017-01-19 20:17:16 +02001299 for idx_1st, menu_1st in enumerate(sorted(items, key=lambda x: menu_1st_order.index(x))):
1300 menu_block.append(
1301 '<a href="#item{}" class="nav-group" data-toggle="collapse" data-parent="#MainMenu">{}</a>'
1302 .format(idx_1st, menu_1st)
1303 )
1304 menu_block.append('<div class="collapse" id="item{}">'.format(idx_1st))
1305 for menu_2nd in sorted(items[menu_1st]):
1306 menu_block.append(' <a href="#content{}" class="nav-group-item">{}</a>'
1307 .format(link_idx, menu_2nd))
1308 content_block.append('<div id="content{}">'.format(link_idx))
koder aka kdanilova732a602017-02-01 20:29:56 +02001309 content_block.extend(" " + x.data for x in items[menu_1st][menu_2nd])
koder aka kdanilov108ac362017-01-19 20:17:16 +02001310 content_block.append('</div>')
1311 link_idx += 1
1312 menu_block.append('</div>')
1313
1314 report = report_template.replace("{{{menu}}}", ("\n" + " " * 16).join(menu_block))
1315 report = report.replace("{{{content}}}", ("\n" + " " * 16).join(content_block))
1316 report_path = rstorage.put_report(report, "index.html")
1317 rstorage.put_report(css_file, "main.css")
1318 logger.info("Report is stored into %r", report_path)