blob: b8a771339aaf9850909d34ad0e15a2e183aa0998 [file] [log] [blame]
koder aka kdanilov108ac362017-01-19 20:17:16 +02001import os
koder aka kdanilov7f59d562016-12-26 01:34:23 +02002import abc
koder aka kdanilova047e1b2015-04-21 23:16:59 +03003import logging
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03004import warnings
koder aka kdanilov108ac362017-01-19 20:17:16 +02005from io import BytesIO
6from functools import wraps
koder aka kdanilov108ac362017-01-19 20:17:16 +02007from collections import defaultdict
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03008from typing import Dict, Any, Iterator, Tuple, cast, List, Callable, Set
koder aka kdanilovcff7b2e2015-04-18 20:48:15 +03009
koder aka kdanilovffaf48d2016-12-27 02:25:29 +020010import numpy
koder aka kdanilov108ac362017-01-19 20:17:16 +020011import scipy.stats
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030012
13import matplotlib
14# matplotlib.use('GTKAgg')
15
koder aka kdanilova732a602017-02-01 20:29:56 +020016import matplotlib.pyplot as plt
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030017from matplotlib import gridspec
18
19from cephlib.common import float2str
20from cephlib.plot import plot_hmap_with_y_histo, hmap_from_2d
koder aka kdanilovbe8f89f2015-04-28 14:51:51 +030021
koder aka kdanilov108ac362017-01-19 20:17:16 +020022import wally
koder aka kdanilovffaf48d2016-12-27 02:25:29 +020023
koder aka kdanilov108ac362017-01-19 20:17:16 +020024from . import html
koder aka kdanilov39e449e2016-12-17 15:15:26 +020025from .stage import Stage, StepOrder
26from .test_run_class import TestRun
koder aka kdanilov108ac362017-01-19 20:17:16 +020027from .hlstorage import ResultStorage
28from .node_interfaces import NodeInfo
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030029from .utils import b2ssize, b2ssize_10, STORAGE_ROLES, unit_conversion_coef
koder aka kdanilova732a602017-02-01 20:29:56 +020030from .statistic import (calc_norm_stat_props, calc_histo_stat_props, moving_average, moving_dev,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030031 hist_outliers_perc, find_ouliers_ts, approximate_curve)
32from .result_classes import (StatProps, DataSource, TimeSeries, NormStatProps, HistoStatProps, SuiteConfig)
koder aka kdanilov108ac362017-01-19 20:17:16 +020033from .suits.io.fio import FioTest, FioJobConfig
koder aka kdanilova732a602017-02-01 20:29:56 +020034from .suits.io.fio_job import FioJobParams
35from .suits.job import JobConfig
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030036from .data_selectors import get_aggregated, AGG_TAG, summ_sensors, find_sensors_to_2d, find_nodes_by_roles
37
38
39with warnings.catch_warnings():
40 warnings.simplefilter("ignore")
41 import seaborn
koder aka kdanilovcff7b2e2015-04-18 20:48:15 +030042
koder aka kdanilov4a510ee2015-04-21 18:50:42 +030043
koder aka kdanilov962ee5f2016-12-19 02:40:08 +020044logger = logging.getLogger("wally")
koder aka kdanilova047e1b2015-04-21 23:16:59 +030045
46
koder aka kdanilov108ac362017-01-19 20:17:16 +020047# ---------------- CONSTS ---------------------------------------------------------------------------------------------
koder aka kdanilov39e449e2016-12-17 15:15:26 +020048
koder aka kdanilov7f59d562016-12-26 01:34:23 +020049
koder aka kdanilov108ac362017-01-19 20:17:16 +020050DEBUG = False
51LARGE_BLOCKS = 256
52MiB2KiB = 1024
53MS2S = 1000
koder aka kdanilov39e449e2016-12-17 15:15:26 +020054
koder aka kdanilov39e449e2016-12-17 15:15:26 +020055
koder aka kdanilov108ac362017-01-19 20:17:16 +020056# ---------------- PROFILES ------------------------------------------------------------------------------------------
57
58
koder aka kdanilova732a602017-02-01 20:29:56 +020059# this is default values, real values is loaded from config
60
koder aka kdanilov108ac362017-01-19 20:17:16 +020061class ColorProfile:
62 primary_color = 'b'
63 suppl_color1 = 'teal'
64 suppl_color2 = 'magenta'
koder aka kdanilova732a602017-02-01 20:29:56 +020065 suppl_color3 = 'orange'
koder aka kdanilov108ac362017-01-19 20:17:16 +020066 box_color = 'y'
koder aka kdanilova732a602017-02-01 20:29:56 +020067 err_color = 'red'
koder aka kdanilov108ac362017-01-19 20:17:16 +020068
69 noise_alpha = 0.3
70 subinfo_alpha = 0.7
71
koder aka kdanilova732a602017-02-01 20:29:56 +020072 imshow_colormap = None # type: str
73
koder aka kdanilov108ac362017-01-19 20:17:16 +020074
75class StyleProfile:
76 grid = True
77 tide_layout = True
78 hist_boxes = 10
koder aka kdanilova732a602017-02-01 20:29:56 +020079 hist_lat_boxes = 25
80 hm_hist_bins_count = 25
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030081 hm_x_slots = 25
koder aka kdanilov108ac362017-01-19 20:17:16 +020082 min_points_for_dev = 5
83
84 dev_range_x = 2.0
85 dev_perc = 95
86
koder aka kdanilova732a602017-02-01 20:29:56 +020087 point_shape = 'o'
88 err_point_shape = '*'
koder aka kdanilov108ac362017-01-19 20:17:16 +020089
koder aka kdanilova732a602017-02-01 20:29:56 +020090 avg_range = 20
91 approx_average = True
92
93 curve_approx_level = 6
koder aka kdanilov108ac362017-01-19 20:17:16 +020094 curve_approx_points = 100
95 assert avg_range >= min_points_for_dev
96
koder aka kdanilova732a602017-02-01 20:29:56 +020097 # figure size in inches
98 figsize = (10, 6)
99
koder aka kdanilov108ac362017-01-19 20:17:16 +0200100 extra_io_spine = True
101
102 legend_for_eng = True
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300103 # heatmap_interpolation = '1d'
104 heatmap_interpolation = None
koder aka kdanilova732a602017-02-01 20:29:56 +0200105 heatmap_interpolation_points = 300
106 outliers_q_nd = 3.0
107 outliers_hide_q_nd = 4.0
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300108 outliers_lat = (0.01, 0.9)
koder aka kdanilova732a602017-02-01 20:29:56 +0200109
110 violin_instead_of_box = True
111 violin_point_count = 30000
112
113 heatmap_colorbar = False
114
115 min_iops_vs_qd_jobs = 3
koder aka kdanilov108ac362017-01-19 20:17:16 +0200116
117 units = {
118 'bw': ("MiBps", MiB2KiB, "bandwith"),
119 'iops': ("IOPS", 1, "iops"),
120 'lat': ("ms", 1, "latency")
121 }
122
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300123 qd_bins = [0, 1, 2, 4, 6, 8, 12, 16, 20, 26, 32, 40, 48, 56, 64, 96, 128]
124 iotime_bins = list(range(0, 1030, 50))
125 block_size_bins = [0, 2, 4, 8, 16, 32, 48, 64, 96, 128, 192, 256, 384, 512, 1024, 2048]
126
koder aka kdanilov108ac362017-01-19 20:17:16 +0200127
128# ---------------- STRUCTS -------------------------------------------------------------------------------------------
koder aka kdanilov39e449e2016-12-17 15:15:26 +0200129
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200130
131# TODO: need to be revised, have to user StatProps fields instead
132class StoragePerfSummary:
133 def __init__(self, name: str) -> None:
134 self.direct_iops_r_max = 0 # type: int
135 self.direct_iops_w_max = 0 # type: int
136
137 # 64 used instead of 4k to faster feed caches
138 self.direct_iops_w64_max = 0 # type: int
139
140 self.rws4k_10ms = 0 # type: int
141 self.rws4k_30ms = 0 # type: int
142 self.rws4k_100ms = 0 # type: int
143 self.bw_write_max = 0 # type: int
144 self.bw_read_max = 0 # type: int
145
146 self.bw = None # type: float
147 self.iops = None # type: float
148 self.lat = None # type: float
149 self.lat_50 = None # type: float
150 self.lat_95 = None # type: float
151
152
koder aka kdanilov108ac362017-01-19 20:17:16 +0200153class IOSummary:
154 def __init__(self,
155 qd: int,
156 block_size: int,
157 nodes_count:int,
158 bw: NormStatProps,
159 lat: HistoStatProps) -> None:
160
161 self.qd = qd
162 self.nodes_count = nodes_count
163 self.block_size = block_size
164
165 self.bw = bw
166 self.lat = lat
167
168
169# -------------- AGGREGATION AND STAT FUNCTIONS ----------------------------------------------------------------------
koder aka kdanilov108ac362017-01-19 20:17:16 +0200170
koder aka kdanilova732a602017-02-01 20:29:56 +0200171def make_iosum(rstorage: ResultStorage, suite: SuiteConfig, job: FioJobConfig) -> IOSummary:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200172 lat = get_aggregated(rstorage, suite, job, "lat")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200173 io = get_aggregated(rstorage, suite, job, "bw")
174
175 return IOSummary(job.qd,
176 nodes_count=len(suite.nodes_ids),
177 block_size=job.bsize,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300178 lat=calc_histo_stat_props(lat, rebins_count=StyleProfile.hist_boxes),
koder aka kdanilov108ac362017-01-19 20:17:16 +0200179 bw=calc_norm_stat_props(io, StyleProfile.hist_boxes))
180
koder aka kdanilov108ac362017-01-19 20:17:16 +0200181
koder aka kdanilova732a602017-02-01 20:29:56 +0200182def is_sensor_numarray(sensor: str, metric: str) -> bool:
183 """Returns True if sensor provides one-dimension array of numeric values. One number per one measurement."""
184 return True
185
186
187LEVEL_SENSORS = {("block-io", "io_queue"),
188 ("system-cpu", "procs_blocked"),
189 ("system-cpu", "procs_queue")}
190
191
192def is_level_sensor(sensor: str, metric: str) -> bool:
193 """Returns True if sensor measure level of any kind, E.g. queue depth."""
194 return (sensor, metric) in LEVEL_SENSORS
195
196
197def is_delta_sensor(sensor: str, metric: str) -> bool:
198 """Returns True if sensor provides deltas for cumulative value. E.g. io completed in given period"""
199 return not is_level_sensor(sensor, metric)
200
koder aka kdanilov108ac362017-01-19 20:17:16 +0200201# -------------- PLOT HELPERS FUNCTIONS ------------------------------------------------------------------------------
202
koder aka kdanilova732a602017-02-01 20:29:56 +0200203def get_emb_data_svg(plt: Any, format: str = 'svg') -> bytes:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200204 bio = BytesIO()
koder aka kdanilova732a602017-02-01 20:29:56 +0200205 if format in ('png', 'jpg'):
206 plt.savefig(bio, format=format)
207 return bio.getvalue()
208 elif format == 'svg':
209 plt.savefig(bio, format='svg')
210 img_start = "<!-- Created with matplotlib (http://matplotlib.org/) -->"
211 return bio.getvalue().decode("utf8").split(img_start, 1)[1].encode("utf8")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200212
213
214def provide_plot(func: Callable[..., None]) -> Callable[..., str]:
215 @wraps(func)
koder aka kdanilova732a602017-02-01 20:29:56 +0200216 def closure1(storage: ResultStorage,
217 path: DataSource,
218 *args, **kwargs) -> str:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200219 fpath = storage.check_plot_file(path)
220 if not fpath:
koder aka kdanilova732a602017-02-01 20:29:56 +0200221 format = path.tag.split(".")[-1]
222
223 plt.figure(figsize=StyleProfile.figsize)
224 plt.subplots_adjust(right=0.66)
225
koder aka kdanilov108ac362017-01-19 20:17:16 +0200226 func(*args, **kwargs)
koder aka kdanilova732a602017-02-01 20:29:56 +0200227 fpath = storage.put_plot_file(get_emb_data_svg(plt, format=format), path)
228 logger.debug("Plot %s saved to %r", path, fpath)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200229 plt.clf()
koder aka kdanilova732a602017-02-01 20:29:56 +0200230 plt.close('all')
koder aka kdanilov108ac362017-01-19 20:17:16 +0200231 return fpath
232 return closure1
233
234
235def apply_style(style: StyleProfile, eng: bool = True, no_legend: bool = False) -> None:
236 if style.grid:
237 plt.grid(True)
238
239 if (style.legend_for_eng or not eng) and not no_legend:
240 legend_location = "center left"
241 legend_bbox_to_anchor = (1.03, 0.81)
242 plt.legend(loc=legend_location, bbox_to_anchor=legend_bbox_to_anchor)
243
244
245# -------------- PLOT FUNCTIONS --------------------------------------------------------------------------------------
246
247
248@provide_plot
249def plot_hist(title: str, units: str,
250 prop: StatProps,
251 colors: Any = ColorProfile,
252 style: Any = StyleProfile) -> None:
253
254 # TODO: unit should came from ts
koder aka kdanilova732a602017-02-01 20:29:56 +0200255 normed_bins = prop.bins_populations / prop.bins_populations.sum()
256 bar_width = prop.bins_edges[1] - prop.bins_edges[0]
257 plt.bar(prop.bins_edges, normed_bins, color=colors.box_color, width=bar_width, label="Real data")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200258
259 plt.xlabel(units)
260 plt.ylabel("Value probability")
261 plt.title(title)
262
263 dist_plotted = False
264 if isinstance(prop, NormStatProps):
265 nprop = cast(NormStatProps, prop)
266 stats = scipy.stats.norm(nprop.average, nprop.deviation)
267
koder aka kdanilova732a602017-02-01 20:29:56 +0200268 new_edges, step = numpy.linspace(prop.bins_edges[0], prop.bins_edges[-1],
269 len(prop.bins_edges) * 10, retstep=True)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200270
koder aka kdanilova732a602017-02-01 20:29:56 +0200271 ypoints = stats.cdf(new_edges) * 11
koder aka kdanilov108ac362017-01-19 20:17:16 +0200272 ypoints = [next - prev for (next, prev) in zip(ypoints[1:], ypoints[:-1])]
koder aka kdanilova732a602017-02-01 20:29:56 +0200273 xpoints = (new_edges[1:] + new_edges[:-1]) / 2
koder aka kdanilov108ac362017-01-19 20:17:16 +0200274
koder aka kdanilova732a602017-02-01 20:29:56 +0200275 plt.plot(xpoints, ypoints, color=colors.primary_color, label="Expected from\nnormal\ndistribution")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200276 dist_plotted = True
277
koder aka kdanilova732a602017-02-01 20:29:56 +0200278 plt.gca().set_xlim(left=prop.bins_edges[0])
279 if prop.log_bins:
280 plt.xscale('log')
281
koder aka kdanilov108ac362017-01-19 20:17:16 +0200282 apply_style(style, eng=True, no_legend=not dist_plotted)
283
284
285@provide_plot
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300286def plot_simple_over_time(tss: List[Tuple[str, numpy.ndarray]],
287 title: str,
288 ylabel: str,
289 xlabel: str = "time, s",
290 average: bool = False,
291 colors: Any = ColorProfile,
292 style: Any = StyleProfile) -> None:
293 fig, ax = plt.subplots(figsize=(12, 6))
294 for name, arr in tss:
295 if average:
296 avg_vals = moving_average(arr, style.avg_range)
297 if style.approx_average:
298 time_points = numpy.arange(len(avg_vals))
299 avg_vals = approximate_curve(time_points, avg_vals, time_points, style.curve_approx_level)
300 arr = avg_vals
301 ax.plot(arr, label=name)
302 ax.set_title(title)
303 ax.set_ylabel(ylabel)
304 ax.set_xlabel(xlabel)
305 apply_style(style, eng=True)
306
307
308@provide_plot
309def plot_hmap_from_2d(data2d: numpy.ndarray,
310 title: str, ylabel: str, xlabel: str = 'time, s', bins: numpy.ndarray = None,
311 colors: Any = ColorProfile, style: Any = StyleProfile) -> None:
312 ioq1d, ranges = hmap_from_2d(data2d)
313 ax, _ = plot_hmap_with_y_histo(ioq1d, ranges, bins=bins)
314 ax.set_ylabel(ylabel)
315 ax.set_xlabel(xlabel)
316 ax.set_title(title)
317
318
319@provide_plot
320def plot_v_over_time(title: str,
321 units: str,
koder aka kdanilov108ac362017-01-19 20:17:16 +0200322 ts: TimeSeries,
323 plot_avg_dev: bool = True,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300324 plot_points: bool = True,
koder aka kdanilov108ac362017-01-19 20:17:16 +0200325 colors: Any = ColorProfile, style: Any = StyleProfile) -> None:
326
327 min_time = min(ts.times)
328
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300329 # convert time to ms
330 coef = float(unit_conversion_coef(ts.time_units, 's'))
331 time_points = numpy.array([(val_time - min_time) * coef for val_time in ts.times])
koder aka kdanilova732a602017-02-01 20:29:56 +0200332
333 outliers_idxs = find_ouliers_ts(ts.data, cut_range=style.outliers_q_nd)
334 outliers_4q_idxs = find_ouliers_ts(ts.data, cut_range=style.outliers_hide_q_nd)
335 normal_idxs = numpy.logical_not(outliers_idxs)
336 outliers_idxs = outliers_idxs & numpy.logical_not(outliers_4q_idxs)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300337 # hidden_outliers_count = numpy.count_nonzero(outliers_4q_idxs)
koder aka kdanilova732a602017-02-01 20:29:56 +0200338
339 data = ts.data[normal_idxs]
340 data_times = time_points[normal_idxs]
341 outliers = ts.data[outliers_idxs]
342 outliers_times = time_points[outliers_idxs]
koder aka kdanilov108ac362017-01-19 20:17:16 +0200343
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300344 if plot_points:
345 alpha = colors.noise_alpha if plot_avg_dev else 1.0
346 plt.plot(data_times, data, style.point_shape,
347 color=colors.primary_color, alpha=alpha, label="Data")
348 plt.plot(outliers_times, outliers, style.err_point_shape,
349 color=colors.err_color, label="Outliers")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200350
koder aka kdanilova732a602017-02-01 20:29:56 +0200351 has_negative_dev = False
352 plus_minus = "\xb1"
koder aka kdanilov108ac362017-01-19 20:17:16 +0200353
koder aka kdanilova732a602017-02-01 20:29:56 +0200354 if plot_avg_dev and len(data) < style.avg_range * 2:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300355 logger.warning("Array %r to small to plot average over %s points", title, style.avg_range)
koder aka kdanilova732a602017-02-01 20:29:56 +0200356 elif plot_avg_dev:
357 avg_vals = moving_average(data, style.avg_range)
358 dev_vals = moving_dev(data, style.avg_range)
359 avg_times = moving_average(data_times, style.avg_range)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200360
koder aka kdanilova732a602017-02-01 20:29:56 +0200361 if style.approx_average:
362 avg_vals = approximate_curve(avg_times, avg_vals, avg_times, style.curve_approx_level)
363 dev_vals = approximate_curve(avg_times, dev_vals, avg_times, style.curve_approx_level)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200364
koder aka kdanilova732a602017-02-01 20:29:56 +0200365 plt.plot(avg_times, avg_vals, c=colors.suppl_color1, label="Average")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200366
koder aka kdanilova732a602017-02-01 20:29:56 +0200367 low_vals_dev = avg_vals - dev_vals * style.dev_range_x
368 hight_vals_dev = avg_vals + dev_vals * style.dev_range_x
369 if style.dev_range_x - int(style.dev_range_x) < 0.01:
370 plt.plot(avg_times, low_vals_dev, c=colors.suppl_color2,
371 label="{}{}*stdev".format(plus_minus, int(style.dev_range_x)))
372 else:
373 plt.plot(avg_times, low_vals_dev, c=colors.suppl_color2,
374 label="{}{}*stdev".format(plus_minus, style.dev_range_x))
375 plt.plot(avg_times, hight_vals_dev, c=colors.suppl_color2)
376 has_negative_dev = low_vals_dev.min() < 0
koder aka kdanilov108ac362017-01-19 20:17:16 +0200377
378 plt.xlim(-5, max(time_points) + 5)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200379 plt.xlabel("Time, seconds from test begin")
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300380
381 if plot_avg_dev:
382 plt.ylabel("{}. Average and {}stddev over {} points".format(units, plus_minus, style.avg_range))
383 else:
384 plt.ylabel(units)
385
koder aka kdanilov108ac362017-01-19 20:17:16 +0200386 plt.title(title)
koder aka kdanilova732a602017-02-01 20:29:56 +0200387
388 if has_negative_dev:
389 plt.gca().set_ylim(bottom=0)
390
koder aka kdanilov108ac362017-01-19 20:17:16 +0200391 apply_style(style, eng=True)
392
393
394@provide_plot
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300395def plot_lat_over_time(title: str, ts: TimeSeries,
396 ylabel: str,
397 samples: int = 5,
koder aka kdanilova732a602017-02-01 20:29:56 +0200398 colors: Any = ColorProfile,
399 style: Any = StyleProfile) -> None:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200400
401 min_time = min(ts.times)
402 times = [int(tm - min_time + 500) // 1000 for tm in ts.times]
403 ts_len = len(times)
404 step = ts_len / samples
405 points = [times[int(i * step + 0.5)] for i in range(samples)]
406 points.append(times[-1])
407 bounds = list(zip(points[:-1], points[1:]))
koder aka kdanilov108ac362017-01-19 20:17:16 +0200408 agg_data = []
409 positions = []
410 labels = []
411
koder aka kdanilov108ac362017-01-19 20:17:16 +0200412 for begin, end in bounds:
koder aka kdanilova732a602017-02-01 20:29:56 +0200413 agg_hist = ts.data[begin:end].sum(axis=0)
414
415 if style.violin_instead_of_box:
416 # cut outliers
417 idx1, idx2 = hist_outliers_perc(agg_hist, style.outliers_lat)
418 agg_hist = agg_hist[idx1:idx2]
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300419 curr_bins_vals = ts.histo_bins[idx1:idx2]
koder aka kdanilova732a602017-02-01 20:29:56 +0200420
421 correct_coef = style.violin_point_count / sum(agg_hist)
422 if correct_coef > 1:
423 correct_coef = 1
424 else:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300425 curr_bins_vals = ts.histo_bins
koder aka kdanilova732a602017-02-01 20:29:56 +0200426 correct_coef = 1
koder aka kdanilov108ac362017-01-19 20:17:16 +0200427
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300428 vals = numpy.empty(shape=[numpy.sum(agg_hist)], dtype='float32')
koder aka kdanilov108ac362017-01-19 20:17:16 +0200429 cidx = 0
koder aka kdanilov108ac362017-01-19 20:17:16 +0200430
koder aka kdanilova732a602017-02-01 20:29:56 +0200431 non_zero, = agg_hist.nonzero()
koder aka kdanilov108ac362017-01-19 20:17:16 +0200432 for pos in non_zero:
koder aka kdanilova732a602017-02-01 20:29:56 +0200433 count = int(agg_hist[pos] * correct_coef + 0.5)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200434
koder aka kdanilova732a602017-02-01 20:29:56 +0200435 if count != 0:
436 vals[cidx: cidx + count] = curr_bins_vals[pos]
437 cidx += count
438
439 agg_data.append(vals[:cidx])
koder aka kdanilov108ac362017-01-19 20:17:16 +0200440 positions.append((end + begin) / 2)
441 labels.append(str((end + begin) // 2))
442
koder aka kdanilova732a602017-02-01 20:29:56 +0200443 if style.violin_instead_of_box:
444 patches = plt.violinplot(agg_data,
445 positions=positions,
446 showmeans=True,
447 showmedians=True,
448 widths=step / 2)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200449
koder aka kdanilova732a602017-02-01 20:29:56 +0200450 patches['cmeans'].set_color("blue")
451 patches['cmedians'].set_color("green")
452 if style.legend_for_eng:
453 legend_location = "center left"
454 legend_bbox_to_anchor = (1.03, 0.81)
455 plt.legend([patches['cmeans'], patches['cmedians']], ["mean", "median"],
456 loc=legend_location, bbox_to_anchor=legend_bbox_to_anchor)
457 else:
458 plt.boxplot(agg_data, 0, '', positions=positions, labels=labels, widths=step / 4)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200459
koder aka kdanilov108ac362017-01-19 20:17:16 +0200460 plt.xlim(min(times), max(times))
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300461 plt.ylabel(ylabel)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200462 plt.xlabel("Time, seconds from test begin, sampled for ~{} seconds".format(int(step)))
koder aka kdanilov108ac362017-01-19 20:17:16 +0200463 plt.title(title)
464 apply_style(style, eng=True, no_legend=True)
465
466
467@provide_plot
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300468def plot_histo_heatmap(title: str,
469 ts: TimeSeries,
470 ylabel: str,
471 xlabel: str = "time, s",
472 colors: Any = ColorProfile,
473 style: Any = StyleProfile) -> None:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200474
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300475 # only histogram-based ts can be plotted
koder aka kdanilova732a602017-02-01 20:29:56 +0200476 assert len(ts.data.shape) == 2
koder aka kdanilova732a602017-02-01 20:29:56 +0200477
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300478 # Find global outliers. As load is expected to be stable during one job
479 # outliers range can be detected globally
koder aka kdanilova732a602017-02-01 20:29:56 +0200480 total_hist = ts.data.sum(axis=0)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300481 idx1, idx2 = hist_outliers_perc(total_hist,
482 bounds_perc=style.outliers_lat,
483 min_bins_left=style.hm_hist_bins_count)
koder aka kdanilova732a602017-02-01 20:29:56 +0200484
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300485 # merge outliers with most close non-outliers cell
486 orig_data = ts.data[:, idx1:idx2].copy()
487 if idx1 > 0:
488 orig_data[:, 0] += ts.data[:, :idx1].sum(axis=1)
koder aka kdanilova732a602017-02-01 20:29:56 +0200489
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300490 if idx2 < ts.data.shape[1]:
491 orig_data[:, -1] += ts.data[:, idx2:].sum(axis=1)
koder aka kdanilova732a602017-02-01 20:29:56 +0200492
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300493 bins_vals = ts.histo_bins[idx1:idx2]
494
495 # rebin over X axis
496 # aggregate some lines in ts.data to plot not more than style.hm_x_slots x bins
497 agg_idx = float(len(orig_data)) / style.hm_x_slots
498 if agg_idx >= 2:
499 data = numpy.zeros([style.hm_x_slots, orig_data.shape[1]], dtype=numpy.float32) # type: List[numpy.ndarray]
500 next = agg_idx
501 count = 0
502 data_idx = 0
503 for idx, arr in enumerate(orig_data):
504 if idx >= next:
505 data[data_idx] /= count
506 data_idx += 1
507 next += agg_idx
508 count = 0
509 data[data_idx] += arr
510 count += 1
511
512 if count > 1:
513 data[-1] /= count
514 else:
515 data = orig_data
516
517 # rebin over Y axis
518 # =================
koder aka kdanilova732a602017-02-01 20:29:56 +0200519
520 # don't using rebin_histogram here, as we need apply same bins for many arrays
521 step = (bins_vals[-1] - bins_vals[0]) / style.hm_hist_bins_count
522 new_bins_edges = numpy.arange(style.hm_hist_bins_count) * step + bins_vals[0]
523 bin_mapping = numpy.clip(numpy.searchsorted(new_bins_edges, bins_vals) - 1, 0, len(new_bins_edges) - 1)
524
525 # map origin bins ranges to heatmap bins, iterate over rows
526 cmap = []
527 for line in data:
528 curr_bins = [0] * style.hm_hist_bins_count
529 for idx, count in zip(bin_mapping, line):
530 curr_bins[idx] += count
531 cmap.append(curr_bins)
532 ncmap = numpy.array(cmap)
533
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300534 # plot data
535 # =========
koder aka kdanilova732a602017-02-01 20:29:56 +0200536
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300537 fig = plt.figure(figsize=(12, 6))
538 boxes = 3
539 gs = gridspec.GridSpec(1, boxes)
540 ax = fig.add_subplot(gs[0, :boxes - 1])
koder aka kdanilova732a602017-02-01 20:29:56 +0200541
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300542 labels = list(map(float2str, (new_bins_edges[:-1] + new_bins_edges[1:]) / 2)) + \
543 [float2str(new_bins_edges[-1]) + "+"]
544 seaborn.heatmap(ncmap[:,::-1].T, xticklabels=False, cmap="Blues", ax=ax)
545 ax.set_yticklabels(labels, rotation='horizontal')
546 ax.set_xticklabels([])
koder aka kdanilova732a602017-02-01 20:29:56 +0200547
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300548 # plot overall histogram
549 # =======================
koder aka kdanilova732a602017-02-01 20:29:56 +0200550
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300551 ax2 = fig.add_subplot(gs[0, boxes - 1])
552 ax2.set_yticklabels([])
553 ax2.set_xticklabels([])
koder aka kdanilova732a602017-02-01 20:29:56 +0200554
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300555 histo = ncmap.sum(axis=0).reshape((-1,))
556 ax2.set_ylim(top=histo.size, bottom=0)
557 plt.barh(numpy.arange(histo.size) + 0.5, width=histo, axes=ax2)
558
559 # Set labels
560 # ==========
561
562 ax.set_title(title)
563 ax.set_ylabel(ylabel)
564 ax.set_xlabel(xlabel)
565
koder aka kdanilova732a602017-02-01 20:29:56 +0200566
koder aka kdanilov108ac362017-01-19 20:17:16 +0200567
568@provide_plot
569def io_chart(title: str,
570 legend: str,
571 iosums: List[IOSummary],
572 iops_log_spine: bool = False,
573 lat_log_spine: bool = False,
574 colors: Any = ColorProfile,
575 style: Any = StyleProfile) -> None:
576
577 # -------------- MAGIC VALUES ---------------------
578 # IOPS bar width
579 width = 0.35
580
581 # offset from center of bar to deviation/confidence range indicator
582 err_x_offset = 0.05
583
koder aka kdanilov108ac362017-01-19 20:17:16 +0200584 # extra space on top and bottom, comparing to maximal tight layout
585 extra_y_space = 0.05
586
587 # additional spine for BW/IOPS on left side of plot
588 extra_io_spine_x_offset = -0.1
589
590 # extra space on left and right sides
591 extra_x_space = 0.5
592
593 # legend location settings
594 legend_location = "center left"
595 legend_bbox_to_anchor = (1.1, 0.81)
596
597 # plot box size adjust (only plot, not spines and legend)
598 plot_box_adjust = {'right': 0.66}
599 # -------------- END OF MAGIC VALUES ---------------------
600
601 block_size = iosums[0].block_size
602 lc = len(iosums)
603 xt = list(range(1, lc + 1))
604
605 # x coordinate of middle of the bars
606 xpos = [i - width / 2 for i in xt]
607
608 # import matplotlib.gridspec as gridspec
609 # gs = gridspec.GridSpec(1, 3, width_ratios=[1, 4, 1])
610 # p1 = plt.subplot(gs[1])
611
koder aka kdanilova732a602017-02-01 20:29:56 +0200612 fig, p1 = plt.subplots(figsize=StyleProfile.figsize)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200613
614 # plot IOPS/BW bars
615 if block_size >= LARGE_BLOCKS:
616 iops_primary = False
617 coef = MiB2KiB
618 p1.set_ylabel("BW (MiBps)")
619 else:
620 iops_primary = True
621 coef = block_size
622 p1.set_ylabel("IOPS")
623
624 p1.bar(xpos, [iosum.bw.average / coef for iosum in iosums], width=width, color=colors.box_color, label=legend)
625
626 # set correct x limits for primary IO spine
627 min_io = min(iosum.bw.average - iosum.bw.deviation * style.dev_range_x for iosum in iosums)
628 max_io = max(iosum.bw.average + iosum.bw.deviation * style.dev_range_x for iosum in iosums)
629 border = (max_io - min_io) * extra_y_space
630 io_lims = (min_io - border, max_io + border)
631
632 p1.set_ylim(io_lims[0] / coef, io_lims[-1] / coef)
633
634 # plot deviation and confidence error ranges
635 err1_legend = err2_legend = None
636 for pos, iosum in zip(xpos, iosums):
637 err1_legend = p1.errorbar(pos + width / 2 - err_x_offset,
638 iosum.bw.average / coef,
639 iosum.bw.deviation * style.dev_range_x / coef,
640 alpha=colors.subinfo_alpha,
641 color=colors.suppl_color1) # 'magenta'
642 err2_legend = p1.errorbar(pos + width / 2 + err_x_offset,
643 iosum.bw.average / coef,
644 iosum.bw.confidence / coef,
645 alpha=colors.subinfo_alpha,
646 color=colors.suppl_color2) # 'teal'
647
648 if style.grid:
649 p1.grid(True)
650
651 handles1, labels1 = p1.get_legend_handles_labels()
652
653 handles1 += [err1_legend, err2_legend]
654 labels1 += ["{}% dev".format(style.dev_perc),
655 "{}% conf".format(int(100 * iosums[0].bw.confidence_level))]
656
657 # extra y spine for latency on right side
658 p2 = p1.twinx()
659
660 # plot median and 95 perc latency
661 p2.plot(xt, [iosum.lat.perc_50 for iosum in iosums], label="lat med")
662 p2.plot(xt, [iosum.lat.perc_95 for iosum in iosums], label="lat 95%")
663
664 # limit and label x spine
665 plt.xlim(extra_x_space, lc + extra_x_space)
666 plt.xticks(xt, ["{0} * {1}".format(iosum.qd, iosum.nodes_count) for iosum in iosums])
667 p1.set_xlabel("QD * Test node count")
668
669 # apply log scales for X spines, if set
670 if iops_log_spine:
671 p1.set_yscale('log')
672
673 if lat_log_spine:
674 p2.set_yscale('log')
675
676 # extra y spine for BW/IOPS on left side
677 if style.extra_io_spine:
678 p3 = p1.twinx()
679 if iops_log_spine:
680 p3.set_yscale('log')
681
682 if iops_primary:
683 p3.set_ylabel("BW (MiBps)")
684 p3.set_ylim(io_lims[0] / MiB2KiB, io_lims[1] / MiB2KiB)
685 else:
686 p3.set_ylabel("IOPS")
687 p3.set_ylim(io_lims[0] / block_size, io_lims[1] / block_size)
688
689 p3.spines["left"].set_position(("axes", extra_io_spine_x_offset))
690 p3.spines["left"].set_visible(True)
691 p3.yaxis.set_label_position('left')
692 p3.yaxis.set_ticks_position('left')
693
694 p2.set_ylabel("Latency (ms)")
695
696 plt.title(title)
697
698 # legend box
699 handles2, labels2 = p2.get_legend_handles_labels()
koder aka kdanilova732a602017-02-01 20:29:56 +0200700 plt.legend(handles1 + handles2, labels1 + labels2,
701 loc=legend_location,
702 bbox_to_anchor=legend_bbox_to_anchor)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200703
704 # adjust central box size to fit legend
705 plt.subplots_adjust(**plot_box_adjust)
706 apply_style(style, eng=False, no_legend=True)
707
708
709# -------------------- REPORT HELPERS --------------------------------------------------------------------------------
710
711
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200712class HTMLBlock:
713 data = None # type: str
714 js_links = [] # type: List[str]
715 css_links = [] # type: List[str]
koder aka kdanilova732a602017-02-01 20:29:56 +0200716 order_attr = None # type: Any
717
718 def __init__(self, data: str, order_attr: Any = None) -> None:
719 self.data = data
720 self.order_attr = order_attr
721
722 def __eq__(self, o: object) -> bool:
723 return o.order_attr == self.order_attr # type: ignore
724
725 def __lt__(self, o: object) -> bool:
726 return o.order_attr > self.order_attr # type: ignore
727
728
729class Table:
730 def __init__(self, header: List[str]) -> None:
731 self.header = header
732 self.data = []
733
734 def add_line(self, values: List[str]) -> None:
735 self.data.append(values)
736
737 def html(self):
738 return html.table("", self.header, self.data)
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200739
740
koder aka kdanilov108ac362017-01-19 20:17:16 +0200741class Menu1st:
742 engineering = "Engineering"
743 summary = "Summary"
koder aka kdanilova732a602017-02-01 20:29:56 +0200744 per_job = "Per Job"
koder aka kdanilov108ac362017-01-19 20:17:16 +0200745
746
747class Menu2ndEng:
748 iops_time = "IOPS(time)"
749 hist = "IOPS/lat overall histogram"
750 lat_time = "Lat(time)"
751
752
753class Menu2ndSumm:
754 io_lat_qd = "IO & Lat vs QD"
755
756
koder aka kdanilova732a602017-02-01 20:29:56 +0200757menu_1st_order = [Menu1st.summary, Menu1st.engineering, Menu1st.per_job]
koder aka kdanilov108ac362017-01-19 20:17:16 +0200758
759
760# -------------------- REPORTS --------------------------------------------------------------------------------------
761
762
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200763class Reporter(metaclass=abc.ABCMeta):
koder aka kdanilova732a602017-02-01 20:29:56 +0200764 suite_types = set() # type: Set[str]
765
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200766 @abc.abstractmethod
koder aka kdanilova732a602017-02-01 20:29:56 +0200767 def get_divs(self, suite: SuiteConfig, storage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
768 pass
769
770
771class JobReporter(metaclass=abc.ABCMeta):
772 suite_type = set() # type: Set[str]
773
774 @abc.abstractmethod
775 def get_divs(self,
776 suite: SuiteConfig,
777 job: JobConfig,
778 storage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200779 pass
780
781
782# Main performance report
783class PerformanceSummary(Reporter):
koder aka kdanilova732a602017-02-01 20:29:56 +0200784 """Aggregated summary fro storage"""
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200785
786
787# Main performance report
koder aka kdanilov108ac362017-01-19 20:17:16 +0200788class IO_QD(Reporter):
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200789 """Creates graph, which show how IOPS and Latency depend on QD"""
koder aka kdanilova732a602017-02-01 20:29:56 +0200790 suite_types = {'fio'}
791
792 def get_divs(self, suite: SuiteConfig, rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
793 ts_map = defaultdict(list) # type: Dict[FioJobParams, List[Tuple[SuiteConfig, FioJobConfig]]]
794 str_summary = {} # type: Dict[FioJobParams, List[IOSummary]]
koder aka kdanilov108ac362017-01-19 20:17:16 +0200795 for job in rstorage.iter_job(suite):
796 fjob = cast(FioJobConfig, job)
koder aka kdanilova732a602017-02-01 20:29:56 +0200797 fjob_no_qd = cast(FioJobParams, fjob.params.copy(qd=None))
798 str_summary[fjob_no_qd] = (fjob_no_qd.summary, fjob_no_qd.long_summary)
799 ts_map[fjob_no_qd].append((suite, fjob))
koder aka kdanilov108ac362017-01-19 20:17:16 +0200800
koder aka kdanilova732a602017-02-01 20:29:56 +0200801 for tpl, suites_jobs in ts_map.items():
802 if len(suites_jobs) > StyleProfile.min_iops_vs_qd_jobs:
803 iosums = [make_iosum(rstorage, suite, job) for suite, job in suites_jobs]
804 iosums.sort(key=lambda x: x.qd)
805 summary, summary_long = str_summary[tpl]
806 ds = DataSource(suite_id=suite.storage_id,
807 job_id=summary,
808 node_id=AGG_TAG,
809 sensor="fio",
810 dev=AGG_TAG,
811 metric="io_over_qd",
812 tag="svg")
koder aka kdanilov108ac362017-01-19 20:17:16 +0200813
koder aka kdanilova732a602017-02-01 20:29:56 +0200814 title = "IOPS, BW, Lat vs. QD.\n" + summary_long
815 fpath = io_chart(rstorage, ds, title=title, legend="IOPS/BW", iosums=iosums) # type: str
816 yield Menu1st.summary, Menu2ndSumm.io_lat_qd, HTMLBlock(html.img(fpath))
koder aka kdanilov7f59d562016-12-26 01:34:23 +0200817
818
819# Linearization report
820class IOPS_Bsize(Reporter):
821 """Creates graphs, which show how IOPS and Latency depend on block size"""
822
823
824# IOPS/latency distribution
koder aka kdanilova732a602017-02-01 20:29:56 +0200825class StatInfo(JobReporter):
826 """Statistic info for job results"""
827 suite_types = {'fio'}
828
829 def get_divs(self, suite: SuiteConfig, job: JobConfig,
830 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
831
832 fjob = cast(FioJobConfig, job)
833 io_sum = make_iosum(rstorage, suite, fjob)
834
835 summary_data = [
836 ["Summary", job.params.long_summary],
837 ]
838
839 res = html.H2(html.center("Test summary"))
840 res += html.table("Test info", None, summary_data)
841 stat_data_headers = ["Name", "Average ~ Dev", "Conf interval", "Mediana", "Mode", "Kurt / Skew", "95%", "99%"]
842
843 KB = 1024
844 bw_data = ["Bandwidth",
845 "{}Bps ~ {}Bps".format(b2ssize(io_sum.bw.average * KB), b2ssize(io_sum.bw.deviation * KB)),
846 b2ssize(io_sum.bw.confidence * KB) + "Bps",
847 b2ssize(io_sum.bw.perc_50 * KB) + "Bps",
848 "-",
849 "{:.2f} / {:.2f}".format(io_sum.bw.kurt, io_sum.bw.skew),
850 b2ssize(io_sum.bw.perc_5 * KB) + "Bps",
851 b2ssize(io_sum.bw.perc_1 * KB) + "Bps"]
852
853 iops_data = ["IOPS",
854 "{}IOPS ~ {}IOPS".format(b2ssize_10(io_sum.bw.average / fjob.bsize),
855 b2ssize_10(io_sum.bw.deviation / fjob.bsize)),
856 b2ssize_10(io_sum.bw.confidence / fjob.bsize) + "IOPS",
857 b2ssize_10(io_sum.bw.perc_50 / fjob.bsize) + "IOPS",
858 "-",
859 "{:.2f} / {:.2f}".format(io_sum.bw.kurt, io_sum.bw.skew),
860 b2ssize_10(io_sum.bw.perc_5 / fjob.bsize) + "IOPS",
861 b2ssize_10(io_sum.bw.perc_1 / fjob.bsize) + "IOPS"]
862
863 MICRO = 1000000
864 # latency
865 lat_data = ["Latency",
866 "-",
867 "-",
868 b2ssize_10(io_sum.bw.perc_50 / MICRO) + "s",
869 "-",
870 "-",
871 b2ssize_10(io_sum.bw.perc_95 / MICRO) + "s",
872 b2ssize_10(io_sum.bw.perc_99 / MICRO) + "s"]
873
874 # sensor usage
875 stat_data = [iops_data, bw_data, lat_data]
876 res += html.table("Load stats info", stat_data_headers, stat_data)
877
878 resource_headers = ["Resource", "Usage count", "Proportional to work done"]
879
880 io_transfered = io_sum.bw.data.sum() * KB
881 resource_data = [
882 ["IO made", b2ssize_10(io_transfered / KB / fjob.bsize) + "OP", "-"],
883 ["Data transfered", b2ssize(io_transfered) + "B", "-"]
884 ]
885
koder aka kdanilova732a602017-02-01 20:29:56 +0200886 storage = rstorage.storage
887 nodes = storage.load_list(NodeInfo, 'all_nodes') # type: List[NodeInfo]
888
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300889 trange = (job.reliable_info_range[0] // 1000, job.reliable_info_range[1] // 1000)
koder aka kdanilova732a602017-02-01 20:29:56 +0200890 ops_done = io_transfered / fjob.bsize / KB
891
892 all_metrics = [
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300893 ("Test nodes net send", 'net-io', 'send_bytes', b2ssize, ['testnode'], "B", io_transfered),
894 ("Test nodes net recv", 'net-io', 'recv_bytes', b2ssize, ['testnode'], "B", io_transfered),
koder aka kdanilova732a602017-02-01 20:29:56 +0200895
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300896 ("Test nodes disk write", 'block-io', 'sectors_written', b2ssize, ['testnode'], "B", io_transfered),
897 ("Test nodes disk read", 'block-io', 'sectors_read', b2ssize, ['testnode'], "B", io_transfered),
898 ("Test nodes writes", 'block-io', 'writes_completed', b2ssize_10, ['testnode'], "OP", ops_done),
899 ("Test nodes reads", 'block-io', 'reads_completed', b2ssize_10, ['testnode'], "OP", ops_done),
koder aka kdanilova732a602017-02-01 20:29:56 +0200900
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300901 ("Storage nodes net send", 'net-io', 'send_bytes', b2ssize, STORAGE_ROLES, "B", io_transfered),
902 ("Storage nodes net recv", 'net-io', 'recv_bytes', b2ssize, STORAGE_ROLES, "B", io_transfered),
koder aka kdanilova732a602017-02-01 20:29:56 +0200903
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300904 ("Storage nodes disk write", 'block-io', 'sectors_written', b2ssize, STORAGE_ROLES, "B", io_transfered),
905 ("Storage nodes disk read", 'block-io', 'sectors_read', b2ssize, STORAGE_ROLES, "B", io_transfered),
906 ("Storage nodes writes", 'block-io', 'writes_completed', b2ssize_10, STORAGE_ROLES, "OP", ops_done),
907 ("Storage nodes reads", 'block-io', 'reads_completed', b2ssize_10, STORAGE_ROLES, "OP", ops_done),
koder aka kdanilova732a602017-02-01 20:29:56 +0200908 ]
909
910 all_agg = {}
911
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300912 for descr, sensor, metric, ffunc, roles, units, denom in all_metrics:
koder aka kdanilova732a602017-02-01 20:29:56 +0200913 if not nodes:
914 continue
915
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300916 res_ts = summ_sensors(rstorage, roles, sensor=sensor, metric=metric, time_range=trange)
917 if res_ts is None:
koder aka kdanilova732a602017-02-01 20:29:56 +0200918 continue
919
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300920 agg = res_ts.data.sum()
koder aka kdanilova732a602017-02-01 20:29:56 +0200921 resource_data.append([descr, ffunc(agg) + units, "{:.1f}".format(agg / denom)])
922 all_agg[descr] = agg
923
koder aka kdanilova732a602017-02-01 20:29:56 +0200924 cums = [
925 ("Test nodes writes", "Test nodes reads", "Total test ops", b2ssize_10, "OP", ops_done),
926 ("Storage nodes writes", "Storage nodes reads", "Total storage ops", b2ssize_10, "OP", ops_done),
927 ("Storage nodes disk write", "Storage nodes disk read", "Total storage IO size", b2ssize,
928 "B", io_transfered),
929 ("Test nodes disk write", "Test nodes disk read", "Total test nodes IO size", b2ssize, "B", io_transfered),
930 ]
931
932 for name1, name2, descr, ffunc, units, denom in cums:
933 if name1 in all_agg and name2 in all_agg:
934 agg = all_agg[name1] + all_agg[name2]
935 resource_data.append([descr, ffunc(agg) + units, "{:.1f}".format(agg / denom)])
936
937 res += html.table("Resources usage", resource_headers, resource_data)
938
939 yield Menu1st.per_job, job.summary, HTMLBlock(res)
940
941
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300942# CPU load
943class CPULoadPlot(JobReporter):
944 def get_divs(self,
945 suite: SuiteConfig,
946 job: JobConfig,
947 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
948
949 trange = (job.reliable_info_range[0] // 1000, job.reliable_info_range[1] // 1000)
950
951 # plot CPU time
952 for rt, roles in [('storage', STORAGE_ROLES), ('test', ['testnode'])]:
953 cpu_ts = {}
954 cpu_metrics = "idle guest iowait irq nice sirq steal sys user".split()
955 for name in cpu_metrics:
956 cpu_ts[name] = summ_sensors(rstorage, roles, sensor='system-cpu', metric=name, time_range=trange)
957
958 it = iter(cpu_ts.values())
959 total_over_time = next(it).data.copy()
960 for ts in it:
961 total_over_time += ts.data
962
963 fname = plot_simple_over_time(rstorage, cpu_ts['idle'].source(metric='allcpu', tag=rt + '.plt.svg'),
964 tss=[(name, ts.data * 100 / total_over_time) for name, ts in cpu_ts.items()],
965 average=True,
966 ylabel="CPU time %",
967 title="{} nodes CPU usage".format(rt.capitalize()))
968
969 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fname))
970
971
972# IO time and QD
973class QDIOTimeHeatmap(JobReporter):
974 def get_divs(self,
975 suite: SuiteConfig,
976 job: JobConfig,
977 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
978
979 # TODO: fix this hardcode, need to track what devices are actually used on test and storage nodes
980 # use saved storage info in nodes
981
982 journal_devs = None
983 storage_devs = None
984 test_nodes_devs = ['rbd0']
985
986 for node in find_nodes_by_roles(rstorage, STORAGE_ROLES):
987 cjd = set(node.params['ceph_journal_devs'])
988 if journal_devs is None:
989 journal_devs = cjd
990 else:
991 assert journal_devs == cjd, "{!r} != {!r}".format(journal_devs, cjd)
992
993 csd = set(node.params['ceph_storage_devs'])
994 if storage_devs is None:
995 storage_devs = csd
996 else:
997 assert storage_devs == csd, "{!r} != {!r}".format(storage_devs, csd)
998
999 storage_nodes_devs = list(journal_devs) + list(storage_devs)
1000 trange = (job.reliable_info_range[0] // 1000, job.reliable_info_range[1] // 1000)
1001
1002 for name, devs, roles in [('storage', storage_devs, STORAGE_ROLES),
1003 ('journal', journal_devs, STORAGE_ROLES),
1004 ('test', test_nodes_devs, ['testnode'])]:
1005 # QD heatmap
1006 ioq2d = find_sensors_to_2d(rstorage, roles, sensor='block-io', devs=devs,
1007 metric='io_queue', time_range=trange)
1008 fname = plot_hmap_from_2d(rstorage, DataSource(suite.storage_id,
1009 job.storage_id,
1010 AGG_TAG,
1011 'block-io',
1012 name,
1013 metric='io_queue',
1014 tag="hmap.svg"),
1015 ioq2d, ylabel="IO QD", title=name.capitalize() + " devs QD",
1016 bins=StyleProfile.qd_bins,
1017 xlabel='Time') # type: str
1018 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fname))
1019
1020 # Block size heatmap
1021 wc2d = find_sensors_to_2d(rstorage, roles, sensor='block-io', devs=devs,
1022 metric='writes_completed', time_range=trange)
1023 wc2d[wc2d < 1E-3] = 1
1024 sw2d = find_sensors_to_2d(rstorage, roles, sensor='block-io', devs=devs,
1025 metric='sectors_written', time_range=trange)
1026 data2d = sw2d / wc2d / 1024
1027 fname = plot_hmap_from_2d(rstorage, DataSource(suite.storage_id,
1028 job.storage_id,
1029 AGG_TAG,
1030 'block-io',
1031 name,
1032 metric='wr_block_size',
1033 tag="hmap.svg"),
1034 data2d, ylabel="IO bsize, KiB", title=name.capitalize() + " write block size",
1035 xlabel='Time',
1036 bins=StyleProfile.block_size_bins) # type: str
1037 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fname))
1038
1039 # iotime heatmap
1040 wtime2d = find_sensors_to_2d(rstorage, roles, sensor='block-io', devs=devs,
1041 metric='io_time', time_range=trange)
1042 fname = plot_hmap_from_2d(rstorage, DataSource(suite.storage_id,
1043 job.storage_id,
1044 AGG_TAG,
1045 'block-io',
1046 name,
1047 metric='io_time',
1048 tag="hmap.svg"),
1049 wtime2d, ylabel="IO time (ms) per second",
1050 title=name.capitalize() + " iotime",
1051 xlabel='Time',
1052 bins=StyleProfile.iotime_bins) # type: str
1053 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fname))
1054
1055
koder aka kdanilova732a602017-02-01 20:29:56 +02001056# IOPS/latency distribution
1057class IOHist(JobReporter):
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001058 """IOPS.latency distribution histogram"""
koder aka kdanilova732a602017-02-01 20:29:56 +02001059 suite_types = {'fio'}
koder aka kdanilov108ac362017-01-19 20:17:16 +02001060
koder aka kdanilova732a602017-02-01 20:29:56 +02001061 def get_divs(self,
1062 suite: SuiteConfig,
1063 job: JobConfig,
1064 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
koder aka kdanilov108ac362017-01-19 20:17:16 +02001065
koder aka kdanilova732a602017-02-01 20:29:56 +02001066 fjob = cast(FioJobConfig, job)
koder aka kdanilov108ac362017-01-19 20:17:16 +02001067
koder aka kdanilova732a602017-02-01 20:29:56 +02001068 yield Menu1st.per_job, fjob.summary, HTMLBlock(html.H2(html.center("Load histograms")))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001069
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001070 # agg_lat = get_aggregated(rstorage, suite, fjob, "lat")
1071 # # bins_edges = numpy.array(get_lat_vals(agg_lat.data.shape[1]), dtype='float32') / 1000 # convert us to ms
1072 # lat_stat_prop = calc_histo_stat_props(agg_lat, bins_edges=None, rebins_count=StyleProfile.hist_lat_boxes)
1073 #
1074 # long_summary = cast(FioJobParams, fjob.params).long_summary
1075 #
1076 # title = "Latency distribution"
1077 # units = "ms"
1078 #
1079 # fpath = plot_hist(rstorage, agg_lat.source(tag='hist.svg'), title, units, lat_stat_prop) # type: str
1080 # yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilova732a602017-02-01 20:29:56 +02001081
1082 agg_io = get_aggregated(rstorage, suite, fjob, "bw")
1083
1084 if fjob.bsize >= LARGE_BLOCKS:
1085 title = "BW distribution"
1086 units = "MiBps"
1087 agg_io.data //= MiB2KiB
1088 else:
1089 title = "IOPS distribution"
1090 agg_io.data //= fjob.bsize
1091 units = "IOPS"
1092
1093 io_stat_prop = calc_norm_stat_props(agg_io, bins_count=StyleProfile.hist_boxes)
1094 fpath = plot_hist(rstorage, agg_io.source(tag='hist.svg'), title, units, io_stat_prop) # type: str
1095 yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001096
1097
koder aka kdanilov108ac362017-01-19 20:17:16 +02001098# IOPS/latency over test time for each job
koder aka kdanilova732a602017-02-01 20:29:56 +02001099class IOTime(JobReporter):
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001100 """IOPS/latency during test"""
koder aka kdanilova732a602017-02-01 20:29:56 +02001101 suite_types = {'fio'}
koder aka kdanilov108ac362017-01-19 20:17:16 +02001102
koder aka kdanilova732a602017-02-01 20:29:56 +02001103 def get_divs(self,
1104 suite: SuiteConfig,
1105 job: JobConfig,
1106 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
koder aka kdanilov108ac362017-01-19 20:17:16 +02001107
koder aka kdanilova732a602017-02-01 20:29:56 +02001108 fjob = cast(FioJobConfig, job)
koder aka kdanilov108ac362017-01-19 20:17:16 +02001109
koder aka kdanilova732a602017-02-01 20:29:56 +02001110 agg_io = get_aggregated(rstorage, suite, fjob, "bw")
1111 if fjob.bsize >= LARGE_BLOCKS:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001112 title = "Fio measured Bandwidth over time"
koder aka kdanilova732a602017-02-01 20:29:56 +02001113 units = "MiBps"
1114 agg_io.data //= MiB2KiB
1115 else:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001116 title = "Fio measured IOPS over time"
koder aka kdanilova732a602017-02-01 20:29:56 +02001117 agg_io.data //= fjob.bsize
1118 units = "IOPS"
koder aka kdanilov108ac362017-01-19 20:17:16 +02001119
koder aka kdanilova732a602017-02-01 20:29:56 +02001120 fpath = plot_v_over_time(rstorage, agg_io.source(tag='ts.svg'), title, units, agg_io) # type: str
1121 yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001122
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001123 agg_lat = get_aggregated(rstorage, suite, fjob, "lat").copy()
1124 TARGET_UNITS = 'ms'
1125 coef = unit_conversion_coef(agg_lat.units, TARGET_UNITS)
1126 agg_lat.histo_bins = agg_lat.histo_bins.copy() * float(coef)
1127 agg_lat.units = TARGET_UNITS
koder aka kdanilov108ac362017-01-19 20:17:16 +02001128
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001129 fpath = plot_lat_over_time(rstorage, agg_lat.source(tag='ts.svg'), "Latency",
1130 agg_lat, ylabel="Latency, " + agg_lat.units) # type: str
koder aka kdanilova732a602017-02-01 20:29:56 +02001131 yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001132
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001133 fpath = plot_histo_heatmap(rstorage,
1134 agg_lat.source(tag='hmap.svg'),
1135 "Latency heatmap",
1136 agg_lat,
1137 ylabel="Latency, " + agg_lat.units,
1138 xlabel='Test time') # type: str
koder aka kdanilov108ac362017-01-19 20:17:16 +02001139
koder aka kdanilova732a602017-02-01 20:29:56 +02001140 yield Menu1st.per_job, fjob.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001141
1142
1143class ResourceUsage:
1144 def __init__(self, io_r_ops: int, io_w_ops: int, io_r_kb: int, io_w_kb: int) -> None:
1145 self.io_w_ops = io_w_ops
1146 self.io_r_ops = io_r_ops
1147 self.io_w_kb = io_w_kb
1148 self.io_r_kb = io_r_kb
1149
1150 self.cpu_used_user = None # type: int
1151 self.cpu_used_sys = None # type: int
1152 self.cpu_wait_io = None # type: int
1153
1154 self.net_send_packets = None # type: int
1155 self.net_recv_packets = None # type: int
1156 self.net_send_kb = None # type: int
1157 self.net_recv_kb = None # type: int
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001158
1159
1160# Cluster load over test time
koder aka kdanilova732a602017-02-01 20:29:56 +02001161class ClusterLoad(JobReporter):
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001162 """IOPS/latency during test"""
1163
koder aka kdanilova732a602017-02-01 20:29:56 +02001164 # TODO: units should came from sensor
koder aka kdanilov108ac362017-01-19 20:17:16 +02001165 storage_sensors = [
koder aka kdanilova732a602017-02-01 20:29:56 +02001166 ('block-io', 'reads_completed', "Read ops", 'iops'),
1167 ('block-io', 'writes_completed', "Write ops", 'iops'),
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001168 ('block-io', 'sectors_read', "Read kb", 'KB'),
1169 ('block-io', 'sectors_written', "Write kb", 'KB'),
koder aka kdanilov108ac362017-01-19 20:17:16 +02001170 ]
1171
koder aka kdanilova732a602017-02-01 20:29:56 +02001172 def get_divs(self,
1173 suite: SuiteConfig,
1174 job: JobConfig,
1175 rstorage: ResultStorage) -> Iterator[Tuple[str, str, HTMLBlock]]:
koder aka kdanilova732a602017-02-01 20:29:56 +02001176 yield Menu1st.per_job, job.summary, HTMLBlock(html.H2(html.center("Cluster load")))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001177
koder aka kdanilova732a602017-02-01 20:29:56 +02001178 # convert ms to s
1179 time_range = (job.reliable_info_range[0] // MS2S, job.reliable_info_range[1] // MS2S)
koder aka kdanilov108ac362017-01-19 20:17:16 +02001180
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001181 for sensor, metric, sensor_title, units in self.storage_sensors:
1182 ts = summ_sensors(rstorage, ['testnode'], sensor, metric, time_range)
koder aka kdanilova732a602017-02-01 20:29:56 +02001183 ds = DataSource(suite_id=suite.storage_id,
1184 job_id=job.storage_id,
1185 node_id="test_nodes",
1186 sensor=sensor,
1187 dev=AGG_TAG,
1188 metric=metric,
1189 tag="ts.svg")
koder aka kdanilov108ac362017-01-19 20:17:16 +02001190
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001191 data = ts.data if units != 'KB' else ts.data * float(unit_conversion_coef(ts.units, 'KB'))
1192
koder aka kdanilova732a602017-02-01 20:29:56 +02001193 ts = TimeSeries(name="",
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001194 times=numpy.arange(*time_range),
1195 data=data,
koder aka kdanilova732a602017-02-01 20:29:56 +02001196 raw=None,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001197 units=units if ts.units is None else ts.units,
1198 time_units=ts.time_units,
1199 source=ds,
1200 histo_bins=ts.histo_bins)
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +03001201
koder aka kdanilova732a602017-02-01 20:29:56 +02001202 fpath = plot_v_over_time(rstorage, ds, sensor_title, sensor_title, ts=ts) # type: str
1203 yield Menu1st.per_job, job.summary, HTMLBlock(html.img(fpath))
koder aka kdanilov108ac362017-01-19 20:17:16 +02001204
1205
1206# Ceph cluster summary
1207class ResourceConsumption(Reporter):
1208 """Resources consumption report, only text"""
1209
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001210
1211# Node load over test time
1212class NodeLoad(Reporter):
1213 """IOPS/latency during test"""
1214
1215
1216# Ceph cluster summary
1217class CephClusterSummary(Reporter):
1218 """IOPS/latency during test"""
1219
1220
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001221# TODO: Ceph operation breakout report
1222# TODO: Resource consumption for different type of test
1223
1224
koder aka kdanilov108ac362017-01-19 20:17:16 +02001225# ------------------------------------------ REPORT STAGES -----------------------------------------------------------
1226
1227
1228class HtmlReportStage(Stage):
1229 priority = StepOrder.REPORT
1230
1231 def run(self, ctx: TestRun) -> None:
1232 rstorage = ResultStorage(ctx.storage)
koder aka kdanilova732a602017-02-01 20:29:56 +02001233
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001234 job_reporters = [StatInfo(), IOTime(), IOHist(), ClusterLoad(), CPULoadPlot(),
1235 QDIOTimeHeatmap()] # type: List[JobReporter]
1236 reporters = []
koder aka kdanilova732a602017-02-01 20:29:56 +02001237
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001238 # reporters = [IO_QD()] # type: List[Reporter]
koder aka kdanilova732a602017-02-01 20:29:56 +02001239 # job_reporters = [ClusterLoad()]
koder aka kdanilov108ac362017-01-19 20:17:16 +02001240
1241 root_dir = os.path.dirname(os.path.dirname(wally.__file__))
1242 doc_templ_path = os.path.join(root_dir, "report_templates/index.html")
1243 report_template = open(doc_templ_path, "rt").read()
1244 css_file_src = os.path.join(root_dir, "report_templates/main.css")
1245 css_file = open(css_file_src, "rt").read()
1246
1247 menu_block = []
1248 content_block = []
1249 link_idx = 0
1250
koder aka kdanilova732a602017-02-01 20:29:56 +02001251 # matplotlib.rcParams.update(ctx.config.reporting.matplotlib_params.raw())
1252 # ColorProfile.__dict__.update(ctx.config.reporting.colors.raw())
1253 # StyleProfile.__dict__.update(ctx.config.reporting.style.raw())
koder aka kdanilov108ac362017-01-19 20:17:16 +02001254
koder aka kdanilova732a602017-02-01 20:29:56 +02001255 items = defaultdict(lambda: defaultdict(list)) # type: Dict[str, Dict[str, List[HTMLBlock]]]
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001256 DEBUG = False
koder aka kdanilova732a602017-02-01 20:29:56 +02001257 # TODO: filter reporters
koder aka kdanilov108ac362017-01-19 20:17:16 +02001258 for suite in rstorage.iter_suite(FioTest.name):
koder aka kdanilova732a602017-02-01 20:29:56 +02001259 all_jobs = list(rstorage.iter_job(suite))
1260 all_jobs.sort(key=lambda job: job.params)
1261 for job in all_jobs:
1262 for reporter in job_reporters:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001263 logger.debug("Start reporter %s on job %s suite %s",
1264 reporter.__class__.__name__, job.summary, suite.test_type)
koder aka kdanilova732a602017-02-01 20:29:56 +02001265 for block, item, html in reporter.get_divs(suite, job, rstorage):
1266 items[block][item].append(html)
1267 if DEBUG:
1268 break
1269
koder aka kdanilov108ac362017-01-19 20:17:16 +02001270 for reporter in reporters:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001271 logger.debug("Start reporter %s on suite %s", reporter.__class__.__name__, suite.test_type)
koder aka kdanilov108ac362017-01-19 20:17:16 +02001272 for block, item, html in reporter.get_divs(suite, rstorage):
1273 items[block][item].append(html)
1274
koder aka kdanilova732a602017-02-01 20:29:56 +02001275 if DEBUG:
1276 break
1277
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03001278 logger.debug("Generating result html")
1279
koder aka kdanilov108ac362017-01-19 20:17:16 +02001280 for idx_1st, menu_1st in enumerate(sorted(items, key=lambda x: menu_1st_order.index(x))):
1281 menu_block.append(
1282 '<a href="#item{}" class="nav-group" data-toggle="collapse" data-parent="#MainMenu">{}</a>'
1283 .format(idx_1st, menu_1st)
1284 )
1285 menu_block.append('<div class="collapse" id="item{}">'.format(idx_1st))
1286 for menu_2nd in sorted(items[menu_1st]):
1287 menu_block.append(' <a href="#content{}" class="nav-group-item">{}</a>'
1288 .format(link_idx, menu_2nd))
1289 content_block.append('<div id="content{}">'.format(link_idx))
koder aka kdanilova732a602017-02-01 20:29:56 +02001290 content_block.extend(" " + x.data for x in items[menu_1st][menu_2nd])
koder aka kdanilov108ac362017-01-19 20:17:16 +02001291 content_block.append('</div>')
1292 link_idx += 1
1293 menu_block.append('</div>')
1294
1295 report = report_template.replace("{{{menu}}}", ("\n" + " " * 16).join(menu_block))
1296 report = report.replace("{{{content}}}", ("\n" + " " * 16).join(content_block))
1297 report_path = rstorage.put_report(report, "index.html")
1298 rstorage.put_report(css_file, "main.css")
1299 logger.info("Report is stored into %r", report_path)