blob: b2ed783d6fd22251e3609c0d49b50b50906e4539 [file] [log] [blame]
koder aka kdanilov7f59d562016-12-26 01:34:23 +02001# put all result preprocessing here
koder aka kdanilova732a602017-02-01 20:29:56 +02002# # selection, aggregation
3#
4# from io import BytesIO
5# import logging
6# from typing import Any
7#
8# from .stage import Stage, StepOrder
9# from .test_run_class import TestRun
10# from .statistic import calc_norm_stat_props, calc_histo_stat_props
11# from .result_classes import StatProps, DataSource, TimeSeries
12# from .hlstorage import ResultStorage
13# from .suits.io.fio_hist import get_lat_vals, expected_lat_bins
14# from .suits.io.fio import FioTest
15# from .utils import StopTestError
16#
17# import matplotlib
18# matplotlib.use('svg')
19# import matplotlib.pyplot as plt
20#
21#
22# logger = logging.getLogger("wally")
23#
24#
25# class CalcStatisticStage(Stage):
26# priority = StepOrder.TEST + 1
27#
28# def run(self, ctx: TestRun) -> None:
29# rstorage = ResultStorage(ctx.storage)
30#
31# for suite in rstorage.iter_suite(FioTest.name):
32# for job in rstorage.iter_job(suite):
33# for ts in rstorage.iter_ts(suite, job):
34# if ts.source.sensor == 'lat':
35# if ts.data.shape[1] != expected_lat_bins:
36# logger.error("Sensor %s.%s on node %s has" +
37# "shape=%s. Can only process sensors with shape=[X,%s].",
38# ts.source.dev, ts.source.sensor, ts.source.node_id,
39# ts.data.shape, expected_lat_bins)
40# continue
41#
42# ts.bins_edges = get_lat_vals(ts.data.shape[1])
43# stat_prop = calc_histo_stat_props(ts) # type: StatProps
44#
45# elif len(ts.data.shape) != 1:
46# logger.warning("Sensor %s.%s on node %s provide 2+D data. Can't process it.",
47# ts.source.dev, ts.source.sensor, ts.source.node_id)
48# continue
49# else:
50# stat_prop = calc_norm_stat_props(ts)
51#
52# raise StopTestError()