kdanylov aka koder | b083333 | 2017-05-13 20:39:17 +0300 | [diff] [blame^] | 1 | import os |
| 2 | import pprint |
| 3 | import logging |
| 4 | from typing import cast, Iterator, Tuple, Type, Optional, Any |
| 5 | |
| 6 | import numpy |
| 7 | |
| 8 | from cephlib.wally_storage import WallyDB |
| 9 | from cephlib.hlstorage import SensorStorageBase |
| 10 | from cephlib.statistic import StatProps |
| 11 | from cephlib.numeric_types import DataSource, TimeSeries |
| 12 | |
| 13 | from .suits.job import JobConfig |
| 14 | from .result_classes import SuiteConfig, IResultStorage |
| 15 | from .utils import StopTestError |
| 16 | from .suits.all_suits import all_suits |
| 17 | |
| 18 | from cephlib.storage import Storage |
| 19 | |
| 20 | logger = logging.getLogger('wally') |
| 21 | |
| 22 | |
| 23 | def fill_path(path: str, **params) -> str: |
| 24 | for name, val in params.items(): |
| 25 | if val is not None: |
| 26 | path = path.replace("{" + name + "}", val) |
| 27 | return path |
| 28 | |
| 29 | |
| 30 | class ResultStorage(IResultStorage, SensorStorageBase): |
| 31 | def __init__(self, storage: Storage) -> None: |
| 32 | SensorStorageBase.__init__(self, storage, WallyDB) |
| 33 | |
| 34 | # ------------- CHECK DATA IN STORAGE ---------------------------------------------------------------------------- |
| 35 | def check_plot_file(self, source: DataSource) -> Optional[str]: |
| 36 | path = self.db_paths.plot.format(**source.__dict__) |
| 37 | fpath = self.storage.get_fname(self.db_paths.report_root + path) |
| 38 | return path if os.path.exists(fpath) else None |
| 39 | |
| 40 | # ------------- PUT DATA INTO STORAGE -------------------------------------------------------------------------- |
| 41 | def put_or_check_suite(self, suite: SuiteConfig) -> None: |
| 42 | path = self.db_paths.suite_cfg.format(suite_id=suite.storage_id) |
| 43 | if path in self.storage: |
| 44 | db_cfg = self.storage.load(SuiteConfig, path) |
| 45 | if db_cfg != suite: |
| 46 | logger.error("Current suite %s config is not equal to found in storage at %s", suite.test_type, path) |
| 47 | logger.debug("Current: \n%s\nStorage:\n%s", pprint.pformat(db_cfg), pprint.pformat(suite)) |
| 48 | raise StopTestError() |
| 49 | else: |
| 50 | self.storage.put(suite, path) |
| 51 | |
| 52 | def put_job(self, suite: SuiteConfig, job: JobConfig) -> None: |
| 53 | path = self.db_paths.job_cfg.format(suite_id=suite.storage_id, job_id=job.storage_id) |
| 54 | self.storage.put(job, path) |
| 55 | |
| 56 | def put_extra(self, data: bytes, source: DataSource) -> None: |
| 57 | self.storage.put_raw(data, self.db_paths.ts.format(**source.__dict__)) |
| 58 | |
| 59 | def put_stat(self, data: StatProps, source: DataSource) -> None: |
| 60 | self.storage.put(data, self.db_paths.stat.format(**source.__dict__)) |
| 61 | |
| 62 | # return path to file to be inserted into report |
| 63 | def put_plot_file(self, data: bytes, source: DataSource) -> str: |
| 64 | path = self.db_paths.plot.format(**source.__dict__) |
| 65 | self.storage.put_raw(data, self.db_paths.report_root + path) |
| 66 | return path |
| 67 | |
| 68 | def put_report(self, report: str, name: str) -> str: |
| 69 | return self.storage.put_raw(report.encode(self.csv_file_encoding), self.db_paths.report_root + name) |
| 70 | |
| 71 | def put_txt_report(self, suite: SuiteConfig, report: str) -> None: |
| 72 | path = self.db_paths.txt_report.format(suite_id=suite.storage_id) |
| 73 | self.storage.put_raw(report.encode('utf8'), path) |
| 74 | |
| 75 | def put_job_info(self, suite: SuiteConfig, job: JobConfig, key: str, data: Any) -> None: |
| 76 | path = self.db_paths.job_extra.format(suite_id=suite.storage_id, job_id=job.storage_id, tag=key) |
| 77 | self.storage.put(data, path) |
| 78 | |
| 79 | # ------------- GET DATA FROM STORAGE -------------------------------------------------------------------------- |
| 80 | |
| 81 | def get_stat(self, stat_cls: Type[StatProps], source: DataSource) -> StatProps: |
| 82 | return self.storage.load(stat_cls, self.db_paths.stat.format(**source.__dict__)) |
| 83 | |
| 84 | |
| 85 | def get_txt_report(self, suite: SuiteConfig) -> Optional[str]: |
| 86 | path = self.db_paths.txt_report.format(suite_id=suite.storage_id) |
| 87 | if path in self.storage: |
| 88 | return self.storage.get_raw(path).decode('utf8') |
| 89 | |
| 90 | def get_job_info(self, suite: SuiteConfig, job: JobConfig, key: str) -> Any: |
| 91 | path = self.db_paths.job_extra.format(suite_id=suite.storage_id, job_id=job.storage_id, tag=key) |
| 92 | return self.storage.get(path, None) |
| 93 | # ------------- ITER OVER STORAGE ------------------------------------------------------------------------------ |
| 94 | |
| 95 | def iter_suite(self, suite_type: str = None) -> Iterator[SuiteConfig]: |
| 96 | for is_file, suite_info_path, groups in self.iter_paths(self.db_paths.suite_cfg_r): |
| 97 | assert is_file |
| 98 | suite = self.storage.load(SuiteConfig, suite_info_path) |
| 99 | # suite = cast(SuiteConfig, self.storage.load(SuiteConfig, suite_info_path)) |
| 100 | assert suite.storage_id == groups['suite_id'] |
| 101 | if not suite_type or suite.test_type == suite_type: |
| 102 | yield suite |
| 103 | |
| 104 | def iter_job(self, suite: SuiteConfig) -> Iterator[JobConfig]: |
| 105 | job_glob = fill_path(self.db_paths.job_cfg_r, suite_id=suite.storage_id) |
| 106 | job_config_cls = all_suits[suite.test_type].job_config_cls |
| 107 | for is_file, path, groups in self.iter_paths(job_glob): |
| 108 | assert is_file |
| 109 | job = cast(JobConfig, self.storage.load(job_config_cls, path)) |
| 110 | assert job.storage_id == groups['job_id'] |
| 111 | yield job |
| 112 | |
| 113 | # ----------------- TS ------------------------------------------------------------------------------------------ |
| 114 | def get_ts(self, ds: DataSource) -> TimeSeries: |
| 115 | path = self.db_paths.ts.format_map(ds.__dict__) |
| 116 | (units, time_units), header2, content = self.storage.get_array(path) |
| 117 | |
| 118 | times = content[:,0].copy() |
| 119 | data = content[:,1:] |
| 120 | |
| 121 | if data.shape[1] == 1: |
| 122 | data.shape = (data.shape[0],) |
| 123 | |
| 124 | return TimeSeries(data=data, times=times, source=ds, units=units, time_units=time_units, histo_bins=header2) |
| 125 | |
| 126 | def put_ts(self, ts: TimeSeries) -> None: |
| 127 | assert ts.data.dtype == ts.times.dtype, "Data type {!r} != time type {!r}".format(ts.data.dtype, ts.times.dtype) |
| 128 | assert ts.data.dtype.kind == 'u', "Only unsigned ints are accepted" |
| 129 | assert ts.source.tag == self.ts_arr_tag, \ |
| 130 | "Incorrect source tag == {!r}, must be {!r}".format(ts.source.tag, self.ts_arr_tag) |
| 131 | |
| 132 | if ts.source.metric == 'lat': |
| 133 | assert len(ts.data.shape) == 2, "Latency should be 2d array" |
| 134 | assert ts.histo_bins is not None, "Latency should have histo_bins field not empty" |
| 135 | |
| 136 | csv_path = self.db_paths.ts.format_map(ts.source.__dict__) |
| 137 | header = [ts.units, ts.time_units] |
| 138 | |
| 139 | tv = ts.times.view().reshape((-1, 1)) |
| 140 | |
| 141 | if len(ts.data.shape) == 1: |
| 142 | dv = ts.data.view().reshape((ts.times.shape[0], -1)) |
| 143 | else: |
| 144 | dv = ts.data |
| 145 | |
| 146 | result = numpy.concatenate((tv, dv), axis=1) |
| 147 | self.storage.put_array(csv_path, result, header, header2=ts.histo_bins, append_on_exists=False) |
| 148 | |
| 149 | def iter_ts(self, **ds_parts) -> Iterator[DataSource]: |
| 150 | return self.iter_objs(self.db_paths.ts_r, **ds_parts) |