blob: 666c753d4573f9f7da78b392b079c2fd8342fc24 [file] [log] [blame]
koder aka kdanilov108ac362017-01-19 20:17:16 +02001import os
kdanylov aka koder150b2192017-04-01 16:53:01 +03002import pprint
koder aka kdanilov108ac362017-01-19 20:17:16 +02003import logging
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03004from typing import cast, Iterator, Tuple, Type, Dict, Optional, List
koder aka kdanilov108ac362017-01-19 20:17:16 +02005
6import numpy
7
koder aka kdanilova732a602017-02-01 20:29:56 +02008from .suits.job import JobConfig
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +03009from .result_classes import SuiteConfig, TimeSeries, DataSource, StatProps, IResultStorage, ArrayData
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +030010from .storage import Storage
11from .utils import StopTestError
koder aka kdanilov108ac362017-01-19 20:17:16 +020012from .suits.all_suits import all_suits
13
14
15logger = logging.getLogger('wally')
16
17
18class DB_re:
19 node_id = r'\d+.\d+.\d+.\d+:\d+'
koder aka kdanilova732a602017-02-01 20:29:56 +020020 job_id = r'[-a-zA-Z0-9_]+_\d+'
21 suite_id = r'[a-z_]+_\d+'
22 sensor = r'[-a-z_]+'
koder aka kdanilov108ac362017-01-19 20:17:16 +020023 dev = r'[-a-zA-Z0-9_]+'
koder aka kdanilov108ac362017-01-19 20:17:16 +020024 tag = r'[a-z_.]+'
koder aka kdanilova732a602017-02-01 20:29:56 +020025 metric = r'[a-z_.]+'
koder aka kdanilov108ac362017-01-19 20:17:16 +020026
27
28class DB_paths:
koder aka kdanilova732a602017-02-01 20:29:56 +020029 suite_cfg_r = r'results/{suite_id}\.info\.yml'
koder aka kdanilov108ac362017-01-19 20:17:16 +020030
kdanylov aka koder150b2192017-04-01 16:53:01 +030031 job_root = r'results/{suite_id}\.{job_id}/'
koder aka kdanilova732a602017-02-01 20:29:56 +020032 job_cfg_r = job_root + r'info\.yml'
33
34 # time series, data from load tool, sensor is a tool name
kdanylov aka koder150b2192017-04-01 16:53:01 +030035 ts_r = job_root + r'{node_id}\.{sensor}\.{metric}\.{tag}'
koder aka kdanilova732a602017-02-01 20:29:56 +020036
37 # statistica data for ts
kdanylov aka koder150b2192017-04-01 16:53:01 +030038 stat_r = job_root + r'{node_id}\.{sensor}\.{metric}\.stat\.yaml'
koder aka kdanilova732a602017-02-01 20:29:56 +020039
40 # sensor data
kdanylov aka koder45183182017-04-30 23:55:40 +030041 sensor_data_r = r'sensors/{node_id}_{sensor}\.{dev}\.{metric}\.{tag}'
koder aka kdanilova732a602017-02-01 20:29:56 +020042 sensor_time_r = r'sensors/{node_id}_collected_at\.csv'
43
44 report_root = 'report/'
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +030045 plot_r = r'{suite_id}\.{job_id}/{node_id}\.{sensor}\.{dev}\.{metric}\.{tag}'
koder aka kdanilova732a602017-02-01 20:29:56 +020046
koder aka kdanilov108ac362017-01-19 20:17:16 +020047 job_cfg = job_cfg_r.replace("\\.", '.')
koder aka kdanilova732a602017-02-01 20:29:56 +020048 suite_cfg = suite_cfg_r.replace("\\.", '.')
koder aka kdanilov108ac362017-01-19 20:17:16 +020049 ts = ts_r.replace("\\.", '.')
koder aka kdanilov108ac362017-01-19 20:17:16 +020050 stat = stat_r.replace("\\.", '.')
koder aka kdanilova732a602017-02-01 20:29:56 +020051 sensor_data = sensor_data_r.replace("\\.", '.')
52 sensor_time = sensor_time_r.replace("\\.", '.')
koder aka kdanilov108ac362017-01-19 20:17:16 +020053 plot = plot_r.replace("\\.", '.')
54
koder aka kdanilova732a602017-02-01 20:29:56 +020055
56DB_rr = {name: r"(?P<{}>{})".format(name, rr)
57 for name, rr in DB_re.__dict__.items()
58 if not name.startswith("__")}
koder aka kdanilov108ac362017-01-19 20:17:16 +020059
60
koder aka kdanilova732a602017-02-01 20:29:56 +020061def fill_path(path: str, **params) -> str:
62 for name, val in params.items():
63 if val is not None:
64 path = path.replace("{" + name + "}", val)
65 return path
koder aka kdanilov108ac362017-01-19 20:17:16 +020066
67
68class ResultStorage(IResultStorage):
69 # TODO: check that all path components match required patterns
70
koder aka kdanilova732a602017-02-01 20:29:56 +020071 ts_header_size = 64
koder aka kdanilov108ac362017-01-19 20:17:16 +020072 ts_header_format = "!IIIcc"
koder aka kdanilova732a602017-02-01 20:29:56 +020073 ts_arr_tag = 'csv'
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +030074 csv_file_encoding = 'ascii'
koder aka kdanilov108ac362017-01-19 20:17:16 +020075
76 def __init__(self, storage: Storage) -> None:
77 self.storage = storage
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030078 self.cache = {} # type: Dict[str, Tuple[int, int, ArrayData]]
koder aka kdanilov108ac362017-01-19 20:17:16 +020079
80 def sync(self) -> None:
81 self.storage.sync()
82
koder aka kdanilova732a602017-02-01 20:29:56 +020083 # ----------------- SERIALIZATION / DESERIALIZATION -------------------------------------------------------------
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +030084 def read_headers(self, fd) -> Tuple[str, List[str], List[str], Optional[numpy.ndarray]]:
85 header = fd.readline().decode(self.csv_file_encoding).rstrip().split(",")
86 dtype, has_header2, header2_dtype, *ext_header = header
87
88 if has_header2 == 'true':
89 ln = fd.readline().decode(self.csv_file_encoding).strip()
90 header2 = numpy.fromstring(ln, sep=',', dtype=header2_dtype)
91 else:
92 assert has_header2 == 'false', \
93 "In file {} has_header2 is not true/false, but {!r}".format(fd.name, has_header2)
94 header2 = None
95 return dtype, ext_header, header, header2
96
97 def load_array(self, path: str) -> ArrayData:
98 """
99 Load array from file, shoult not be called directly
100 :param path: file path
101 :return: ArrayData
102 """
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +0300103 with self.storage.get_fd(path, "rb") as fd:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300104 fd.seek(0, os.SEEK_SET)
105
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +0300106 stats = os.fstat(fd.fileno())
107 if path in self.cache:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300108 size, atime, arr_info = self.cache[path]
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +0300109 if size == stats.st_size and atime == stats.st_atime_ns:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300110 return arr_info
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +0300111
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300112 data_dtype, header, _, header2 = self.read_headers(fd)
113 assert data_dtype == 'uint64', path
114 dt = fd.read().decode(self.csv_file_encoding).strip()
kdanylov aka koder150b2192017-04-01 16:53:01 +0300115
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300116 if len(dt) != 0:
117 arr = numpy.fromstring(dt.replace("\n", ','), sep=',', dtype=data_dtype)
118 lines = dt.count("\n") + 1
119 assert len(set(ln.count(',') for ln in dt.split("\n"))) == 1, \
120 "Data lines in {!r} have different element count".format(path)
121 arr.shape = [lines] if lines == arr.size else [lines, -1]
122 else:
123 arr = None
kdanylov aka koder150b2192017-04-01 16:53:01 +0300124
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300125 arr_data = ArrayData(header, header2, arr)
126 self.cache[path] = (stats.st_size, stats.st_atime_ns, arr_data)
127 return arr_data
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +0300128
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300129 def put_array(self, path: str, data: numpy.array, header: List[str], header2: numpy.ndarray = None,
130 append_on_exists: bool = False) -> None:
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +0300131
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300132 header = [data.dtype.name] + \
133 (['false', ''] if header2 is None else ['true', header2.dtype.name]) + \
134 header
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +0300135
136 exists = append_on_exists and path in self.storage
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300137 vw = data.view().reshape((data.shape[0], 1)) if len(data.shape) == 1 else data
138 mode = "cb" if not exists else "rb+"
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +0300139
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300140 with self.storage.get_fd(path, mode) as fd:
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +0300141 if exists:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300142 data_dtype, _, full_header, curr_header2 = self.read_headers(fd)
143
144 assert data_dtype == data.dtype.name, \
145 "Path {!r}. Passed data type ({!r}) and current data type ({!r}) doesn't match"\
146 .format(path, data.dtype.name, data_dtype)
147
148 assert header == full_header, \
149 "Path {!r}. Passed header ({!r}) and current header ({!r}) doesn't match"\
150 .format(path, header, full_header)
151
152 assert header2 == curr_header2, \
153 "Path {!r}. Passed header2 != current header2: {!r}\n{!r}"\
154 .format(path, header2, curr_header2)
155
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +0300156 fd.seek(0, os.SEEK_END)
157 else:
158 fd.write((",".join(header) + "\n").encode(self.csv_file_encoding))
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300159 if header2 is not None:
160 fd.write((",".join(map(str, header2)) + "\n").encode(self.csv_file_encoding))
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +0300161
162 numpy.savetxt(fd, vw, delimiter=',', newline="\n", fmt="%lu")
koder aka kdanilova732a602017-02-01 20:29:56 +0200163
164 def load_ts(self, ds: DataSource, path: str) -> TimeSeries:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300165 """
166 Load time series, generated by fio or other tool, should not be called directly,
167 use iter_ts istead.
168 :param ds: data source path
169 :param path: path in data storage
170 :return: TimeSeries
171 """
172 (units, time_units), header2, data = self.load_array(path)
173 times = data[:,0]
174 ts_data = data[:,1:]
koder aka kdanilova732a602017-02-01 20:29:56 +0200175
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300176 if ts_data.shape[1] == 1:
177 ts_data.shape = (ts_data.shape[0],)
koder aka kdanilova732a602017-02-01 20:29:56 +0200178
179 return TimeSeries("{}.{}".format(ds.dev, ds.sensor),
180 raw=None,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300181 data=ts_data,
182 times=times,
koder aka kdanilova732a602017-02-01 20:29:56 +0200183 source=ds,
184 units=units,
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300185 time_units=time_units,
186 histo_bins=header2)
koder aka kdanilova732a602017-02-01 20:29:56 +0200187
kdanylov aka koder45183182017-04-30 23:55:40 +0300188 def load_sensor_raw(self, ds: DataSource) -> bytes:
189 path = DB_paths.sensor_data.format(**ds.__dict__)
190 with self.storage.get_fd(path, "rb") as fd:
191 return fd.read()
192
koder aka kdanilova732a602017-02-01 20:29:56 +0200193 def load_sensor(self, ds: DataSource) -> TimeSeries:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300194 # sensors has no shape
195 path = DB_paths.sensor_time.format(**ds.__dict__)
196 collect_header, must_be_none, collected_at = self.load_array(path)
197
198 # cut 'collection end' time
kdanylov aka koder45183182017-04-30 23:55:40 +0300199 # .copy needed to really remove 'collection end' element to make c_interpolate_.. works correctly
200 collected_at = collected_at[::2].copy()
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300201
202 # there must be no histogram for collected_at
203 assert must_be_none is None, "Extra header2 {!r} in collect_at file at {!r}".format(must_be_none, path)
204 assert collect_header == [ds.node_id, 'collected_at', 'us'],\
205 "Unexpected collect_at header {!r} at {!r}".format(collect_header, path)
206 assert len(collected_at.shape) == 1, "Collected_at must be 1D at {!r}".format(path)
207
208 data_path = DB_paths.sensor_data.format(**ds.__dict__)
209 data_header, must_be_none, data = self.load_array(data_path)
210
211 # there must be no histogram for any sensors
212 assert must_be_none is None, "Extra header2 {!r} in sensor data file {!r}".format(must_be_none, data_path)
koder aka kdanilova732a602017-02-01 20:29:56 +0200213
214 data_units = data_header[2]
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300215 assert data_header == [ds.node_id, ds.metric_fqdn, data_units], \
216 "Unexpected data header {!r} at {!r}".format(data_header, data_path)
217 assert len(data.shape) == 1, "Sensor data must be 1D at {!r}".format(data_path)
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +0300218
koder aka kdanilova732a602017-02-01 20:29:56 +0200219 return TimeSeries(ds.metric_fqdn,
220 raw=None,
221 data=data,
222 times=collected_at,
223 source=ds,
224 units=data_units,
225 time_units='us')
226
227 # ------------- CHECK DATA IN STORAGE ----------------------------------------------------------------------------
228
229 def check_plot_file(self, source: DataSource) -> Optional[str]:
230 path = DB_paths.plot.format(**source.__dict__)
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +0300231 fpath = self.storage.resolve_raw(DB_paths.report_root + path)
koder aka kdanilova732a602017-02-01 20:29:56 +0200232 return path if os.path.exists(fpath) else None
233
234 # ------------- PUT DATA INTO STORAGE --------------------------------------------------------------------------
235
236 def put_or_check_suite(self, suite: SuiteConfig) -> None:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200237 path = DB_paths.suite_cfg.format(suite_id=suite.storage_id)
238 if path in self.storage:
kdanylov aka koder150b2192017-04-01 16:53:01 +0300239 db_cfg = self.storage.load(SuiteConfig, path)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200240 if db_cfg != suite:
241 logger.error("Current suite %s config is not equal to found in storage at %s", suite.test_type, path)
kdanylov aka koder150b2192017-04-01 16:53:01 +0300242 logger.debug("Current: \n%s\nStorage:\n%s", pprint.pformat(db_cfg), pprint.pformat(suite))
koder aka kdanilov108ac362017-01-19 20:17:16 +0200243 raise StopTestError()
kdanylov aka koder150b2192017-04-01 16:53:01 +0300244 else:
245 self.storage.put(suite, path)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200246
koder aka kdanilova732a602017-02-01 20:29:56 +0200247 def put_job(self, suite: SuiteConfig, job: JobConfig) -> None:
koder aka kdanilov108ac362017-01-19 20:17:16 +0200248 path = DB_paths.job_cfg.format(suite_id=suite.storage_id, job_id=job.storage_id)
249 self.storage.put(job, path)
250
251 def put_ts(self, ts: TimeSeries) -> None:
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300252 assert ts.data.dtype == ts.times.dtype, "Data type {!r} != time type {!r}".format(ts.data.dtype, ts.times.dtype)
253 assert ts.data.dtype.kind == 'u', "Only unsigned ints are accepted"
254 assert ts.source.tag == self.ts_arr_tag, "Incorrect source tag == {!r}, must be {!r}".format(ts.source.tag,
255 self.ts_arr_tag)
koder aka kdanilova732a602017-02-01 20:29:56 +0200256 csv_path = DB_paths.ts.format(**ts.source.__dict__)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300257 header = [ts.units, ts.time_units]
koder aka kdanilov108ac362017-01-19 20:17:16 +0200258
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +0300259 tv = ts.times.view().reshape((-1, 1))
260 if len(ts.data.shape) == 1:
261 dv = ts.data.view().reshape((ts.times.shape[0], -1))
262 else:
263 dv = ts.data
koder aka kdanilov108ac362017-01-19 20:17:16 +0200264
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +0300265 result = numpy.concatenate((tv, dv), axis=1)
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300266 if ts.histo_bins is not None:
267 self.put_array(csv_path, result, header, header2=ts.histo_bins)
268 else:
269 self.put_array(csv_path, result, header)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200270
271 if ts.raw:
koder aka kdanilova732a602017-02-01 20:29:56 +0200272 raw_path = DB_paths.ts.format(**ts.source(tag=ts.raw_tag).__dict__)
koder aka kdanilov108ac362017-01-19 20:17:16 +0200273 self.storage.put_raw(ts.raw, raw_path)
274
275 def put_extra(self, data: bytes, source: DataSource) -> None:
kdanylov aka koder150b2192017-04-01 16:53:01 +0300276 self.storage.put_raw(data, DB_paths.ts.format(**source.__dict__))
koder aka kdanilov108ac362017-01-19 20:17:16 +0200277
278 def put_stat(self, data: StatProps, source: DataSource) -> None:
koder aka kdanilova732a602017-02-01 20:29:56 +0200279 self.storage.put(data, DB_paths.stat.format(**source.__dict__))
koder aka kdanilov108ac362017-01-19 20:17:16 +0200280
281 # return path to file to be inserted into report
282 def put_plot_file(self, data: bytes, source: DataSource) -> str:
283 path = DB_paths.plot.format(**source.__dict__)
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +0300284 self.storage.put_raw(data, DB_paths.report_root + path)
285 return path
koder aka kdanilov108ac362017-01-19 20:17:16 +0200286
koder aka kdanilov108ac362017-01-19 20:17:16 +0200287 def put_report(self, report: str, name: str) -> str:
kdanylov aka koder0e0cfcb2017-03-27 22:19:09 +0300288 return self.storage.put_raw(report.encode(self.csv_file_encoding), DB_paths.report_root + name)
koder aka kdanilova732a602017-02-01 20:29:56 +0200289
kdanylov aka koder45183182017-04-30 23:55:40 +0300290 def put_sensor_raw(self, data: bytes, ds: DataSource) -> None:
291 path = DB_paths.sensor_data.format(**ds.__dict__)
292 with self.storage.get_fd(path, "cb") as fd:
293 fd.write(data)
294
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300295 def append_sensor(self, data: numpy.array, ds: DataSource, units: str, histo_bins: numpy.ndarray = None) -> None:
koder aka kdanilova732a602017-02-01 20:29:56 +0200296 if ds.metric == 'collected_at':
297 path = DB_paths.sensor_time
298 metrics_fqn = 'collected_at'
299 else:
300 path = DB_paths.sensor_data
301 metrics_fqn = ds.metric_fqdn
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300302
303 if ds.metric == 'lat':
304 assert len(data.shape) == 2, "Latency should be histo array"
305 assert histo_bins is not None, "Latency should have histo bins"
306
307 path = path.format(**ds.__dict__)
308 self.put_array(path, data, [ds.node_id, metrics_fqn, units], header2=histo_bins, append_on_exists=True)
koder aka kdanilova732a602017-02-01 20:29:56 +0200309
310 # ------------- GET DATA FROM STORAGE --------------------------------------------------------------------------
311
312 def get_stat(self, stat_cls: Type[StatProps], source: DataSource) -> StatProps:
313 return self.storage.load(stat_cls, DB_paths.stat.format(**source.__dict__))
314
315 # ------------- ITER OVER STORAGE ------------------------------------------------------------------------------
316
317 def iter_paths(self, path_glob) -> Iterator[Tuple[bool, str, Dict[str, str]]]:
318 path = path_glob.format(**DB_rr).split("/")
319 yield from self.storage._iter_paths("", path, {})
320
321 def iter_suite(self, suite_type: str = None) -> Iterator[SuiteConfig]:
322 for is_file, suite_info_path, groups in self.iter_paths(DB_paths.suite_cfg_r):
323 assert is_file
324 suite = self.storage.load(SuiteConfig, suite_info_path)
325 # suite = cast(SuiteConfig, self.storage.load(SuiteConfig, suite_info_path))
326 assert suite.storage_id == groups['suite_id']
327 if not suite_type or suite.test_type == suite_type:
328 yield suite
329
330 def iter_job(self, suite: SuiteConfig) -> Iterator[JobConfig]:
331 job_glob = fill_path(DB_paths.job_cfg_r, suite_id=suite.storage_id)
332 job_config_cls = all_suits[suite.test_type].job_config_cls
333 for is_file, path, groups in self.iter_paths(job_glob):
334 assert is_file
335 job = cast(JobConfig, self.storage.load(job_config_cls, path))
336 assert job.storage_id == groups['job_id']
337 yield job
338
339 # iterate over test tool data
340 def iter_ts(self, suite: SuiteConfig, job: JobConfig, **filters) -> Iterator[TimeSeries]:
341 filters.update(suite_id=suite.storage_id, job_id=job.storage_id)
342 ts_glob = fill_path(DB_paths.ts_r, **filters)
koder aka kdanilova732a602017-02-01 20:29:56 +0200343 for is_file, path, groups in self.iter_paths(ts_glob):
kdanylov aka koder150b2192017-04-01 16:53:01 +0300344 tag = groups["tag"]
345 if tag != 'csv':
346 continue
koder aka kdanilova732a602017-02-01 20:29:56 +0200347 assert is_file
348 groups = groups.copy()
349 groups.update(filters)
350 ds = DataSource(suite_id=suite.storage_id,
351 job_id=job.storage_id,
352 node_id=groups["node_id"],
353 sensor=groups["sensor"],
354 dev=None,
355 metric=groups["metric"],
kdanylov aka koder150b2192017-04-01 16:53:01 +0300356 tag=tag)
koder aka kdanilova732a602017-02-01 20:29:56 +0200357 yield self.load_ts(ds, path)
358
359 def iter_sensors(self, node_id: str = None, sensor: str = None, dev: str = None, metric: str = None) -> \
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300360 Iterator[Tuple[str, DataSource]]:
361 vls = dict(node_id=node_id, sensor=sensor, dev=dev, metric=metric)
362 path = fill_path(DB_paths.sensor_data_r, **vls)
koder aka kdanilova732a602017-02-01 20:29:56 +0200363 for is_file, path, groups in self.iter_paths(path):
kdanylov aka kodercdfcdaf2017-04-29 10:03:39 +0300364 cvls = vls.copy()
365 cvls.update(groups)
366 yield path, DataSource(**cvls)
koder aka kdanilova732a602017-02-01 20:29:56 +0200367
368