refactor result classes and code which stores/loads results from storage
diff --git a/wally/suits/io/fio.py b/wally/suits/io/fio.py
index c32dba2..7b2c3e3 100644
--- a/wally/suits/io/fio.py
+++ b/wally/suits/io/fio.py
@@ -7,9 +7,9 @@
from ...utils import StopTestError, get_os, ssize2b
from ...node_interfaces import IRPCNode
-from ..itest import ThreadedTest, IterationConfig, NodeTestResults
-from ...result_classes import TimeSerie
-from .fio_task_parser import execution_time, fio_cfg_compile, FioJobSection, FioParams, get_log_files, get_test_summary
+from ..itest import ThreadedTest
+from ...result_classes import TimeSeries, JobMetrics
+from .fio_task_parser import execution_time, fio_cfg_compile, FioJobConfig, FioParams, get_log_files
from . import rpc_plugin
from .fio_hist import expected_lat_bins
@@ -22,6 +22,7 @@
retry_time = 30
configs_dir = os.path.dirname(__file__) # type: str
name = 'fio'
+ job_config_cls = FioJobConfig
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
@@ -72,14 +73,13 @@
else:
self.file_size = ssize2b(self.load_params['FILESIZE'])
- self.fio_configs = list(fio_cfg_compile(self.load_profile, self.load_profile_path,
+ self.job_configs = list(fio_cfg_compile(self.load_profile, self.load_profile_path,
cast(FioParams, self.load_params)))
- if len(self.fio_configs) == 0:
+ if len(self.job_configs) == 0:
logger.error("Empty fio config provided")
raise StopTestError()
- self.iterations_configs = self.fio_configs # type: ignore
self.exec_folder = self.config.remote_dir
def config_node(self, node: IRPCNode) -> None:
@@ -129,15 +129,15 @@
node.copy_file(fio_path, bz_dest)
node.run("bzip2 --decompress {} ; chmod a+x {}".format(bz_dest, self.join_remote("fio")))
- def get_expected_runtime(self, iteration_info: IterationConfig) -> int:
- return execution_time(cast(FioJobSection, iteration_info))
+ def get_expected_runtime(self, job_config: FioJobConfig) -> int:
+ return execution_time(cast(FioJobConfig, job_config))
- def prepare_iteration(self, node: IRPCNode, iter_config: IterationConfig) -> None:
- node.put_to_file(self.remote_task_file, str(iter_config).encode("utf8"))
+ def prepare_iteration(self, node: IRPCNode, job_config: FioJobConfig) -> None:
+ node.put_to_file(self.remote_task_file, str(job_config).encode("utf8"))
# TODO: get a link to substorage as a parameter
- def run_iteration(self, node: IRPCNode, iter_config: IterationConfig, stor_prefix: str) -> NodeTestResults:
- f_iter_config = cast(FioJobSection, iter_config)
+ def run_iteration(self, node: IRPCNode, iter_config: FioJobConfig, job_root: str) -> JobMetrics:
+ f_iter_config = cast(FioJobConfig, iter_config)
exec_time = execution_time(f_iter_config)
fio_cmd_templ = "cd {exec_folder}; " + \
@@ -152,17 +152,15 @@
if must_be_empty:
logger.error("Unexpected fio output: %r", must_be_empty)
- res = NodeTestResults(self.__class__.__name__, node.info.node_id(), get_test_summary(f_iter_config))
+ res = {} # type: JobMetrics
- res.extra_logs['fio'] = node.get_file_content(self.remote_output_file)
- self.store_data(res.extra_logs['fio'], "raw", stor_prefix, "fio_raw")
+ # put fio output into storage
+ fio_out = node.get_file_content(self.remote_output_file)
+ self.rstorage.put_extra(job_root, node.info.node_id(), "fio_raw", fio_out)
node.conn.fs.unlink(self.remote_output_file)
files = [name for name in node.conn.fs.listdir(self.exec_folder)]
- expected_time_delta = 1000 # 1000ms == 1s
- max_time_diff = 50 # 50ms - 5%
-
for name, path in get_log_files(f_iter_config):
log_files = [fname for fname in files if fname.startswith(path)]
if len(log_files) != 1:
@@ -172,7 +170,6 @@
fname = os.path.join(self.exec_folder, log_files[0])
raw_result = node.get_file_content(fname) # type: bytes
- self.store_data(raw_result, "raw", stor_prefix, "{}_raw".format(name))
node.conn.fs.unlink(fname)
try:
@@ -182,23 +179,14 @@
raise StopTestError()
parsed = array.array('L' if name == 'lat' else 'Q')
- prev_ts = None
- load_start_at = None
+ times = array.array('Q')
- # TODO: need to adjust vals for timedelta
for idx, line in enumerate(log_data):
line = line.strip()
if line:
try:
time_ms_s, val_s, _, *rest = line.split(",")
- time_ms = int(time_ms_s.strip())
-
- if not prev_ts:
- prev_ts = time_ms - expected_time_delta
- load_start_at = time_ms
- elif abs(time_ms - prev_ts - expected_time_delta) > max_time_diff:
- logger.warning("Too large gap in {} log at {} - {}ms"
- .format(name, time_ms, time_ms - prev_ts))
+ times.append(int(time_ms_s.strip()))
if name == 'lat':
vals = [int(i.strip()) for i in rest]
@@ -215,17 +203,12 @@
logger.exception("Error during parse %s fio log file in line %s: %r", name, idx, line)
raise StopTestError()
- prev_ts += expected_time_delta
-
- res.series[name] = TimeSerie(name=name,
- raw=raw_result,
- second_axis_size=expected_lat_bins if name == 'lat' else 1,
- start_at=load_start_at,
- step=expected_time_delta,
- data=parsed)
-
- self.store_data(parsed, "array", stor_prefix, "{}_data".format(name))
- self.store_data(res.series[name].meta(), "yaml", stor_prefix, "{}_meta".format(name))
+ ts = TimeSeries(name=name,
+ raw=raw_result,
+ second_axis_size=expected_lat_bins if name == 'lat' else 1,
+ data=parsed,
+ times=times)
+ res[(node.info.node_id(), 'fio', name)] = ts
return res
diff --git a/wally/suits/io/fio_hist.py b/wally/suits/io/fio_hist.py
index eb5d9ee..a2ded70 100644
--- a/wally/suits/io/fio_hist.py
+++ b/wally/suits/io/fio_hist.py
@@ -52,6 +52,6 @@
return lower + (upper - lower) * edge
-def get_lat_vals(columns: int = 1216, coarseness: int = 0) -> List[float]:
+def get_lat_vals(columns: int = expected_lat_bins, coarseness: int = 0) -> List[float]:
return [plat_idx_to_val_coarse(val, coarseness) for val in range(columns)]
diff --git a/wally/suits/io/fio_task_parser.py b/wally/suits/io/fio_task_parser.py
index bdcf4a3..6940aaf 100644
--- a/wally/suits/io/fio_task_parser.py
+++ b/wally/suits/io/fio_task_parser.py
@@ -12,7 +12,7 @@
from ...result_classes import IStorable
-from ..itest import IterationConfig
+from ...result_classes import TestJobConfig
from ...utils import sec_to_str, ssize2b
@@ -38,13 +38,16 @@
("vm_count", int)])
-class FioJobSection(IterationConfig, IStorable):
+class FioJobConfig(TestJobConfig):
def __init__(self, name: str) -> None:
- self.name = name
+ TestJobConfig.__init__(self)
self.vals = OrderedDict() # type: Dict[str, Any]
- self.summary = None
+ self.name = name
- def copy(self) -> 'FioJobSection':
+ def __eq__(self, other: 'FioJobConfig') -> bool:
+ return self.vals == other.vals
+
+ def copy(self) -> 'FioJobConfig':
return copy.deepcopy(self)
def required_vars(self) -> Iterator[Tuple[str, Var]]:
@@ -56,7 +59,7 @@
return len(list(self.required_vars())) == 0
def __str__(self) -> str:
- res = "[{0}]\n".format(self.name)
+ res = "[{0}]\n".format(self.summary)
for name, val in self.vals.items():
if name.startswith('_') or name == name.upper():
@@ -68,15 +71,18 @@
return res
+ def __repr__(self) -> str:
+ return str(self)
+
def raw(self) -> Dict[str, Any]:
return {
- 'name': self.name,
- 'vals': list(map(list, self.vals.items())),
- 'summary': self.summary
+ 'vals': [[key, val] for key, val in self.vals.items()],
+ 'summary': self.summary,
+ 'name': self.name
}
@classmethod
- def fromraw(cls, data: Dict[str, Any]) -> 'FioJobSection':
+ def fromraw(cls, data: Dict[str, Any]) -> 'FioJobConfig':
obj = cls(data['name'])
obj.summary = data['summary']
obj.vals.update(data['vals'])
@@ -160,7 +166,7 @@
raise ParseError(str(exc), fname, lineno, oline)
-def fio_config_parse(lexer_iter: Iterable[CfgLine]) -> Iterator[FioJobSection]:
+def fio_config_parse(lexer_iter: Iterable[CfgLine]) -> Iterator[FioJobConfig]:
in_globals = False
curr_section = None
glob_vals = OrderedDict() # type: Dict[str, Any]
@@ -188,9 +194,7 @@
try:
cont = open(new_fname).read()
except IOError as err:
- msg = "Error while including file {0}: {1}"
- raise ParseError(msg.format(new_fname, err),
- fname, lineno, oline)
+ raise ParseError("Error while including file {}: {}".format(new_fname, err), fname, lineno, oline)
new_lines.extend(fio_config_lexer(cont, new_fname))
one_more = True
@@ -207,13 +211,11 @@
if name == 'global':
if sections_count != 0:
- raise ParseError("[global] section should" +
- " be only one and first",
- fname, lineno, oline)
+ raise ParseError("[global] section should be only one and first", fname, lineno, oline)
in_globals = True
else:
in_globals = False
- curr_section = FioJobSection(name)
+ curr_section = FioJobConfig(name)
curr_section.vals = glob_vals.copy()
sections_count += 1
else:
@@ -221,12 +223,9 @@
if in_globals:
glob_vals[name] = val
elif name == name.upper():
- raise ParseError("Param '" + name +
- "' not in [global] section",
- fname, lineno, oline)
+ raise ParseError("Param {!r} not in [global] section".format(name), fname, lineno, oline)
elif curr_section is None:
- raise ParseError("Data outside section",
- fname, lineno, oline)
+ raise ParseError("Data outside section", fname, lineno, oline)
else:
curr_section.vals[name] = val
@@ -234,7 +233,7 @@
yield curr_section
-def process_cycles(sec: FioJobSection) -> Iterator[FioJobSection]:
+def process_cycles(sec: FioJobConfig) -> Iterator[FioJobConfig]:
cycles = OrderedDict() # type: Dict[str, Any]
for name, val in sec.vals.items():
@@ -270,7 +269,7 @@
FioParams = Dict[str, FioParamsVal]
-def apply_params(sec: FioJobSection, params: FioParams) -> FioJobSection:
+def apply_params(sec: FioJobConfig, params: FioParams) -> FioJobConfig:
processed_vals = OrderedDict() # type: Dict[str, Any]
processed_vals.update(params)
for name, val in sec.vals.items():
@@ -307,7 +306,7 @@
MAGIC_OFFSET = 0.1885
-def final_process(sec: FioJobSection, counter: List[int] = [0]) -> FioJobSection:
+def final_process(sec: FioJobConfig, counter: List[int] = [0]) -> FioJobConfig:
sec = sec.copy()
sec.vals['unified_rw_reporting'] = '1'
@@ -340,7 +339,7 @@
return sec
-def get_test_sync_mode(sec: FioJobSection) -> str:
+def get_test_sync_mode(sec: FioJobConfig) -> str:
if isinstance(sec, dict):
vals = sec
else:
@@ -359,7 +358,7 @@
return 'a'
-def get_test_summary_tuple(sec: FioJobSection, vm_count: int = None) -> TestSumm:
+def get_test_summary_tuple(sec: FioJobConfig, vm_count: int = None) -> TestSumm:
if isinstance(sec, dict):
vals = sec
else:
@@ -382,7 +381,7 @@
vm_count)
-def get_test_summary(sec: FioJobSection, vm_count: int = None, noiodepth: bool = False) -> str:
+def get_test_summary(sec: FioJobConfig, vm_count: int = None, noiodepth: bool = False) -> str:
tpl = get_test_summary_tuple(sec, vm_count)
res = "{0.oper}{0.mode}{0.bsize}".format(tpl)
@@ -395,11 +394,11 @@
return res
-def execution_time(sec: FioJobSection) -> int:
+def execution_time(sec: FioJobConfig) -> int:
return sec.vals.get('ramp_time', 0) + sec.vals.get('runtime', 0)
-def parse_all_in_1(source:str, fname: str = None) -> Iterator[FioJobSection]:
+def parse_all_in_1(source:str, fname: str = None) -> Iterator[FioJobConfig]:
return fio_config_parse(fio_config_lexer(source, fname))
@@ -414,7 +413,7 @@
yield res
-def get_log_files(sec: FioJobSection) -> List[Tuple[str, str]]:
+def get_log_files(sec: FioJobConfig) -> List[Tuple[str, str]]:
res = [] # type: List[Tuple[str, str]]
for key, name in (('write_iops_log', 'iops'), ('write_bw_log', 'bw'), ('write_hist_log', 'lat')):
log = sec.vals.get(key)
@@ -423,7 +422,7 @@
return res
-def fio_cfg_compile(source: str, fname: str, test_params: FioParams) -> Iterator[FioJobSection]:
+def fio_cfg_compile(source: str, fname: str, test_params: FioParams) -> Iterator[FioJobConfig]:
it = parse_all_in_1(source, fname)
it = (apply_params(sec, test_params) for sec in it)
it = flatmap(process_cycles, it)
diff --git a/wally/suits/io/rrd.cfg b/wally/suits/io/rrd.cfg
index 850c9a3..ca3c613 100644
--- a/wally/suits/io/rrd.cfg
+++ b/wally/suits/io/rrd.cfg
@@ -1,7 +1,7 @@
[global]
include defaults_qd.cfg
ramp_time=0
-runtime=600
+runtime=10
[test_{TEST_SUMM}]
blocksize=60k