many updates in report code and in storage structure, this commit is broken
diff --git a/wally/suits/io/fio.py b/wally/suits/io/fio.py
index bf2e6b3..77d7a75 100644
--- a/wally/suits/io/fio.py
+++ b/wally/suits/io/fio.py
@@ -1,10 +1,10 @@
-import array
import os.path
import logging
-from typing import cast, Any, Tuple, List
+from typing import cast, Any, List, Union
+
+import numpy
import wally
-
from ...utils import StopTestError, ssize2b, b2ssize
from ...node_interfaces import IRPCNode
from ...node_utils import get_os
@@ -36,7 +36,7 @@
self.use_system_fio = get('use_system_fio', False) # type: bool
self.use_sudo = get("use_sudo", True) # type: bool
self.force_prefill = get('force_prefill', False) # type: bool
-
+ self.skip_prefill = get('skip_prefill', False) # type: bool
self.load_profile_name = self.suite.params['load'] # type: str
if os.path.isfile(self.load_profile_name):
@@ -71,6 +71,11 @@
self.file_size = list(sizes)[0]
logger.info("Detected test file size is %sB", b2ssize(self.file_size))
+ if self.file_size % (4 * 1024 ** 2) != 0:
+ tail = self.file_size % (4 * 1024 ** 2)
+ logger.warning("File size is not proportional to 4M, %sb at the end will not be used for test",
+ str(tail // 1024) + "Kb" if tail > 1024 else str(tail) + "b")
+ self.file_size -= self.file_size % (4 * 1024 ** 2)
self.load_params['FILESIZE'] = self.file_size
else:
self.file_size = ssize2b(self.load_params['FILESIZE'])
@@ -107,16 +112,18 @@
self.install_utils(node)
- mb = int(self.file_size / 1024 ** 2)
- logger.info("Filling test file %s on node %s with %sMiB of random data", self.file_name, node.info, mb)
- is_prefilled, fill_bw = node.conn.fio.fill_file(self.file_name, mb,
- force=self.force_prefill,
- fio_path=self.fio_path)
-
- if not is_prefilled:
- logger.info("Test file on node %s is already prefilled", node.info)
- elif fill_bw is not None:
- logger.info("Initial fio fill bw is %s MiBps for %s", fill_bw, node.info)
+ if self.skip_prefill:
+ logger.info("Prefill is skipped due to 'skip_prefill' set to true")
+ else:
+ mb = int(self.file_size / 1024 ** 2)
+ logger.info("Filling test file %s on node %s with %sMiB of random data", self.file_name, node.info, mb)
+ is_prefilled, fill_bw = node.conn.fio.fill_file(self.file_name, mb,
+ force=self.force_prefill,
+ fio_path=self.fio_path)
+ if not is_prefilled:
+ logger.info("Test file on node %s is already prefilled", node.info)
+ elif fill_bw is not None:
+ logger.info("Initial fio fill bw is %s MiBps for %s", fill_bw, node.info)
def install_utils(self, node: IRPCNode) -> None:
os_info = get_os(node)
@@ -170,16 +177,16 @@
path = DataSource(suite_id=self.suite.storage_id,
job_id=job.storage_id,
node_id=node.node_id,
- dev='fio',
- sensor='stdout',
+ sensor='fio',
+ dev=None,
+ metric='stdout',
tag='json')
-
self.storage.put_extra(fio_out, path)
node.conn.fs.unlink(self.remote_output_file)
files = [name for name in node.conn.fs.listdir(self.exec_folder)]
result = []
- for name, file_path in get_log_files(cast(FioJobConfig, job)):
+ for name, file_path, units in get_log_files(cast(FioJobConfig, job)):
log_files = [fname for fname in files if fname.startswith(file_path)]
if len(log_files) != 1:
logger.error("Found %s files, match log pattern %s(%s) - %s",
@@ -196,8 +203,10 @@
logger.exception("Error during parse %s fio log file - can't decode usint UTF8", name)
raise StopTestError()
- parsed = array.array('L' if name == 'lat' else 'Q')
- times = array.array('Q')
+ # TODO: fix units, need to get array type from stream
+
+ parsed = [] # type: List[Union[List[int], int]]
+ times = []
for idx, line in enumerate(log_data):
line = line.strip()
@@ -214,19 +223,23 @@
.format(expected_lat_bins, len(vals), time_ms_s))
raise StopTestError()
- parsed.extend(vals)
+ parsed.append(vals)
else:
parsed.append(int(val_s.strip()))
except ValueError:
logger.exception("Error during parse %s fio log file in line %s: %r", name, idx, line)
raise StopTestError()
+ if not self.suite.keep_raw_files:
+ raw_result = None
+
result.append(TimeSeries(name=name,
raw=raw_result,
- second_axis_size=expected_lat_bins if name == 'lat' else 1,
- data=parsed,
- times=times,
- source=path(sensor=name, tag=None)))
+ data=numpy.array(parsed, dtype='uint64'),
+ units=units,
+ times=numpy.array(times, dtype='uint64'),
+ time_units='ms',
+ source=path(metric=name, tag='csv')))
return result
def format_for_console(self, data: Any) -> str:
diff --git a/wally/suits/io/fio_job.py b/wally/suits/io/fio_job.py
index 0f55e91..2d8d78a 100644
--- a/wally/suits/io/fio_job.py
+++ b/wally/suits/io/fio_job.py
@@ -53,15 +53,26 @@
@property
def long_summary(self) -> str:
"""Readable long summary for management and deployment engineers"""
- res = "{0[sync_mode_long]} {0[oper]} {1}".format(self, b2ssize(self['bsize'] * 1024))
+ res = "{0[oper]}, {0.sync_mode_long}, block size {1}B".format(self, b2ssize(self['bsize'] * 1024))
if self['qd'] is not None:
- res += " QD = " + str(self['qd'])
+ res += ", QD = " + str(self['qd'])
if self['thcount'] not in (1, None):
- res += " threads={0[thcount]}".format(self)
+ res += ", threads={0[thcount]}".format(self)
if self['write_perc'] is not None:
- res += " write_perc={0[write_perc]}%".format(self)
+ res += ", write_perc={0[write_perc]}%".format(self)
return res
+ def copy(self, **kwargs: Dict[str, Any]) -> 'FioJobParams':
+ np = self.params.copy()
+ np.update(kwargs)
+ return self.__class__(**np)
+
+ @property
+ def char_tpl(self) -> Tuple[Union[str, int], ...]:
+ mint = lambda x: -10000000000 if x is None else int(x)
+ return self['oper'], mint(self['bsize']), self['sync_mode'], \
+ mint(self['thcount']), mint(self['qd']), mint(self['write_perc'])
+
class FioJobConfig(JobConfig):
"""Fio job configuration"""
@@ -157,7 +168,7 @@
return len(list(self.required_vars())) == 0
def __str__(self) -> str:
- res = "[{0}]\n".format(self.params.summary)
+ res = "[{0}]\n".format(self.summary)
for name, val in self.vals.items():
if name.startswith('_') or name == name.upper():
@@ -180,4 +191,6 @@
@classmethod
def fromraw(cls, data: Dict[str, Any]) -> 'FioJobConfig':
data['vals'] = OrderedDict(data['vals'])
+ data['_sync_mode'] = None
+ data['_params'] = None
return cast(FioJobConfig, super().fromraw(data))
diff --git a/wally/suits/io/fio_task_parser.py b/wally/suits/io/fio_task_parser.py
index c1b4bc3..bdcec23 100644
--- a/wally/suits/io/fio_task_parser.py
+++ b/wally/suits/io/fio_task_parser.py
@@ -288,19 +288,18 @@
return fio_config_parse(fio_config_lexer(source, fname))
-def get_log_files(sec: FioJobConfig, iops: bool = False) -> List[Tuple[str, str]]:
- res = [] # type: List[Tuple[str, str]]
+def get_log_files(sec: FioJobConfig, iops: bool = False) -> Iterator[Tuple[str, str, str]]:
+ res = [] # type: List[Tuple[str, str, str]]
- keys = [('write_bw_log', 'bw'), ('write_hist_log', 'lat')]
+ keys = [('write_bw_log', 'bw', 'kibps'),
+ ('write_hist_log', 'lat', 'us')]
if iops:
- keys.append(('write_iops_log', 'iops'))
+ keys.append(('write_iops_log', 'iops', 'iops'))
- for key, name in keys:
+ for key, name, units in keys:
log = sec.vals.get(key)
if log is not None:
- res.append((name, log))
-
- return res
+ yield (name, log, units)
def fio_cfg_compile(source: str, fname: str, test_params: FioParams) -> Iterator[FioJobConfig]:
diff --git a/wally/suits/io/hdd.cfg b/wally/suits/io/hdd.cfg
index 95c8cec..eff64cd 100644
--- a/wally/suits/io/hdd.cfg
+++ b/wally/suits/io/hdd.cfg
@@ -1,44 +1,23 @@
[global]
-include defaults.cfg
+include defaults_qd.cfg
-# NUMJOBS={% 1, 5, 10, 15, 20, 30, 40, 80 %}
-
-NUMJOBS={% 1, 5, 10, 15, 25, 40 %}
-
-ramp_time=30
-runtime=120
-
+QD={% 1, 2, 4, 8, 16, 32, 64 %}
+runtime=300
direct=1
# ---------------------------------------------------------------------
-# check different thread count, sync mode. (latency, iops) = func(th_count)
+# check different thread count, direct read mode. (latency, iops) = func(QD)
# ---------------------------------------------------------------------
[hdd_{TEST_SUMM}]
blocksize=4k
-rw=randwrite
-sync=1
-numjobs={NUMJOBS}
+rw={% randread, randwrite %}
+iodepth={QD}
# ---------------------------------------------------------------------
-# check different thread count, direct read mode. (latency, iops) = func(th_count)
-# also check iops for randread
-# ---------------------------------------------------------------------
-[hdd_{TEST_SUMM}]
-blocksize=4k
-rw=randread
-numjobs={NUMJOBS}
-
-# ---------------------------------------------------------------------
-# No reason for th count > 1 in case of sequantial operations
+# No reason for QD > 1 in case of sequential operations
# ot they became random
# ---------------------------------------------------------------------
[hdd_{TEST_SUMM}]
blocksize=1m
rw={% read, write %}
-
-# ---------------------------------------------------------------------
-# check IOPS randwrite.
-# ---------------------------------------------------------------------
-[hdd_{TEST_SUMM}]
-blocksize=4k
-rw=randwrite
+iodepth=1