working on reporting, this commit represent broking code state
diff --git a/wally/result_classes.py b/wally/result_classes.py
index 3616f47..1a148ed 100644
--- a/wally/result_classes.py
+++ b/wally/result_classes.py
@@ -1,5 +1,7 @@
+import abc
 import array
-from typing import Dict, List, Any, Optional, Tuple, cast
+from typing import Dict, List, Any, Optional, Tuple, cast, Type, Iterator
+from collections import OrderedDict
 
 
 import numpy
@@ -7,13 +9,31 @@
 
 
 from .node_interfaces import IRPCNode
-from .istorable import IStorable, Storable
+from .common_types import IStorable, Storable
 from .utils import round_digits, Number
 
 
-class TestJobConfig(Storable):
-    def __init__(self) -> None:
-        self.summary = None  # type: str
+class TestJobConfig(Storable, metaclass=abc.ABCMeta):
+    def __init__(self, idx: int) -> None:
+        self.idx = idx
+        self.reliable_info_time_range = None  # type: Tuple[int, int]
+        self.vals = OrderedDict()  # type: Dict[str, Any]
+
+    @property
+    def storage_id(self) -> str:
+        return "{}_{}".format(self.summary, self.idx)
+
+    @abc.abstractproperty
+    def characterized_tuple(self) -> Tuple:
+        pass
+
+    @abc.abstractproperty
+    def summary(self, *excluded_fields) -> str:
+        pass
+
+    @abc.abstractproperty
+    def long_summary(self, *excluded_fields) -> str:
+        pass
 
 
 class TestSuiteConfig(IStorable):
@@ -31,15 +51,22 @@
                  params: Dict[str, Any],
                  run_uuid: str,
                  nodes: List[IRPCNode],
-                 remote_dir: str) -> None:
+                 remote_dir: str,
+                 idx: int) -> None:
         self.test_type = test_type
         self.params = params
         self.run_uuid = run_uuid
         self.nodes = nodes
-        self.nodes_ids = [node.info.node_id() for node in nodes]
+        self.nodes_ids = [node.node_id for node in nodes]
         self.remote_dir = remote_dir
+        self.storage_id = "{}_{}".format(self.test_type, idx)
 
-    def __eq__(self, other: 'TestSuiteConfig') -> bool:
+    def __eq__(self, o: object) -> bool:
+        if type(o) is not self.__class__:
+            return False
+
+        other = cast(TestSuiteConfig, o)
+
         return (self.test_type == other.test_type and
                 self.params == other.params and
                 set(self.nodes_ids) == set(other.nodes_ids))
@@ -62,23 +89,50 @@
         return obj
 
 
+class DataSource:
+    def __init__(self,
+                 suite_id: str = None,
+                 job_id: str = None,
+                 node_id: str = None,
+                 dev: str = None,
+                 sensor: str = None,
+                 tag: str = None) -> None:
+        self.suite_id = suite_id
+        self.job_id = job_id
+        self.node_id = node_id
+        self.dev = dev
+        self.sensor = sensor
+        self.tag = tag
+
+    def __call__(self, **kwargs) -> 'DataSource':
+        dct = self.__dict__.copy()
+        dct.update(kwargs)
+        return self.__class__(**dct)
+
+    def __str__(self) -> str:
+        return "{0.suite_id}.{0.job_id}/{0.node_id}/{0.dev}.{0.sensor}.{0.tag}".format(self)
+
+    def __repr__(self) -> str:
+        return str(self)
+
+
 class TimeSeries:
     """Data series from sensor - either system sensor or from load generator tool (e.g. fio)"""
 
     def __init__(self,
                  name: str,
                  raw: Optional[bytes],
-                 data: array.array,
-                 times: array.array,
+                 data: numpy.array,
+                 times: numpy.array,
                  second_axis_size: int = 1,
-                 bins_edges: List[float] = None) -> None:
+                 source: DataSource = None) -> None:
 
         # Sensor name. Typically DEV_NAME.METRIC
         self.name = name
 
         # Time series times and values. Time in ms from Unix epoch.
-        self.times = times  # type: List[int]
-        self.data = data  # type: List[int]
+        self.times = times
+        self.data = data
 
         # Not equal to 1 in case of 2d sensors, like latency, when each measurement is a histogram.
         self.second_axis_size = second_axis_size
@@ -86,8 +140,18 @@
         # Raw sensor data (is provided). Like log file for fio iops/bw/lat.
         self.raw = raw
 
-        # bin edges for historgam timeseries
-        self.bins_edges = bins_edges
+        self.source = source
+
+    def __str__(self) -> str:
+        res = "TS({}):\n".format(self.name)
+        res += "    source={}\n".format(self.source)
+        res += "    times_size={}\n".format(len(self.times))
+        res += "    data_size={}\n".format(len(self.data))
+        res += "    data_shape={}x{}\n".format(len(self.data) // self.second_axis_size, self.second_axis_size)
+        return res
+
+    def __repr__(self) -> str:
+        return str(self)
 
 
 # (node_name, source_dev, metric_name) => metric_results
@@ -96,7 +160,7 @@
 
 class StatProps(IStorable):
     "Statistic properties for timeseries with unknown data distribution"
-    def __init__(self, data: List[Number]) -> None:
+    def __init__(self, data: numpy.array) -> None:
         self.perc_99 = None  # type: float
         self.perc_95 = None  # type: float
         self.perc_90 = None  # type: float
@@ -106,8 +170,8 @@
         self.max = None  # type: Number
 
         # bin_center: bin_count
-        self.bins_populations = None  # type: List[int]
-        self.bins_edges = None  # type: List[float]
+        self.bins_populations = None # type: numpy.array
+        self.bins_mids = None  # type: numpy.array
         self.data = data
 
     def __str__(self) -> str:
@@ -122,14 +186,16 @@
 
     def raw(self) -> Dict[str, Any]:
         data = self.__dict__.copy()
-        data['bins_edges'] = list(self.bins_edges)
+        del data['data']
+        data['bins_mids'] = list(self.bins_mids)
         data['bins_populations'] = list(self.bins_populations)
         return data
 
     @classmethod
     def fromraw(cls, data: Dict[str, Any]) -> 'StatProps':
-        data['bins_edges'] = numpy.array(data['bins_edges'])
+        data['bins_mids'] = numpy.array(data['bins_mids'])
         data['bins_populations'] = numpy.array(data['bins_populations'])
+        data['data'] = None
         res = cls.__new__(cls)
         res.__dict__.update(data)
         return res
@@ -138,14 +204,14 @@
 class HistoStatProps(StatProps):
     """Statistic properties for 2D timeseries with unknown data distribution and histogram as input value.
     Used for latency"""
-    def __init__(self, data: List[Number], second_axis_size: int) -> None:
+    def __init__(self, data: numpy.array, second_axis_size: int) -> None:
         self.second_axis_size = second_axis_size
         StatProps.__init__(self, data)
 
 
 class NormStatProps(StatProps):
     "Statistic properties for timeseries with normal data distribution. Used for iops/bw"
-    def __init__(self, data: List[Number]) -> None:
+    def __init__(self, data: numpy.array) -> None:
         StatProps.__init__(self, data)
 
         self.average = None  # type: float
@@ -153,6 +219,8 @@
         self.confidence = None  # type: float
         self.confidence_level = None  # type: float
         self.normtest = None  # type: NormaltestResult
+        self.skew = None  # type: float
+        self.kurt = None  # type: float
 
     def __str__(self) -> str:
         res = ["NormStatProps(size = {}):".format(len(self.data)),
@@ -163,13 +231,16 @@
                "    perc_95 = {}".format(round_digits(self.perc_95)),
                "    perc_99 = {}".format(round_digits(self.perc_99)),
                "    range {} {}".format(round_digits(self.min), round_digits(self.max)),
-               "    normtest = {0.normtest}".format(self)]
+               "    normtest = {0.normtest}".format(self),
+               "    skew ~ kurt = {0.skew} ~ {0.kurt}".format(self)]
         return "\n".join(res)
 
     def raw(self) -> Dict[str, Any]:
         data = self.__dict__.copy()
         data['normtest'] = (data['nortest'].statistic, data['nortest'].pvalue)
-        data['bins_edges'] = list(self.bins_edges)
+        del data['data']
+        data['bins_mids'] = list(self.bins_mids)
+        data['bins_populations'] = list(self.bins_populations)
         return data
 
     @classmethod
@@ -195,3 +266,51 @@
         self.run_interval = (begin_time, end_time)
         self.raw = raw  # type: JobMetrics
         self.processed = None  # type: JobStatMetrics
+
+
+class IResultStorage(metaclass=abc.ABCMeta):
+
+    @abc.abstractmethod
+    def sync(self) -> None:
+        pass
+
+    @abc.abstractmethod
+    def put_or_check_suite(self, suite: TestSuiteConfig) -> None:
+        pass
+
+    @abc.abstractmethod
+    def put_job(self, suite: TestSuiteConfig, job: TestJobConfig) -> None:
+        pass
+
+    @abc.abstractmethod
+    def put_ts(self, ts: TimeSeries) -> None:
+        pass
+
+    @abc.abstractmethod
+    def put_extra(self, data: bytes, source: DataSource) -> None:
+        pass
+
+    @abc.abstractmethod
+    def put_stat(self, data: StatProps, source: DataSource) -> None:
+        pass
+
+    @abc.abstractmethod
+    def get_stat(self, stat_cls: Type[StatProps], source: DataSource) -> StatProps:
+        pass
+
+    @abc.abstractmethod
+    def iter_suite(self, suite_type: str = None) -> Iterator[TestSuiteConfig]:
+        pass
+
+    @abc.abstractmethod
+    def iter_job(self, suite: TestSuiteConfig) -> Iterator[TestJobConfig]:
+        pass
+
+    @abc.abstractmethod
+    def iter_ts(self, suite: TestSuiteConfig, job: TestJobConfig) -> Iterator[TimeSeries]:
+        pass
+
+    # return path to file to be inserted into report
+    @abc.abstractmethod
+    def put_plot_file(self, data: bytes, source: DataSource) -> str:
+        pass