fixes
diff --git a/Dockerfile b/Dockerfile
index e74d736..21e8ba0 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,18 +1,26 @@
-FROM ubuntu:14.04
+# docker build -t ubuntu1604py36
+FROM ubuntu:16.04
+
 MAINTAINER Kostiantyn Danylov <koder.mail@gmail.com>
 
-RUN apt-get update
+RUN apt-get update && \
+    apt-get install -y software-properties-common && \
+    add-apt-repository ppa:jonathonf/python-3.6 && \
+    apt-get update &&  \
+    apt-get install -y vim git build-essential python3.6 python3.6-dev python3-pip python3.6-venv curl wget
 
-RUN apt-get install -y python-dev python-pip python-virtualenv \
-    libevent-dev python-libvirt
+RUN git clone https://github.com/Mirantis/disk_perf_test_tool.git /opt/disk_perf_tool && \
+    git clone https://github.com/koder-ua/cephlib.git /opt/cephlib && \
+    git clone https://github.com/koder-ua/xmlbuilder3.git /opt/xmlbuilder3 && \
+    git clone https://github.com/koder-ua/agent.git /opt/agent && \
+    mkdir /opt/wally_libs && \
+    ln -s /opt/agent/agent /opt/wally_libs && \
+    ln -s /opt/xmlbuilder3/xmlbuilder3 /opt/wally_libs && \
+    ln -s /opt/cephlib/cephlib /opt/wally_libs && \
+    python3.6 -m pip install pip --upgrade && \
+    cd /opt/disk_perf_tool && \
+    git checkout v2.0 && \
+    python3.6 -m pip install wheel && \
+    python3.6 -m pip install -r requirements.txt
 
-RUN apt-get install -y libssl-dev libffi-dev
-
-RUN apt-get install -y python-setuptools git vim curl wget
-
-RUN git clone https://github.com/Mirantis/disk_perf_test_tool.git \
-    /opt/disk_perf_tool
-
-RUN cd /opt/disk_perf_tool; bash scripts/install.sh --full
-
-RUN ["/bin/bash"]
+CMD /bin/bash
diff --git a/requirements.txt b/requirements.txt
index 38c8b79..74d28c4 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -16,7 +16,7 @@
 python-keystoneclient
 python-glanceclient
 oktest
-iso8601==0.1.10
+iso8601
 scipy
 numpy
 matplotlib
diff --git a/wally/ceph.py b/wally/ceph.py
index ad3f461..35e78b4 100644
--- a/wally/ceph.py
+++ b/wally/ceph.py
@@ -19,7 +19,7 @@
 
 def get_osds_info(node: IRPCNode, ceph_extra_args: str = "", thcount: int = 8) -> Dict[IP, List[OSDInfo]]:
     """Get set of osd's ip"""
-    res = {}  # type: Dict[IP, List[OSDInfo]]
+    res: Dict[IP, List[OSDInfo]] = {}
     return {IP(ip): osd_info_list
             for ip, osd_info_list in discover.get_osds_nodes(node.run, ceph_extra_args, thcount=thcount).items()}
 
@@ -53,21 +53,21 @@
         key = ceph.get("key")
 
         if conf is None:
-            conf = "/etc/ceph/{}.conf".format(cluster)
+            conf = f"/etc/ceph/{cluster}.conf"
 
         if key is None:
-            key = "/etc/ceph/{}.client.admin.keyring".format(cluster)
+            key = f"/etc/ceph/{cluster}.client.admin.keyring"
 
         ceph_extra_args = ""
 
         if conf:
-            ceph_extra_args += " -c '{}'".format(conf)
+            ceph_extra_args += f" -c '{conf}'"
 
         if key:
-            ceph_extra_args += " -k '{}'".format(key)
+            ceph_extra_args += f" -k '{key}'"
 
-        logger.debug("Start discovering ceph nodes from root %s", root_node_uri)
-        logger.debug("cluster=%s key=%s conf=%s", cluster, conf, key)
+        logger.debug(f"Start discovering ceph nodes from root {root_node_uri}")
+        logger.debug(f"cluster={cluster} key={conf} conf={key}")
 
         info = NodeInfo(parse_ssh_uri(root_node_uri), set())
 
@@ -86,13 +86,13 @@
                     info.params.setdefault('ceph-osds', []).extend(info.__dict__.copy() for info in osds_info)
                     assert 'ceph' not in info.params or info.params['ceph'] == ceph_params
                     info.params['ceph'] = ceph_params
-                logger.debug("Found %s nodes with ceph-osd role", len(ips))
+                logger.debug(f"Found {len(ips)} nodes with ceph-osd role")
             except Exception as exc:
                 if not ignore_errors:
                     logger.exception("OSD discovery failed")
                     raise StopTestError()
                 else:
-                    logger.warning("OSD discovery failed %s", exc)
+                    logger.warning(f"OSD discovery failed {exc}")
 
             try:
                 counter = 0
@@ -102,13 +102,13 @@
                     info = ctx.merge_node(creds, {'ceph-mon'})
                     assert 'ceph' not in info.params or info.params['ceph'] == ceph_params
                     info.params['ceph'] = ceph_params
-                logger.debug("Found %s nodes with ceph-mon role", counter + 1)
+                logger.debug(f"Found {counter + 1} nodes with ceph-mon role")
             except Exception as exc:
                 if not ignore_errors:
                     logger.exception("MON discovery failed")
                     raise StopTestError()
                 else:
-                    logger.warning("MON discovery failed %s", exc)
+                    logger.warning(f"MON discovery failed {exc}")
 
 
 def raw_dev_name(path: str) -> str:
@@ -127,8 +127,8 @@
         for node in ctx.nodes:
             if 'ceph_storage_devs' not in node.info.params:
                 if 'ceph-osd' in node.info.roles:
-                    jdevs = set()  # type: Set[str]
-                    sdevs = set()  # type: Set[str]
+                    jdevs: Set[str] = set()
+                    sdevs: Set[str] = set()
                     for osd_info in node.info.params['ceph-osds']:
                         for key, sset in [('journal', jdevs), ('storage', sdevs)]:
                             path = osd_info.get(key)
diff --git a/wally/config.py b/wally/config.py
index 2ec7ba5..1090d6c 100644
--- a/wally/config.py
+++ b/wally/config.py
@@ -10,30 +10,30 @@
     def __init__(self, dct: ConfigBlock) -> None:
         # make mypy happy, set fake dict
         self.__dict__['_dct'] = {}
-        self.run_uuid = None  # type: str
-        self.storage_url = None  # type: str
-        self.comment = None  # type: str
-        self.keep_vm = None  # type: bool
-        self.dont_discover_nodes = None  # type: bool
-        self.build_id = None  # type: str
-        self.build_description = None  # type: str
-        self.build_type = None  # type: str
-        self.default_test_local_folder = None  # type: str
-        self.settings_dir = None  # type: str
-        self.connect_timeout = None  # type: int
-        self.no_tests = False  # type: bool
-        self.debug_agents = False  # type: bool
+        self.run_uuid: str = None
+        self.storage_url: str = None
+        self.comment: str = None
+        self.keep_vm: bool = None
+        self.dont_discover_nodes: bool = None
+        self.build_id: str = None
+        self.build_description: str = None
+        self.build_type: str = None
+        self.default_test_local_folder: str = None
+        self.settings_dir: str = None
+        self.connect_timeout: int = None
+        self.no_tests: bool = False
+        self.debug_agents: bool = False
 
         # None, disabled, enabled, metadata, ignore_errors
-        self.discover = None  # type: Optional[str]
+        self.discover: Optional[str] = None
 
-        self.logging = None  # type: 'Config'
-        self.ceph = None  # type: 'Config'
-        self.openstack = None  # type: 'Config'
-        self.fuel = None  # type: 'Config'
-        self.test = None  # type: 'Config'
-        self.sensors = None  # type: 'Config'
-        self.discover = None  # type: Set[str]
+        self.logging: 'Config' = None
+        self.ceph: 'Config' = None
+        self.openstack: 'Config' = None
+        self.fuel: 'Config' = None
+        self.test: 'Config' = None
+        self.sensors: 'Config' = None
+        self.discover: Set[str] = None
 
         self._dct.clear()
         self._dct.update(dct)
diff --git a/wally/console_report.py b/wally/console_report.py
index 381d9ff..d1b6e9f 100644
--- a/wally/console_report.py
+++ b/wally/console_report.py
@@ -1,5 +1,5 @@
 import logging
-from typing import cast, Iterator, List, Union
+from typing import cast, List, Union
 
 import numpy
 
@@ -20,12 +20,11 @@
 logger = logging.getLogger("wally")
 
 
-
 console_report_headers = ["Description", "IOPS ~ Dev", "BW, MiBps", 'Skew/Kurt', 'lat med, ms', 'lat 95, ms']
 console_report_align = ['l', 'r', 'r', 'r', 'r', 'r']
 
 def get_console_report_table(suite: SuiteConfig, rstorage: IWallyStorage) -> List[Union[List[str], Texttable.HLINE]]:
-    table = []  # type: List[Union[List[str], Texttable.HLINE]]
+    table: List[Union[List[str], Texttable.HLINE]] = []
     prev_params = None
     for job in sorted(rstorage.iter_job(suite), key=lambda job: job.params):
         fparams = cast(FioJobParams, job.params)
@@ -47,9 +46,9 @@
         bins_edges = numpy.array(get_lat_vals(lat_ts.data.shape[1]), dtype='float32') / 1000  # convert us to ms
         lat_props = calc_histo_stat_props(lat_ts, bins_edges)
         table.append([job.params.summary,
-                      "{:>6s} ~ {:>6s}".format(float2str(avg_iops), float2str(iops_dev)),
+                      f"{float2str(avg_iops):>6s} ~ {float2str(iops_dev):>6s}",
                       float2str(props.average / 1024),  # Ki -> Mi
-                      "{:>5.1f}/{:>5.1f}".format(props.skew, props.kurt),
+                      f"{props.skew:>5.1f}/{props.kurt:>5.1f}",
                       float2str(lat_props.perc_50), float2str(lat_props.perc_95)])
     return table
 
diff --git a/wally/data_selectors.py b/wally/data_selectors.py
index d9eedfa..2b9037e 100644
--- a/wally/data_selectors.py
+++ b/wally/data_selectors.py
@@ -51,7 +51,7 @@
     tss = list(find_all_series(rstorage, suite_id, job_id, metric))
 
     if len(tss) == 0:
-        raise NameError("Can't found any TS for {},{},{}".format(suite_id, job_id, metric))
+        raise NameError(f"Can't found any TS for {suite_id},{job_id},{metric}")
 
     c_intp = c_interpolate_ts_on_seconds_border
     tss_inp = [c_intp(ts.select(trange), tp='fio', allow_broken_step=(metric == 'lat')) for ts in tss]
@@ -66,23 +66,20 @@
             raise ValueError(msg)
 
         if metric == 'lat' and (len(ts.data.shape) != 2 or ts.data.shape[1] != expected_lat_bins):
-            msg = "Sensor {}.{} on node {} has shape={}. Can only process sensors with shape=[X, {}].".format(
-                         ts.source.dev, ts.source.sensor, ts.source.node_id, ts.data.shape, expected_lat_bins)
+            msg = f"Sensor {ts.source.dev}.{ts.source.sensor} on node {ts.source.node_id} " + \
+                f"has shape={ts.data.shape}. Can only process sensors with shape=[X, {expected_lat_bins}]."
             logger.error(msg)
             raise ValueError(msg)
 
         if metric != 'lat' and len(ts.data.shape) != 1:
-            msg = "Sensor {}.{} on node {} has shape={}. Can only process 1D sensors.".format(
-                         ts.source.dev, ts.source.sensor, ts.source.node_id, ts.data.shape)
+            msg = f"Sensor {ts.source.dev}.{ts.source.sensor} on node {ts.source.node_id} " + \
+                f"has shape={ts.data.shape}. Can only process 1D sensors."
             logger.error(msg)
             raise ValueError(msg)
 
-        try:
-            assert trange[0] >= ts.times[0] and trange[1] <= ts.times[-1], \
-                "[{}, {}] not in [{}, {}]".format(ts.times[0], ts.times[-1], trange[0], trange[-1])
-        except AssertionError:
-            import IPython
-            IPython.embed()
+        assert trange[0] >= ts.times[0] and trange[1] <= ts.times[-1], \
+            f"[{ts.times[0]}, {ts.times[-1]}] not in [{trange[0]}, {trange[-1]}]"
+
 
         idx1, idx2 = numpy.searchsorted(ts.times, trange)
         idx2 += 1
@@ -95,7 +92,7 @@
             res = dt.copy()
             res_times = ts.times[idx1: idx2].copy()
         else:
-            assert res.shape == dt.shape, "res.shape(={}) != dt.shape(={})".format(res.shape, dt.shape)
+            assert res.shape == dt.shape, f"res.shape(={res.shape}) != dt.shape(={dt.shape})"
             res += dt
 
     ds = DataSource(suite_id=suite_id, job_id=job_id, node_id=AGG_TAG, sensor='fio',
diff --git a/wally/main.py b/wally/main.py
index 36a2690..77074c6 100644
--- a/wally/main.py
+++ b/wally/main.py
@@ -7,6 +7,7 @@
 import argparse
 import functools
 import contextlib
+import datetime
 from typing import List, Tuple, Any, Callable, IO, cast, Optional, Iterator
 from yaml import load as _yaml_load
 
@@ -53,6 +54,15 @@
 from .console_report import ConsoleReportStage
 
 
+try:
+    assert False
+except AssertionError:
+    pass
+else:
+    print("Must not run this code with -o python option. Assertions are important!")
+    exit(1)
+
+
 logger = logging.getLogger("wally")
 
 
@@ -61,7 +71,7 @@
     logger.info("Start " + stage.name() + ("::cleanup" if cleanup else ""))
     try:
         yield
-    except utils.StopTestError as exc:
+    except utils.StopTestError:
         raise
     except Exception:
         logger.exception("During %s", stage.name() + ("::cleanup" if cleanup else ""))
@@ -75,12 +85,13 @@
         dirs.append((os.stat(full_path).st_ctime, full_path))
 
     dirs.sort()
-    results = []  # type: List[Tuple[str, str, str, str, str]]
+    results: List[Tuple[str, str, str, str, str]] = []
     for _, full_path in dirs[::-1]:
         try:
             stor = make_storage(full_path, existing=True)
         except Exception as exc:
-            logger.warning("Can't load folder {}. Error {}".format(full_path, exc))
+            logger.warning(f"Can't load folder {full_path}. Error {exc}")
+            continue
 
         try:
             try:
@@ -88,7 +99,7 @@
             except KeyError:
                 cfg = stor.load(Config, "config")
         except Exception as exc:
-            print("Fail to load {}. {}".format(os.path.basename(full_path), exc))
+            print(f"Fail to load {os.path.basename(full_path)}. {exc}")
             continue
 
         if WallyDB.run_interval in stor:
@@ -96,7 +107,7 @@
         else:
             run_time = os.stat(full_path).st_ctime
 
-        ftime = time.strftime("%d %b %H:%M", time.localtime(run_time))
+        ftime = f"{datetime.datetime.fromtimestamp(run_time):%d %b %H:%M}"
 
         test_types = []
         for suite_cfg in cfg.get('tests', []):
@@ -221,7 +232,7 @@
 
 def load_config(path: str) -> Config:
     path = os.path.abspath(path)
-    cfg_dict = yaml_load(open(path).read())
+    cfg_dict = yaml_load(open(path))
 
     while 'include' in cfg_dict:
         inc = cfg_dict.pop('include')
@@ -230,7 +241,7 @@
 
         for fname in inc:
             inc_path = find_cfg_file(fname, path)
-            inc_dict = yaml_load(open(inc_path).read())
+            inc_dict = yaml_load(open(inc_path))
             inc_dict.update(cfg_dict)
             cfg_dict = inc_dict
 
@@ -363,7 +374,7 @@
 
     start_time = int(time.time())
 
-    report_stages = []  # type: List[Stage]
+    report_stages: List[Stage] = []
     if not getattr(opts, "no_report", False):
         reporters = opts.reporters.split(",")
         assert len(set(reporters)) == len(reporters)