Migrating to Python v3

 - support for Python v3.8.x
 - support for Python v3.5.x
 - new tag, 2019.2.8
 - updates class generation and iterators
 - unittests updated with coverage >75%
 - new coverage routines
 - unittests profiling
 - full fake data for unittests
 - unittest testrun is ~1.5 seconds long

Bugfixes
 - 34834, proper use of 'sudo' option
 - multiple proper iterator use
 - 37919, show warning when installed and candidate versions
   are newer comparing to release version

Change-Id: Idd6b889f7ce94ae0c832e2f0a0346e4fdc3264a3
Related-PROD: PROD-34834 PROD-34664 PROD-34919
diff --git a/cfg_checker/cfg_check.py b/cfg_checker/cfg_check.py
index 1a2d79f..5c986a2 100644
--- a/cfg_checker/cfg_check.py
+++ b/cfg_checker/cfg_check.py
@@ -47,7 +47,7 @@
         args, unknown = parser.parse_known_args()
     except TypeError:
         logger_cli.info("\n# Please, check arguments")
-        sys.exit(0)
+        sys.exit(1)
 
     if unknown:
         logger_cli.error(
diff --git a/cfg_checker/cli/command.py b/cfg_checker/cli/command.py
index 9ac05f3..0a892d8 100644
--- a/cfg_checker/cli/command.py
+++ b/cfg_checker/cli/command.py
@@ -35,11 +35,11 @@
     # check commands
     if not hasattr(args, 'type') or not args.type:
         logger_cli.info("\n# Please, type a command listed above")
-        return 0
+        return 1
     _type = args.type.replace("-", "_") if "-" in args.type else args.type
     if command not in commands:
         logger_cli.info("\n# Please, type a command listed above")
-        return 0
+        return 1
     elif _type not in commands[command]:
         # check type
         logger_cli.info(
@@ -47,7 +47,7 @@
                 command
             )
         )
-        return 0
+        return 1
     else:
         # form function name to call
         _method_name = "do_" + _type
@@ -86,7 +86,7 @@
         args, unknown = my_parser.parse_known_args()
     except TypeError:
         logger_cli.info("\n# Please, check arguments")
-        sys.exit(0)
+        sys.exit(1)
 
     if unknown:
         logger_cli.error(
diff --git a/cfg_checker/cli/network.py b/cfg_checker/cli/network.py
index 5c5a4e2..6d3059e 100644
--- a/cfg_checker/cli/network.py
+++ b/cfg_checker/cli/network.py
@@ -1,4 +1,4 @@
-from command import cli_command
+from .command import cli_command
 
 
 def entrypoint():
diff --git a/cfg_checker/cli/packages.py b/cfg_checker/cli/packages.py
index c44e5bc..b1319a1 100644
--- a/cfg_checker/cli/packages.py
+++ b/cfg_checker/cli/packages.py
@@ -1,4 +1,4 @@
-from command import cli_command
+from .command import cli_command
 
 
 def entrypoint():
diff --git a/cfg_checker/cli/reclass.py b/cfg_checker/cli/reclass.py
index f479a72..652f00a 100644
--- a/cfg_checker/cli/reclass.py
+++ b/cfg_checker/cli/reclass.py
@@ -1,4 +1,4 @@
-from command import cli_command
+from .command import cli_command
 
 
 def entrypoint():
diff --git a/cfg_checker/clients/__init__.py b/cfg_checker/clients/__init__.py
index c827a38..86d731f 100644
--- a/cfg_checker/clients/__init__.py
+++ b/cfg_checker/clients/__init__.py
@@ -21,9 +21,5 @@
     # create it once
     if salt is None:
         salt = SaltRemote()
-        # do most expensive operation with no strict timeout possible
-        # all nodes that answer ping
-        # salt.nodes_active = salt.get_active_nodes()
-
     # return once required
     return salt
diff --git a/cfg_checker/common/__init__.py b/cfg_checker/common/__init__.py
index 8316e39..752373f 100644
--- a/cfg_checker/common/__init__.py
+++ b/cfg_checker/common/__init__.py
@@ -1,10 +1,8 @@
-import const
+from cfg_checker.common.log import logger, logger_cli
 
-from log import logger, logger_cli
+from cfg_checker.common.other import Utils
 
-from other import Utils
-
-from settings import config
+from cfg_checker.common.settings import config
 
 
 def nested_set(_d, _keys, _value):
@@ -18,7 +16,6 @@
 
 
 utils = Utils()
-const = const
 logger = logger
 logger_cli = logger_cli
 config = config
diff --git a/cfg_checker/common/config_file.py b/cfg_checker/common/config_file.py
index 87f4759..c70e5a6 100644
--- a/cfg_checker/common/config_file.py
+++ b/cfg_checker/common/config_file.py
@@ -1,7 +1,7 @@
 import configparser
 import os
 
-from cfg_checker.common import logger_cli
+from . import logger_cli
 
 
 class ConfigFile(object):
diff --git a/cfg_checker/common/const.py b/cfg_checker/common/const.py
index 9e5fea2..685c79a 100644
--- a/cfg_checker/common/const.py
+++ b/cfg_checker/common/const.py
@@ -75,7 +75,7 @@
     "unk": "uknown"
 }
 
-ubuntu_releases = ["trusty", "xenial", "ubuntu"]
+ubuntu_releases = ["trusty", "xenial", "ubuntu", "bionic"]
 all_arch = ["amd64"]
 repo_types = {
     "main": "Officially supported software",
diff --git a/cfg_checker/common/exception.py b/cfg_checker/common/exception.py
index 52aab2d..2536099 100644
--- a/cfg_checker/common/exception.py
+++ b/cfg_checker/common/exception.py
@@ -1,5 +1,3 @@
-from exceptions import Exception
-
 
 class CheckerBaseExceptions(Exception):
     pass
diff --git a/cfg_checker/common/file_utils.py b/cfg_checker/common/file_utils.py
index c550184..398ea66 100644
--- a/cfg_checker/common/file_utils.py
+++ b/cfg_checker/common/file_utils.py
@@ -67,7 +67,7 @@
 
     _dict = {
         'fd': fd.fileno(),
-        'mode': oct(mode & 0777),
+        'mode': oct(mode & 0o777),
         'device': hex(dev),
         'inode': ino,
         'hard_links': nlink,
@@ -102,3 +102,11 @@
         return "... folder '{}' created".format(_folder)
     else:
         return "... folder is at '{}'".format(_folder)
+
+
+def ensure_folder_removed(_folder):
+    if os.path.exists(_folder):
+        os.rmdir(_folder)
+        return "... folder '{}' removed".format(_folder)
+    else:
+        return "... folder '{}' not exists".format(_folder)
diff --git a/cfg_checker/common/salt_utils.py b/cfg_checker/common/salt_utils.py
index 7bd6ce7..dd6fbec 100644
--- a/cfg_checker/common/salt_utils.py
+++ b/cfg_checker/common/salt_utils.py
@@ -5,15 +5,15 @@
 import os
 import time
 
+import requests
+
 from cfg_checker.common import config, logger, logger_cli
 from cfg_checker.common.exception import InvalidReturnException, SaltException
 from cfg_checker.common.other import shell
 
-import requests
-
 
 def _extract_password(_raw):
-    if not isinstance(_raw, unicode):
+    if not isinstance(_raw, str):
         raise InvalidReturnException(_raw)
     else:
         try:
@@ -69,10 +69,14 @@
 
     :return: password string
     """
-    _cmd = "salt-call"
-    _args = "--out=json pillar.get _param:salt_api_password"
+    _cmd = []
+    if config.ssh_uses_sudo:
+        _cmd = ["sudo"]
+    # salt commands
+    _cmd.append("salt-call")
+    _cmd.append("--out=json pillar.get _param:salt_api_password")
     try:
-        _result = shell(" ".join([_cmd, _args]))
+        _result = shell(" ".join(_cmd))
     except OSError as e:
         raise SaltException(
             "Salt error calling '{}': '{}'\n"
@@ -386,7 +390,7 @@
             param=path,
             kwarg=_kwarg
         )
-        return salt_output[salt_output.keys()[0]]
+        return [*salt_output.values()][0]
 
     def f_append_master(self, path, strings_list, makedirs=True):
         _kwarg = {
@@ -400,7 +404,7 @@
             param=_args,
             kwarg=_kwarg
         )
-        return salt_output[salt_output.keys()[0]]
+        return [*salt_output.values()][0]
 
     def mkdir(self, target, path, tgt_type=None):
         salt_output = self.cmd(
@@ -452,7 +456,7 @@
             param=_arg,
             kwarg=_kwarg
         )
-        return salt_output[salt_output.keys()[0]]
+        return [*salt_output.values()][0]
 
     def cache_file(self, target, source_path):
         salt_output = self.cmd(
@@ -460,7 +464,7 @@
             "cp.cache_file",
             param=source_path
         )
-        return salt_output[salt_output.keys()[0]]
+        return [*salt_output.values()][0]
 
     def get_file(self, target, source_path, target_path, tgt_type=None):
         return self.cmd(
diff --git a/cfg_checker/common/settings.py b/cfg_checker/common/settings.py
index 33e7c25..92c17a5 100644
--- a/cfg_checker/common/settings.py
+++ b/cfg_checker/common/settings.py
@@ -1,10 +1,10 @@
 import os
 
-from exception import ConfigException
+from cfg_checker.common.exception import ConfigException
 
-from log import logger_cli
+from cfg_checker.common.log import logger_cli
 
-from other import utils
+from cfg_checker.common.other import utils
 
 pkg_dir = os.path.dirname(__file__)
 pkg_dir = os.path.join(pkg_dir, os.pardir, os.pardir)
diff --git a/cfg_checker/helpers/console_utils.py b/cfg_checker/helpers/console_utils.py
index 994143c..1a2c184 100644
--- a/cfg_checker/helpers/console_utils.py
+++ b/cfg_checker/helpers/console_utils.py
@@ -16,8 +16,8 @@
         new_size = len(note)
         if self._note_size > new_size:
             _suffix = ' '*(self._note_size - new_size)
-        _percent = (100 * index) / self.total
-        _index = (self.bar_size * index) / self.total
+        _percent = (100 * index) // self.total
+        _index = (self.bar_size * index) // self.total
         # clear the line
         sys.stdout.write('\r')
         # print new progress
diff --git a/cfg_checker/helpers/errors.py b/cfg_checker/helpers/errors.py
index 95e1495..27ed242 100644
--- a/cfg_checker/helpers/errors.py
+++ b/cfg_checker/helpers/errors.py
@@ -25,11 +25,15 @@
         0: "Unknown error"
     }
 
-    def __init__(self, area_code, delimiter='-'):
+    def __init__(self, area_code, delimiter='-', folder=None):
         self._area_code = area_code
         self._delimiter = delimiter
         self._index += 1
 
+        # save folder
+        if folder:
+            self._error_logs_folder_name = folder
+
         # init the error log storage folder
         _folder = os.path.join(pkg_dir, self._error_logs_folder_name)
         self._conf_filename = os.path.join(
@@ -98,7 +102,7 @@
         _code = self._format_error_code(index)
         # prepare data as string list
         _d = self._errors[index]['data']
-        _data = ["    {}: {}".format(_k, _v) for _k, _v in _d.iteritems()]
+        _data = ["    {}: {}".format(_k, _v) for _k, _v in _d.items()]
         # format message
         _msg = "### {}:\n    Description: {}\n{}".format(
             _code,
@@ -170,9 +174,11 @@
 
         for _type in self._types.keys():
             _len = len(
-                filter(
-                    lambda i: self._errors[i]['type'] == _type,
-                    self._errors
+                list(
+                    filter(
+                        lambda i: self._errors[i]['type'] == _type,
+                        self._errors
+                    )
                 )
             )
             if _len:
diff --git a/cfg_checker/helpers/tgz.py b/cfg_checker/helpers/tgz.py
index 754f0de..5be00e2 100644
--- a/cfg_checker/helpers/tgz.py
+++ b/cfg_checker/helpers/tgz.py
@@ -50,11 +50,14 @@
         else:
             self.basefile = _filepath
 
-    def get_file(self, name):
+    def get_file(self, name, decode=False):
         if self.has_file(name):
             with tarfile.open(self.basefile, "r:gz") as tgz:
                 _tgzitem = tgz.extractfile(tgz.getmember(name))
-                return _tgzitem.read()
+                if decode:
+                    return _tgzitem.read().decode('utf-8') 
+                else:
+                    return _tgzitem.read()
         else:
             return None
 
diff --git a/cfg_checker/helpers/zip.py b/cfg_checker/helpers/zip.py
deleted file mode 100644
index b050030..0000000
--- a/cfg_checker/helpers/zip.py
+++ /dev/null
@@ -1,8 +0,0 @@
-import os
-import zipfile
-
-
-class ZIPFile(object):
-    def __init__(self, _filepath, label=None):
-
-        return
diff --git a/cfg_checker/modules/network/mapper.py b/cfg_checker/modules/network/mapper.py
index 482bdfa..59f3781 100644
--- a/cfg_checker/modules/network/mapper.py
+++ b/cfg_checker/modules/network/mapper.py
@@ -124,7 +124,7 @@
                 continue
 
             # build map based on IPs and save info too
-            for if_name, _dat in _pillar.iteritems():
+            for if_name, _dat in _pillar.items():
                 # get proper IF name
                 _if_name = if_name if 'name' not in _dat else _dat['name']
                 # place it
@@ -195,11 +195,11 @@
 
         logger_cli.info("-> mapping IPs")
         # match interfaces by IP subnets
-        for host, node_data in salt_master.nodes.iteritems():
+        for host, node_data in salt_master.nodes.items():
             if not salt_master.is_node_available(host):
                 continue
 
-            for net_name, net_data in node_data['networks'].iteritems():
+            for net_name, net_data in node_data['networks'].items():
                 # cut net name
                 _i = net_name.find('@')
                 _name = net_name if _i < 0 else net_name[:_i]
@@ -321,7 +321,7 @@
 
                 # debug, print built tree
                 # logger_cli.debug("# '{}'".format(_ifname))
-                lvls = _tree.keys()
+                lvls = list(_tree.keys())
                 lvls.sort()
                 n = len(lvls)
                 m = max([len(_tree[k].keys()) for k in _tree.keys()])
@@ -330,11 +330,14 @@
                 while True:
                     _lv = lvls.pop(0)
                     # get all interfaces on this level
-                    nets = _tree[_lv].keys()
+                    nets = iter(_tree[_lv].keys())
                     while True:
                         y = 0
                         # get next interface
-                        _net = nets.pop(0)
+                        try:
+                            _net = next(nets)
+                        except StopIteration:
+                            break
                         # all nets
                         _a = [_net]
                         # put current interface if this is only one left
diff --git a/cfg_checker/modules/network/network_errors.py b/cfg_checker/modules/network/network_errors.py
index 6c41021..7159a36 100644
--- a/cfg_checker/modules/network/network_errors.py
+++ b/cfg_checker/modules/network/network_errors.py
@@ -23,9 +23,9 @@
     NET_PING_ERROR = next(_c)
     NET_PING_NOT_RESOLVED = next(_c)
 
-    def __init__(self):
-        super(NetworkErrors, self).__init__("NET")
+    _initialized = False
 
+    def _add_types(self):
         self.add_error_type(
             self.NET_MTU_MISMATCH,
             "MTU mismatch on runtime interface and in reclass"
@@ -82,6 +82,21 @@
             self.NET_PING_NOT_RESOLVED,
             "Host not resolved while conducting Ping"
         )
+        self._initialized = True
+
+    def __init__(self, folder=None):
+        super(NetworkErrors, self).__init__("NET", folder=folder)
+
+        if not self._initialized:
+            self._add_types()
+            self._initialized = True
+
+    def __call__(self):
+        if not self._initialized:
+            self._add_types()
+            self._initialized = True
+
+        return self
 
 
 del _c
diff --git a/cfg_checker/modules/network/pinger.py b/cfg_checker/modules/network/pinger.py
index 266727b..0500284 100644
--- a/cfg_checker/modules/network/pinger.py
+++ b/cfg_checker/modules/network/pinger.py
@@ -44,7 +44,7 @@
     def ping_nodes(self, network_cidr_str):
         # Conduct actual ping using network CIDR
         logger_cli.info("# Collecting node pairs")
-        _fake_if = ipaddress.IPv4Interface(unicode(network_cidr_str))
+        _fake_if = ipaddress.IPv4Interface(str(network_cidr_str))
         _net = _fake_if.network
         # collect nodes and ips from reclass
         nodes = self._collect_node_addresses(_net)
@@ -69,7 +69,7 @@
                 "targets": {}
             }
 
-            for tgt_host, tgt_data in nodes.iteritems():
+            for tgt_host, tgt_data in nodes.items():
                 _t = _packets[src_host]["targets"]
                 for tgt_if in tgt_data:
                     tgt_if_name = tgt_if['name']
@@ -110,7 +110,7 @@
         _progress = Progress(_count)
         _progress_index = 0
         _node_index = 0
-        for src, src_data in _packets.iteritems():
+        for src, src_data in _packets.items():
             _targets = src_data["targets"]
             _node_index += 1
             # create 'targets.json' on source host
@@ -154,7 +154,7 @@
                 )
                 continue
             # Handle return codes
-            for tgt_node, _tgt_ips in _result.iteritems():
+            for tgt_node, _tgt_ips in _result.items():
                 for _params in _tgt_ips:
                     _body = "{}({}) --{}--> {}({}@{})\n".format(
                             src,
diff --git a/cfg_checker/modules/packages/__init__.py b/cfg_checker/modules/packages/__init__.py
index 9d55c05..41dfca1 100644
--- a/cfg_checker/modules/packages/__init__.py
+++ b/cfg_checker/modules/packages/__init__.py
@@ -1,7 +1,7 @@
 from cfg_checker.helpers import args_utils
 from cfg_checker.modules.packages.repos import RepoManager
 
-import checker
+from . import checker
 
 command_help = "Package versions check (Candidate vs Installed)"
 
diff --git a/cfg_checker/modules/packages/checker.py b/cfg_checker/modules/packages/checker.py
index 92d9e1c..8f30f3c 100644
--- a/cfg_checker/modules/packages/checker.py
+++ b/cfg_checker/modules/packages/checker.py
@@ -8,7 +8,7 @@
 from cfg_checker.nodes import salt_master
 from cfg_checker.reports import reporter
 
-from versions import DebianVersion, PkgVersions, VersionCmpResult
+from .versions import DebianVersion, PkgVersions, VersionCmpResult
 
 
 class CloudPackageChecker(object):
@@ -78,6 +78,7 @@
             # sort packages
             _pn, _val = all_packages.popitem()
             _c = _val['desc']['section']
+            _rkeys = _val['results'].keys()
 
             if not full:
                 # Check if this packet has errors
@@ -125,9 +126,9 @@
                 _data['unlisted'].update({
                     _pn: _val
                 })
-                _eu += _val['results'].keys().count(const.VERSION_ERR)
-                _wu += _val['results'].keys().count(const.VERSION_WARN)
-                _du += _val['results'].keys().count(const.VERSION_DOWN)
+                _eu += sum(x == const.VERSION_ERR for x in _rkeys)
+                _wu += sum(x == const.VERSION_WARN for x in _rkeys)
+                _du += sum(x == const.VERSION_DOWN for x in _rkeys)
             # mirantis/critical
             # elif len(_c) > 0 and _c != 'System':
             elif _val['is_mirantis']:
@@ -135,25 +136,25 @@
                 _data['critical'].update({
                     _pn: _val
                 })
-                _ec += _val['results'].keys().count(const.VERSION_ERR)
-                _wc += _val['results'].keys().count(const.VERSION_WARN)
-                _dc += _val['results'].keys().count(const.VERSION_DOWN)
+                _ec += sum(x == const.VERSION_ERR for x in _rkeys)
+                _wc += sum(x == const.VERSION_WARN for x in _rkeys)
+                _dc += sum(x == const.VERSION_DOWN for x in _rkeys)
             # system
             elif _c == 'System':
                 _data['system'].update({
                     _pn: _val
                 })
-                _es += _val['results'].keys().count(const.VERSION_ERR)
-                _ws += _val['results'].keys().count(const.VERSION_WARN)
-                _ds += _val['results'].keys().count(const.VERSION_DOWN)
+                _es += sum(x == const.VERSION_ERR for x in _rkeys)
+                _ws += sum(x == const.VERSION_WARN for x in _rkeys)
+                _ds += sum(x == const.VERSION_DOWN for x in _rkeys)
             # rest
             else:
                 _data['other'].update({
                     _pn: _val
                 })
-                _eo += _val['results'].keys().count(const.VERSION_ERR)
-                _wo += _val['results'].keys().count(const.VERSION_WARN)
-                _do += _val['results'].keys().count(const.VERSION_DOWN)
+                _eo += sum(x == const.VERSION_ERR for x in _rkeys)
+                _wo += sum(x == const.VERSION_WARN for x in _rkeys)
+                _do += sum(x == const.VERSION_DOWN for x in _rkeys)
 
         _progress.end()
 
@@ -244,7 +245,7 @@
         _total_processed = 0
         # Collect packages from all of the nodes in flat dict
         _all_packages = {}
-        for node_name, node_value in salt_master.nodes.iteritems():
+        for node_name, node_value in salt_master.nodes.items():
             _uniq_len = len(_all_packages.keys())
             _progress_index += 1
             # progress updates shown before next node only
@@ -256,7 +257,7 @@
                     _total_processed
                 )
             )
-            for _name, _value in node_value['packages'].iteritems():
+            for _name, _value in node_value['packages'].items():
                 _total_processed += 1
                 # Parse versions from nodes
                 _ver_ins = DebianVersion(_value['installed'])
@@ -309,9 +310,9 @@
                     _vs = {}
                     _sections = {}
                     _apps = {}
-                    for s, apps in _r.iteritems():
-                        for a, versions in apps.iteritems():
-                            for v, repos in versions.iteritems():
+                    for s, apps in _r.items():
+                        for a, versions in apps.items():
+                            for v, repos in versions.items():
                                 for repo in repos:
                                     if v not in _vs:
                                         _vs[v] = []
@@ -324,11 +325,13 @@
                                     _apps[v].append(a)
                     # search for the newest version among filtered
                     _r_desc = []
-                    _vs_keys = _vs.keys()
-                    if _vs_keys:
-                        _newest = _newest = DebianVersion(_vs_keys.pop())
-                    else:
+                    _vs_keys = iter(_vs.keys())
+                    # get next version, if any
+                    try:
+                        _newest = DebianVersion(next(_vs_keys))
+                    except StopIteration:
                         _newest = DebianVersion('')
+                    # iterate others, if any
                     for v in _vs_keys:
                         _this = DebianVersion(v)
                         if _this > _newest:
diff --git a/cfg_checker/modules/packages/repos.py b/cfg_checker/modules/packages/repos.py
index 00c438f..57d8b9e 100644
--- a/cfg_checker/modules/packages/repos.py
+++ b/cfg_checker/modules/packages/repos.py
@@ -43,32 +43,39 @@
 
 def _get_value_index(_di, value, header=None):
     # Mainteiner names often uses specific chars
-    # so make sure that value saved is unicode not str
-    _val = unicode(value, 'utf-8') if isinstance(value, str) else value
+    # so make sure that value saved is str not str
+    # Python2
+    # _val = str(value, 'utf-8') if isinstance(value, str) else value
+    # Python3 has always utf-8 decoded value
+    _val = value
     if header:
-        if not filter(lambda i: _di[i]["header"] == header, _di):
-            _index = unicode(len(_di.keys()) + 1)
+        try:
+            _ = next(filter(lambda i: _di[i]["header"] == header, _di))
+            # iterator not empty, find index
+            for _k, _v in _di.items():
+                if _v["header"] == header:
+                    _index = _k
+        except StopIteration:
+            _index = str(len(_di.keys()) + 1)
             _di[_index] = {
                 "header": header,
                 "props": _val
             }
-        else:
-            for _k, _v in _di.iteritems():
-                if _v["header"] == header:
-                    _index = _k
-
-        return _index
+        finally:
+            return _index
     else:
-        if not filter(lambda i: _di[i] == _val, _di):
-            _index = unicode(len(_di.keys()) + 1)
-            # on save, cast it as unicode
-            _di[_index] = _val
-        else:
-            for _k, _v in _di.iteritems():
+        try:
+            _ = next(filter(lambda i: _di[i] == _val, _di))
+            # iterator not empty, find index
+            for _k, _v in _di.items():
                 if _v == _val:
                     _index = _k
-
-        return _index
+        except StopIteration:
+            _index = str(len(_di.keys()) + 1)
+            # on save, cast it as str
+            _di[_index] = _val
+        finally:
+            return _index
 
 
 def _safe_load(_f, _a):
@@ -79,7 +86,7 @@
                 _f
             )
         )
-        return json.loads(_a.get_file(_f))
+        return json.loads(_a.get_file(_f, decode=True))
     else:
         return {}
 
@@ -92,8 +99,33 @@
 
 
 class ReposInfo(object):
-    repos = []
-    _repofile = os.path.join(pkg_dir, "versions", _repos_info_archive)
+    init_done = False
+
+    def _init_vars(self):
+        self.repos = []
+
+    def _init_folders(self, arch_folder=None):
+        if arch_folder:
+            self._arch_folder = arch_folder
+            self._repofile = os.path.join(arch_folder, _repos_info_archive)
+        else:
+            self._arch_folder = os.path.join(pkg_dir, "versions")
+            self._repofile = os.path.join(
+                self._arch_folder,
+                _repos_info_archive
+            )
+
+    def __init__(self, arch_folder=None):
+        # perform inits
+        self._init_vars()
+        self._init_folders(arch_folder)
+        self.init_done = True
+
+    def __call__(self, *args, **kwargs):
+        if self.init_done:
+            return self
+        else:
+            return self.__init__(self, *args, **kwargs)
 
     @staticmethod
     def _ls_repo_page(url):
@@ -189,8 +221,10 @@
         else:
             # gather all of them
             _tags, _ = self._ls_repo_page(base_url)
-            _tags.remove('hotfix')
-            _tags.remove('update')
+            if "hotfix" in _tags:
+                _tags.remove('hotfix')
+            if "update" in _tags:
+                _tags.remove('update')
             # search tags in subfolders
             _h_tags, _ = self._ls_repo_page(base_url + 'hotfix')
             _u_tags, _ = self._ls_repo_page(base_url + 'update')
@@ -334,30 +368,46 @@
 
     def get_repoinfo(self, tag):
         _tgz = TGZFile(self._repofile)
-        _buf = _tgz.get_file(tag + ext)
+        _buf = _tgz.get_file(tag + ext, decode=True)
         return json.loads(_buf)
 
 
 class RepoManager(object):
-    # archives
-    _arch_folder = os.path.join(pkg_dir, "versions")
-    _versions_arch = os.path.join(_arch_folder, _repos_versions_archive)
-    _desc_arch = os.path.join(_arch_folder, _pkg_desc_archive)
-    _apps_filename = "apps.json"
+    init_done = False
 
-    # repository index
-    _repo_index = {}
-    _mainteiners_index = {}
+    def _init_folders(self, arch_folder=None):
+        # overide arch folder if needed
+        if arch_folder:
+            self._arch_folder = arch_folder
+        else:
+            self._arch_folder = os.path.join(pkg_dir, "versions")
 
-    _apps = {}
+        self._versions_arch = os.path.join(
+            self._arch_folder,
+            _repos_versions_archive
+        )
+        self._desc_arch = os.path.join(self._arch_folder, _pkg_desc_archive)
 
-    # init package versions storage
-    _versions_mirantis = {}
-    _versions_other = {}
+    def _init_vars(self, info_class):
+        # RepoInfo instance init
+        if info_class:
+            self._info_class = info_class
+        else:
+            self._info_class = ReposInfo()
+        # archives
+        self._apps_filename = "apps.json"
 
-    def __init__(self):
-        # Ensure that versions folder exists
-        logger_cli.debug(ensure_folder_exists(self._arch_folder))
+        # repository index
+        self._repo_index = {}
+        self._mainteiners_index = {}
+
+        self._apps = {}
+
+        # init package versions storage
+        self._versions_mirantis = {}
+        self._versions_other = {}
+
+    def _init_archives(self):
         # Init version files
         self.versionstgz = TGZFile(
             self._versions_arch,
@@ -394,6 +444,22 @@
             self.versionstgz
         )
 
+    def __init__(self, arch_folder=None, info_class=None):
+        # Perform inits
+        self._init_vars(info_class)
+        self._init_folders(arch_folder)
+        # Ensure that versions folder exists
+        logger_cli.debug(ensure_folder_exists(self._arch_folder))
+        # Preload/create archives
+        self._init_archives()
+        self.init_done = True
+
+    def __call__(self, *args, **kwargs):
+        if self.init_done:
+            return self
+        else:
+            return self.__init__(self, *args, **kwargs)
+
     def _create_repo_header(self, p):
         _header = "_".join([
             p['tag'],
@@ -504,14 +570,14 @@
         due to huge resulting file size and slow processing
         """
         # init gzip and downloader
-        _info = ReposInfo().get_repoinfo(tag)
+        _info = self._info_class.get_repoinfo(tag)
         # calculate Packages.gz files to process
         _baseurl = _info.pop("baseurl")
         _total_components = len(_info.keys()) - 1
         _ubuntu_package_repos = 0
         _other_repos = 0
-        for _c, _d in _info.iteritems():
-            for _ur, _l in _d.iteritems():
+        for _c, _d in _info.items():
+            for _ur, _l in _d.items():
                 if _ur in ubuntu_releases:
                     _ubuntu_package_repos += len(_l)
                 elif _ur != 'url':
@@ -531,12 +597,12 @@
         _index = 0
         _processed = 0
         _new = 0
-        for _c, _d in _info.iteritems():
+        for _c, _d in _info.items():
             # we do not need url here, just get rid of it
             if 'url' in _d:
                 _d.pop('url')
             # _url =  if 'url' in _d else _baseurl + _c
-            for _ur, _l in _d.iteritems():
+            for _ur, _l in _d.items():
                 # iterate package collections
                 for _p in _l:
                     # descriptions
@@ -564,6 +630,8 @@
                             )
                         )
                         continue
+                    else:
+                        _raw = _raw.decode("utf-8")
                     _progress.write_progress(
                         _index,
                         note="/ {} {} {} {} {}, {}/{}".format(
@@ -728,11 +796,9 @@
     def build_repos(self, url, tag=None):
         """Builds versions data for selected tag, or for all of them
         """
-        # Init the ReposInfo class and check if all files are present
-        _repos = ReposInfo()
         # recoursively walk the mirrors
         # and gather all of the repos for 'tag' or all of the tags
-        _repos.fetch_repos(url, tag=tag)
+        self._info_class.fetch_repos(url, tag=tag)
 
     def _build_action(self, url, tags):
         for t in tags:
@@ -741,7 +807,7 @@
 
     def get_available_tags(self, tag=None):
         # Populate action tags
-        major, updates, hotfix = ReposInfo().list_tags(splitted=True)
+        major, updates, hotfix = self._info_class.list_tags(splitted=True)
 
         _tags = []
         if tag in major:
@@ -767,14 +833,14 @@
             logger_cli.info("# No action set, nothing to do")
         # See if this is a list action
         if action == "list":
-            _all = ReposInfo().list_tags()
+            _all = self._info_class.list_tags()
             if _all:
                 # Print pretty list and exit
                 logger_cli.info("# Tags available at '{}':".format(url))
                 for t in _all:
                     _ri = self._repo_index
                     _isparsed = any(
-                        [k for k, v in _ri.iteritems()
+                        [k for k, v in _ri.items()
                          if v['props']['tag'] == t]
                     )
                     if _isparsed:
@@ -862,8 +928,8 @@
         _rows = []
         for _p in versions.keys():
             _vs = versions[_p]
-            for _v, _d1 in _vs.iteritems():
-                for _md5, _info in _d1.iteritems():
+            for _v, _d1 in _vs.items():
+                for _md5, _info in _d1.items():
                     if _all or name == _info['app']:
                         _s_max = max(len(_info['section']), _s_max)
                         _a_max = max(len(_info['app']), _a_max)
@@ -993,21 +1059,21 @@
         and filters them using keys above
         """
         if tag:
-            tag = unicode(tag) if not isinstance(tag, unicode) else tag
+            tag = str(tag) if not isinstance(tag, str) else tag
         _out = {}
         _vs = self.get_package_versions(name, tagged=True)
         # iterate to filter out keywords
-        for s, apps in _vs.iteritems():
-            for a, _tt in apps.iteritems():
-                for t, vs in _tt.iteritems():
+        for s, apps in _vs.items():
+            for a, _tt in apps.items():
+                for t, vs in _tt.items():
                     # filter tags
                     if tag and t != tag and t.rsplit('.', 1)[0] != tag:
                         continue
                     # Skip hotfix tag
                     if t == tag + ".hotfix":
                         continue
-                    for v, rp in vs.iteritems():
-                        for h, p in rp.iteritems():
+                    for v, rp in vs.items():
+                        for h, p in rp.items():
                             # filter headers with all keywords matching
                             _h = re.split(r"[\-\_]+", h)
                             _included = all([kw in _h for kw in include])
@@ -1038,9 +1104,9 @@
         # insert repo data, insert props into headers place
         _package = {}
         if tagged:
-            for _v, _d1 in _vs.iteritems():
+            for _v, _d1 in _vs.items():
                 # use tag as a next step
-                for _md5, _info in _d1.iteritems():
+                for _md5, _info in _d1.items():
                     _s = _info['section']
                     _a = _info['app']
                     for _pair in _info['repo']:
@@ -1061,8 +1127,8 @@
                             _rp
                         )
         else:
-            for _v, _d1 in _vs.iteritems():
-                for _md5, _info in _d1.iteritems():
+            for _v, _d1 in _vs.items():
+                for _md5, _info in _d1.items():
                     _s = _info['section']
                     _a = _info['app']
                     for _pair in _info['repo']:
@@ -1079,7 +1145,7 @@
 
     def parse_repos(self):
         # all tags to check
-        major, updates, hotfix = ReposInfo().list_tags(splitted=True)
+        major, updates, hotfix = self._info_class.list_tags(splitted=True)
 
         # major tags
         logger_cli.info("# Processing major tags")
diff --git a/cfg_checker/modules/packages/versions.py b/cfg_checker/modules/packages/versions.py
index 7fae9fc..542c0e4 100644
--- a/cfg_checker/modules/packages/versions.py
+++ b/cfg_checker/modules/packages/versions.py
@@ -351,8 +351,8 @@
                     self.target = r
                 elif i > r:
                     # both are newer, same target
-                    self.status = const.VERSION_UP
-                    self.action = const.ACT_NA
+                    self.status = const.VERSION_WARN
+                    self.action = const.ACT_REPO
                 elif i == r:
                     # all is ok
                     self.status = const.VERSION_OK
@@ -372,10 +372,3 @@
 
         # and we need to update per-part status
         self.source.update_parts(self.target, self.status)
-
-    @staticmethod
-    def deb_lower(_s, _t):
-        if _t.debian and _t.debian > _s.debian:
-            return True
-        else:
-            return False
diff --git a/cfg_checker/modules/reclass/__init__.py b/cfg_checker/modules/reclass/__init__.py
index 4b8b667..88b287e 100644
--- a/cfg_checker/modules/reclass/__init__.py
+++ b/cfg_checker/modules/reclass/__init__.py
@@ -4,9 +4,9 @@
 from cfg_checker.helpers import args_utils
 from cfg_checker.reports import reporter
 
-import comparer
+from . import comparer
 
-import validator
+from . import validator
 
 command_help = "Reclass related checks and reports"
 
diff --git a/cfg_checker/modules/reclass/comparer.py b/cfg_checker/modules/reclass/comparer.py
index 8ef8894..47e4baf 100644
--- a/cfg_checker/modules/reclass/comparer.py
+++ b/cfg_checker/modules/reclass/comparer.py
@@ -5,8 +5,9 @@
 import itertools
 import os
 
+from functools import reduce
+
 from cfg_checker.common import logger, logger_cli
-from cfg_checker.reports import reporter
 
 import yaml
 
@@ -197,13 +198,13 @@
                     # use ifilterfalse to compare lists of dicts
                     try:
                         _removed = list(
-                            itertools.ifilterfalse(
+                            itertools.filterfalse(
                                 lambda x: x in dict2[k],
                                 dict1[k]
                             )
                         )
                         _added = list(
-                            itertools.ifilterfalse(
+                            itertools.filterfalse(
                                 lambda x: x in dict1[k],
                                 dict2[k]
                             )
@@ -271,9 +272,10 @@
                     except TypeError as e:
                         logger.warning(
                             "One of the values is not a dict: "
-                            "{}, {}".format(
+                            "{}, {}; {}".format(
                                 str(dict1),
-                                str(dict2)
+                                str(dict2),
+                                e.message
                             ))
                         match = False
                     if not match:
@@ -331,38 +333,3 @@
 
         _diff_report["diff_names"] = [self.model_name_1, self.model_name_2]
         return _diff_report
-
-    def compare_models(self):
-        # Do actual compare using model names from the class
-        self.load_model_tree(
-            self.model_name_1,
-            self.model_path_1
-        )
-        self.load_model_tree(
-            self.model_name_2,
-            self.model_path_2
-        )
-        # Models should have similar structure to be compared
-        # classes/system
-        # classes/cluster
-        # nodes
-
-        diffs = self.generate_model_report_tree()
-
-        report_file = \
-            self.model_name_1 + "-vs-" + self.model_name_2 + ".html"
-        # HTML report class is post-callable
-        report = reporter.ReportToFile(
-            reporter.HTMLModelCompare(),
-            report_file
-        )
-        logger_cli.info("...generating report to {}".format(report_file))
-        # report will have tabs for each of the comparable entities in diffs
-        report({
-            "nodes": {},
-            "rc_diffs": diffs,
-        })
-        # with open("./gen_tree.json", "w+") as _out:
-        #     _out.write(json.dumps(mComparer.generate_model_report_tree))
-
-        return
diff --git a/cfg_checker/nodes.py b/cfg_checker/nodes.py
index ec20f6a..c261752 100644
--- a/cfg_checker/nodes.py
+++ b/cfg_checker/nodes.py
@@ -35,7 +35,7 @@
         try:
             _keys = self.salt.list_keys()
             _str = []
-            for _k, _v in _keys.iteritems():
+            for _k, _v in _keys.items():
                 _str.append("{}: {}".format(_k, len(_v)))
             logger_cli.info("-> keys collected: {}".format(", ".join(_str)))
 
@@ -92,10 +92,12 @@
             )
         )
         # get master node fqdn
-        _filtered = filter(
-            lambda nd: self.nodes[nd]['role'] == const.all_roles_map['cfg'],
-            self.nodes
-        )
+        # _filtered = filter(
+        #     lambda nd: self.nodes[nd]['role'] == const.all_roles_map['cfg'],
+        #     self.nodes
+        # )
+        _role = const.all_roles_map['cfg']
+        _filtered = [n for n, v in self.nodes.items() if v['role'] == _role]
         if len(_filtered) < 1:
             raise SaltException(
                 "No master node detected! Check/Update node role map."
@@ -161,7 +163,7 @@
         else:
             _nodes = self.nodes
         _result = self.execute_cmd_on_active_nodes(cmd, nodes=nodes)
-        for node, data in _nodes.iteritems():
+        for node, data in _nodes.items():
 
             if node in self.skip_list:
                 logger_cli.debug(
@@ -200,7 +202,7 @@
         )
         _result = self.salt.pillar_get(self.active_nodes_compound, pillar_path)
         self.not_responded = []
-        for node, data in self.nodes.iteritems():
+        for node, data in self.nodes.items():
             if node in self.skip_list:
                 logger_cli.debug(
                     "... '{}' skipped while collecting '{}'".format(
diff --git a/cfg_checker/reports/reporter.py b/cfg_checker/reports/reporter.py
index fa5fc72..3d3ede3 100644
--- a/cfg_checker/reports/reporter.py
+++ b/cfg_checker/reports/reporter.py
@@ -110,9 +110,7 @@
             r['arch']
         ]) + ", "
         # maintainer w/o email
-        _m = r['maintainer'][:r['maintainer'].find('<')-1]
-        _m_ascii = _m.encode('ascii', errors="xmlcharrefreplace")
-        _text += _m_ascii
+        _text += ascii(r['maintainer'][:r['maintainer'].find('<')-1])
         # newline
         _text += "<br />"
     return _text
@@ -215,17 +213,21 @@
 
     def _extend_data(self, data):
         def get_bytes(value):
-            if value[-1] == 'G':
-                return int(float(value[:-1]) * 1024 * 1024 * 1024)
-            elif value[-1] == 'M':
-                return int(float(value[:-1]) * 1024 * 1024)
-            elif value[-1] == 'K':
-                return int(float(value[:-1]) * 1024)
-            else:
+            _char = value[-1]
+            _ord = ord(_char)
+            if _ord > 47 and _ord < 58:
+                # bytes comes with no Char
                 return int(value)
+            else:
+                _sizes = ["*", "K", "M", "G", "T"]
+                _flo = float(value[:-1])
+                _pwr = 1
+                if _char in _sizes:
+                    _pwr = _sizes.index(_char)
+                return int(_flo**_pwr)
 
         def _dmidecode(_dict, type=0):
-            _key = "dmi"
+            # _key = "dmi"
             _key_r = "dmi_r"
             _f_cmd = salt_master.get_cmd_for_nodes
             _cmd = "dmidecode -t {}".format(type)
@@ -234,7 +236,7 @@
             pass
 
         def _lsblk(_dict):
-            _key = "lsblk"
+            # _key = "lsblk"
             _key_r = "lsblk_raw"
             _f_cmd = salt_master.get_cmd_for_nodes
             _columns = [
@@ -261,7 +263,7 @@
             _cmd = "lscpu | sed -n '/\\:/s/ \\+/ /gp'"
             _f_cmd(_cmd, _key_r, target_dict=_dict)
             # parse them and put into dict
-            for node, dt in _dict.iteritems():
+            for node, dt in _dict.items():
                 dt[_key] = {}
                 if dt['status'] == DOWN:
                     continue
@@ -291,7 +293,7 @@
             _cmd = "free -h | sed -n '/Mem/s/ \\+/ /gp'"
             _f_cmd(_cmd, _key_r, target_dict=_dict)
             # parse them and put into dict
-            for node, dt in _dict.iteritems():
+            for node, dt in _dict.items():
                 dt[_key] = {}
                 if dt['status'] == DOWN:
                     continue
@@ -323,7 +325,7 @@
             _f_cmd = salt_master.get_cmd_for_nodes
             _cmd = "service --status-all"
             _f_cmd(_cmd, _key_r, target_dict=_dict)
-            for node, dt in _dict.iteritems():
+            for node, dt in _dict.items():
                 dt[_key] = {}
                 if dt['status'] == DOWN:
                     continue
@@ -386,7 +388,7 @@
                     _softnet_interval
                 )
             _f_cmd(_cmd, _key_r, target_dict=_dict)
-            for node, dt in _dict.iteritems():
+            for node, dt in _dict.items():
                 _cpuindex = 1
                 _add_mode = True
                 # totals for start mark
@@ -438,7 +440,7 @@
                             dt[_key]["total"][i] += _c[i]
                     _cpuindex += 1
                 # finally, subtract initial totals
-                for k, v in dt[_key].iteritems():
+                for k, v in dt[_key].items():
                     if k != "total":
                         dt[_key][k] = [v[i] / 5. for i in range(len(v))]
                     else:
@@ -483,7 +485,7 @@
             "disk_raw",
             target_dict=data["nodes"]
         )
-        for dt in data["nodes"].itervalues():
+        for dt in data["nodes"].values():
             dt["disk"] = {}
             dt["disk_max_dev"] = None
             if dt['status'] == DOWN:
@@ -516,8 +518,8 @@
                     _d[_t[0]]['f'] = ""
 
         # prepare networks data for report
-        for net, net_v in data['map'].iteritems():
-            for node, ifs in net_v.iteritems():
+        for net, net_v in data['map'].items():
+            for node, ifs in net_v.items():
                 for d in ifs:
                     _err = "fail"
                     d['interface_error'] = _err if d['interface_error'] else ""