Migrating to Python v3

 - support for Python v3.8.x
 - support for Python v3.5.x
 - new tag, 2019.2.8
 - updates class generation and iterators
 - unittests updated with coverage >75%
 - new coverage routines
 - unittests profiling
 - full fake data for unittests
 - unittest testrun is ~1.5 seconds long

Bugfixes
 - 34834, proper use of 'sudo' option
 - multiple proper iterator use
 - 37919, show warning when installed and candidate versions
   are newer comparing to release version

Change-Id: Idd6b889f7ce94ae0c832e2f0a0346e4fdc3264a3
Related-PROD: PROD-34834 PROD-34664 PROD-34919
diff --git a/cfg_checker/modules/packages/repos.py b/cfg_checker/modules/packages/repos.py
index 00c438f..57d8b9e 100644
--- a/cfg_checker/modules/packages/repos.py
+++ b/cfg_checker/modules/packages/repos.py
@@ -43,32 +43,39 @@
 
 def _get_value_index(_di, value, header=None):
     # Mainteiner names often uses specific chars
-    # so make sure that value saved is unicode not str
-    _val = unicode(value, 'utf-8') if isinstance(value, str) else value
+    # so make sure that value saved is str not str
+    # Python2
+    # _val = str(value, 'utf-8') if isinstance(value, str) else value
+    # Python3 has always utf-8 decoded value
+    _val = value
     if header:
-        if not filter(lambda i: _di[i]["header"] == header, _di):
-            _index = unicode(len(_di.keys()) + 1)
+        try:
+            _ = next(filter(lambda i: _di[i]["header"] == header, _di))
+            # iterator not empty, find index
+            for _k, _v in _di.items():
+                if _v["header"] == header:
+                    _index = _k
+        except StopIteration:
+            _index = str(len(_di.keys()) + 1)
             _di[_index] = {
                 "header": header,
                 "props": _val
             }
-        else:
-            for _k, _v in _di.iteritems():
-                if _v["header"] == header:
-                    _index = _k
-
-        return _index
+        finally:
+            return _index
     else:
-        if not filter(lambda i: _di[i] == _val, _di):
-            _index = unicode(len(_di.keys()) + 1)
-            # on save, cast it as unicode
-            _di[_index] = _val
-        else:
-            for _k, _v in _di.iteritems():
+        try:
+            _ = next(filter(lambda i: _di[i] == _val, _di))
+            # iterator not empty, find index
+            for _k, _v in _di.items():
                 if _v == _val:
                     _index = _k
-
-        return _index
+        except StopIteration:
+            _index = str(len(_di.keys()) + 1)
+            # on save, cast it as str
+            _di[_index] = _val
+        finally:
+            return _index
 
 
 def _safe_load(_f, _a):
@@ -79,7 +86,7 @@
                 _f
             )
         )
-        return json.loads(_a.get_file(_f))
+        return json.loads(_a.get_file(_f, decode=True))
     else:
         return {}
 
@@ -92,8 +99,33 @@
 
 
 class ReposInfo(object):
-    repos = []
-    _repofile = os.path.join(pkg_dir, "versions", _repos_info_archive)
+    init_done = False
+
+    def _init_vars(self):
+        self.repos = []
+
+    def _init_folders(self, arch_folder=None):
+        if arch_folder:
+            self._arch_folder = arch_folder
+            self._repofile = os.path.join(arch_folder, _repos_info_archive)
+        else:
+            self._arch_folder = os.path.join(pkg_dir, "versions")
+            self._repofile = os.path.join(
+                self._arch_folder,
+                _repos_info_archive
+            )
+
+    def __init__(self, arch_folder=None):
+        # perform inits
+        self._init_vars()
+        self._init_folders(arch_folder)
+        self.init_done = True
+
+    def __call__(self, *args, **kwargs):
+        if self.init_done:
+            return self
+        else:
+            return self.__init__(self, *args, **kwargs)
 
     @staticmethod
     def _ls_repo_page(url):
@@ -189,8 +221,10 @@
         else:
             # gather all of them
             _tags, _ = self._ls_repo_page(base_url)
-            _tags.remove('hotfix')
-            _tags.remove('update')
+            if "hotfix" in _tags:
+                _tags.remove('hotfix')
+            if "update" in _tags:
+                _tags.remove('update')
             # search tags in subfolders
             _h_tags, _ = self._ls_repo_page(base_url + 'hotfix')
             _u_tags, _ = self._ls_repo_page(base_url + 'update')
@@ -334,30 +368,46 @@
 
     def get_repoinfo(self, tag):
         _tgz = TGZFile(self._repofile)
-        _buf = _tgz.get_file(tag + ext)
+        _buf = _tgz.get_file(tag + ext, decode=True)
         return json.loads(_buf)
 
 
 class RepoManager(object):
-    # archives
-    _arch_folder = os.path.join(pkg_dir, "versions")
-    _versions_arch = os.path.join(_arch_folder, _repos_versions_archive)
-    _desc_arch = os.path.join(_arch_folder, _pkg_desc_archive)
-    _apps_filename = "apps.json"
+    init_done = False
 
-    # repository index
-    _repo_index = {}
-    _mainteiners_index = {}
+    def _init_folders(self, arch_folder=None):
+        # overide arch folder if needed
+        if arch_folder:
+            self._arch_folder = arch_folder
+        else:
+            self._arch_folder = os.path.join(pkg_dir, "versions")
 
-    _apps = {}
+        self._versions_arch = os.path.join(
+            self._arch_folder,
+            _repos_versions_archive
+        )
+        self._desc_arch = os.path.join(self._arch_folder, _pkg_desc_archive)
 
-    # init package versions storage
-    _versions_mirantis = {}
-    _versions_other = {}
+    def _init_vars(self, info_class):
+        # RepoInfo instance init
+        if info_class:
+            self._info_class = info_class
+        else:
+            self._info_class = ReposInfo()
+        # archives
+        self._apps_filename = "apps.json"
 
-    def __init__(self):
-        # Ensure that versions folder exists
-        logger_cli.debug(ensure_folder_exists(self._arch_folder))
+        # repository index
+        self._repo_index = {}
+        self._mainteiners_index = {}
+
+        self._apps = {}
+
+        # init package versions storage
+        self._versions_mirantis = {}
+        self._versions_other = {}
+
+    def _init_archives(self):
         # Init version files
         self.versionstgz = TGZFile(
             self._versions_arch,
@@ -394,6 +444,22 @@
             self.versionstgz
         )
 
+    def __init__(self, arch_folder=None, info_class=None):
+        # Perform inits
+        self._init_vars(info_class)
+        self._init_folders(arch_folder)
+        # Ensure that versions folder exists
+        logger_cli.debug(ensure_folder_exists(self._arch_folder))
+        # Preload/create archives
+        self._init_archives()
+        self.init_done = True
+
+    def __call__(self, *args, **kwargs):
+        if self.init_done:
+            return self
+        else:
+            return self.__init__(self, *args, **kwargs)
+
     def _create_repo_header(self, p):
         _header = "_".join([
             p['tag'],
@@ -504,14 +570,14 @@
         due to huge resulting file size and slow processing
         """
         # init gzip and downloader
-        _info = ReposInfo().get_repoinfo(tag)
+        _info = self._info_class.get_repoinfo(tag)
         # calculate Packages.gz files to process
         _baseurl = _info.pop("baseurl")
         _total_components = len(_info.keys()) - 1
         _ubuntu_package_repos = 0
         _other_repos = 0
-        for _c, _d in _info.iteritems():
-            for _ur, _l in _d.iteritems():
+        for _c, _d in _info.items():
+            for _ur, _l in _d.items():
                 if _ur in ubuntu_releases:
                     _ubuntu_package_repos += len(_l)
                 elif _ur != 'url':
@@ -531,12 +597,12 @@
         _index = 0
         _processed = 0
         _new = 0
-        for _c, _d in _info.iteritems():
+        for _c, _d in _info.items():
             # we do not need url here, just get rid of it
             if 'url' in _d:
                 _d.pop('url')
             # _url =  if 'url' in _d else _baseurl + _c
-            for _ur, _l in _d.iteritems():
+            for _ur, _l in _d.items():
                 # iterate package collections
                 for _p in _l:
                     # descriptions
@@ -564,6 +630,8 @@
                             )
                         )
                         continue
+                    else:
+                        _raw = _raw.decode("utf-8")
                     _progress.write_progress(
                         _index,
                         note="/ {} {} {} {} {}, {}/{}".format(
@@ -728,11 +796,9 @@
     def build_repos(self, url, tag=None):
         """Builds versions data for selected tag, or for all of them
         """
-        # Init the ReposInfo class and check if all files are present
-        _repos = ReposInfo()
         # recoursively walk the mirrors
         # and gather all of the repos for 'tag' or all of the tags
-        _repos.fetch_repos(url, tag=tag)
+        self._info_class.fetch_repos(url, tag=tag)
 
     def _build_action(self, url, tags):
         for t in tags:
@@ -741,7 +807,7 @@
 
     def get_available_tags(self, tag=None):
         # Populate action tags
-        major, updates, hotfix = ReposInfo().list_tags(splitted=True)
+        major, updates, hotfix = self._info_class.list_tags(splitted=True)
 
         _tags = []
         if tag in major:
@@ -767,14 +833,14 @@
             logger_cli.info("# No action set, nothing to do")
         # See if this is a list action
         if action == "list":
-            _all = ReposInfo().list_tags()
+            _all = self._info_class.list_tags()
             if _all:
                 # Print pretty list and exit
                 logger_cli.info("# Tags available at '{}':".format(url))
                 for t in _all:
                     _ri = self._repo_index
                     _isparsed = any(
-                        [k for k, v in _ri.iteritems()
+                        [k for k, v in _ri.items()
                          if v['props']['tag'] == t]
                     )
                     if _isparsed:
@@ -862,8 +928,8 @@
         _rows = []
         for _p in versions.keys():
             _vs = versions[_p]
-            for _v, _d1 in _vs.iteritems():
-                for _md5, _info in _d1.iteritems():
+            for _v, _d1 in _vs.items():
+                for _md5, _info in _d1.items():
                     if _all or name == _info['app']:
                         _s_max = max(len(_info['section']), _s_max)
                         _a_max = max(len(_info['app']), _a_max)
@@ -993,21 +1059,21 @@
         and filters them using keys above
         """
         if tag:
-            tag = unicode(tag) if not isinstance(tag, unicode) else tag
+            tag = str(tag) if not isinstance(tag, str) else tag
         _out = {}
         _vs = self.get_package_versions(name, tagged=True)
         # iterate to filter out keywords
-        for s, apps in _vs.iteritems():
-            for a, _tt in apps.iteritems():
-                for t, vs in _tt.iteritems():
+        for s, apps in _vs.items():
+            for a, _tt in apps.items():
+                for t, vs in _tt.items():
                     # filter tags
                     if tag and t != tag and t.rsplit('.', 1)[0] != tag:
                         continue
                     # Skip hotfix tag
                     if t == tag + ".hotfix":
                         continue
-                    for v, rp in vs.iteritems():
-                        for h, p in rp.iteritems():
+                    for v, rp in vs.items():
+                        for h, p in rp.items():
                             # filter headers with all keywords matching
                             _h = re.split(r"[\-\_]+", h)
                             _included = all([kw in _h for kw in include])
@@ -1038,9 +1104,9 @@
         # insert repo data, insert props into headers place
         _package = {}
         if tagged:
-            for _v, _d1 in _vs.iteritems():
+            for _v, _d1 in _vs.items():
                 # use tag as a next step
-                for _md5, _info in _d1.iteritems():
+                for _md5, _info in _d1.items():
                     _s = _info['section']
                     _a = _info['app']
                     for _pair in _info['repo']:
@@ -1061,8 +1127,8 @@
                             _rp
                         )
         else:
-            for _v, _d1 in _vs.iteritems():
-                for _md5, _info in _d1.iteritems():
+            for _v, _d1 in _vs.items():
+                for _md5, _info in _d1.items():
                     _s = _info['section']
                     _a = _info['app']
                     for _pair in _info['repo']:
@@ -1079,7 +1145,7 @@
 
     def parse_repos(self):
         # all tags to check
-        major, updates, hotfix = ReposInfo().list_tags(splitted=True)
+        major, updates, hotfix = self._info_class.list_tags(splitted=True)
 
         # major tags
         logger_cli.info("# Processing major tags")