Package report/repo parser integration

 - parser able to filter package versions using keywords
 - warning message on missing tag
 - on the fly versions lookup (excluding '*.hotfix')
 - updated versions compare routine
 - lexical compare uses numbers, not ordinal values
 - updated release version detection
 - final report lists pkg section/app if no description given
 - final report shows repo info for detected release version

Fixes:
 - shorter alternate entrpoints: mcp-pkg, mcp-net, cmp-reclass
 - flake8 syntax
 - proper mirantis/non-mirantis versions getting
 - exit on unexpected arguments
 - salt-master class now gets linux codename by default and architecture

Change-Id: I0a2daadca8a1acaecafc8680226dc00d20cc24ce
Related-PROD: PROD-28199
diff --git a/cfg_checker/cfg_check.py b/cfg_checker/cfg_check.py
index 37f9957..1a2d79f 100644
--- a/cfg_checker/cfg_check.py
+++ b/cfg_checker/cfg_check.py
@@ -44,11 +44,19 @@
 
     # parse arguments
     try:
-        args = parser.parse_args()
+        args, unknown = parser.parse_known_args()
     except TypeError:
         logger_cli.info("\n# Please, check arguments")
         sys.exit(0)
 
+    if unknown:
+        logger_cli.error(
+            "# Unexpected arguments: {}".format(
+                ", ".join(["'{}'".format(a) for a in unknown])
+            )
+        )
+        sys.exit(1)
+
     # Pass externally configured values
     config.ssh_uses_sudo = args.sudo
 
diff --git a/cfg_checker/cli/command.py b/cfg_checker/cli/command.py
index e6d9cd9..8e715f2 100644
--- a/cfg_checker/cli/command.py
+++ b/cfg_checker/cli/command.py
@@ -33,10 +33,11 @@
 def execute_command(args, command):
     # Validate the commands
     # check command
+    _type = args.type.replace("-", "_") if "-" in args.type else args.type
     if command not in commands:
         logger_cli.info("\n# Please, type a command listed above")
         return 0
-    elif args.type not in commands[command]:
+    elif _type not in commands[command]:
         # check type
         logger_cli.info(
             "\n# Please, select '{}' command type listed above".format(
@@ -46,7 +47,7 @@
         return 0
     else:
         # form function name to call
-        _method_name = "do_" + args.type
+        _method_name = "do_" + _type
         _target_module = __import__(
             mods_prefix + command,
             fromlist=[""]
@@ -79,11 +80,19 @@
 
     # parse arguments
     try:
-        args = my_parser.parse_args()
+        args, unknown = my_parser.parse_known_args()
     except TypeError:
         logger_cli.info("\n# Please, check arguments")
         sys.exit(0)
 
+    if unknown:
+        logger_cli.error(
+            "# Unexpected arguments: {}".format(
+                ", ".join(["'{}'".format(a) for a in unknown])
+            )
+        )
+        sys.exit(1)
+
     # force use of sudo
     config.ssh_uses_sudo = True
 
diff --git a/cfg_checker/common/other.py b/cfg_checker/common/other.py
index d9e434a..2620d05 100644
--- a/cfg_checker/common/other.py
+++ b/cfg_checker/common/other.py
@@ -38,7 +38,7 @@
             return (True, _message) if message else True
 
         # node role code checks
-        _code = re.findall("[a-zA-Z]+", fqdn.split('.')[0])
+        _code = re.findall(r"[a-zA-Z]+", fqdn.split('.')[0])
         if len(_code) > 0:
             if _code[0] in all_roles_map:
                 return _result()
@@ -70,7 +70,7 @@
     def get_node_code(self, fqdn):
         # validate
         _isvalid, _message = self.validate_name(fqdn, message=True)
-        _code = re.findall("[a-zA-Z]+?(?=(?:[0-9]+$)|$)", fqdn.split('.')[0])
+        _code = re.findall(r"[a-zA-Z]+?(?=(?:[0-9]+$)|$)", fqdn.split('.')[0])
         # check if it is valid and raise if not
         if _isvalid:
             # try to match it with ones in map
diff --git a/cfg_checker/common/salt_utils.py b/cfg_checker/common/salt_utils.py
index 4dcbd30..7bd6ce7 100644
--- a/cfg_checker/common/salt_utils.py
+++ b/cfg_checker/common/salt_utils.py
@@ -46,13 +46,22 @@
     _ssh_cmd.append(_salt_cmd)
     _ssh_cmd = " ".join(_ssh_cmd)
     logger_cli.debug("... calling salt: '{}'".format(_ssh_cmd))
-    _result = shell(_ssh_cmd)
-    if len(_result) < 1:
-        raise InvalidReturnException("# Empty value returned for '{}".format(
-            _ssh_cmd
-        ))
-    else:
-        return _extract_password(_result)
+    try:
+        _result = shell(_ssh_cmd)
+        if len(_result) < 1:
+            raise InvalidReturnException(
+                "# Empty value returned for '{}".format(
+                    _ssh_cmd
+                )
+            )
+        else:
+            return _extract_password(_result)
+    except OSError as e:
+        raise SaltException(
+            "Salt error calling '{}': '{}'\n"
+            "\nConsider checking 'SALT_ENV' "
+            "and '<pkg>/etc/<env>.env' files".format(_ssh_cmd, e.strerror)
+        )
 
 
 def get_local_password():
@@ -60,8 +69,16 @@
 
     :return: password string
     """
-    _cmd = "salt-call --out=json pillar.get _param:salt_api_password"
-    _result = shell(_cmd)
+    _cmd = "salt-call"
+    _args = "--out=json pillar.get _param:salt_api_password"
+    try:
+        _result = shell(" ".join([_cmd, _args]))
+    except OSError as e:
+        raise SaltException(
+            "Salt error calling '{}': '{}'\n"
+            "\nConsider checking 'SALT_ENV' "
+            "and '<pkg>/etc/<env>.env' files".format(_cmd, e.strerror)
+        )
     return _extract_password(_result)
 
 
diff --git a/cfg_checker/helpers/console_utils.py b/cfg_checker/helpers/console_utils.py
index d1db2a1..994143c 100644
--- a/cfg_checker/helpers/console_utils.py
+++ b/cfg_checker/helpers/console_utils.py
@@ -39,6 +39,7 @@
         sys.stdout.write('\r')
         sys.stdout.write(' '*self._strsize)
         sys.stdout.write('\r')
+        sys.stdout.flush()
 
     def end(self):
         self._note_size = 0
diff --git a/cfg_checker/modules/packages/__init__.py b/cfg_checker/modules/packages/__init__.py
index 77c1654..45b1f77 100644
--- a/cfg_checker/modules/packages/__init__.py
+++ b/cfg_checker/modules/packages/__init__.py
@@ -60,6 +60,11 @@
         action="store_true", default=False,
         help="Save pkg descriptions while parsing"
     )
+    pkg_repos.add_argument(
+        '--gen-apps',
+        action="store_true", default=False,
+        help="Save pkg descriptions while parsing"
+    )
     pkg_show = pkg_subparsers.add_parser(
         'show',
         help="Show package history from the map"
@@ -69,6 +74,15 @@
         nargs='+',
         help="Package names separated by space"
     )
+    pkg_app = pkg_subparsers.add_parser(
+        'show-app',
+        help="Show packages for single app"
+    )
+    pkg_app.add_argument(
+        'args',
+        nargs='+',
+        help="List of app's packages (or 'source' in package description)"
+    )
 
     return _parser
 
@@ -115,7 +129,8 @@
             args.url,
             args.tag,
             action="fetch",
-            descriptions=args.gen_desc
+            descriptions=args.gen_desc,
+            apps=args.gen_apps
         )
     else:
         # All of them
@@ -130,3 +145,13 @@
     # show packages
     for p in args.args:
         r.show_package(p)
+
+
+def do_show_app(args):
+    """Shows packages for app
+    """
+    # Init manager
+    r = RepoManager()
+    # show packages
+    for a in args.args:
+        r.show_app(a)
diff --git a/cfg_checker/modules/packages/checker.py b/cfg_checker/modules/packages/checker.py
index 514bd9c..bf192f9 100644
--- a/cfg_checker/modules/packages/checker.py
+++ b/cfg_checker/modules/packages/checker.py
@@ -3,6 +3,7 @@
 from cfg_checker.common import const, logger_cli
 from cfg_checker.common.exception import ConfigException
 from cfg_checker.helpers.console_utils import Progress
+from cfg_checker.modules.packages.repos import RepoManager
 from cfg_checker.nodes import salt_master
 from cfg_checker.reports import reporter
 
@@ -10,6 +11,25 @@
 
 
 class CloudPackageChecker(object):
+    rm = RepoManager()
+
+    def __init__(self):
+        # Init salt master info
+        if not salt_master.nodes:
+            salt_master.nodes = salt_master.get_nodes()
+
+        # check that this env tag is present in Manager
+        _tags = self.rm.get_available_tags(tag=salt_master.mcp_release)
+        if not _tags:
+            logger_cli.warning(
+                "\n# hWARNING: '{0}' is not listed in repo index. "
+                "Consider running:\n\t{1}\nto add info on this tag's "
+                "release package versions".format(
+                    salt_master.mcp_release,
+                    "mcp-checker packages versions --tag {0}"
+                )
+            )
+
     @staticmethod
     def presort_packages(all_packages, full=None):
         logger_cli.info("-> Presorting packages")
@@ -51,7 +71,8 @@
             _progress.write_progress(_progress_index)
             # sort packages
             _pn, _val = all_packages.popitem()
-            _c = _val['desc']['component']
+            _c = _val['desc']['section']
+
             if not full:
                 # Check if this packet has errors
                 # if all is ok -> just skip it
@@ -59,11 +80,11 @@
                 if _max_status <= const.VERSION_OK:
                     _max_action = max(_val['results'][_max_status].keys())
                     if _max_action == const.ACT_NA:
-                        # this package do not has any comments
+                        # this package does not have any comments
                         # ...just skip it from report
                         continue
 
-            if len(_c) > 0 and _c == 'unlisted':
+            if len(_c) > 0 and _val['is_mirantis'] is None:
                 # not listed package in version lib
                 _data['unlisted'].update({
                     _pn: _val
@@ -71,7 +92,8 @@
                 _eu += _val['results'].keys().count(const.VERSION_ERR)
                 _du += _val['results'].keys().count(const.VERSION_DOWN)
             # mirantis/critical
-            elif len(_c) > 0 and _c != 'System':
+            # elif len(_c) > 0 and _c != 'System':
+            elif _val['is_mirantis']:
                 # not blank and not system
                 _data['critical'].update({
                     _pn: _val
@@ -118,8 +140,6 @@
         :return: none
         """
         logger_cli.info("# Collecting installed packages")
-        if not salt_master.nodes:
-            salt_master.nodes = salt_master.get_nodes()
         salt_master.prepare_script_on_active_nodes("pkg_versions.py")
         _result = salt_master.execute_script_on_active_nodes("pkg_versions.py")
 
@@ -159,16 +179,23 @@
         logger_cli.info(
             "# Cross-comparing: Installed vs Candidates vs Release"
         )
+        # shortcuts for this cloud values
+        _os = salt_master.openstack_release
+        _mcp = salt_master.mcp_release
+        # Progress class
         _progress = Progress(len(salt_master.nodes.keys()))
         _progress_index = 0
         _total_processed = 0
         # Collect packages from all of the nodes in flat dict
         _all_packages = {}
+        _all_tags = set([])
+        # get env tag's year and major version
+        _tag_major = _mcp[:_mcp.find('.', _mcp.find('.')+1)]
         for node_name, node_value in salt_master.nodes.iteritems():
             _uniq_len = len(_all_packages.keys())
             _progress_index += 1
-            # progress will jump from node to node
-            # it is very costly operation to execute it for each pkg
+            # progress updates shown before next node only
+            # it is costly operation to do it for each of the 150k packages
             _progress.write_progress(
                 _progress_index,
                 note="/ {} uniq out of {} packages found".format(
@@ -184,31 +211,128 @@
 
                 # All packages list with version and node list
                 if _name not in _all_packages:
-                    # shortcuts for this cloud values
-                    _os = salt_master.openstack_release
-                    _mcp = salt_master.mcp_release
-                    _pkg_desc = {}
+                    # get repo versions list,
+                    _linux = salt_master.nodes[node_name]['linux_codename']
+                    _arch = salt_master.nodes[node_name]['linux_arch']
+                    if _name == u'qemu-block-extra':
+                        a = 1
+                    # omit tag as target versions might be of different tag
+                    _r = self.rm.get_filtered_versions(
+                        _name,
+                        tag=_mcp,
+                        include=[_os, _linux, _arch],
+                        exclude=["nightly"]
+                    )
+                    # save versions for matching major tags
+                    _vs = {}
+                    _sections = {}
+                    _apps = {}
+                    # get all versions for this year
+                    for s, apps in _r.iteritems():
+                        for a, versions in apps.iteritems():
+                            for v, repos in versions.iteritems():
+                                for repo in repos:
+                                    t = repo['tag']
+                                    _major = t[:t.find('.', t.find('.')+1)]
+                                    if _tag_major == _major:
+                                        if v not in _vs:
+                                            _vs[v] = []
+                                        _vs[v].append(repo)
+                                        if v not in _sections:
+                                            _sections[v] = []
+                                        _sections[v].append(s)
+                                        if v not in _apps:
+                                            _apps[v] = []
+                                        _apps[v].append(a)
+
+                    # check if we have candidate version among found
+                    _r_desc = []
+                    _vs_keys = _vs.keys()
+                    if _vs_keys:
+                        _newest = _newest = DebianVersion(_vs_keys.pop())
+                    else:
+                        _newest = DebianVersion('')
+                    # if _ver_ins.version in _vs_keys:
+                    #     # exact match, save it
+                    #     _release = _ver_ins
+                    # else:
+                    # detect newest version among saved
+
+                    for v in _vs_keys:
+                        _this = DebianVersion(v)
+                        if _this > _newest:
+                            _newest = _this
+                    # newest version for the YEAR.MAJOR will be the release
+                    _release = _newest
+                    # save repos list for this version
+                    if _release.version != 'n/a':
+                        _r_desc = _vs[_release.version]
+                    # preload special description
                     if _desc[_name]:
-                        # shortcut to version library
-                        _vers = _desc[_name]['versions']
                         _pkg_desc = _desc[_name]
                     else:
-                        # no description - no library :)
-                        _vers = {}
                         _pkg_desc = _desc.dummy_desc
+                    # Check if we can provide better from the package
+                    if _release.version != 'n/a':
+                        if not _pkg_desc['section']:
+                            _pkg_desc['section'] = \
+                                "/".join(_sections[_release.version])
+                        if not _pkg_desc['app']:
+                            _pkg_desc['app'] = \
+                                "/".join(_apps[_release.version])
 
                     # get specific set for this OS release if present
-                    if _os in _vers:
-                        _v = _vers[_os]
-                    elif 'any' in _vers:
-                        _v = _vers['any']
-                    else:
-                        _v = {}
+                    # if not, try search in all repos for this tag
+                    # _r_desc = _r[_newest.version]
+                    # if _r:
+                    #     _vs = _r.keys()
+                    #     if len(_vs) > 1:
+                    #         # search best match
+                    #         _release = None
+                    #         for _v in _vs:
+                    #             _deb = DebianVersion(_v)
+                    #             if _ver_can == _deb:
+                    #                 _release = _deb
+                    #                 _r_desc = _r[_v]
+                    #                 break
+                    #         if not _release:
+                    #             _progress.clearline()
+                    #             logger_cli.error(
+                    #                 "# ERROR: No release version found "
+                    #                 "for '{}'".format(_name)
+                    #             )
+                    #             _release = DebianVersion('')
+                    #     else:
+                    #         _release = DebianVersion(_vs[0])
+                    #         _r_desc = _r[_vs[0]]
+                    # else:
+                    #     # not found... 99% that it will not happen
+                    #     _release = DebianVersion('')
+
+                    # Old versions match routine
+                    # ########
+                    # if _os in _vers:
+                    #     _v = _vers[_os]
+                    # elif 'any' in _vers:
+                    #     _v = _vers['any']
+                    # else:
+                    #     _v = {}
+
                     # Finally, get specific version
-                    _release = DebianVersion(_v[_mcp] if _mcp in _v else '')
+                    # _release = DebianVersion(_v[_mcp] if _mcp in _v else '')
+                    for repo in _r_desc:
+                        _all_tags.add(repo['tag'])
+
                     # Populate package info
+                    _m = _r_desc[0]["maintainer"] if _r_desc else 'n/a'
                     _all_packages[_name] = {
                         "desc": _pkg_desc,
+                        "repos": _r_desc,
+                        "maintainer": _m,
+                        "is_mirantis": self.rm.is_mirantis(
+                            _name,
+                            tag=_tag_major
+                        ),
                         "results": {},
                         "r": _release,
                     }
diff --git a/cfg_checker/modules/packages/repos.py b/cfg_checker/modules/packages/repos.py
index ae662de..e7c4c6a 100644
--- a/cfg_checker/modules/packages/repos.py
+++ b/cfg_checker/modules/packages/repos.py
@@ -1,5 +1,6 @@
 import json
 import os
+import re
 from copy import deepcopy
 
 from cfg_checker.common import logger, logger_cli, nested_set
@@ -71,8 +72,8 @@
 
 def _safe_load(_f, _a):
     if _f in _a.list_files():
-        logger_cli.info(
-            "# Loading '{}':'{}'".format(
+        logger_cli.debug(
+            "... loading '{}':'{}'".format(
                 _a.basefile,
                 _f
             )
@@ -325,7 +326,7 @@
             _dig = [s for s in _all if s[0].isdigit()]
             _dig = sorted(
                 _dig,
-                key=lambda x: tuple(int(i) for i in re.findall('\\d+', x)[:3])
+                key=lambda x: tuple(int(i) for i in re.findall(r"\d+", x)[:3])
             )
 
             return _dig + _lex
@@ -340,11 +341,14 @@
     # archives
     _versions_arch = os.path.join(pkg_dir, "versions", _repos_versions_archive)
     _desc_arch = os.path.join(pkg_dir, "versions", _pkg_desc_archive)
+    _apps_filename = "apps.json"
 
     # repository index
     _repo_index = {}
     _mainteiners_index = {}
 
+    _apps = {}
+
     # init package versions storage
     _versions_mirantis = {}
     _versions_other = {}
@@ -359,6 +363,13 @@
             self._desc_arch,
             label="MCP Configuration Checker: Package descriptions archive"
         )
+
+        # section / app
+        self._apps = _safe_load(
+            self._apps_filename,
+            self.desctgz
+        )
+
         # indices
         self._repo_index = _safe_load(
             _repos_index_filename,
@@ -394,19 +405,24 @@
         _h, _m = pair.split('-')
         return self._repo_index[_h], self._mainteiners_index[_m]
 
-    def _update_pkg_version(self, _d, n, v, md5, h_index, m_index):
+    def _update_pkg_version(self, _d, n, v, md5, s, a, h_index, m_index):
         """Method updates package version record in global dict
         """
         # 'if'*4 operation is pretty expensive when using it 100k in a row
         # so try/except is a better way to go, even faster than 'reduce'
         _pair = "-".join([h_index, m_index])
+        _info = {
+            'repo': [_pair],
+            'section': s,
+            'app': a
+        }
         try:
             # try to load list
-            _list = _d[n][v][md5]
+            _list = _d[n][v][md5]['repo']
             # cast it as set() and union()
             _list = set(_list).union([_pair])
             # cast back as set() is not serializeable
-            _d[n][v][md5] = list(_list)
+            _d[n][v][md5]['repo'] = list(_list)
             return False
         except KeyError:
             # ok, this is fresh pkg. Do it slow way.
@@ -416,12 +432,12 @@
                     # there is such version, check md5
                     if md5 in _d[n][v]:
                         # just add new repo header
-                        if _pair not in _d[n][v][md5]:
-                            _d[n][v][md5].append(_pair)
+                        if _pair not in _d[n][v][md5]['repo']:
+                            _d[n][v][md5]['repo'].append(_pair)
                     else:
                         # check if such index is here...
                         _existing = filter(
-                            lambda i: _pair in _d[n][v][i],
+                            lambda i: _pair in _d[n][v][i]['repo'],
                             _d[n][v]
                         )
                         if _existing:
@@ -436,18 +452,18 @@
                                     md5
                                 )
                             )
-                        _d[n][v][md5] = [_pair]
+                        _d[n][v][md5] = _info
                 else:
                     # this is new version for existing package
                     _d[n][v] = {
-                        md5: [_pair]
+                        md5: _info
                     }
                 return False
             else:
                 # this is new pakcage
                 _d[n] = {
                     v: {
-                        md5: [_pair]
+                        md5: _info
                     }
                 }
                 return True
@@ -478,7 +494,7 @@
     #     else:
     #         return None
 
-    def parse_tag(self, tag, descriptions=False):
+    def parse_tag(self, tag, descriptions=False, apps=False):
         """Download and parse Package.gz files for specific tag
         By default, descriptions not saved
         due to huge resulting file size and slow processing
@@ -523,6 +539,27 @@
                     if descriptions:
                         _descriptions = {}
                     # download and unzip
+                    _index += 1
+                    _progress.write_progress(
+                        _index,
+                        note="/ {} {} {} {} {}, GET 'Packages.gz'".format(
+                            _c,
+                            _ur,
+                            _p['ubuntu-release'],
+                            _p['type'],
+                            _p['arch']
+                        )
+                    )
+                    _raw = get_gzipped_file(_p['filepath'])
+                    if not _raw:
+                        # empty repo...
+                        _progress.clearline()
+                        logger_cli.warning(
+                            "# WARNING: Empty file: '{}'".format(
+                                _p['filepath']
+                            )
+                        )
+                        continue
                     _progress.write_progress(
                         _index,
                         note="/ {} {} {} {} {}, {}/{}".format(
@@ -535,9 +572,7 @@
                             _new
                         )
                     )
-                    _raw = get_gzipped_file(_p['filepath'])
                     _lines = _raw.splitlines()
-                    _index += 1
                     # break lines collection into isolated pkg data
                     _pkg = {
                         "tag": tag,
@@ -547,6 +582,10 @@
                     _pkg.update(_p)
                     _desc = {}
                     _key = _value = ""
+                    # if there is no empty line at end, add it
+                    if _lines[-1] != '':
+                        _lines.append('')
+                    # Process lines
                     for _line in _lines:
                         if not _line:
                             # if the line is empty, process pkg data gathered
@@ -555,6 +594,30 @@
                             _version = _desc['version']
                             _mainteiner = _desc['maintainer']
 
+                            if 'source' in _desc:
+                                _ap = _desc['source'].lower()
+                            else:
+                                _ap = "-"
+
+                            if apps:
+                                # insert app
+                                _sc = _desc['section'].lower()
+                                if 'source' in _desc:
+                                    _ap = _desc['source'].lower()
+                                else:
+                                    _ap = "-"
+
+                                try:
+                                    _tmp = set(self._apps[_sc][_ap][_name])
+                                    _tmp.add(_desc['architecture'])
+                                    self._apps[_sc][_ap][_name] = list(_tmp)
+                                except KeyError:
+                                    nested_set(
+                                        self._apps,
+                                        [_sc, _ap, _name],
+                                        [_desc['architecture']]
+                                    )
+
                             # Check is mainteiner is Mirantis
                             if _mainteiner.endswith("@mirantis.com>"):
                                 # update mirantis versions
@@ -563,6 +626,8 @@
                                     _name,
                                     _version,
                                     _md5,
+                                    _desc['section'].lower(),
+                                    _ap,
                                     self._create_repo_header(_pkg),
                                     _get_value_index(
                                         self._mainteiners_index,
@@ -577,6 +642,8 @@
                                     _name,
                                     _version,
                                     _md5,
+                                    _desc['section'].lower(),
+                                    _ap,
                                     self._create_repo_header(_pkg),
                                     _get_value_index(
                                         self._mainteiners_index,
@@ -622,9 +689,16 @@
             json.dumps(self._mainteiners_index),
             replace=True
         )
+        if apps:
+            self.desctgz.add_file(
+                self._apps_filename,
+                json.dumps(self._apps),
+                replace=True
+            )
+
         return
 
-    def fetch_versions(self, tag, descriptions=False):
+    def fetch_versions(self, tag, descriptions=False, apps=False):
         """Executes parsing for specific tag
         """
         if descriptions:
@@ -634,7 +708,7 @@
             )
         # if there is no such tag, parse it from repoinfo
         logger_cli.info("# Fetching versions for {}".format(tag))
-        self.parse_tag(tag, descriptions=descriptions)
+        self.parse_tag(tag, descriptions=descriptions, apps=apps)
         logger_cli.info("-> saving updated versions")
         self.versionstgz.add_file(
             _mirantis_versions_filename,
@@ -666,12 +740,27 @@
             )
             self.build_repos(url, tag=t)
 
+    def get_available_tags(self, tag=None):
+        # Populate action tags
+        major, updates, hotfix = ReposInfo().list_tags(splitted=True)
+
+        _tags = []
+        if tag in major:
+            _tags.append(tag)
+        if tag in updates:
+            _tags.append(tag + ".update")
+        if tag in hotfix:
+            _tags.append(tag + ".hotfix")
+
+        return _tags
+
     def action_for_tag(
         self,
         url,
         tag,
         action=None,
-        descriptions=None
+        descriptions=None,
+        apps=None
     ):
         """Executes action for every tag from all collections
         """
@@ -694,16 +783,9 @@
             # exit
             return
 
-        # Pupulate action tags
-        major, updates, hotfix = ReposInfo().list_tags(splitted=True)
-        _action_tags = []
-        if tag in major:
-            _action_tags.append(tag)
-        if tag in updates:
-            _action_tags.append(tag + ".update")
-        if tag in hotfix:
-            _action_tags.append(tag + ".hotfix")
-        # Check if any tags collected
+        # Populate action tags
+        _action_tags = self.get_available_tags(tag)
+
         if not _action_tags:
             logger_cli.info(
                 "# Tag of '{}' not found. "
@@ -720,7 +802,7 @@
             self._build_action(url, _action_tags)
         elif action == "fetch":
             for t in _action_tags:
-                self.fetch_versions(t, descriptions=descriptions)
+                self.fetch_versions(t, descriptions=descriptions, apps=apps)
 
         logger_cli.info("# Done.")
 
@@ -738,46 +820,214 @@
             #           \t <version>
             # <10symbols> \t <md5> \t sorted headers with no tag
             # ...
-            logger_cli.info("\n# Package: {}".format(name))
+            # section
             _o = ""
-            # get and sort tags
-            _vs = _p.keys()
-            _vs.sort()
-            for _v in _vs:
-                _o += "\n" + " "*8 + _v + ':\n'
-                # get and sort tags
-                _mds = _p[_v].keys()
-                _mds.sort()
-                for _md5 in _mds:
-                    _o += " "*16 + _md5 + "\n"
-                    # get and sort repo headers
-                    _rr = _p[_v][_md5].keys()
-                    _rr.sort()
-                    for _r in _rr:
-                        _o += " "*24 + _r.replace('_', ' ')
-                        _o += " ({})\n".format(_p[_v][_md5][_r]["mainteiner"])
+
+            _ss = _p.keys()
+            _ss.sort()
+            for _s in _ss:
+                _apps = _p[_s].keys()
+                _apps.sort()
+                # app
+                for _a in _apps:
+                    logger_cli.info(
+                        "\n# Package: {}/{}/{}".format(_s, _a, name)
+                    )
+                    # get and sort tags
+                    _vs = _p[_s][_a].keys()
+                    _vs.sort()
+                    for _v in _vs:
+                        _o += "\n" + " "*8 + _v + ':\n'
+                        # get and sort tags
+                        _mds = _p[_s][_a][_v].keys()
+                        _mds.sort()
+                        for _md5 in _mds:
+                            _o += " "*16 + _md5 + "\n"
+                            # get and sort repo headers
+                            _rr = _p[_s][_a][_v][_md5].keys()
+                            _rr.sort()
+                            for _r in _rr:
+                                _o += " "*24 + _r.replace('_', ' ')
+                                _o += " ({})\n".format(
+                                    _p[_s][_a][_v][_md5][_r]["maintainer"]
+                                )
 
             logger_cli.info(_o)
 
-    def get_package_versions(self, name, mirantis=True, tagged=False):
+    @staticmethod
+    def get_apps(versions, name):
+        _all = True if name == '*' else False
+        _s_max = 0
+        _a_max = 0
+        _rows = []
+        for _p in versions.keys():
+            _vs = versions[_p]
+            for _v, _d1 in _vs.iteritems():
+                for _md5, _info in _d1.iteritems():
+                    if _all or name == _info['app']:
+                        _s_max = max(len(_info['section']), _s_max)
+                        _a_max = max(len(_info['app']), _a_max)
+                        _rows.append([
+                            _info['section'],
+                            _info['app'],
+                            _p
+                        ])
+        _fmt = "{:"+str(_s_max)+"} {:"+str(_a_max)+"} {}"
+        _rows = [_fmt.format(s, a, p) for s, a, p in _rows]
+        _rows.sort()
+        return _rows
+
+    def show_app(self, name):
+        c = 0
+        rows = self.get_apps(self._versions_mirantis, name)
+        if rows:
+            logger_cli.info("# Mirantis packages for '{}'".format(name))
+            logger_cli.info("\n".join(rows))
+            c += 1
+        rows = self.get_apps(self._versions_other, name)
+        if rows:
+            logger_cli.info("# Other packages for '{}'".format(name))
+            logger_cli.info("\n".join(rows))
+            c += 1
+        if c == 0:
+            logger_cli.info("\n# No app found for '{}'".format(name))
+
+    def get_mirantis_pkg_names(self):
+        # Mirantis maintainers only
+        return set(
+            self._versions_mirantis.keys()
+        ) - set(
+            self._versions_other.keys()
+        )
+
+    def get_other_pkg_names(self):
+        # Non-mirantis Maintainers
+        return set(
+            self._versions_other.keys()
+        ) - set(
+            self._versions_mirantis.keys()
+        )
+
+    def get_mixed_pkg_names(self):
+        # Mixed maintainers
+        return set(
+            self._versions_mirantis.keys()
+        ).intersection(set(
+            self._versions_other.keys()
+        ))
+
+    def is_mirantis(self, name, tag=None):
+        """Method checks if this package is mainteined
+        by mirantis in target tag repo
+        """
+        if name in self._versions_mirantis:
+            # check tag
+            if tag:
+                _pkg = self.get_package_versions(
+                    name,
+                    tagged=True
+                )
+                _tags = []
+                for s in _pkg.keys():
+                    for a in _pkg[s].keys():
+                        for t in _pkg[s][a].keys():
+                            _tags.append(t)
+                if any([t.startswith(tag) for t in _tags]):
+                    return True
+                else:
+                    return None
+            else:
+                return True
+        elif name in self._versions_other:
+            # check tag
+            if tag:
+                _pkg = self.get_package_versions(
+                    name,
+                    tagged=True
+                )
+                _tags = []
+                for s in _pkg.keys():
+                    for a in _pkg[s].keys():
+                        for t in _pkg[s][a].keys():
+                            _tags.append(t)
+                if any([t.startswith(tag) for t in _tags]):
+                    return False
+                else:
+                    return None
+            else:
+                return False
+        else:
+            logger.error(
+                "# ERROR: package '{}' not found "
+                "while determining maintainer".format(
+                    name
+                )
+            )
+            return None
+
+    def get_filtered_versions(
+        self,
+        name,
+        tag=None,
+        include=None,
+        exclude=None
+    ):
+        """Method gets all the versions for the package
+        and filters them using keys above
+        """
+        if tag:
+            tag = unicode(tag) if not isinstance(tag, unicode) else tag
+        _out = {}
+        _vs = self.get_package_versions(name, tagged=True)
+        # iterate to filter out keywords
+        for s, apps in _vs.iteritems():
+            for a, _tt in apps.iteritems():
+                for t, vs in _tt.iteritems():
+                    # filter tags
+                    if tag and t != tag and t.rsplit('.', 1)[0] != tag:
+                        continue
+                    # Skip hotfix tag
+                    if t == tag + ".hotfix":
+                        continue
+                    for v, rp in vs.iteritems():
+                        for h, p in rp.iteritems():
+                            # filter headers with all keywords matching
+                            _h = re.split(r"[\-\_]+", h)
+                            _included = all([kw in _h for kw in include])
+                            _excluded = any([kw in _h for kw in exclude])
+                            if not _included or _excluded:
+                                continue
+                            else:
+                                nested_set(_out, [s, a, v], [])
+                                _dat = {
+                                    "header": h
+                                }
+                                _dat.update(p)
+                                _out[s][a][v].append(_dat)
+        return _out
+
+    def get_package_versions(self, name, tagged=False):
         """Method builds package version structure
         with repository properties included
         """
         # get data
-        if mirantis and name in self._versions_mirantis:
-            _vs = self._versions_mirantis[name]
-        elif not mirantis and name in self._versions_other:
-            _vs = self._versions_other[name]
-        else:
-            return {}
+        _vs = {}
+
+        if name in self._versions_mirantis:
+            _vs.update(self._versions_mirantis[name])
+        if name in self._versions_other:
+            _vs.update(self._versions_other[name])
 
         # insert repo data, insert props into headers place
         _package = {}
         if tagged:
             for _v, _d1 in _vs.iteritems():
                 # use tag as a next step
-                for _md5, _indices in _d1.iteritems():
-                    for _pair in _indices:
+                for _md5, _info in _d1.iteritems():
+                    _s = _info['section']
+                    _a = _info['app']
+                    for _pair in _info['repo']:
+                        _rp = {}
                         # extract props for a repo
                         _r, _m = self._get_indexed_values(_pair)
                         # get tag
@@ -785,26 +1035,27 @@
                         # cut tag from the header
                         _cut_head = _r["header"].split("_", 1)[1]
                         # populate dict
+                        _rp["maintainer"] = _m
+                        _rp["md5"] = _md5
+                        _rp.update(_r["props"])
                         nested_set(
                             _package,
-                            [_tag, _v, _cut_head, _md5],
-                            {
-                                "repo": _r["props"],
-                                "mainteiner": _m
-                            }
+                            [_s, _a, _tag, _v, _cut_head],
+                            _rp
                         )
         else:
             for _v, _d1 in _vs.iteritems():
-                for _md5, _indices in _d1.iteritems():
-                    for _pair in _indices:
+                for _md5, _info in _d1.iteritems():
+                    _s = _info['section']
+                    _a = _info['app']
+                    for _pair in _info['repo']:
                         _r, _m = self._get_indexed_values(_pair)
+                        _info["maintainer"] = _m
+                        _info.update(_r["props"])
                         nested_set(
                             _package,
-                            [_v, _md5, _r["header"]],
-                            {
-                                "repo": _r["props"],
-                                "mainteiner": _m
-                            }
+                            [_s, _a, _v, _md5, _r["header"]],
+                            _info
                         )
 
         return _package
diff --git a/cfg_checker/modules/packages/versions.py b/cfg_checker/modules/packages/versions.py
index a2bd083..f12d0b7 100644
--- a/cfg_checker/modules/packages/versions.py
+++ b/cfg_checker/modules/packages/versions.py
@@ -1,5 +1,6 @@
 import csv
 import os
+import re
 
 from cfg_checker.common import config, const, logger_cli
 from cfg_checker.common.settings import pkg_dir
@@ -10,7 +11,7 @@
     _list = {}
 
     dummy_desc = {
-        "component": "unlisted",
+        "section": "unlisted",
         "app": "-",
         "repo": "-",
         "versions": {}
@@ -34,7 +35,7 @@
                 # package_name,component,application_or_service,repo,openstack_release,2018.4.0,2018.11.0,2019.2.0,2019.2.1,2019.2.2
                 # reassign for code readability
                 _pkg = row[0]
-                _component = row[1]
+                _section = row[1]
                 _app = row[2]
                 _repo = row[3]
                 # if release cell empty - use keyword 'any'
@@ -59,7 +60,7 @@
                     # update pkg data in list
                     self._list.update({
                         _pkg: {
-                            "component": _component,
+                            "section": _section,
                             "app": _app,
                             "repo": _repo,
                             "versions": {}
@@ -124,16 +125,16 @@
     def __init__(self, version_string):
         # save
         if len(version_string) < 1:
-            self.epoch = None
-            self.upstream = None
-            self.debian = None
+            self.epoch = "0"
+            self.upstream = "0"
+            self.debian = ''
             self.version = 'n/a'
             return
         else:
             # do parse the main versions
             _v = version_string
             # colon presence, means epoch present
-            _e = _v.split(':', 1)[0] if ':' in _v else ''
+            _e = _v.split(':', 1)[0] if ':' in _v else "0"
             # if epoch was there, upstream should be cut
             _m = _v if ':' not in _v else _v.split(':', 1)[1]
             # dash presence, means debian present
@@ -186,9 +187,25 @@
         return self._cmp_fragment(_lhf, _rhf)
 
     def _cmp_lex(self, lf, rf):
-        # cast each item into its ORD value
-        _lhf = [ord(n) for n in lf]
-        _rhf = [ord(n) for n in rf]
+        def split_rev(_s):
+            _out = []
+            _list = re.split(r'(\d+)', _s)
+            # iterate and cast into num of possible
+            for idx in range(0, len(_list)):
+                try:
+                    # try to convert it to number
+                    _out.append(int(_list[idx]))
+                except ValueError:
+                    # not a number
+                    _ords = [ord(n) for n in _list[idx]]
+                    _out += _ords
+            return _out
+        # split string into letters and numbers
+        # and cast each item into its ORD value
+        _lhf = split_rev(lf)
+        _rhf = split_rev(rf)
+        # _lhf = [ord(n) for n in lf]
+        # _rhf = [ord(n) for n in rf]
 
         return self._cmp_fragment(_lhf, _rhf)
     # end of cmps
@@ -196,14 +213,20 @@
     # main part compared using splitted numbers
     # if equal, revision is compared using lexical comparizon
     def __lt__(self, v):
-        if self._cmp_num(self.epoch, v.epoch) < 0:
-            return True
-        elif self._cmp_num(self.upstream, v.upstream) < 0:
-            return True
-        elif self._cmp_lex(self.upstream_rev, v.upstream_rev) < 0:
-            return True
-        else:
-            return False
+        _e = self._cmp_num(self.epoch, v.epoch)
+        _u = self._cmp_num(self.upstream, v.upstream)
+        _ul = self._cmp_lex(self.upstream_rev, v.upstream_rev)
+        _d = self._cmp_num(self.debian, v.debian)
+        _dl = self._cmp_lex(self.debian_rev, v.debian_rev)
+        for n in [_e, _u, _ul, _d, _dl]:
+            if n == 0:
+                continue
+            elif n < 0:
+                return True
+            elif n > 0:
+                return False
+        # if all is equal, it is still false
+        return False
 
     def __eq__(self, v):
         # compare all portions
@@ -211,18 +234,26 @@
         _result.append(self._cmp_num(self.epoch, v.epoch))
         _result.append(self._cmp_num(self.upstream, v.upstream))
         _result.append(self._cmp_lex(self.upstream_rev, v.upstream_rev))
+        _result.append(self._cmp_num(self.debian, v.debian))
+        _result.append(self._cmp_lex(self.debian_rev, v.debian_rev))
         # if there is any non-zero, its not equal
         return not any(_result)
 
     def __gt__(self, v):
-        if self._cmp_num(self.epoch, v.epoch) > 0:
-            return True
-        elif self._cmp_num(self.upstream, v.upstream) > 0:
-            return True
-        elif self._cmp_lex(self.upstream_rev, v.upstream_rev) > 0:
-            return True
-        else:
-            return False
+        _e = self._cmp_num(self.epoch, v.epoch)
+        _u = self._cmp_num(self.upstream, v.upstream)
+        _ul = self._cmp_lex(self.upstream_rev, v.upstream_rev)
+        _d = self._cmp_num(self.debian, v.debian)
+        _dl = self._cmp_lex(self.debian_rev, v.debian_rev)
+        for n in [_e, _u, _ul, _d, _dl]:
+            if n == 0:
+                continue
+            elif n > 0:
+                return True
+            elif n < 0:
+                return False
+        # if all is equal, it is still false
+        return False
 
     def update_parts(self, target, status):
         # updating parts of version statuses
diff --git a/cfg_checker/nodes.py b/cfg_checker/nodes.py
index 9a1fd48..d752655 100644
--- a/cfg_checker/nodes.py
+++ b/cfg_checker/nodes.py
@@ -64,7 +64,6 @@
         # in case API not listed minions, we need all that answer ping
         _active = self.salt.get_active_nodes()
         logger_cli.info("-> nodes responded: {}".format(len(_active)))
-        # just inventory for faster interaction
         # iterate through all accepted nodes and create a dict for it
         self.nodes = {}
         self.skip_list = []
@@ -113,6 +112,14 @@
             self.salt.master_node,
             "_param:openstack_version"
         )[self.salt.master_node]
+        # Preload codenames
+        # do additional queries to get linux codename and arch for each node
+        self.get_specific_pillar_for_nodes("_param:linux_system_codename")
+        self.get_specific_pillar_for_nodes("_param:linux_system_architecture")
+        for _name in self.nodes.keys():
+            _p = self.nodes[_name]['pillars']['_param']
+            self.nodes[_name]['linux_codename'] = _p['linux_system_codename']
+            self.nodes[_name]['linux_arch'] = _p['linux_system_architecture']
 
     def skip_node(self, node):
         # Add node to skip list
@@ -298,7 +305,7 @@
         )
 
         # execute script
-        logger.debug("... running script")
+        logger_cli.debug("... running script")
         # handle results for each node
         _script_arguments = " ".join(args) if args else ""
         self.not_responded = []
diff --git a/cfg_checker/reports/reporter.py b/cfg_checker/reports/reporter.py
index 08015cf..04c9034 100644
--- a/cfg_checker/reports/reporter.py
+++ b/cfg_checker/reports/reporter.py
@@ -30,7 +30,7 @@
         return sorted(
             td.keys(),
             key=lambda k: (
-                td[k]['desc']['component'],
+                td[k]['desc']['section'],
                 td[k]['desc']['app'],
                 k
             )
@@ -78,6 +78,28 @@
     return const.all_statuses[sts]
 
 
+def make_repo_info(repos):
+    _text = ""
+    for r in repos:
+        # tag
+        _text += r['tag'] + ": "
+        # repo header
+        _text += " ".join([
+            r['subset'],
+            r['release'],
+            r['ubuntu-release'],
+            r['type'],
+            r['arch']
+        ]) + ", "
+        # maintainer w/o email
+        _m = r['maintainer'][:r['maintainer'].find('<')-1]
+        _m_ascii = _m.encode('ascii', errors="xmlcharrefreplace")
+        _text += _m_ascii
+        # newline
+        _text += "<br />"
+    return _text
+
+
 @six.add_metaclass(abc.ABCMeta)
 class _Base(object):
     def __init__(self):
@@ -126,6 +148,7 @@
         self.jinja2_env.filters['make_status_class'] = make_status_class
         self.jinja2_env.filters['make_action_label'] = make_action_label
         self.jinja2_env.filters['make_action_class'] = make_action_class
+        self.jinja2_env.filters['make_repo_info'] = make_repo_info
 
         # render!
         logger_cli.info("-> Using template: {}".format(self.tmpl))
diff --git a/scripts/ifs_data.py b/scripts/ifs_data.py
index e182fb1..119acdb 100644
--- a/scripts/ifs_data.py
+++ b/scripts/ifs_data.py
@@ -67,9 +67,9 @@
 def get_ifs_data():
     # Collect interface and IPs data
     # Compile regexps for detecting IPs
-    if_start = re.compile("^[0-9]+: .*: \<.*\> .*$")
-    if_link = re.compile("^\s{4}link\/ether\ .*$")
-    if_ipv4 = re.compile("^\s{4}inet\ .*$")
+    if_start = re.compile(r"^[0-9]+: .*: \<.*\> .*$")
+    if_link = re.compile(r"^\s{4}link\/ether\ .*$")
+    if_ipv4 = re.compile(r"^\s{4}inet\ .*$")
     # variable prototypes
     _ifs = {}
     _if_name = None
diff --git a/setup.py b/setup.py
index 2f93c71..9c32915 100644
--- a/setup.py
+++ b/setup.py
@@ -24,9 +24,9 @@
 entry_points = {
     "console_scripts": [
         "mcp-checker = cfg_checker.cfg_check:config_check_entrypoint",
-        "mcp-checker-package = cfg_checker.cli.packages:entrypoint",
-        "mcp-checker-network = cfg_checker.cli.network:entrypoint",
-        "mcp-checker-reclass = cfg_checker.cli.reclass:entrypoint"
+        "mcp-pkg = cfg_checker.cli.packages:entrypoint",
+        "mcp-net = cfg_checker.cli.network:entrypoint",
+        "cmp-reclass = cfg_checker.cli.reclass:entrypoint"
     ]
 }
 
diff --git a/templates/common_styles.j2 b/templates/common_styles.j2
index 604564b..66385be 100644
--- a/templates/common_styles.j2
+++ b/templates/common_styles.j2
@@ -73,7 +73,7 @@
         border-bottom: 1px dotted black;
     }
 
-    .tooltip .tooltiptext {
+    .tooltip .tooltiptext, .tooltip .repoinfotext {
         visibility: hidden;
         background-color: black;
         font-family: "Lucida Console", Monaco, monospace;
@@ -88,8 +88,16 @@
         z-index: 1;
     }
 
+    .tooltip .repoinfotext {
+        right: 0%;
+    }
+
     .tooltip:hover .tooltiptext {
         visibility: visible;
     }
 
+    .tooltip:hover .repoinfotext {
+        visibility: visible;
+    }
+
 </style>
diff --git a/templates/pkg_versions_html.j2 b/templates/pkg_versions_html.j2
index 7c52a1d..1bf216d 100644
--- a/templates/pkg_versions_html.j2
+++ b/templates/pkg_versions_html.j2
@@ -157,7 +157,7 @@
 {% macro render_package(pkg_name, dat, status_shown, action_shown, id_label) %}
         <tr onclick="toggleClassByID('{{ id_label }}_{{ pkg_name }}_{{ status_shown }}_{{ action_shown }}')" id="{{ id_label }}_{{ pkg_name }}_{{ status_shown }}_{{ action_shown }}_button">
             <td class="repo">{{ dat['desc']['repo'] }}</td>
-            <td class="component">{{ dat['desc']['component'] }}</td>
+            <td class="component">{{ dat['desc']['section'] }}</td>
             <td class="app">{{ dat['desc']['app'] }}</td>
             <td class="package_name">{{ pkg_name }}</td>
             <td class="status_container" colspan="3">
@@ -192,7 +192,12 @@
                 </div>
             </td>
             <td class="candidate">{{ nd['c'].version }}</td>
-            <td class="release">{{ dat['r'].version }}</td>
+            <td class="release">
+                <div class="tooltip">
+                    {{ dat['r'].version }}
+                    <pre class="repoinfotext">{{ dat['repos'] | make_repo_info }}</pre>
+                </div>
+            </td>
         </tr>
         {% endfor %}
         {% endfor %}