Migrating to Python v3
- support for Python v3.8.x
- support for Python v3.5.x
- new tag, 2019.2.8
- updates class generation and iterators
- unittests updated with coverage >75%
- new coverage routines
- unittests profiling
- full fake data for unittests
- unittest testrun is ~1.5 seconds long
Bugfixes
- 34834, proper use of 'sudo' option
- multiple proper iterator use
- 37919, show warning when installed and candidate versions
are newer comparing to release version
Change-Id: Idd6b889f7ce94ae0c832e2f0a0346e4fdc3264a3
Related-PROD: PROD-34834 PROD-34664 PROD-34919
diff --git a/cfg_checker/modules/network/mapper.py b/cfg_checker/modules/network/mapper.py
index 482bdfa..59f3781 100644
--- a/cfg_checker/modules/network/mapper.py
+++ b/cfg_checker/modules/network/mapper.py
@@ -124,7 +124,7 @@
continue
# build map based on IPs and save info too
- for if_name, _dat in _pillar.iteritems():
+ for if_name, _dat in _pillar.items():
# get proper IF name
_if_name = if_name if 'name' not in _dat else _dat['name']
# place it
@@ -195,11 +195,11 @@
logger_cli.info("-> mapping IPs")
# match interfaces by IP subnets
- for host, node_data in salt_master.nodes.iteritems():
+ for host, node_data in salt_master.nodes.items():
if not salt_master.is_node_available(host):
continue
- for net_name, net_data in node_data['networks'].iteritems():
+ for net_name, net_data in node_data['networks'].items():
# cut net name
_i = net_name.find('@')
_name = net_name if _i < 0 else net_name[:_i]
@@ -321,7 +321,7 @@
# debug, print built tree
# logger_cli.debug("# '{}'".format(_ifname))
- lvls = _tree.keys()
+ lvls = list(_tree.keys())
lvls.sort()
n = len(lvls)
m = max([len(_tree[k].keys()) for k in _tree.keys()])
@@ -330,11 +330,14 @@
while True:
_lv = lvls.pop(0)
# get all interfaces on this level
- nets = _tree[_lv].keys()
+ nets = iter(_tree[_lv].keys())
while True:
y = 0
# get next interface
- _net = nets.pop(0)
+ try:
+ _net = next(nets)
+ except StopIteration:
+ break
# all nets
_a = [_net]
# put current interface if this is only one left
diff --git a/cfg_checker/modules/network/network_errors.py b/cfg_checker/modules/network/network_errors.py
index 6c41021..7159a36 100644
--- a/cfg_checker/modules/network/network_errors.py
+++ b/cfg_checker/modules/network/network_errors.py
@@ -23,9 +23,9 @@
NET_PING_ERROR = next(_c)
NET_PING_NOT_RESOLVED = next(_c)
- def __init__(self):
- super(NetworkErrors, self).__init__("NET")
+ _initialized = False
+ def _add_types(self):
self.add_error_type(
self.NET_MTU_MISMATCH,
"MTU mismatch on runtime interface and in reclass"
@@ -82,6 +82,21 @@
self.NET_PING_NOT_RESOLVED,
"Host not resolved while conducting Ping"
)
+ self._initialized = True
+
+ def __init__(self, folder=None):
+ super(NetworkErrors, self).__init__("NET", folder=folder)
+
+ if not self._initialized:
+ self._add_types()
+ self._initialized = True
+
+ def __call__(self):
+ if not self._initialized:
+ self._add_types()
+ self._initialized = True
+
+ return self
del _c
diff --git a/cfg_checker/modules/network/pinger.py b/cfg_checker/modules/network/pinger.py
index 266727b..0500284 100644
--- a/cfg_checker/modules/network/pinger.py
+++ b/cfg_checker/modules/network/pinger.py
@@ -44,7 +44,7 @@
def ping_nodes(self, network_cidr_str):
# Conduct actual ping using network CIDR
logger_cli.info("# Collecting node pairs")
- _fake_if = ipaddress.IPv4Interface(unicode(network_cidr_str))
+ _fake_if = ipaddress.IPv4Interface(str(network_cidr_str))
_net = _fake_if.network
# collect nodes and ips from reclass
nodes = self._collect_node_addresses(_net)
@@ -69,7 +69,7 @@
"targets": {}
}
- for tgt_host, tgt_data in nodes.iteritems():
+ for tgt_host, tgt_data in nodes.items():
_t = _packets[src_host]["targets"]
for tgt_if in tgt_data:
tgt_if_name = tgt_if['name']
@@ -110,7 +110,7 @@
_progress = Progress(_count)
_progress_index = 0
_node_index = 0
- for src, src_data in _packets.iteritems():
+ for src, src_data in _packets.items():
_targets = src_data["targets"]
_node_index += 1
# create 'targets.json' on source host
@@ -154,7 +154,7 @@
)
continue
# Handle return codes
- for tgt_node, _tgt_ips in _result.iteritems():
+ for tgt_node, _tgt_ips in _result.items():
for _params in _tgt_ips:
_body = "{}({}) --{}--> {}({}@{})\n".format(
src,
diff --git a/cfg_checker/modules/packages/__init__.py b/cfg_checker/modules/packages/__init__.py
index 9d55c05..41dfca1 100644
--- a/cfg_checker/modules/packages/__init__.py
+++ b/cfg_checker/modules/packages/__init__.py
@@ -1,7 +1,7 @@
from cfg_checker.helpers import args_utils
from cfg_checker.modules.packages.repos import RepoManager
-import checker
+from . import checker
command_help = "Package versions check (Candidate vs Installed)"
diff --git a/cfg_checker/modules/packages/checker.py b/cfg_checker/modules/packages/checker.py
index 92d9e1c..8f30f3c 100644
--- a/cfg_checker/modules/packages/checker.py
+++ b/cfg_checker/modules/packages/checker.py
@@ -8,7 +8,7 @@
from cfg_checker.nodes import salt_master
from cfg_checker.reports import reporter
-from versions import DebianVersion, PkgVersions, VersionCmpResult
+from .versions import DebianVersion, PkgVersions, VersionCmpResult
class CloudPackageChecker(object):
@@ -78,6 +78,7 @@
# sort packages
_pn, _val = all_packages.popitem()
_c = _val['desc']['section']
+ _rkeys = _val['results'].keys()
if not full:
# Check if this packet has errors
@@ -125,9 +126,9 @@
_data['unlisted'].update({
_pn: _val
})
- _eu += _val['results'].keys().count(const.VERSION_ERR)
- _wu += _val['results'].keys().count(const.VERSION_WARN)
- _du += _val['results'].keys().count(const.VERSION_DOWN)
+ _eu += sum(x == const.VERSION_ERR for x in _rkeys)
+ _wu += sum(x == const.VERSION_WARN for x in _rkeys)
+ _du += sum(x == const.VERSION_DOWN for x in _rkeys)
# mirantis/critical
# elif len(_c) > 0 and _c != 'System':
elif _val['is_mirantis']:
@@ -135,25 +136,25 @@
_data['critical'].update({
_pn: _val
})
- _ec += _val['results'].keys().count(const.VERSION_ERR)
- _wc += _val['results'].keys().count(const.VERSION_WARN)
- _dc += _val['results'].keys().count(const.VERSION_DOWN)
+ _ec += sum(x == const.VERSION_ERR for x in _rkeys)
+ _wc += sum(x == const.VERSION_WARN for x in _rkeys)
+ _dc += sum(x == const.VERSION_DOWN for x in _rkeys)
# system
elif _c == 'System':
_data['system'].update({
_pn: _val
})
- _es += _val['results'].keys().count(const.VERSION_ERR)
- _ws += _val['results'].keys().count(const.VERSION_WARN)
- _ds += _val['results'].keys().count(const.VERSION_DOWN)
+ _es += sum(x == const.VERSION_ERR for x in _rkeys)
+ _ws += sum(x == const.VERSION_WARN for x in _rkeys)
+ _ds += sum(x == const.VERSION_DOWN for x in _rkeys)
# rest
else:
_data['other'].update({
_pn: _val
})
- _eo += _val['results'].keys().count(const.VERSION_ERR)
- _wo += _val['results'].keys().count(const.VERSION_WARN)
- _do += _val['results'].keys().count(const.VERSION_DOWN)
+ _eo += sum(x == const.VERSION_ERR for x in _rkeys)
+ _wo += sum(x == const.VERSION_WARN for x in _rkeys)
+ _do += sum(x == const.VERSION_DOWN for x in _rkeys)
_progress.end()
@@ -244,7 +245,7 @@
_total_processed = 0
# Collect packages from all of the nodes in flat dict
_all_packages = {}
- for node_name, node_value in salt_master.nodes.iteritems():
+ for node_name, node_value in salt_master.nodes.items():
_uniq_len = len(_all_packages.keys())
_progress_index += 1
# progress updates shown before next node only
@@ -256,7 +257,7 @@
_total_processed
)
)
- for _name, _value in node_value['packages'].iteritems():
+ for _name, _value in node_value['packages'].items():
_total_processed += 1
# Parse versions from nodes
_ver_ins = DebianVersion(_value['installed'])
@@ -309,9 +310,9 @@
_vs = {}
_sections = {}
_apps = {}
- for s, apps in _r.iteritems():
- for a, versions in apps.iteritems():
- for v, repos in versions.iteritems():
+ for s, apps in _r.items():
+ for a, versions in apps.items():
+ for v, repos in versions.items():
for repo in repos:
if v not in _vs:
_vs[v] = []
@@ -324,11 +325,13 @@
_apps[v].append(a)
# search for the newest version among filtered
_r_desc = []
- _vs_keys = _vs.keys()
- if _vs_keys:
- _newest = _newest = DebianVersion(_vs_keys.pop())
- else:
+ _vs_keys = iter(_vs.keys())
+ # get next version, if any
+ try:
+ _newest = DebianVersion(next(_vs_keys))
+ except StopIteration:
_newest = DebianVersion('')
+ # iterate others, if any
for v in _vs_keys:
_this = DebianVersion(v)
if _this > _newest:
diff --git a/cfg_checker/modules/packages/repos.py b/cfg_checker/modules/packages/repos.py
index 00c438f..57d8b9e 100644
--- a/cfg_checker/modules/packages/repos.py
+++ b/cfg_checker/modules/packages/repos.py
@@ -43,32 +43,39 @@
def _get_value_index(_di, value, header=None):
# Mainteiner names often uses specific chars
- # so make sure that value saved is unicode not str
- _val = unicode(value, 'utf-8') if isinstance(value, str) else value
+ # so make sure that value saved is str not str
+ # Python2
+ # _val = str(value, 'utf-8') if isinstance(value, str) else value
+ # Python3 has always utf-8 decoded value
+ _val = value
if header:
- if not filter(lambda i: _di[i]["header"] == header, _di):
- _index = unicode(len(_di.keys()) + 1)
+ try:
+ _ = next(filter(lambda i: _di[i]["header"] == header, _di))
+ # iterator not empty, find index
+ for _k, _v in _di.items():
+ if _v["header"] == header:
+ _index = _k
+ except StopIteration:
+ _index = str(len(_di.keys()) + 1)
_di[_index] = {
"header": header,
"props": _val
}
- else:
- for _k, _v in _di.iteritems():
- if _v["header"] == header:
- _index = _k
-
- return _index
+ finally:
+ return _index
else:
- if not filter(lambda i: _di[i] == _val, _di):
- _index = unicode(len(_di.keys()) + 1)
- # on save, cast it as unicode
- _di[_index] = _val
- else:
- for _k, _v in _di.iteritems():
+ try:
+ _ = next(filter(lambda i: _di[i] == _val, _di))
+ # iterator not empty, find index
+ for _k, _v in _di.items():
if _v == _val:
_index = _k
-
- return _index
+ except StopIteration:
+ _index = str(len(_di.keys()) + 1)
+ # on save, cast it as str
+ _di[_index] = _val
+ finally:
+ return _index
def _safe_load(_f, _a):
@@ -79,7 +86,7 @@
_f
)
)
- return json.loads(_a.get_file(_f))
+ return json.loads(_a.get_file(_f, decode=True))
else:
return {}
@@ -92,8 +99,33 @@
class ReposInfo(object):
- repos = []
- _repofile = os.path.join(pkg_dir, "versions", _repos_info_archive)
+ init_done = False
+
+ def _init_vars(self):
+ self.repos = []
+
+ def _init_folders(self, arch_folder=None):
+ if arch_folder:
+ self._arch_folder = arch_folder
+ self._repofile = os.path.join(arch_folder, _repos_info_archive)
+ else:
+ self._arch_folder = os.path.join(pkg_dir, "versions")
+ self._repofile = os.path.join(
+ self._arch_folder,
+ _repos_info_archive
+ )
+
+ def __init__(self, arch_folder=None):
+ # perform inits
+ self._init_vars()
+ self._init_folders(arch_folder)
+ self.init_done = True
+
+ def __call__(self, *args, **kwargs):
+ if self.init_done:
+ return self
+ else:
+ return self.__init__(self, *args, **kwargs)
@staticmethod
def _ls_repo_page(url):
@@ -189,8 +221,10 @@
else:
# gather all of them
_tags, _ = self._ls_repo_page(base_url)
- _tags.remove('hotfix')
- _tags.remove('update')
+ if "hotfix" in _tags:
+ _tags.remove('hotfix')
+ if "update" in _tags:
+ _tags.remove('update')
# search tags in subfolders
_h_tags, _ = self._ls_repo_page(base_url + 'hotfix')
_u_tags, _ = self._ls_repo_page(base_url + 'update')
@@ -334,30 +368,46 @@
def get_repoinfo(self, tag):
_tgz = TGZFile(self._repofile)
- _buf = _tgz.get_file(tag + ext)
+ _buf = _tgz.get_file(tag + ext, decode=True)
return json.loads(_buf)
class RepoManager(object):
- # archives
- _arch_folder = os.path.join(pkg_dir, "versions")
- _versions_arch = os.path.join(_arch_folder, _repos_versions_archive)
- _desc_arch = os.path.join(_arch_folder, _pkg_desc_archive)
- _apps_filename = "apps.json"
+ init_done = False
- # repository index
- _repo_index = {}
- _mainteiners_index = {}
+ def _init_folders(self, arch_folder=None):
+ # overide arch folder if needed
+ if arch_folder:
+ self._arch_folder = arch_folder
+ else:
+ self._arch_folder = os.path.join(pkg_dir, "versions")
- _apps = {}
+ self._versions_arch = os.path.join(
+ self._arch_folder,
+ _repos_versions_archive
+ )
+ self._desc_arch = os.path.join(self._arch_folder, _pkg_desc_archive)
- # init package versions storage
- _versions_mirantis = {}
- _versions_other = {}
+ def _init_vars(self, info_class):
+ # RepoInfo instance init
+ if info_class:
+ self._info_class = info_class
+ else:
+ self._info_class = ReposInfo()
+ # archives
+ self._apps_filename = "apps.json"
- def __init__(self):
- # Ensure that versions folder exists
- logger_cli.debug(ensure_folder_exists(self._arch_folder))
+ # repository index
+ self._repo_index = {}
+ self._mainteiners_index = {}
+
+ self._apps = {}
+
+ # init package versions storage
+ self._versions_mirantis = {}
+ self._versions_other = {}
+
+ def _init_archives(self):
# Init version files
self.versionstgz = TGZFile(
self._versions_arch,
@@ -394,6 +444,22 @@
self.versionstgz
)
+ def __init__(self, arch_folder=None, info_class=None):
+ # Perform inits
+ self._init_vars(info_class)
+ self._init_folders(arch_folder)
+ # Ensure that versions folder exists
+ logger_cli.debug(ensure_folder_exists(self._arch_folder))
+ # Preload/create archives
+ self._init_archives()
+ self.init_done = True
+
+ def __call__(self, *args, **kwargs):
+ if self.init_done:
+ return self
+ else:
+ return self.__init__(self, *args, **kwargs)
+
def _create_repo_header(self, p):
_header = "_".join([
p['tag'],
@@ -504,14 +570,14 @@
due to huge resulting file size and slow processing
"""
# init gzip and downloader
- _info = ReposInfo().get_repoinfo(tag)
+ _info = self._info_class.get_repoinfo(tag)
# calculate Packages.gz files to process
_baseurl = _info.pop("baseurl")
_total_components = len(_info.keys()) - 1
_ubuntu_package_repos = 0
_other_repos = 0
- for _c, _d in _info.iteritems():
- for _ur, _l in _d.iteritems():
+ for _c, _d in _info.items():
+ for _ur, _l in _d.items():
if _ur in ubuntu_releases:
_ubuntu_package_repos += len(_l)
elif _ur != 'url':
@@ -531,12 +597,12 @@
_index = 0
_processed = 0
_new = 0
- for _c, _d in _info.iteritems():
+ for _c, _d in _info.items():
# we do not need url here, just get rid of it
if 'url' in _d:
_d.pop('url')
# _url = if 'url' in _d else _baseurl + _c
- for _ur, _l in _d.iteritems():
+ for _ur, _l in _d.items():
# iterate package collections
for _p in _l:
# descriptions
@@ -564,6 +630,8 @@
)
)
continue
+ else:
+ _raw = _raw.decode("utf-8")
_progress.write_progress(
_index,
note="/ {} {} {} {} {}, {}/{}".format(
@@ -728,11 +796,9 @@
def build_repos(self, url, tag=None):
"""Builds versions data for selected tag, or for all of them
"""
- # Init the ReposInfo class and check if all files are present
- _repos = ReposInfo()
# recoursively walk the mirrors
# and gather all of the repos for 'tag' or all of the tags
- _repos.fetch_repos(url, tag=tag)
+ self._info_class.fetch_repos(url, tag=tag)
def _build_action(self, url, tags):
for t in tags:
@@ -741,7 +807,7 @@
def get_available_tags(self, tag=None):
# Populate action tags
- major, updates, hotfix = ReposInfo().list_tags(splitted=True)
+ major, updates, hotfix = self._info_class.list_tags(splitted=True)
_tags = []
if tag in major:
@@ -767,14 +833,14 @@
logger_cli.info("# No action set, nothing to do")
# See if this is a list action
if action == "list":
- _all = ReposInfo().list_tags()
+ _all = self._info_class.list_tags()
if _all:
# Print pretty list and exit
logger_cli.info("# Tags available at '{}':".format(url))
for t in _all:
_ri = self._repo_index
_isparsed = any(
- [k for k, v in _ri.iteritems()
+ [k for k, v in _ri.items()
if v['props']['tag'] == t]
)
if _isparsed:
@@ -862,8 +928,8 @@
_rows = []
for _p in versions.keys():
_vs = versions[_p]
- for _v, _d1 in _vs.iteritems():
- for _md5, _info in _d1.iteritems():
+ for _v, _d1 in _vs.items():
+ for _md5, _info in _d1.items():
if _all or name == _info['app']:
_s_max = max(len(_info['section']), _s_max)
_a_max = max(len(_info['app']), _a_max)
@@ -993,21 +1059,21 @@
and filters them using keys above
"""
if tag:
- tag = unicode(tag) if not isinstance(tag, unicode) else tag
+ tag = str(tag) if not isinstance(tag, str) else tag
_out = {}
_vs = self.get_package_versions(name, tagged=True)
# iterate to filter out keywords
- for s, apps in _vs.iteritems():
- for a, _tt in apps.iteritems():
- for t, vs in _tt.iteritems():
+ for s, apps in _vs.items():
+ for a, _tt in apps.items():
+ for t, vs in _tt.items():
# filter tags
if tag and t != tag and t.rsplit('.', 1)[0] != tag:
continue
# Skip hotfix tag
if t == tag + ".hotfix":
continue
- for v, rp in vs.iteritems():
- for h, p in rp.iteritems():
+ for v, rp in vs.items():
+ for h, p in rp.items():
# filter headers with all keywords matching
_h = re.split(r"[\-\_]+", h)
_included = all([kw in _h for kw in include])
@@ -1038,9 +1104,9 @@
# insert repo data, insert props into headers place
_package = {}
if tagged:
- for _v, _d1 in _vs.iteritems():
+ for _v, _d1 in _vs.items():
# use tag as a next step
- for _md5, _info in _d1.iteritems():
+ for _md5, _info in _d1.items():
_s = _info['section']
_a = _info['app']
for _pair in _info['repo']:
@@ -1061,8 +1127,8 @@
_rp
)
else:
- for _v, _d1 in _vs.iteritems():
- for _md5, _info in _d1.iteritems():
+ for _v, _d1 in _vs.items():
+ for _md5, _info in _d1.items():
_s = _info['section']
_a = _info['app']
for _pair in _info['repo']:
@@ -1079,7 +1145,7 @@
def parse_repos(self):
# all tags to check
- major, updates, hotfix = ReposInfo().list_tags(splitted=True)
+ major, updates, hotfix = self._info_class.list_tags(splitted=True)
# major tags
logger_cli.info("# Processing major tags")
diff --git a/cfg_checker/modules/packages/versions.py b/cfg_checker/modules/packages/versions.py
index 7fae9fc..542c0e4 100644
--- a/cfg_checker/modules/packages/versions.py
+++ b/cfg_checker/modules/packages/versions.py
@@ -351,8 +351,8 @@
self.target = r
elif i > r:
# both are newer, same target
- self.status = const.VERSION_UP
- self.action = const.ACT_NA
+ self.status = const.VERSION_WARN
+ self.action = const.ACT_REPO
elif i == r:
# all is ok
self.status = const.VERSION_OK
@@ -372,10 +372,3 @@
# and we need to update per-part status
self.source.update_parts(self.target, self.status)
-
- @staticmethod
- def deb_lower(_s, _t):
- if _t.debian and _t.debian > _s.debian:
- return True
- else:
- return False
diff --git a/cfg_checker/modules/reclass/__init__.py b/cfg_checker/modules/reclass/__init__.py
index 4b8b667..88b287e 100644
--- a/cfg_checker/modules/reclass/__init__.py
+++ b/cfg_checker/modules/reclass/__init__.py
@@ -4,9 +4,9 @@
from cfg_checker.helpers import args_utils
from cfg_checker.reports import reporter
-import comparer
+from . import comparer
-import validator
+from . import validator
command_help = "Reclass related checks and reports"
diff --git a/cfg_checker/modules/reclass/comparer.py b/cfg_checker/modules/reclass/comparer.py
index 8ef8894..47e4baf 100644
--- a/cfg_checker/modules/reclass/comparer.py
+++ b/cfg_checker/modules/reclass/comparer.py
@@ -5,8 +5,9 @@
import itertools
import os
+from functools import reduce
+
from cfg_checker.common import logger, logger_cli
-from cfg_checker.reports import reporter
import yaml
@@ -197,13 +198,13 @@
# use ifilterfalse to compare lists of dicts
try:
_removed = list(
- itertools.ifilterfalse(
+ itertools.filterfalse(
lambda x: x in dict2[k],
dict1[k]
)
)
_added = list(
- itertools.ifilterfalse(
+ itertools.filterfalse(
lambda x: x in dict1[k],
dict2[k]
)
@@ -271,9 +272,10 @@
except TypeError as e:
logger.warning(
"One of the values is not a dict: "
- "{}, {}".format(
+ "{}, {}; {}".format(
str(dict1),
- str(dict2)
+ str(dict2),
+ e.message
))
match = False
if not match:
@@ -331,38 +333,3 @@
_diff_report["diff_names"] = [self.model_name_1, self.model_name_2]
return _diff_report
-
- def compare_models(self):
- # Do actual compare using model names from the class
- self.load_model_tree(
- self.model_name_1,
- self.model_path_1
- )
- self.load_model_tree(
- self.model_name_2,
- self.model_path_2
- )
- # Models should have similar structure to be compared
- # classes/system
- # classes/cluster
- # nodes
-
- diffs = self.generate_model_report_tree()
-
- report_file = \
- self.model_name_1 + "-vs-" + self.model_name_2 + ".html"
- # HTML report class is post-callable
- report = reporter.ReportToFile(
- reporter.HTMLModelCompare(),
- report_file
- )
- logger_cli.info("...generating report to {}".format(report_file))
- # report will have tabs for each of the comparable entities in diffs
- report({
- "nodes": {},
- "rc_diffs": diffs,
- })
- # with open("./gen_tree.json", "w+") as _out:
- # _out.write(json.dumps(mComparer.generate_model_report_tree))
-
- return