Package versions divided by mainteiners
- Mirantis packages
- Other packages
Change-Id: I0bdbed7d03aa577c12afc504506896d887de7b16
Related-PROD: PROD-28199
diff --git a/cfg_checker/common/const.py b/cfg_checker/common/const.py
index 490d879..cbd0643 100644
--- a/cfg_checker/common/const.py
+++ b/cfg_checker/common/const.py
@@ -84,4 +84,6 @@
_pkg_desc_archive = "pkg.descriptions.tgz"
_repos_index_filename = "repoindex.json"
-_repos_versions_filename = "versions.json"
+_mainteiners_index_filename = "mainteiners.json"
+_mirantis_versions_filename = "mirantis_v.json"
+_other_versions_filename = "other_v.json"
diff --git a/cfg_checker/modules/packages/repos.py b/cfg_checker/modules/packages/repos.py
index e592915..c0aaaee 100644
--- a/cfg_checker/modules/packages/repos.py
+++ b/cfg_checker/modules/packages/repos.py
@@ -3,11 +3,13 @@
from copy import deepcopy
from cfg_checker.common import logger, logger_cli, nested_set
+from cfg_checker.common.const import _mainteiners_index_filename
+from cfg_checker.common.const import _mirantis_versions_filename
+from cfg_checker.common.const import _other_versions_filename
from cfg_checker.common.const import _pkg_desc_archive
from cfg_checker.common.const import _repos_index_filename
from cfg_checker.common.const import _repos_info_archive
from cfg_checker.common.const import _repos_versions_archive
-from cfg_checker.common.const import _repos_versions_filename
from cfg_checker.common.const import ubuntu_releases
from cfg_checker.common.file_utils import get_gzipped_file
from cfg_checker.common.settings import pkg_dir
@@ -20,18 +22,62 @@
ext = ".json"
-def get_tag_label(_tag):
+def get_tag_label(_tag, parsed=False):
# prettify the tag for printing
- _label = ""
+ if parsed:
+ _label = "+ "
+ else:
+ _label = " "
+
if _tag.endswith(".update"):
_label += "[updates] " + _tag.rsplit('.', 1)[0]
elif _tag.endswith(".hotfix"):
_label += " [hotfix] " + _tag.rsplit('.', 1)[0]
else:
_label += " "*10 + _tag
+
return _label
+def _get_value_index(_di, value, header=None):
+ if header:
+ if not filter(lambda i: _di[i]["header"] == header, _di):
+ _index = str(len(_di.keys()) + 1)
+ _di[_index] = {
+ "header": header,
+ "props": value
+ }
+ else:
+ for _k, _v in _di.iteritems():
+ if _v["header"] == header:
+ _index = _k
+
+ return _index
+ else:
+ if not filter(lambda i: _di[i] == value, _di):
+ _index = str(len(_di.keys()) + 1)
+ _di[_index] = value
+ else:
+ for _k, _v in _di.iteritems():
+ if _v == value:
+ _index = _k
+
+ return _index
+
+
+def _safe_load(_f, _a):
+ if _f in _a.list_files():
+ logger_cli.info(
+ "# Loading '{}':'{}'".format(
+ _a.basefile,
+ _f
+ )
+ )
+ return json.loads(_a.get_file(_f))
+ else:
+ return {}
+
+
def _n_url(url):
if url[-1] == '/':
return url
@@ -287,18 +333,17 @@
class RepoManager(object):
- # files in archive
- _repoindexfile = _repos_index_filename
- _versionsfile = _repos_versions_filename
# archives
_versions_arch = os.path.join(pkg_dir, "versions", _repos_versions_archive)
_desc_arch = os.path.join(pkg_dir, "versions", _pkg_desc_archive)
# repository index
_repo_index = {}
+ _mainteiners_index = {}
# init package versions storage
- _versions = {}
+ _versions_mirantis = {}
+ _versions_other = {}
def __init__(self):
# Init version files
@@ -310,24 +355,25 @@
self._desc_arch,
label="MCP Configuration Checker: Package descriptions archive"
)
+ # indices
+ self._repo_index = _safe_load(
+ _repos_index_filename,
+ self.versionstgz
+ )
+ self._mainteiners_index = _safe_load(
+ _mainteiners_index_filename,
+ self.versionstgz
+ )
- if self._versionsfile in self.versionstgz.list_files():
- logger_cli.info(
- "# Loading versions '{}':'{}'".format(
- self._versions_arch,
- self._versionsfile
- )
- )
- self._versions = json.loads(
- self.versionstgz.get_file(self._versionsfile)
- )
-
- if self._repoindexfile in self.versionstgz.list_files():
- self._repo_index = json.loads(
- self.versionstgz.get_file(
- self._repoindexfile
- )
- )
+ # versions
+ self._versions_mirantis = _safe_load(
+ _mirantis_versions_filename,
+ self.versionstgz
+ )
+ self._versions_other = _safe_load(
+ _other_versions_filename,
+ self.versionstgz
+ )
def _create_repo_header(self, p):
_header = "_".join([
@@ -338,80 +384,66 @@
p['type'],
p['arch']
])
- if not filter(
- lambda i: self._repo_index[i]["header"] == _header,
- self._repo_index
- ):
- _index = str(len(self._repo_index.keys()) + 1)
- self._repo_index[_index] = {
- "header": _header,
- "props": p
- }
- else:
- for _k, _v in self._repo_index.iteritems():
- if _v["header"] == _header:
- _index = _k
+ return _get_value_index(self._repo_index, p, header=_header)
- return _index
+ def _get_indexed_values(self, pair):
+ _h, _m = pair.split('-')
+ return self._repo_index[_h], self._mainteiners_index[_m]
- def _get_repo_header(self, index):
- return self._repo_index[index]
-
- def _update_pkg_version(self, n, v, md5, header_index):
+ def _update_pkg_version(self, _d, n, v, md5, h_index, m_index):
"""Method updates package version record in global dict
"""
# 'if'*4 operation is pretty expensive when using it 100k in a row
# so try/except is a better way to go, even faster than 'reduce'
- vs = self._versions
+ _pair = "-".join([h_index, m_index])
try:
# try to load list
- _list = vs[n][v][md5]
+ _list = _d[n][v][md5]
# cast it as set() and union()
- _list = set(_list).union([header_index])
+ _list = set(_list).union([_pair])
# cast back as set() is not serializeable
- vs[n][v][md5] = list(_list)
+ _d[n][v][md5] = list(_list)
return False
except KeyError:
# ok, this is fresh pkg. Do it slow way.
- if n in vs:
+ if n in _d:
# there is such pkg already
- if v in vs[n]:
+ if v in _d[n]:
# there is such version, check md5
- if md5 in vs[n][v]:
+ if md5 in _d[n][v]:
# just add new repo header
- if header_index not in vs[n][v][md5]:
- vs[n][v][md5].append(header_index)
+ if _pair not in _d[n][v][md5]:
+ _d[n][v][md5].append(_pair)
else:
# check if such index is here...
_existing = filter(
- lambda i: header_index in vs[n][v][i],
- vs[n][v]
+ lambda i: _pair in _d[n][v][i],
+ _d[n][v]
)
if _existing:
# Yuck! Same version had different MD5
+ _r, _m = self._get_indexed_values(_pair)
logger_cli.error(
"# ERROR: Package version has multiple MD5s "
"in '{}': {}:{}:{}".format(
- self._get_repo_header(
- header_index
- )["header"],
+ _r,
n,
v,
md5
)
)
- vs[n][v][md5] = [header_index]
+ _d[n][v][md5] = [_pair]
else:
# this is new version for existing package
- vs[n][v] = {
- md5: [header_index]
+ _d[n][v] = {
+ md5: [_pair]
}
return False
else:
# this is new pakcage
- vs[n] = {
+ _d[n] = {
v: {
- md5: [header_index]
+ md5: [_pair]
}
}
return True
@@ -517,14 +549,37 @@
_name = _desc['package']
_md5 = _desc['md5sum']
_version = _desc['version']
- # update version for a package
- if self._update_pkg_version(
- _name,
- _version,
- _md5,
- self._create_repo_header(_pkg)
- ):
- _new += 1
+ _mainteiner = _desc['maintainer']
+
+ # Check is mainteiner is Mirantis
+ if _mainteiner.endswith("@mirantis.com>"):
+ # update mirantis versions
+ if self._update_pkg_version(
+ self._versions_mirantis,
+ _name,
+ _version,
+ _md5,
+ self._create_repo_header(_pkg),
+ _get_value_index(
+ self._mainteiners_index,
+ _mainteiner
+ )
+ ):
+ _new += 1
+ else:
+ # update other versions
+ if self._update_pkg_version(
+ self._versions_other,
+ _name,
+ _version,
+ _md5,
+ self._create_repo_header(_pkg),
+ _get_value_index(
+ self._mainteiners_index,
+ _mainteiner
+ )
+ ):
+ _new += 1
if descriptions:
_d_new = {
@@ -554,10 +609,15 @@
_progress.end()
# backup headers to disk
self.versionstgz.add_file(
- self._repoindexfile,
+ _repos_index_filename,
json.dumps(self._repo_index),
replace=True
)
+ self.versionstgz.add_file(
+ _mainteiners_index_filename,
+ json.dumps(self._mainteiners_index),
+ replace=True
+ )
return
def fetch_versions(self, tag, descriptions=False):
@@ -569,11 +629,19 @@
"consumes huge amount of disk space\n\n"
)
# if there is no such tag, parse it from repoinfo
- _f = self._versionsfile
logger_cli.info("# Fetching versions for {}".format(tag))
self.parse_tag(tag, descriptions=descriptions)
- logger_cli.info("-> saving updated versions to {}".format(_f))
- self.versionstgz.add_file(_f, json.dumps(self._versions), replace=True)
+ logger_cli.info("-> saving updated versions")
+ self.versionstgz.add_file(
+ _mirantis_versions_filename,
+ json.dumps(self._versions_mirantis),
+ replace=True
+ )
+ self.versionstgz.add_file(
+ _other_versions_filename,
+ json.dumps(self._versions_other),
+ replace=True
+ )
def build_repos(self, url, tag=None):
"""Builds versions data for selected tag, or for all of them
@@ -611,7 +679,14 @@
# Print pretty list and exit
logger_cli.info("# Tags available at '{}':".format(url))
for t in _all:
- logger_cli.info(get_tag_label(t))
+ _ri = self._repo_index
+ _isparsed = any(
+ [k for k, v in _ri.iteritems() if v['props']['tag'] == t]
+ )
+ if _isparsed:
+ logger_cli.info(get_tag_label(t, parsed=True))
+ else:
+ logger_cli.info(get_tag_label(t))
# exit
return
@@ -675,47 +750,57 @@
_rr = _p[_v][_md5].keys()
_rr.sort()
for _r in _rr:
- _o += " "*24 + _r.replace('_', ' ') + "\n"
+ _o += " "*24 + _r.replace('_', ' ')
+ _o += " ({})\n".format(_p[_v][_md5][_r]["mainteiner"])
logger_cli.info(_o)
- def get_package_versions(self, name, tagged=False):
+ def get_package_versions(self, name, mirantis=True, tagged=False):
"""Method builds package version structure
with repository properties included
"""
# get data
- if name in self._versions:
- _vs = self._versions[name]
+ if mirantis and name in self._versions_mirantis:
+ _vs = self._versions_mirantis[name]
+ elif not mirantis and name in self._versions_other:
+ _vs = self._versions_other[name]
else:
return {}
+
# insert repo data, insert props into headers place
_package = {}
if tagged:
for _v, _d1 in _vs.iteritems():
# use tag as a next step
- for _md5, _repos in _d1.iteritems():
- for _index in _repos:
+ for _md5, _indices in _d1.iteritems():
+ for _pair in _indices:
# extract props for a repo
- _repo_props = self._repo_index[_index]
+ _r, _m = self._get_indexed_values(_pair)
# get tag
- _tag = _repo_props["props"]["tag"]
+ _tag = _r["props"]["tag"]
# cut tag from the header
- _cut_head = _repo_props["header"].split("_", 1)[1]
+ _cut_head = _r["header"].split("_", 1)[1]
# populate dict
nested_set(
_package,
[_tag, _v, _cut_head, _md5],
- _repo_props["props"]
+ {
+ "repo": _r["props"],
+ "mainteiner": _m
+ }
)
else:
for _v, _d1 in _vs.iteritems():
- for _md5, _repos in _d1.iteritems():
- for _index in _repos:
- _repo_props = self._repo_index[_index]
+ for _md5, _indices in _d1.iteritems():
+ for _pair in _indices:
+ _r, _m = self._get_indexed_values(_pair)
nested_set(
_package,
- [_v, _md5, _repo_props["header"]],
- _repo_props["props"]
+ [_v, _md5, _r["header"]],
+ {
+ "repo": _r["props"],
+ "mainteiner": _m
+ }
)
return _package