blob: c0aaaeeb2f9062539c039b6ea43641fed025ffba [file] [log] [blame]
Alexd9fd85e2019-05-16 16:58:24 -05001import json
2import os
3from copy import deepcopy
4
Alex74dc1352019-05-17 13:18:24 -05005from cfg_checker.common import logger, logger_cli, nested_set
Alex0ed4f762019-05-17 17:55:33 -05006from cfg_checker.common.const import _mainteiners_index_filename
7from cfg_checker.common.const import _mirantis_versions_filename
8from cfg_checker.common.const import _other_versions_filename
Alexd9fd85e2019-05-16 16:58:24 -05009from cfg_checker.common.const import _pkg_desc_archive
10from cfg_checker.common.const import _repos_index_filename
11from cfg_checker.common.const import _repos_info_archive
12from cfg_checker.common.const import _repos_versions_archive
Alexd9fd85e2019-05-16 16:58:24 -050013from cfg_checker.common.const import ubuntu_releases
14from cfg_checker.common.file_utils import get_gzipped_file
15from cfg_checker.common.settings import pkg_dir
16from cfg_checker.helpers.console_utils import Progress
17from cfg_checker.helpers.tgz import TGZFile
18
19import requests
20from requests.exceptions import ConnectionError
21
22ext = ".json"
23
24
Alex0ed4f762019-05-17 17:55:33 -050025def get_tag_label(_tag, parsed=False):
Alex74dc1352019-05-17 13:18:24 -050026 # prettify the tag for printing
Alex0ed4f762019-05-17 17:55:33 -050027 if parsed:
28 _label = "+ "
29 else:
30 _label = " "
31
Alex74dc1352019-05-17 13:18:24 -050032 if _tag.endswith(".update"):
33 _label += "[updates] " + _tag.rsplit('.', 1)[0]
34 elif _tag.endswith(".hotfix"):
35 _label += " [hotfix] " + _tag.rsplit('.', 1)[0]
36 else:
37 _label += " "*10 + _tag
Alex0ed4f762019-05-17 17:55:33 -050038
Alex74dc1352019-05-17 13:18:24 -050039 return _label
40
41
Alex0ed4f762019-05-17 17:55:33 -050042def _get_value_index(_di, value, header=None):
43 if header:
44 if not filter(lambda i: _di[i]["header"] == header, _di):
45 _index = str(len(_di.keys()) + 1)
46 _di[_index] = {
47 "header": header,
48 "props": value
49 }
50 else:
51 for _k, _v in _di.iteritems():
52 if _v["header"] == header:
53 _index = _k
54
55 return _index
56 else:
57 if not filter(lambda i: _di[i] == value, _di):
58 _index = str(len(_di.keys()) + 1)
59 _di[_index] = value
60 else:
61 for _k, _v in _di.iteritems():
62 if _v == value:
63 _index = _k
64
65 return _index
66
67
68def _safe_load(_f, _a):
69 if _f in _a.list_files():
70 logger_cli.info(
71 "# Loading '{}':'{}'".format(
72 _a.basefile,
73 _f
74 )
75 )
76 return json.loads(_a.get_file(_f))
77 else:
78 return {}
79
80
Alexd9fd85e2019-05-16 16:58:24 -050081def _n_url(url):
82 if url[-1] == '/':
83 return url
84 else:
85 return url + '/'
86
87
88class ReposInfo(object):
89 repos = []
90 _repofile = os.path.join(pkg_dir, "versions", _repos_info_archive)
91
92 @staticmethod
93 def _ls_repo_page(url):
94 # Yes, this is ugly. But it works ok for small HTMLs.
95 _a = "<a"
96 _s = "href="
97 _e = "\">"
98 try:
99 page = requests.get(url, timeout=60)
100 except ConnectionError as e:
101 logger_cli.error("# ERROR: {}".format(e.message))
102 return [], []
103 a = page.text.splitlines()
104 # Comprehension for dirs. Anchors for ends with '-'
105 _dirs = [l[l.index(_s)+6:l.index(_e)-1]
106 for l in a if l.startswith(_a) and l.endswith('-')]
107 # Comprehension for files. Anchors ends with size
108 _files = [l[l.index(_s)+6:l.index(_e)]
109 for l in a if l.startswith(_a) and not l.endswith('-')]
110
111 return _dirs, _files
112
113 def search_pkg(self, url, _list):
114 # recoursive method to walk dists tree
115 _dirs, _files = self._ls_repo_page(url)
116
117 for _d in _dirs:
118 # Search only in dists, ignore the rest
119 if "dists" not in url and _d != "dists":
120 continue
121 _u = _n_url(url + _d)
122 self.search_pkg(_u, _list)
123
124 for _f in _files:
125 if _f == "Packages.gz":
126 _list.append(url + _f)
127 logger.debug("... [F] '{}'".format(url + _f))
128
129 return _list
130
131 @staticmethod
132 def _map_repo(_path_list, _r):
133 for _pkg_path in _path_list:
134 _l = _pkg_path.split('/')
135 _kw = _l[_l.index('dists')+1:]
136 _kw.reverse()
137 _repo_item = {
138 "arch": _kw[1][7:] if "binary" in _kw[1] else _kw[1],
139 "type": _kw[2],
140 "ubuntu-release": _kw[3],
141 "filepath": _pkg_path
142 }
143 _r.append(_repo_item)
144
145 def _find_tag(self, _t, _u, label=""):
146 if label:
147 _url = _n_url(_u + label)
148 _label = _t + '.' + label
149 else:
150 _url = _u
151 _label = _t
152 _ts, _ = self._ls_repo_page(_url)
153 if _t in _ts:
154 logger.debug(
155 "... found tag '{}' at '{}'".format(
156 _t,
157 _url
158 )
159 )
160 return {
161 _label: {
162 "baseurl": _n_url(_url + _t),
163 "all": {}
164 }
165 }
166 else:
167 return {}
168
169 def fetch_repos(self, url, tag=None):
170 base_url = _n_url(url)
171 logger_cli.info("# Using '{}' as a repos source".format(base_url))
172
173 logger_cli.info("# Gathering repos info (i.e. links to 'packages.gz')")
174 # init repoinfo archive
175 _repotgz = TGZFile(self._repofile)
176 # prepare repo links
177 _repos = {}
178 if tag:
179 # only one tag to process
180 _repos.update(self._find_tag(tag, base_url))
181 _repos.update(self._find_tag(tag, base_url, label="hotfix"))
182 _repos.update(self._find_tag(tag, base_url, label="update"))
183 else:
184 # gather all of them
185 _tags, _ = self._ls_repo_page(base_url)
186 _tags.remove('hotfix')
187 _tags.remove('update')
188 # search tags in subfolders
189 _h_tags, _ = self._ls_repo_page(base_url + 'hotfix')
190 _u_tags, _ = self._ls_repo_page(base_url + 'update')
191 _tags.extend([t for t in _h_tags if t not in _tags])
192 _tags.extend([t for t in _u_tags if t not in _tags])
193 _progress = Progress(len(_tags))
194 _index = 0
195 for _tag in _tags:
196 _repos.update(self._find_tag(_tag, base_url))
197 _repos.update(self._find_tag(_tag, base_url, label="hotfix"))
198 _repos.update(self._find_tag(_tag, base_url, label="update"))
199 _index += 1
200 _progress.write_progress(_index)
201 _progress.end()
202
203 # parse subtags
204 for _label in _repos.keys():
205 logger_cli.info("-> processing tag '{}'".format(_label))
206 _name = _label + ".json"
207 if _repotgz.has_file(_name):
208 logger_cli.info(
209 "-> skipping, '{}' already has '{}'".format(
210 _repos_info_archive,
211 _name
212 )
213 )
214 continue
215 # process the tag
216 _repo = _repos[_label]
217 _baseurl = _repos[_label]["baseurl"]
218 # get the subtags
219 _sub_tags, _ = self._ls_repo_page(_baseurl)
220 _total_index = len(_sub_tags)
221 _index = 0
222 _progress = Progress(_total_index)
223 logger.debug(
224 "... found {} subtags for '{}'".format(
225 len(_sub_tags),
226 _label
227 )
228 )
229 # save the url and start search
230 for _stag in _sub_tags:
231 _u = _baseurl + _stag
232 _index += 1
233 logger.debug(
234 "... searching repos in '{}/{}'".format(
235 _label,
236 _stag
237 )
238 )
239
240 # Searching Package collections
241 if _stag in ubuntu_releases:
242 # if stag is the release, this is all packages
243 _repo["all"][_stag] = []
244 _repo["all"]["url"] = _n_url(_u)
245 _path_list = self.search_pkg(_n_url(_u), [])
246 self._map_repo(_path_list, _repo["all"][_stag])
247 logger.info(
248 "-> found {} dists".format(
249 len(_repo["all"][_stag])
250 )
251 )
252
253 else:
254 # each subtag might have any ubuntu release
255 # so iterate them
256 _repo[_stag] = {
257 "url": _n_url(_u)
258 }
259 _releases, _ = self._ls_repo_page(_n_url(_u))
260 for _rel in _releases:
261 if _rel not in ubuntu_releases:
262 logger.debug(
263 "... skipped unknown ubuntu release: "
264 "'{}' in '{}'".format(
265 _rel,
266 _u
267 )
268 )
269 else:
270 _rel_u = _n_url(_u) + _rel
271 _repo[_stag][_rel] = []
272 _path_list = self.search_pkg(_n_url(_rel_u), [])
273 self._map_repo(
274 _path_list,
275 _repo[_stag][_rel]
276 )
277 logger.info(
278 "-> found {} dists for '{}'".format(
279 len(_repo[_stag][_rel]),
280 _rel
281 )
282 )
283 _progress.write_progress(_index)
284
285 _progress.end()
286 _name = _label + ext
287 _repotgz.add_file(_name, buf=json.dumps(_repo, indent=2))
288 logger_cli.info(
289 "-> archive '{}' updated with '{}'".format(
290 self._repofile,
291 _name
292 )
293 )
294
295 return
296
Alex74dc1352019-05-17 13:18:24 -0500297 def list_tags(self, splitted=False):
Alexd9fd85e2019-05-16 16:58:24 -0500298 _files = TGZFile(self._repofile).list_files()
299 # all files in archive with no '.json' part
300 _all = set([f.rsplit('.', 1)[0] for f in _files])
Alex74dc1352019-05-17 13:18:24 -0500301 if splitted:
302 # files that ends with '.update'
303 _updates = set([f for f in _all if f.find('update') >= 0])
304 # files that ends with '.hotfix'
305 _hotfix = set([f for f in _all if f.find('hotfix') >= 0])
306 # remove updates and hotfix tags from all. The true magic of SETs
307 _all = _all - _updates - _hotfix
308 # cut updates and hotfix endings
309 _updates = [f.rsplit('.', 1)[0] for f in _updates]
310 _hotfix = [f.rsplit('.', 1)[0] for f in _hotfix]
Alexd9fd85e2019-05-16 16:58:24 -0500311
Alex74dc1352019-05-17 13:18:24 -0500312 return _all, _updates, _hotfix
313 else:
314 # dynamic import
315 import re
316 _all = list(_all)
317 # lexical tags
318 _lex = [s for s in _all if not s[0].isdigit()]
319 _lex.sort()
320 # tags with digits
321 _dig = [s for s in _all if s[0].isdigit()]
322 _dig = sorted(
323 _dig,
324 key=lambda x: tuple(int(i) for i in re.findall('\\d+', x)[:3])
325 )
326
327 return _dig + _lex
Alexd9fd85e2019-05-16 16:58:24 -0500328
329 def get_repoinfo(self, tag):
330 _tgz = TGZFile(self._repofile)
331 _buf = _tgz.get_file(tag + ext)
332 return json.loads(_buf)
333
334
335class RepoManager(object):
Alexd9fd85e2019-05-16 16:58:24 -0500336 # archives
337 _versions_arch = os.path.join(pkg_dir, "versions", _repos_versions_archive)
338 _desc_arch = os.path.join(pkg_dir, "versions", _pkg_desc_archive)
339
340 # repository index
341 _repo_index = {}
Alex0ed4f762019-05-17 17:55:33 -0500342 _mainteiners_index = {}
Alexd9fd85e2019-05-16 16:58:24 -0500343
344 # init package versions storage
Alex0ed4f762019-05-17 17:55:33 -0500345 _versions_mirantis = {}
346 _versions_other = {}
Alexd9fd85e2019-05-16 16:58:24 -0500347
348 def __init__(self):
349 # Init version files
350 self.versionstgz = TGZFile(
351 self._versions_arch,
352 label="MCP Configuration Checker: Package versions archive"
353 )
354 self.desctgz = TGZFile(
355 self._desc_arch,
356 label="MCP Configuration Checker: Package descriptions archive"
357 )
Alex0ed4f762019-05-17 17:55:33 -0500358 # indices
359 self._repo_index = _safe_load(
360 _repos_index_filename,
361 self.versionstgz
362 )
363 self._mainteiners_index = _safe_load(
364 _mainteiners_index_filename,
365 self.versionstgz
366 )
Alexd9fd85e2019-05-16 16:58:24 -0500367
Alex0ed4f762019-05-17 17:55:33 -0500368 # versions
369 self._versions_mirantis = _safe_load(
370 _mirantis_versions_filename,
371 self.versionstgz
372 )
373 self._versions_other = _safe_load(
374 _other_versions_filename,
375 self.versionstgz
376 )
Alexd9fd85e2019-05-16 16:58:24 -0500377
378 def _create_repo_header(self, p):
379 _header = "_".join([
380 p['tag'],
381 p['subset'],
382 p['release'],
383 p['ubuntu-release'],
384 p['type'],
385 p['arch']
386 ])
Alex0ed4f762019-05-17 17:55:33 -0500387 return _get_value_index(self._repo_index, p, header=_header)
Alexd9fd85e2019-05-16 16:58:24 -0500388
Alex0ed4f762019-05-17 17:55:33 -0500389 def _get_indexed_values(self, pair):
390 _h, _m = pair.split('-')
391 return self._repo_index[_h], self._mainteiners_index[_m]
Alexd9fd85e2019-05-16 16:58:24 -0500392
Alex0ed4f762019-05-17 17:55:33 -0500393 def _update_pkg_version(self, _d, n, v, md5, h_index, m_index):
Alexd9fd85e2019-05-16 16:58:24 -0500394 """Method updates package version record in global dict
395 """
396 # 'if'*4 operation is pretty expensive when using it 100k in a row
397 # so try/except is a better way to go, even faster than 'reduce'
Alex0ed4f762019-05-17 17:55:33 -0500398 _pair = "-".join([h_index, m_index])
Alexd9fd85e2019-05-16 16:58:24 -0500399 try:
400 # try to load list
Alex0ed4f762019-05-17 17:55:33 -0500401 _list = _d[n][v][md5]
Alexd9fd85e2019-05-16 16:58:24 -0500402 # cast it as set() and union()
Alex0ed4f762019-05-17 17:55:33 -0500403 _list = set(_list).union([_pair])
Alexd9fd85e2019-05-16 16:58:24 -0500404 # cast back as set() is not serializeable
Alex0ed4f762019-05-17 17:55:33 -0500405 _d[n][v][md5] = list(_list)
Alexd9fd85e2019-05-16 16:58:24 -0500406 return False
407 except KeyError:
408 # ok, this is fresh pkg. Do it slow way.
Alex0ed4f762019-05-17 17:55:33 -0500409 if n in _d:
Alexd9fd85e2019-05-16 16:58:24 -0500410 # there is such pkg already
Alex0ed4f762019-05-17 17:55:33 -0500411 if v in _d[n]:
Alexd9fd85e2019-05-16 16:58:24 -0500412 # there is such version, check md5
Alex0ed4f762019-05-17 17:55:33 -0500413 if md5 in _d[n][v]:
Alexd9fd85e2019-05-16 16:58:24 -0500414 # just add new repo header
Alex0ed4f762019-05-17 17:55:33 -0500415 if _pair not in _d[n][v][md5]:
416 _d[n][v][md5].append(_pair)
Alexd9fd85e2019-05-16 16:58:24 -0500417 else:
418 # check if such index is here...
419 _existing = filter(
Alex0ed4f762019-05-17 17:55:33 -0500420 lambda i: _pair in _d[n][v][i],
421 _d[n][v]
Alexd9fd85e2019-05-16 16:58:24 -0500422 )
423 if _existing:
424 # Yuck! Same version had different MD5
Alex0ed4f762019-05-17 17:55:33 -0500425 _r, _m = self._get_indexed_values(_pair)
Alexd9fd85e2019-05-16 16:58:24 -0500426 logger_cli.error(
427 "# ERROR: Package version has multiple MD5s "
428 "in '{}': {}:{}:{}".format(
Alex0ed4f762019-05-17 17:55:33 -0500429 _r,
Alexd9fd85e2019-05-16 16:58:24 -0500430 n,
431 v,
432 md5
433 )
434 )
Alex0ed4f762019-05-17 17:55:33 -0500435 _d[n][v][md5] = [_pair]
Alexd9fd85e2019-05-16 16:58:24 -0500436 else:
437 # this is new version for existing package
Alex0ed4f762019-05-17 17:55:33 -0500438 _d[n][v] = {
439 md5: [_pair]
Alexd9fd85e2019-05-16 16:58:24 -0500440 }
441 return False
442 else:
443 # this is new pakcage
Alex0ed4f762019-05-17 17:55:33 -0500444 _d[n] = {
Alexd9fd85e2019-05-16 16:58:24 -0500445 v: {
Alex0ed4f762019-05-17 17:55:33 -0500446 md5: [_pair]
Alexd9fd85e2019-05-16 16:58:24 -0500447 }
448 }
449 return True
450
451 def _save_repo_descriptions(self, repo_props, desc):
452 # form the filename for the repo and save it
453 self.desctgz.add_file(
454 self._create_repo_header(repo_props),
455 json.dumps(desc)
456 )
457
458 # def get_description(self, repo_props, name, md5=None):
459 # """Gets target description
460 # """
461 # _filename = self._create_repo_header(repo_props)
462 # # check if it is present in cache
463 # if _filename in self._desc_cache:
464 # _descs = self._desc_cache[_filename]
465 # else:
466 # # load data
467 # _descs = self.desctgz.get_file(_filename)
468 # # Serialize it
469 # _descs = json.loads(_descs)
470 # self._desc_cache[_filename] = _descs
471 # # return target desc
472 # if name in _descs and md5 in _descs[name]:
473 # return _descs[name][md5]
474 # else:
475 # return None
476
477 def parse_tag(self, tag, descriptions=False):
478 """Download and parse Package.gz files for specific tag
479 By default, descriptions not saved
480 due to huge resulting file size and slow processing
481 """
482 # init gzip and downloader
483 _info = ReposInfo().get_repoinfo(tag)
484 # calculate Packages.gz files to process
485 _baseurl = _info.pop("baseurl")
486 _total_components = len(_info.keys()) - 1
487 _ubuntu_package_repos = 0
488 _other_repos = 0
489 for _c, _d in _info.iteritems():
490 for _ur, _l in _d.iteritems():
491 if _ur in ubuntu_releases:
492 _ubuntu_package_repos += len(_l)
493 elif _ur != 'url':
494 _other_repos += len(_l)
495 logger_cli.info(
496 "-> loaded repository info for '{}'.\n"
497 " '{}', {} components, {} ubuntu repos, {} other/uknown".format(
498 _baseurl,
499 tag,
500 _total_components,
501 _ubuntu_package_repos,
502 _other_repos
503 )
504 )
505 # init progress bar
506 _progress = Progress(_ubuntu_package_repos)
507 _index = 0
508 _processed = 0
509 _new = 0
510 for _c, _d in _info.iteritems():
511 # we do not need url here, just get rid of it
512 if 'url' in _d:
513 _d.pop('url')
514 # _url = if 'url' in _d else _baseurl + _c
515 for _ur, _l in _d.iteritems():
516 # iterate package collections
517 for _p in _l:
518 # descriptions
519 if descriptions:
520 _descriptions = {}
521 # download and unzip
522 _progress.write_progress(
523 _index,
524 note="/ {} {} {} {} {}, {}/{}".format(
525 _c,
526 _ur,
527 _p['ubuntu-release'],
528 _p['type'],
529 _p['arch'],
530 _processed,
531 _new
532 )
533 )
534 _raw = get_gzipped_file(_p['filepath'])
535 _lines = _raw.splitlines()
536 _index += 1
537 # break lines collection into isolated pkg data
538 _pkg = {
539 "tag": tag,
540 "subset": _c,
541 "release": _ur
542 }
543 _pkg.update(_p)
544 _desc = {}
545 _key = _value = ""
546 for _line in _lines:
547 if not _line:
548 # if the line is empty, process pkg data gathered
549 _name = _desc['package']
550 _md5 = _desc['md5sum']
551 _version = _desc['version']
Alex0ed4f762019-05-17 17:55:33 -0500552 _mainteiner = _desc['maintainer']
553
554 # Check is mainteiner is Mirantis
555 if _mainteiner.endswith("@mirantis.com>"):
556 # update mirantis versions
557 if self._update_pkg_version(
558 self._versions_mirantis,
559 _name,
560 _version,
561 _md5,
562 self._create_repo_header(_pkg),
563 _get_value_index(
564 self._mainteiners_index,
565 _mainteiner
566 )
567 ):
568 _new += 1
569 else:
570 # update other versions
571 if self._update_pkg_version(
572 self._versions_other,
573 _name,
574 _version,
575 _md5,
576 self._create_repo_header(_pkg),
577 _get_value_index(
578 self._mainteiners_index,
579 _mainteiner
580 )
581 ):
582 _new += 1
Alexd9fd85e2019-05-16 16:58:24 -0500583
584 if descriptions:
585 _d_new = {
586 _md5: deepcopy(_desc)
587 }
588 try:
589 _descriptions[_name].update(_d_new)
590 except KeyError:
591 _descriptions[_name] = _d_new
592 # clear the data for next pkg
593 _processed += 1
594 _desc = {}
595 _key = ""
596 _value = ""
597 elif _line.startswith(' '):
598 _desc[_key] += "\n{}".format(_line)
599 else:
600 _key, _value = _line.split(': ', 1)
601 _key = _key.lower()
602
603 _desc[_key] = _value
604 # save descriptions if needed
605 if descriptions:
606 _progress.clearline()
607 self._save_repo_descriptions(_pkg, _descriptions)
608
609 _progress.end()
610 # backup headers to disk
611 self.versionstgz.add_file(
Alex0ed4f762019-05-17 17:55:33 -0500612 _repos_index_filename,
Alexd9fd85e2019-05-16 16:58:24 -0500613 json.dumps(self._repo_index),
614 replace=True
615 )
Alex0ed4f762019-05-17 17:55:33 -0500616 self.versionstgz.add_file(
617 _mainteiners_index_filename,
618 json.dumps(self._mainteiners_index),
619 replace=True
620 )
Alexd9fd85e2019-05-16 16:58:24 -0500621 return
622
623 def fetch_versions(self, tag, descriptions=False):
624 """Executes parsing for specific tag
625 """
626 if descriptions:
627 logger_cli.warning(
628 "\n\n# !!! WARNING: Saving repo descriptions "
629 "consumes huge amount of disk space\n\n"
630 )
631 # if there is no such tag, parse it from repoinfo
Alexd9fd85e2019-05-16 16:58:24 -0500632 logger_cli.info("# Fetching versions for {}".format(tag))
633 self.parse_tag(tag, descriptions=descriptions)
Alex0ed4f762019-05-17 17:55:33 -0500634 logger_cli.info("-> saving updated versions")
635 self.versionstgz.add_file(
636 _mirantis_versions_filename,
637 json.dumps(self._versions_mirantis),
638 replace=True
639 )
640 self.versionstgz.add_file(
641 _other_versions_filename,
642 json.dumps(self._versions_other),
643 replace=True
644 )
Alexd9fd85e2019-05-16 16:58:24 -0500645
646 def build_repos(self, url, tag=None):
647 """Builds versions data for selected tag, or for all of them
648 """
649 # Init the ReposInfo class and check if all files are present
650 _repos = ReposInfo()
651 # recoursively walk the mirrors
652 # and gather all of the repos for 'tag' or all of the tags
653 _repos.fetch_repos(url, tag=tag)
654
Alex74dc1352019-05-17 13:18:24 -0500655 def _build_action(self, url, tags):
656 for t in tags:
657 logger_cli.info(
658 "# Building repo info for '{}/{}'".format(
659 url,
660 t
661 )
662 )
663 self.build_repos(url, tag=t)
664
Alexd9fd85e2019-05-16 16:58:24 -0500665 def action_for_tag(
666 self,
667 url,
668 tag,
669 action=None,
670 descriptions=None
671 ):
672 """Executes action for every tag from all collections
673 """
674 if not action:
675 logger_cli.info("# No action set, nothing to do")
Alex74dc1352019-05-17 13:18:24 -0500676 # See if this is a list action
Alexd9fd85e2019-05-16 16:58:24 -0500677 if action == "list":
Alex74dc1352019-05-17 13:18:24 -0500678 _all = ReposInfo().list_tags()
679 # Print pretty list and exit
Alexd9fd85e2019-05-16 16:58:24 -0500680 logger_cli.info("# Tags available at '{}':".format(url))
Alex74dc1352019-05-17 13:18:24 -0500681 for t in _all:
Alex0ed4f762019-05-17 17:55:33 -0500682 _ri = self._repo_index
683 _isparsed = any(
684 [k for k, v in _ri.iteritems() if v['props']['tag'] == t]
685 )
686 if _isparsed:
687 logger_cli.info(get_tag_label(t, parsed=True))
688 else:
689 logger_cli.info(get_tag_label(t))
Alex74dc1352019-05-17 13:18:24 -0500690 # exit
Alexd9fd85e2019-05-16 16:58:24 -0500691 return
Alex74dc1352019-05-17 13:18:24 -0500692
Alexd9fd85e2019-05-16 16:58:24 -0500693 # Pupulate action tags
Alex74dc1352019-05-17 13:18:24 -0500694 major, updates, hotfix = ReposInfo().list_tags(splitted=True)
Alexd9fd85e2019-05-16 16:58:24 -0500695 _action_tags = []
696 if tag in major:
697 _action_tags.append(tag)
Alex74dc1352019-05-17 13:18:24 -0500698 if tag in updates:
Alexd9fd85e2019-05-16 16:58:24 -0500699 _action_tags.append(tag + ".update")
Alex74dc1352019-05-17 13:18:24 -0500700 if tag in hotfix:
Alexd9fd85e2019-05-16 16:58:24 -0500701 _action_tags.append(tag + ".hotfix")
Alex74dc1352019-05-17 13:18:24 -0500702 # Check if any tags collected
Alexd9fd85e2019-05-16 16:58:24 -0500703 if not _action_tags:
704 logger_cli.info(
705 "# Tag of '{}' not found. "
706 "Consider rebuilding repos info.".format(tag)
707 )
Alex74dc1352019-05-17 13:18:24 -0500708 else:
Alexd9fd85e2019-05-16 16:58:24 -0500709 logger_cli.info(
Alex74dc1352019-05-17 13:18:24 -0500710 "-> tags to process: {}".format(
Alexd9fd85e2019-05-16 16:58:24 -0500711 ", ".join(_action_tags)
712 )
713 )
Alex74dc1352019-05-17 13:18:24 -0500714 # Execute actions
715 if action == "build":
716 self._build_action(url, _action_tags)
717 elif action == "fetch":
Alexd9fd85e2019-05-16 16:58:24 -0500718 for t in _action_tags:
719 self.fetch_versions(t, descriptions=descriptions)
720
721 logger_cli.info("# Done.")
722
Alex74dc1352019-05-17 13:18:24 -0500723 def show_package(self, name):
724 # get the package data
725 _p = self.get_package_versions(name)
726 if not _p:
727 logger_cli.warning(
728 "# WARNING: Package '{}' not found".format(name)
729 )
730 else:
731 # print package info using sorted tags from headers
732 # Package: name
733 # [u/h] tag \t <version>
734 # \t <version>
735 # <10symbols> \t <md5> \t sorted headers with no tag
736 # ...
737 logger_cli.info("\n# Package: {}".format(name))
738 _o = ""
739 # get and sort tags
740 _vs = _p.keys()
741 _vs.sort()
742 for _v in _vs:
743 _o += "\n" + " "*8 + _v + ':\n'
744 # get and sort tags
745 _mds = _p[_v].keys()
746 _mds.sort()
747 for _md5 in _mds:
748 _o += " "*16 + _md5 + "\n"
749 # get and sort repo headers
750 _rr = _p[_v][_md5].keys()
751 _rr.sort()
752 for _r in _rr:
Alex0ed4f762019-05-17 17:55:33 -0500753 _o += " "*24 + _r.replace('_', ' ')
754 _o += " ({})\n".format(_p[_v][_md5][_r]["mainteiner"])
Alex74dc1352019-05-17 13:18:24 -0500755
756 logger_cli.info(_o)
757
Alex0ed4f762019-05-17 17:55:33 -0500758 def get_package_versions(self, name, mirantis=True, tagged=False):
Alex74dc1352019-05-17 13:18:24 -0500759 """Method builds package version structure
760 with repository properties included
761 """
762 # get data
Alex0ed4f762019-05-17 17:55:33 -0500763 if mirantis and name in self._versions_mirantis:
764 _vs = self._versions_mirantis[name]
765 elif not mirantis and name in self._versions_other:
766 _vs = self._versions_other[name]
Alex74dc1352019-05-17 13:18:24 -0500767 else:
768 return {}
Alex0ed4f762019-05-17 17:55:33 -0500769
Alex74dc1352019-05-17 13:18:24 -0500770 # insert repo data, insert props into headers place
771 _package = {}
772 if tagged:
773 for _v, _d1 in _vs.iteritems():
774 # use tag as a next step
Alex0ed4f762019-05-17 17:55:33 -0500775 for _md5, _indices in _d1.iteritems():
776 for _pair in _indices:
Alex74dc1352019-05-17 13:18:24 -0500777 # extract props for a repo
Alex0ed4f762019-05-17 17:55:33 -0500778 _r, _m = self._get_indexed_values(_pair)
Alex74dc1352019-05-17 13:18:24 -0500779 # get tag
Alex0ed4f762019-05-17 17:55:33 -0500780 _tag = _r["props"]["tag"]
Alex74dc1352019-05-17 13:18:24 -0500781 # cut tag from the header
Alex0ed4f762019-05-17 17:55:33 -0500782 _cut_head = _r["header"].split("_", 1)[1]
Alex74dc1352019-05-17 13:18:24 -0500783 # populate dict
784 nested_set(
785 _package,
786 [_tag, _v, _cut_head, _md5],
Alex0ed4f762019-05-17 17:55:33 -0500787 {
788 "repo": _r["props"],
789 "mainteiner": _m
790 }
Alex74dc1352019-05-17 13:18:24 -0500791 )
792 else:
793 for _v, _d1 in _vs.iteritems():
Alex0ed4f762019-05-17 17:55:33 -0500794 for _md5, _indices in _d1.iteritems():
795 for _pair in _indices:
796 _r, _m = self._get_indexed_values(_pair)
Alex74dc1352019-05-17 13:18:24 -0500797 nested_set(
798 _package,
Alex0ed4f762019-05-17 17:55:33 -0500799 [_v, _md5, _r["header"]],
800 {
801 "repo": _r["props"],
802 "mainteiner": _m
803 }
Alex74dc1352019-05-17 13:18:24 -0500804 )
805
806 return _package
807
Alexd9fd85e2019-05-16 16:58:24 -0500808 def parse_repos(self):
809 # all tags to check
Alex74dc1352019-05-17 13:18:24 -0500810 major, updates, hotfix = ReposInfo().list_tags(splitted=True)
Alexd9fd85e2019-05-16 16:58:24 -0500811
812 # major tags
813 logger_cli.info("# Processing major tags")
814 for _tag in major:
815 self.fetch_versions(_tag)
816
817 # updates tags
818 logger_cli.info("# Processing update tags")
819 for _tag in updates:
820 self.fetch_versions(_tag + ".update")
821
822 # hotfix tags
823 logger_cli.info("# Processing hotfix tags")
824 for _tag in hotfix:
825 self.fetch_versions(_tag + ".hotfix")