blob: ae662de8dc94aa95d93bb4af4217f66607571029 [file] [log] [blame]
Alexd9fd85e2019-05-16 16:58:24 -05001import json
2import os
3from copy import deepcopy
4
Alex74dc1352019-05-17 13:18:24 -05005from cfg_checker.common import logger, logger_cli, nested_set
Alex0ed4f762019-05-17 17:55:33 -05006from cfg_checker.common.const import _mainteiners_index_filename
7from cfg_checker.common.const import _mirantis_versions_filename
8from cfg_checker.common.const import _other_versions_filename
Alexd9fd85e2019-05-16 16:58:24 -05009from cfg_checker.common.const import _pkg_desc_archive
10from cfg_checker.common.const import _repos_index_filename
11from cfg_checker.common.const import _repos_info_archive
12from cfg_checker.common.const import _repos_versions_archive
Alexd9fd85e2019-05-16 16:58:24 -050013from cfg_checker.common.const import ubuntu_releases
14from cfg_checker.common.file_utils import get_gzipped_file
15from cfg_checker.common.settings import pkg_dir
16from cfg_checker.helpers.console_utils import Progress
17from cfg_checker.helpers.tgz import TGZFile
18
19import requests
20from requests.exceptions import ConnectionError
21
22ext = ".json"
23
24
Alex0ed4f762019-05-17 17:55:33 -050025def get_tag_label(_tag, parsed=False):
Alex74dc1352019-05-17 13:18:24 -050026 # prettify the tag for printing
Alex0ed4f762019-05-17 17:55:33 -050027 if parsed:
28 _label = "+ "
29 else:
30 _label = " "
31
Alex74dc1352019-05-17 13:18:24 -050032 if _tag.endswith(".update"):
33 _label += "[updates] " + _tag.rsplit('.', 1)[0]
34 elif _tag.endswith(".hotfix"):
35 _label += " [hotfix] " + _tag.rsplit('.', 1)[0]
36 else:
37 _label += " "*10 + _tag
Alex0ed4f762019-05-17 17:55:33 -050038
Alex74dc1352019-05-17 13:18:24 -050039 return _label
40
41
Alex0ed4f762019-05-17 17:55:33 -050042def _get_value_index(_di, value, header=None):
Alex29ee76f2019-05-17 18:52:29 -050043 # Mainteiner names often uses specific chars
44 # so make sure that value saved is unicode not str
45 _val = unicode(value, 'utf-8') if isinstance(value, str) else value
Alex0ed4f762019-05-17 17:55:33 -050046 if header:
47 if not filter(lambda i: _di[i]["header"] == header, _di):
Alex29ee76f2019-05-17 18:52:29 -050048 _index = unicode(len(_di.keys()) + 1)
Alex0ed4f762019-05-17 17:55:33 -050049 _di[_index] = {
50 "header": header,
Alex29ee76f2019-05-17 18:52:29 -050051 "props": _val
Alex0ed4f762019-05-17 17:55:33 -050052 }
53 else:
54 for _k, _v in _di.iteritems():
55 if _v["header"] == header:
56 _index = _k
57
58 return _index
59 else:
Alex29ee76f2019-05-17 18:52:29 -050060 if not filter(lambda i: _di[i] == _val, _di):
61 _index = unicode(len(_di.keys()) + 1)
62 # on save, cast it as unicode
63 _di[_index] = _val
Alex0ed4f762019-05-17 17:55:33 -050064 else:
65 for _k, _v in _di.iteritems():
Alex29ee76f2019-05-17 18:52:29 -050066 if _v == _val:
Alex0ed4f762019-05-17 17:55:33 -050067 _index = _k
68
69 return _index
70
71
72def _safe_load(_f, _a):
73 if _f in _a.list_files():
74 logger_cli.info(
75 "# Loading '{}':'{}'".format(
76 _a.basefile,
77 _f
78 )
79 )
80 return json.loads(_a.get_file(_f))
81 else:
82 return {}
83
84
Alexd9fd85e2019-05-16 16:58:24 -050085def _n_url(url):
86 if url[-1] == '/':
87 return url
88 else:
89 return url + '/'
90
91
92class ReposInfo(object):
93 repos = []
94 _repofile = os.path.join(pkg_dir, "versions", _repos_info_archive)
95
96 @staticmethod
97 def _ls_repo_page(url):
98 # Yes, this is ugly. But it works ok for small HTMLs.
99 _a = "<a"
100 _s = "href="
101 _e = "\">"
102 try:
103 page = requests.get(url, timeout=60)
104 except ConnectionError as e:
105 logger_cli.error("# ERROR: {}".format(e.message))
106 return [], []
107 a = page.text.splitlines()
108 # Comprehension for dirs. Anchors for ends with '-'
109 _dirs = [l[l.index(_s)+6:l.index(_e)-1]
110 for l in a if l.startswith(_a) and l.endswith('-')]
111 # Comprehension for files. Anchors ends with size
112 _files = [l[l.index(_s)+6:l.index(_e)]
113 for l in a if l.startswith(_a) and not l.endswith('-')]
114
115 return _dirs, _files
116
117 def search_pkg(self, url, _list):
118 # recoursive method to walk dists tree
119 _dirs, _files = self._ls_repo_page(url)
120
121 for _d in _dirs:
122 # Search only in dists, ignore the rest
123 if "dists" not in url and _d != "dists":
124 continue
125 _u = _n_url(url + _d)
126 self.search_pkg(_u, _list)
127
128 for _f in _files:
129 if _f == "Packages.gz":
130 _list.append(url + _f)
131 logger.debug("... [F] '{}'".format(url + _f))
132
133 return _list
134
135 @staticmethod
136 def _map_repo(_path_list, _r):
137 for _pkg_path in _path_list:
138 _l = _pkg_path.split('/')
139 _kw = _l[_l.index('dists')+1:]
140 _kw.reverse()
141 _repo_item = {
142 "arch": _kw[1][7:] if "binary" in _kw[1] else _kw[1],
143 "type": _kw[2],
144 "ubuntu-release": _kw[3],
145 "filepath": _pkg_path
146 }
147 _r.append(_repo_item)
148
149 def _find_tag(self, _t, _u, label=""):
150 if label:
151 _url = _n_url(_u + label)
152 _label = _t + '.' + label
153 else:
154 _url = _u
155 _label = _t
156 _ts, _ = self._ls_repo_page(_url)
157 if _t in _ts:
158 logger.debug(
159 "... found tag '{}' at '{}'".format(
160 _t,
161 _url
162 )
163 )
164 return {
165 _label: {
166 "baseurl": _n_url(_url + _t),
167 "all": {}
168 }
169 }
170 else:
171 return {}
172
173 def fetch_repos(self, url, tag=None):
174 base_url = _n_url(url)
175 logger_cli.info("# Using '{}' as a repos source".format(base_url))
176
177 logger_cli.info("# Gathering repos info (i.e. links to 'packages.gz')")
178 # init repoinfo archive
179 _repotgz = TGZFile(self._repofile)
180 # prepare repo links
181 _repos = {}
182 if tag:
183 # only one tag to process
184 _repos.update(self._find_tag(tag, base_url))
185 _repos.update(self._find_tag(tag, base_url, label="hotfix"))
186 _repos.update(self._find_tag(tag, base_url, label="update"))
187 else:
188 # gather all of them
189 _tags, _ = self._ls_repo_page(base_url)
190 _tags.remove('hotfix')
191 _tags.remove('update')
192 # search tags in subfolders
193 _h_tags, _ = self._ls_repo_page(base_url + 'hotfix')
194 _u_tags, _ = self._ls_repo_page(base_url + 'update')
195 _tags.extend([t for t in _h_tags if t not in _tags])
196 _tags.extend([t for t in _u_tags if t not in _tags])
197 _progress = Progress(len(_tags))
198 _index = 0
199 for _tag in _tags:
200 _repos.update(self._find_tag(_tag, base_url))
201 _repos.update(self._find_tag(_tag, base_url, label="hotfix"))
202 _repos.update(self._find_tag(_tag, base_url, label="update"))
203 _index += 1
204 _progress.write_progress(_index)
205 _progress.end()
206
207 # parse subtags
208 for _label in _repos.keys():
209 logger_cli.info("-> processing tag '{}'".format(_label))
210 _name = _label + ".json"
211 if _repotgz.has_file(_name):
212 logger_cli.info(
213 "-> skipping, '{}' already has '{}'".format(
214 _repos_info_archive,
215 _name
216 )
217 )
218 continue
219 # process the tag
220 _repo = _repos[_label]
221 _baseurl = _repos[_label]["baseurl"]
222 # get the subtags
223 _sub_tags, _ = self._ls_repo_page(_baseurl)
224 _total_index = len(_sub_tags)
225 _index = 0
226 _progress = Progress(_total_index)
227 logger.debug(
228 "... found {} subtags for '{}'".format(
229 len(_sub_tags),
230 _label
231 )
232 )
233 # save the url and start search
234 for _stag in _sub_tags:
235 _u = _baseurl + _stag
236 _index += 1
237 logger.debug(
238 "... searching repos in '{}/{}'".format(
239 _label,
240 _stag
241 )
242 )
243
244 # Searching Package collections
245 if _stag in ubuntu_releases:
246 # if stag is the release, this is all packages
247 _repo["all"][_stag] = []
248 _repo["all"]["url"] = _n_url(_u)
249 _path_list = self.search_pkg(_n_url(_u), [])
250 self._map_repo(_path_list, _repo["all"][_stag])
251 logger.info(
252 "-> found {} dists".format(
253 len(_repo["all"][_stag])
254 )
255 )
256
257 else:
258 # each subtag might have any ubuntu release
259 # so iterate them
260 _repo[_stag] = {
261 "url": _n_url(_u)
262 }
263 _releases, _ = self._ls_repo_page(_n_url(_u))
264 for _rel in _releases:
265 if _rel not in ubuntu_releases:
266 logger.debug(
267 "... skipped unknown ubuntu release: "
268 "'{}' in '{}'".format(
269 _rel,
270 _u
271 )
272 )
273 else:
274 _rel_u = _n_url(_u) + _rel
275 _repo[_stag][_rel] = []
276 _path_list = self.search_pkg(_n_url(_rel_u), [])
277 self._map_repo(
278 _path_list,
279 _repo[_stag][_rel]
280 )
281 logger.info(
282 "-> found {} dists for '{}'".format(
283 len(_repo[_stag][_rel]),
284 _rel
285 )
286 )
287 _progress.write_progress(_index)
288
289 _progress.end()
290 _name = _label + ext
291 _repotgz.add_file(_name, buf=json.dumps(_repo, indent=2))
292 logger_cli.info(
293 "-> archive '{}' updated with '{}'".format(
294 self._repofile,
295 _name
296 )
297 )
298
299 return
300
Alex74dc1352019-05-17 13:18:24 -0500301 def list_tags(self, splitted=False):
Alexd9fd85e2019-05-16 16:58:24 -0500302 _files = TGZFile(self._repofile).list_files()
303 # all files in archive with no '.json' part
304 _all = set([f.rsplit('.', 1)[0] for f in _files])
Alex74dc1352019-05-17 13:18:24 -0500305 if splitted:
306 # files that ends with '.update'
307 _updates = set([f for f in _all if f.find('update') >= 0])
308 # files that ends with '.hotfix'
309 _hotfix = set([f for f in _all if f.find('hotfix') >= 0])
310 # remove updates and hotfix tags from all. The true magic of SETs
311 _all = _all - _updates - _hotfix
312 # cut updates and hotfix endings
313 _updates = [f.rsplit('.', 1)[0] for f in _updates]
314 _hotfix = [f.rsplit('.', 1)[0] for f in _hotfix]
Alexd9fd85e2019-05-16 16:58:24 -0500315
Alex74dc1352019-05-17 13:18:24 -0500316 return _all, _updates, _hotfix
317 else:
318 # dynamic import
319 import re
320 _all = list(_all)
321 # lexical tags
322 _lex = [s for s in _all if not s[0].isdigit()]
323 _lex.sort()
324 # tags with digits
325 _dig = [s for s in _all if s[0].isdigit()]
326 _dig = sorted(
327 _dig,
328 key=lambda x: tuple(int(i) for i in re.findall('\\d+', x)[:3])
329 )
330
331 return _dig + _lex
Alexd9fd85e2019-05-16 16:58:24 -0500332
333 def get_repoinfo(self, tag):
334 _tgz = TGZFile(self._repofile)
335 _buf = _tgz.get_file(tag + ext)
336 return json.loads(_buf)
337
338
339class RepoManager(object):
Alexd9fd85e2019-05-16 16:58:24 -0500340 # archives
341 _versions_arch = os.path.join(pkg_dir, "versions", _repos_versions_archive)
342 _desc_arch = os.path.join(pkg_dir, "versions", _pkg_desc_archive)
343
344 # repository index
345 _repo_index = {}
Alex0ed4f762019-05-17 17:55:33 -0500346 _mainteiners_index = {}
Alexd9fd85e2019-05-16 16:58:24 -0500347
348 # init package versions storage
Alex0ed4f762019-05-17 17:55:33 -0500349 _versions_mirantis = {}
350 _versions_other = {}
Alexd9fd85e2019-05-16 16:58:24 -0500351
352 def __init__(self):
353 # Init version files
354 self.versionstgz = TGZFile(
355 self._versions_arch,
356 label="MCP Configuration Checker: Package versions archive"
357 )
358 self.desctgz = TGZFile(
359 self._desc_arch,
360 label="MCP Configuration Checker: Package descriptions archive"
361 )
Alex0ed4f762019-05-17 17:55:33 -0500362 # indices
363 self._repo_index = _safe_load(
364 _repos_index_filename,
365 self.versionstgz
366 )
367 self._mainteiners_index = _safe_load(
368 _mainteiners_index_filename,
369 self.versionstgz
370 )
Alexd9fd85e2019-05-16 16:58:24 -0500371
Alex0ed4f762019-05-17 17:55:33 -0500372 # versions
373 self._versions_mirantis = _safe_load(
374 _mirantis_versions_filename,
375 self.versionstgz
376 )
377 self._versions_other = _safe_load(
378 _other_versions_filename,
379 self.versionstgz
380 )
Alexd9fd85e2019-05-16 16:58:24 -0500381
382 def _create_repo_header(self, p):
383 _header = "_".join([
384 p['tag'],
385 p['subset'],
386 p['release'],
387 p['ubuntu-release'],
388 p['type'],
389 p['arch']
390 ])
Alex0ed4f762019-05-17 17:55:33 -0500391 return _get_value_index(self._repo_index, p, header=_header)
Alexd9fd85e2019-05-16 16:58:24 -0500392
Alex0ed4f762019-05-17 17:55:33 -0500393 def _get_indexed_values(self, pair):
394 _h, _m = pair.split('-')
395 return self._repo_index[_h], self._mainteiners_index[_m]
Alexd9fd85e2019-05-16 16:58:24 -0500396
Alex0ed4f762019-05-17 17:55:33 -0500397 def _update_pkg_version(self, _d, n, v, md5, h_index, m_index):
Alexd9fd85e2019-05-16 16:58:24 -0500398 """Method updates package version record in global dict
399 """
400 # 'if'*4 operation is pretty expensive when using it 100k in a row
401 # so try/except is a better way to go, even faster than 'reduce'
Alex0ed4f762019-05-17 17:55:33 -0500402 _pair = "-".join([h_index, m_index])
Alexd9fd85e2019-05-16 16:58:24 -0500403 try:
404 # try to load list
Alex0ed4f762019-05-17 17:55:33 -0500405 _list = _d[n][v][md5]
Alexd9fd85e2019-05-16 16:58:24 -0500406 # cast it as set() and union()
Alex0ed4f762019-05-17 17:55:33 -0500407 _list = set(_list).union([_pair])
Alexd9fd85e2019-05-16 16:58:24 -0500408 # cast back as set() is not serializeable
Alex0ed4f762019-05-17 17:55:33 -0500409 _d[n][v][md5] = list(_list)
Alexd9fd85e2019-05-16 16:58:24 -0500410 return False
411 except KeyError:
412 # ok, this is fresh pkg. Do it slow way.
Alex0ed4f762019-05-17 17:55:33 -0500413 if n in _d:
Alexd9fd85e2019-05-16 16:58:24 -0500414 # there is such pkg already
Alex0ed4f762019-05-17 17:55:33 -0500415 if v in _d[n]:
Alexd9fd85e2019-05-16 16:58:24 -0500416 # there is such version, check md5
Alex0ed4f762019-05-17 17:55:33 -0500417 if md5 in _d[n][v]:
Alexd9fd85e2019-05-16 16:58:24 -0500418 # just add new repo header
Alex0ed4f762019-05-17 17:55:33 -0500419 if _pair not in _d[n][v][md5]:
420 _d[n][v][md5].append(_pair)
Alexd9fd85e2019-05-16 16:58:24 -0500421 else:
422 # check if such index is here...
423 _existing = filter(
Alex0ed4f762019-05-17 17:55:33 -0500424 lambda i: _pair in _d[n][v][i],
425 _d[n][v]
Alexd9fd85e2019-05-16 16:58:24 -0500426 )
427 if _existing:
428 # Yuck! Same version had different MD5
Alex0ed4f762019-05-17 17:55:33 -0500429 _r, _m = self._get_indexed_values(_pair)
Alexd9fd85e2019-05-16 16:58:24 -0500430 logger_cli.error(
431 "# ERROR: Package version has multiple MD5s "
432 "in '{}': {}:{}:{}".format(
Alex0ed4f762019-05-17 17:55:33 -0500433 _r,
Alexd9fd85e2019-05-16 16:58:24 -0500434 n,
435 v,
436 md5
437 )
438 )
Alex0ed4f762019-05-17 17:55:33 -0500439 _d[n][v][md5] = [_pair]
Alexd9fd85e2019-05-16 16:58:24 -0500440 else:
441 # this is new version for existing package
Alex0ed4f762019-05-17 17:55:33 -0500442 _d[n][v] = {
443 md5: [_pair]
Alexd9fd85e2019-05-16 16:58:24 -0500444 }
445 return False
446 else:
447 # this is new pakcage
Alex0ed4f762019-05-17 17:55:33 -0500448 _d[n] = {
Alexd9fd85e2019-05-16 16:58:24 -0500449 v: {
Alex0ed4f762019-05-17 17:55:33 -0500450 md5: [_pair]
Alexd9fd85e2019-05-16 16:58:24 -0500451 }
452 }
453 return True
454
455 def _save_repo_descriptions(self, repo_props, desc):
456 # form the filename for the repo and save it
457 self.desctgz.add_file(
458 self._create_repo_header(repo_props),
459 json.dumps(desc)
460 )
461
462 # def get_description(self, repo_props, name, md5=None):
463 # """Gets target description
464 # """
465 # _filename = self._create_repo_header(repo_props)
466 # # check if it is present in cache
467 # if _filename in self._desc_cache:
468 # _descs = self._desc_cache[_filename]
469 # else:
470 # # load data
471 # _descs = self.desctgz.get_file(_filename)
472 # # Serialize it
473 # _descs = json.loads(_descs)
474 # self._desc_cache[_filename] = _descs
475 # # return target desc
476 # if name in _descs and md5 in _descs[name]:
477 # return _descs[name][md5]
478 # else:
479 # return None
480
481 def parse_tag(self, tag, descriptions=False):
482 """Download and parse Package.gz files for specific tag
483 By default, descriptions not saved
484 due to huge resulting file size and slow processing
485 """
486 # init gzip and downloader
487 _info = ReposInfo().get_repoinfo(tag)
488 # calculate Packages.gz files to process
489 _baseurl = _info.pop("baseurl")
490 _total_components = len(_info.keys()) - 1
491 _ubuntu_package_repos = 0
492 _other_repos = 0
493 for _c, _d in _info.iteritems():
494 for _ur, _l in _d.iteritems():
495 if _ur in ubuntu_releases:
496 _ubuntu_package_repos += len(_l)
497 elif _ur != 'url':
498 _other_repos += len(_l)
499 logger_cli.info(
500 "-> loaded repository info for '{}'.\n"
501 " '{}', {} components, {} ubuntu repos, {} other/uknown".format(
502 _baseurl,
503 tag,
504 _total_components,
505 _ubuntu_package_repos,
506 _other_repos
507 )
508 )
509 # init progress bar
510 _progress = Progress(_ubuntu_package_repos)
511 _index = 0
512 _processed = 0
513 _new = 0
514 for _c, _d in _info.iteritems():
515 # we do not need url here, just get rid of it
516 if 'url' in _d:
517 _d.pop('url')
518 # _url = if 'url' in _d else _baseurl + _c
519 for _ur, _l in _d.iteritems():
520 # iterate package collections
521 for _p in _l:
522 # descriptions
523 if descriptions:
524 _descriptions = {}
525 # download and unzip
526 _progress.write_progress(
527 _index,
528 note="/ {} {} {} {} {}, {}/{}".format(
529 _c,
530 _ur,
531 _p['ubuntu-release'],
532 _p['type'],
533 _p['arch'],
534 _processed,
535 _new
536 )
537 )
538 _raw = get_gzipped_file(_p['filepath'])
539 _lines = _raw.splitlines()
540 _index += 1
541 # break lines collection into isolated pkg data
542 _pkg = {
543 "tag": tag,
544 "subset": _c,
545 "release": _ur
546 }
547 _pkg.update(_p)
548 _desc = {}
549 _key = _value = ""
550 for _line in _lines:
551 if not _line:
552 # if the line is empty, process pkg data gathered
553 _name = _desc['package']
554 _md5 = _desc['md5sum']
555 _version = _desc['version']
Alex0ed4f762019-05-17 17:55:33 -0500556 _mainteiner = _desc['maintainer']
557
558 # Check is mainteiner is Mirantis
559 if _mainteiner.endswith("@mirantis.com>"):
560 # update mirantis versions
561 if self._update_pkg_version(
562 self._versions_mirantis,
563 _name,
564 _version,
565 _md5,
566 self._create_repo_header(_pkg),
567 _get_value_index(
568 self._mainteiners_index,
569 _mainteiner
570 )
571 ):
572 _new += 1
573 else:
574 # update other versions
575 if self._update_pkg_version(
576 self._versions_other,
577 _name,
578 _version,
579 _md5,
580 self._create_repo_header(_pkg),
581 _get_value_index(
582 self._mainteiners_index,
583 _mainteiner
584 )
585 ):
586 _new += 1
Alexd9fd85e2019-05-16 16:58:24 -0500587
588 if descriptions:
589 _d_new = {
590 _md5: deepcopy(_desc)
591 }
592 try:
593 _descriptions[_name].update(_d_new)
594 except KeyError:
595 _descriptions[_name] = _d_new
596 # clear the data for next pkg
597 _processed += 1
598 _desc = {}
599 _key = ""
600 _value = ""
601 elif _line.startswith(' '):
602 _desc[_key] += "\n{}".format(_line)
603 else:
604 _key, _value = _line.split(': ', 1)
605 _key = _key.lower()
606
607 _desc[_key] = _value
608 # save descriptions if needed
609 if descriptions:
610 _progress.clearline()
611 self._save_repo_descriptions(_pkg, _descriptions)
612
613 _progress.end()
614 # backup headers to disk
615 self.versionstgz.add_file(
Alex0ed4f762019-05-17 17:55:33 -0500616 _repos_index_filename,
Alexd9fd85e2019-05-16 16:58:24 -0500617 json.dumps(self._repo_index),
618 replace=True
619 )
Alex0ed4f762019-05-17 17:55:33 -0500620 self.versionstgz.add_file(
621 _mainteiners_index_filename,
622 json.dumps(self._mainteiners_index),
623 replace=True
624 )
Alexd9fd85e2019-05-16 16:58:24 -0500625 return
626
627 def fetch_versions(self, tag, descriptions=False):
628 """Executes parsing for specific tag
629 """
630 if descriptions:
631 logger_cli.warning(
632 "\n\n# !!! WARNING: Saving repo descriptions "
633 "consumes huge amount of disk space\n\n"
634 )
635 # if there is no such tag, parse it from repoinfo
Alexd9fd85e2019-05-16 16:58:24 -0500636 logger_cli.info("# Fetching versions for {}".format(tag))
637 self.parse_tag(tag, descriptions=descriptions)
Alex0ed4f762019-05-17 17:55:33 -0500638 logger_cli.info("-> saving updated versions")
639 self.versionstgz.add_file(
640 _mirantis_versions_filename,
641 json.dumps(self._versions_mirantis),
642 replace=True
643 )
644 self.versionstgz.add_file(
645 _other_versions_filename,
646 json.dumps(self._versions_other),
647 replace=True
648 )
Alexd9fd85e2019-05-16 16:58:24 -0500649
650 def build_repos(self, url, tag=None):
651 """Builds versions data for selected tag, or for all of them
652 """
653 # Init the ReposInfo class and check if all files are present
654 _repos = ReposInfo()
655 # recoursively walk the mirrors
656 # and gather all of the repos for 'tag' or all of the tags
657 _repos.fetch_repos(url, tag=tag)
658
Alex74dc1352019-05-17 13:18:24 -0500659 def _build_action(self, url, tags):
660 for t in tags:
661 logger_cli.info(
662 "# Building repo info for '{}/{}'".format(
663 url,
664 t
665 )
666 )
667 self.build_repos(url, tag=t)
668
Alexd9fd85e2019-05-16 16:58:24 -0500669 def action_for_tag(
670 self,
671 url,
672 tag,
673 action=None,
674 descriptions=None
675 ):
676 """Executes action for every tag from all collections
677 """
678 if not action:
679 logger_cli.info("# No action set, nothing to do")
Alex74dc1352019-05-17 13:18:24 -0500680 # See if this is a list action
Alexd9fd85e2019-05-16 16:58:24 -0500681 if action == "list":
Alex74dc1352019-05-17 13:18:24 -0500682 _all = ReposInfo().list_tags()
683 # Print pretty list and exit
Alexd9fd85e2019-05-16 16:58:24 -0500684 logger_cli.info("# Tags available at '{}':".format(url))
Alex74dc1352019-05-17 13:18:24 -0500685 for t in _all:
Alex0ed4f762019-05-17 17:55:33 -0500686 _ri = self._repo_index
687 _isparsed = any(
688 [k for k, v in _ri.iteritems() if v['props']['tag'] == t]
689 )
690 if _isparsed:
691 logger_cli.info(get_tag_label(t, parsed=True))
692 else:
693 logger_cli.info(get_tag_label(t))
Alex74dc1352019-05-17 13:18:24 -0500694 # exit
Alexd9fd85e2019-05-16 16:58:24 -0500695 return
Alex74dc1352019-05-17 13:18:24 -0500696
Alexd9fd85e2019-05-16 16:58:24 -0500697 # Pupulate action tags
Alex74dc1352019-05-17 13:18:24 -0500698 major, updates, hotfix = ReposInfo().list_tags(splitted=True)
Alexd9fd85e2019-05-16 16:58:24 -0500699 _action_tags = []
700 if tag in major:
701 _action_tags.append(tag)
Alex74dc1352019-05-17 13:18:24 -0500702 if tag in updates:
Alexd9fd85e2019-05-16 16:58:24 -0500703 _action_tags.append(tag + ".update")
Alex74dc1352019-05-17 13:18:24 -0500704 if tag in hotfix:
Alexd9fd85e2019-05-16 16:58:24 -0500705 _action_tags.append(tag + ".hotfix")
Alex74dc1352019-05-17 13:18:24 -0500706 # Check if any tags collected
Alexd9fd85e2019-05-16 16:58:24 -0500707 if not _action_tags:
708 logger_cli.info(
709 "# Tag of '{}' not found. "
710 "Consider rebuilding repos info.".format(tag)
711 )
Alex74dc1352019-05-17 13:18:24 -0500712 else:
Alexd9fd85e2019-05-16 16:58:24 -0500713 logger_cli.info(
Alex74dc1352019-05-17 13:18:24 -0500714 "-> tags to process: {}".format(
Alexd9fd85e2019-05-16 16:58:24 -0500715 ", ".join(_action_tags)
716 )
717 )
Alex74dc1352019-05-17 13:18:24 -0500718 # Execute actions
719 if action == "build":
720 self._build_action(url, _action_tags)
721 elif action == "fetch":
Alexd9fd85e2019-05-16 16:58:24 -0500722 for t in _action_tags:
723 self.fetch_versions(t, descriptions=descriptions)
724
725 logger_cli.info("# Done.")
726
Alex74dc1352019-05-17 13:18:24 -0500727 def show_package(self, name):
728 # get the package data
729 _p = self.get_package_versions(name)
730 if not _p:
731 logger_cli.warning(
732 "# WARNING: Package '{}' not found".format(name)
733 )
734 else:
735 # print package info using sorted tags from headers
736 # Package: name
737 # [u/h] tag \t <version>
738 # \t <version>
739 # <10symbols> \t <md5> \t sorted headers with no tag
740 # ...
741 logger_cli.info("\n# Package: {}".format(name))
742 _o = ""
743 # get and sort tags
744 _vs = _p.keys()
745 _vs.sort()
746 for _v in _vs:
747 _o += "\n" + " "*8 + _v + ':\n'
748 # get and sort tags
749 _mds = _p[_v].keys()
750 _mds.sort()
751 for _md5 in _mds:
752 _o += " "*16 + _md5 + "\n"
753 # get and sort repo headers
754 _rr = _p[_v][_md5].keys()
755 _rr.sort()
756 for _r in _rr:
Alex0ed4f762019-05-17 17:55:33 -0500757 _o += " "*24 + _r.replace('_', ' ')
758 _o += " ({})\n".format(_p[_v][_md5][_r]["mainteiner"])
Alex74dc1352019-05-17 13:18:24 -0500759
760 logger_cli.info(_o)
761
Alex0ed4f762019-05-17 17:55:33 -0500762 def get_package_versions(self, name, mirantis=True, tagged=False):
Alex74dc1352019-05-17 13:18:24 -0500763 """Method builds package version structure
764 with repository properties included
765 """
766 # get data
Alex0ed4f762019-05-17 17:55:33 -0500767 if mirantis and name in self._versions_mirantis:
768 _vs = self._versions_mirantis[name]
769 elif not mirantis and name in self._versions_other:
770 _vs = self._versions_other[name]
Alex74dc1352019-05-17 13:18:24 -0500771 else:
772 return {}
Alex0ed4f762019-05-17 17:55:33 -0500773
Alex74dc1352019-05-17 13:18:24 -0500774 # insert repo data, insert props into headers place
775 _package = {}
776 if tagged:
777 for _v, _d1 in _vs.iteritems():
778 # use tag as a next step
Alex0ed4f762019-05-17 17:55:33 -0500779 for _md5, _indices in _d1.iteritems():
780 for _pair in _indices:
Alex74dc1352019-05-17 13:18:24 -0500781 # extract props for a repo
Alex0ed4f762019-05-17 17:55:33 -0500782 _r, _m = self._get_indexed_values(_pair)
Alex74dc1352019-05-17 13:18:24 -0500783 # get tag
Alex0ed4f762019-05-17 17:55:33 -0500784 _tag = _r["props"]["tag"]
Alex74dc1352019-05-17 13:18:24 -0500785 # cut tag from the header
Alex0ed4f762019-05-17 17:55:33 -0500786 _cut_head = _r["header"].split("_", 1)[1]
Alex74dc1352019-05-17 13:18:24 -0500787 # populate dict
788 nested_set(
789 _package,
790 [_tag, _v, _cut_head, _md5],
Alex0ed4f762019-05-17 17:55:33 -0500791 {
792 "repo": _r["props"],
793 "mainteiner": _m
794 }
Alex74dc1352019-05-17 13:18:24 -0500795 )
796 else:
797 for _v, _d1 in _vs.iteritems():
Alex0ed4f762019-05-17 17:55:33 -0500798 for _md5, _indices in _d1.iteritems():
799 for _pair in _indices:
800 _r, _m = self._get_indexed_values(_pair)
Alex74dc1352019-05-17 13:18:24 -0500801 nested_set(
802 _package,
Alex0ed4f762019-05-17 17:55:33 -0500803 [_v, _md5, _r["header"]],
804 {
805 "repo": _r["props"],
806 "mainteiner": _m
807 }
Alex74dc1352019-05-17 13:18:24 -0500808 )
809
810 return _package
811
Alexd9fd85e2019-05-16 16:58:24 -0500812 def parse_repos(self):
813 # all tags to check
Alex74dc1352019-05-17 13:18:24 -0500814 major, updates, hotfix = ReposInfo().list_tags(splitted=True)
Alexd9fd85e2019-05-16 16:58:24 -0500815
816 # major tags
817 logger_cli.info("# Processing major tags")
818 for _tag in major:
819 self.fetch_versions(_tag)
820
821 # updates tags
822 logger_cli.info("# Processing update tags")
823 for _tag in updates:
824 self.fetch_versions(_tag + ".update")
825
826 # hotfix tags
827 logger_cli.info("# Processing hotfix tags")
828 for _tag in hotfix:
829 self.fetch_versions(_tag + ".hotfix")