Coverage for cfg_checker/modules/packages/repos.py : 12%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1import json
2import os
3import re
4from copy import deepcopy
6from cfg_checker.common import logger, logger_cli, nested_set
7from cfg_checker.common.const import _mainteiners_index_filename
8from cfg_checker.common.const import _mirantis_versions_filename
9from cfg_checker.common.const import _other_versions_filename
10from cfg_checker.common.const import _pkg_desc_archive
11from cfg_checker.common.const import _repos_index_filename
12from cfg_checker.common.const import _repos_info_archive
13from cfg_checker.common.const import _repos_versions_archive
14from cfg_checker.common.const import ubuntu_releases
15from cfg_checker.common.file_utils import ensure_folder_exists
16from cfg_checker.common.file_utils import get_gzipped_file
17from cfg_checker.common.settings import pkg_dir
18from cfg_checker.helpers.console_utils import Progress
19from cfg_checker.helpers.tgz import TGZFile
21import requests
22from requests.exceptions import ConnectionError
24ext = ".json"
27def get_tag_label(_tag, parsed=False):
28 # prettify the tag for printing
29 if parsed:
30 _label = "+ "
31 else:
32 _label = " "
34 if _tag.endswith(".update"):
35 _label += "[updates] " + _tag.rsplit('.', 1)[0]
36 elif _tag.endswith(".hotfix"):
37 _label += " [hotfix] " + _tag.rsplit('.', 1)[0]
38 else:
39 _label += " "*10 + _tag
41 return _label
44def _get_value_index(_di, value, header=None):
45 # Mainteiner names often uses specific chars
46 # so make sure that value saved is str not str
47 _val = str(value, 'utf-8') if isinstance(value, str) else value
48 if header:
49 if not filter(lambda i: _di[i]["header"] == header, _di):
50 _index = str(len(_di.keys()) + 1)
51 _di[_index] = {
52 "header": header,
53 "props": _val
54 }
55 else:
56 for _k, _v in _di.items():
57 if _v["header"] == header:
58 _index = _k
60 return _index
61 else:
62 if not filter(lambda i: _di[i] == _val, _di):
63 _index = str(len(_di.keys()) + 1)
64 # on save, cast it as str
65 _di[_index] = _val
66 else:
67 for _k, _v in _di.items():
68 if _v == _val:
69 _index = _k
71 return _index
74def _safe_load(_f, _a):
75 if _f in _a.list_files():
76 logger_cli.debug(
77 "... loading '{}':'{}'".format(
78 _a.basefile,
79 _f
80 )
81 )
82 return json.loads(_a.get_file(_f))
83 else:
84 return {}
87def _n_url(url):
88 if url[-1] == '/':
89 return url
90 else:
91 return url + '/'
94class ReposInfo(object):
95 repos = []
96 _repofile = os.path.join(pkg_dir, "versions", _repos_info_archive)
98 @staticmethod
99 def _ls_repo_page(url):
100 # Yes, this is ugly. But it works ok for small HTMLs.
101 _a = "<a"
102 _s = "href="
103 _e = "\">"
104 try:
105 page = requests.get(url, timeout=60)
106 except ConnectionError as e:
107 logger_cli.error("# ERROR: {}".format(e.message))
108 return [], []
109 a = page.text.splitlines()
110 # Comprehension for dirs. Anchors for ends with '-'
111 _dirs = [l[l.index(_s)+6:l.index(_e)-1]
112 for l in a if l.startswith(_a) and l.endswith('-')]
113 # Comprehension for files. Anchors ends with size
114 _files = [l[l.index(_s)+6:l.index(_e)]
115 for l in a if l.startswith(_a) and not l.endswith('-')]
117 return _dirs, _files
119 def search_pkg(self, url, _list):
120 # recoursive method to walk dists tree
121 _dirs, _files = self._ls_repo_page(url)
123 for _d in _dirs:
124 # Search only in dists, ignore the rest
125 if "dists" not in url and _d != "dists":
126 continue
127 _u = _n_url(url + _d)
128 self.search_pkg(_u, _list)
130 for _f in _files:
131 if _f == "Packages.gz":
132 _list.append(url + _f)
133 logger.debug("... [F] '{}'".format(url + _f))
135 return _list
137 @staticmethod
138 def _map_repo(_path_list, _r):
139 for _pkg_path in _path_list:
140 _l = _pkg_path.split('/')
141 _kw = _l[_l.index('dists')+1:]
142 _kw.reverse()
143 _repo_item = {
144 "arch": _kw[1][7:] if "binary" in _kw[1] else _kw[1],
145 "type": _kw[2],
146 "ubuntu-release": _kw[3],
147 "filepath": _pkg_path
148 }
149 _r.append(_repo_item)
151 def _find_tag(self, _t, _u, label=""):
152 if label:
153 _url = _n_url(_u + label)
154 _label = _t + '.' + label
155 else:
156 _url = _u
157 _label = _t
158 _ts, _ = self._ls_repo_page(_url)
159 if _t in _ts:
160 logger.debug(
161 "... found tag '{}' at '{}'".format(
162 _t,
163 _url
164 )
165 )
166 return {
167 _label: {
168 "baseurl": _n_url(_url + _t),
169 "all": {}
170 }
171 }
172 else:
173 return {}
175 def fetch_repos(self, url, tag=None):
176 base_url = _n_url(url)
177 logger_cli.info("# Using '{}' as a repos source".format(base_url))
179 logger_cli.info("# Gathering repos info (i.e. links to 'packages.gz')")
180 # init repoinfo archive
181 _repotgz = TGZFile(self._repofile)
182 # prepare repo links
183 _repos = {}
184 if tag:
185 # only one tag to process
186 _repos.update(self._find_tag(tag, base_url))
187 _repos.update(self._find_tag(tag, base_url, label="hotfix"))
188 _repos.update(self._find_tag(tag, base_url, label="update"))
189 else:
190 # gather all of them
191 _tags, _ = self._ls_repo_page(base_url)
192 _tags.remove('hotfix')
193 _tags.remove('update')
194 # search tags in subfolders
195 _h_tags, _ = self._ls_repo_page(base_url + 'hotfix')
196 _u_tags, _ = self._ls_repo_page(base_url + 'update')
197 _tags.extend([t for t in _h_tags if t not in _tags])
198 _tags.extend([t for t in _u_tags if t not in _tags])
199 _progress = Progress(len(_tags))
200 _index = 0
201 for _tag in _tags:
202 _repos.update(self._find_tag(_tag, base_url))
203 _repos.update(self._find_tag(_tag, base_url, label="hotfix"))
204 _repos.update(self._find_tag(_tag, base_url, label="update"))
205 _index += 1
206 _progress.write_progress(_index)
207 _progress.end()
209 # parse subtags
210 for _label in _repos.keys():
211 logger_cli.info("-> processing tag '{}'".format(_label))
212 _name = _label + ".json"
213 if _repotgz.has_file(_name):
214 logger_cli.info(
215 "-> skipping, '{}' already has '{}'".format(
216 _repos_info_archive,
217 _name
218 )
219 )
220 continue
221 # process the tag
222 _repo = _repos[_label]
223 _baseurl = _repos[_label]["baseurl"]
224 # get the subtags
225 _sub_tags, _ = self._ls_repo_page(_baseurl)
226 _total_index = len(_sub_tags)
227 _index = 0
228 _progress = Progress(_total_index)
229 logger.debug(
230 "... found {} subtags for '{}'".format(
231 len(_sub_tags),
232 _label
233 )
234 )
235 # save the url and start search
236 for _stag in _sub_tags:
237 _u = _baseurl + _stag
238 _index += 1
239 logger.debug(
240 "... searching repos in '{}/{}'".format(
241 _label,
242 _stag
243 )
244 )
246 # Searching Package collections
247 if _stag in ubuntu_releases:
248 # if stag is the release, this is all packages
249 _repo["all"][_stag] = []
250 _repo["all"]["url"] = _n_url(_u)
251 _path_list = self.search_pkg(_n_url(_u), [])
252 self._map_repo(_path_list, _repo["all"][_stag])
253 logger.info(
254 "-> found {} dists".format(
255 len(_repo["all"][_stag])
256 )
257 )
259 else:
260 # each subtag might have any ubuntu release
261 # so iterate them
262 _repo[_stag] = {
263 "url": _n_url(_u)
264 }
265 _releases, _ = self._ls_repo_page(_n_url(_u))
266 for _rel in _releases:
267 if _rel not in ubuntu_releases:
268 logger.debug(
269 "... skipped unknown ubuntu release: "
270 "'{}' in '{}'".format(
271 _rel,
272 _u
273 )
274 )
275 else:
276 _rel_u = _n_url(_u) + _rel
277 _repo[_stag][_rel] = []
278 _path_list = self.search_pkg(_n_url(_rel_u), [])
279 self._map_repo(
280 _path_list,
281 _repo[_stag][_rel]
282 )
283 logger.info(
284 "-> found {} dists for '{}'".format(
285 len(_repo[_stag][_rel]),
286 _rel
287 )
288 )
289 _progress.write_progress(_index)
291 _progress.end()
292 _name = _label + ext
293 _repotgz.add_file(_name, buf=json.dumps(_repo, indent=2))
294 logger_cli.info(
295 "-> archive '{}' updated with '{}'".format(
296 self._repofile,
297 _name
298 )
299 )
301 return
303 def list_tags(self, splitted=False):
304 _files = TGZFile(self._repofile).list_files()
305 # all files in archive with no '.json' part
306 _all = set([f.rsplit('.', 1)[0] for f in _files])
307 if splitted:
308 # files that ends with '.update'
309 _updates = set([f for f in _all if f.find('update') >= 0])
310 # files that ends with '.hotfix'
311 _hotfix = set([f for f in _all if f.find('hotfix') >= 0])
312 # remove updates and hotfix tags from all. The true magic of SETs
313 _all = _all - _updates - _hotfix
314 # cut updates and hotfix endings
315 _updates = [f.rsplit('.', 1)[0] for f in _updates]
316 _hotfix = [f.rsplit('.', 1)[0] for f in _hotfix]
318 return _all, _updates, _hotfix
319 else:
320 # dynamic import
321 import re
322 _all = list(_all)
323 # lexical tags
324 _lex = [s for s in _all if not s[0].isdigit()]
325 _lex.sort()
326 # tags with digits
327 _dig = [s for s in _all if s[0].isdigit()]
328 _dig = sorted(
329 _dig,
330 key=lambda x: tuple(int(i) for i in re.findall(r"\d+", x)[:3])
331 )
333 return _dig + _lex
335 def get_repoinfo(self, tag):
336 _tgz = TGZFile(self._repofile)
337 _buf = _tgz.get_file(tag + ext)
338 return json.loads(_buf)
341class RepoManager(object):
342 # archives
343 _arch_folder = os.path.join(pkg_dir, "versions")
344 _versions_arch = os.path.join(_arch_folder, _repos_versions_archive)
345 _desc_arch = os.path.join(_arch_folder, _pkg_desc_archive)
346 _apps_filename = "apps.json"
348 # repository index
349 _repo_index = {}
350 _mainteiners_index = {}
352 _apps = {}
354 # init package versions storage
355 _versions_mirantis = {}
356 _versions_other = {}
358 def __init__(self):
359 # Ensure that versions folder exists
360 logger_cli.debug(ensure_folder_exists(self._arch_folder))
361 # Init version files
362 self.versionstgz = TGZFile(
363 self._versions_arch,
364 label="MCP Configuration Checker: Package versions archive"
365 )
366 self.desctgz = TGZFile(
367 self._desc_arch,
368 label="MCP Configuration Checker: Package descriptions archive"
369 )
371 # section / app
372 self._apps = _safe_load(
373 self._apps_filename,
374 self.desctgz
375 )
377 # indices
378 self._repo_index = _safe_load(
379 _repos_index_filename,
380 self.versionstgz
381 )
382 self._mainteiners_index = _safe_load(
383 _mainteiners_index_filename,
384 self.versionstgz
385 )
387 # versions
388 self._versions_mirantis = _safe_load(
389 _mirantis_versions_filename,
390 self.versionstgz
391 )
392 self._versions_other = _safe_load(
393 _other_versions_filename,
394 self.versionstgz
395 )
397 def _create_repo_header(self, p):
398 _header = "_".join([
399 p['tag'],
400 p['subset'],
401 p['release'],
402 p['ubuntu-release'],
403 p['type'],
404 p['arch']
405 ])
406 return _get_value_index(self._repo_index, p, header=_header)
408 def _get_indexed_values(self, pair):
409 _h, _m = pair.split('-')
410 return self._repo_index[_h], self._mainteiners_index[_m]
412 def _update_pkg_version(self, _d, n, v, md5, s, a, h_index, m_index):
413 """Method updates package version record in global dict
414 """
415 # 'if'*4 operation is pretty expensive when using it 100k in a row
416 # so try/except is a better way to go, even faster than 'reduce'
417 _pair = "-".join([h_index, m_index])
418 _info = {
419 'repo': [_pair],
420 'section': s,
421 'app': a
422 }
423 try:
424 # try to load list
425 _list = _d[n][v][md5]['repo']
426 # cast it as set() and union()
427 _list = set(_list).union([_pair])
428 # cast back as set() is not serializeable
429 _d[n][v][md5]['repo'] = list(_list)
430 return False
431 except KeyError:
432 # ok, this is fresh pkg. Do it slow way.
433 if n in _d:
434 # there is such pkg already
435 if v in _d[n]:
436 # there is such version, check md5
437 if md5 in _d[n][v]:
438 # just add new repo header
439 if _pair not in _d[n][v][md5]['repo']:
440 _d[n][v][md5]['repo'].append(_pair)
441 else:
442 # check if such index is here...
443 _existing = filter(
444 lambda i: _pair in _d[n][v][i]['repo'],
445 _d[n][v]
446 )
447 if _existing:
448 # Yuck! Same version had different MD5
449 _r, _m = self._get_indexed_values(_pair)
450 logger_cli.error(
451 "# ERROR: Package version has multiple MD5s "
452 "in '{}': {}:{}:{}".format(
453 _r,
454 n,
455 v,
456 md5
457 )
458 )
459 _d[n][v][md5] = _info
460 else:
461 # this is new version for existing package
462 _d[n][v] = {
463 md5: _info
464 }
465 return False
466 else:
467 # this is new pakcage
468 _d[n] = {
469 v: {
470 md5: _info
471 }
472 }
473 return True
475 def _save_repo_descriptions(self, repo_props, desc):
476 # form the filename for the repo and save it
477 self.desctgz.add_file(
478 self._create_repo_header(repo_props),
479 json.dumps(desc)
480 )
482 # def get_description(self, repo_props, name, md5=None):
483 # """Gets target description
484 # """
485 # _filename = self._create_repo_header(repo_props)
486 # # check if it is present in cache
487 # if _filename in self._desc_cache:
488 # _descs = self._desc_cache[_filename]
489 # else:
490 # # load data
491 # _descs = self.desctgz.get_file(_filename)
492 # # Serialize it
493 # _descs = json.loads(_descs)
494 # self._desc_cache[_filename] = _descs
495 # # return target desc
496 # if name in _descs and md5 in _descs[name]:
497 # return _descs[name][md5]
498 # else:
499 # return None
501 def parse_tag(self, tag, descriptions=False, apps=False):
502 """Download and parse Package.gz files for specific tag
503 By default, descriptions not saved
504 due to huge resulting file size and slow processing
505 """
506 # init gzip and downloader
507 _info = ReposInfo().get_repoinfo(tag)
508 # calculate Packages.gz files to process
509 _baseurl = _info.pop("baseurl")
510 _total_components = len(_info.keys()) - 1
511 _ubuntu_package_repos = 0
512 _other_repos = 0
513 for _c, _d in _info.items():
514 for _ur, _l in _d.items():
515 if _ur in ubuntu_releases:
516 _ubuntu_package_repos += len(_l)
517 elif _ur != 'url':
518 _other_repos += len(_l)
519 logger_cli.info(
520 "-> loaded repository info for '{}'.\n"
521 " '{}', {} components, {} ubuntu repos, {} other/uknown".format(
522 _baseurl,
523 tag,
524 _total_components,
525 _ubuntu_package_repos,
526 _other_repos
527 )
528 )
529 # init progress bar
530 _progress = Progress(_ubuntu_package_repos)
531 _index = 0
532 _processed = 0
533 _new = 0
534 for _c, _d in _info.items():
535 # we do not need url here, just get rid of it
536 if 'url' in _d:
537 _d.pop('url')
538 # _url = if 'url' in _d else _baseurl + _c
539 for _ur, _l in _d.items():
540 # iterate package collections
541 for _p in _l:
542 # descriptions
543 if descriptions:
544 _descriptions = {}
545 # download and unzip
546 _index += 1
547 _progress.write_progress(
548 _index,
549 note="/ {} {} {} {} {}, GET 'Packages.gz'".format(
550 _c,
551 _ur,
552 _p['ubuntu-release'],
553 _p['type'],
554 _p['arch']
555 )
556 )
557 _raw = get_gzipped_file(_p['filepath'])
558 if not _raw:
559 # empty repo...
560 _progress.clearline()
561 logger_cli.warning(
562 "# WARNING: Empty file: '{}'".format(
563 _p['filepath']
564 )
565 )
566 continue
567 _progress.write_progress(
568 _index,
569 note="/ {} {} {} {} {}, {}/{}".format(
570 _c,
571 _ur,
572 _p['ubuntu-release'],
573 _p['type'],
574 _p['arch'],
575 _processed,
576 _new
577 )
578 )
579 _lines = _raw.splitlines()
580 # break lines collection into isolated pkg data
581 _pkg = {
582 "tag": tag,
583 "subset": _c,
584 "release": _ur
585 }
586 _pkg.update(_p)
587 _desc = {}
588 _key = _value = ""
589 # if there is no empty line at end, add it
590 if _lines[-1] != '':
591 _lines.append('')
592 # Process lines
593 for _line in _lines:
594 if not _line:
595 # if the line is empty, process pkg data gathered
596 _name = _desc['package']
597 _md5 = _desc['md5sum']
598 _version = _desc['version']
599 _mainteiner = _desc['maintainer']
601 if 'source' in _desc:
602 _ap = _desc['source'].lower()
603 else:
604 _ap = "-"
606 if apps:
607 # insert app
608 _sc = _desc['section'].lower()
609 if 'source' in _desc:
610 _ap = _desc['source'].lower()
611 else:
612 _ap = "-"
614 try:
615 _tmp = set(self._apps[_sc][_ap][_name])
616 _tmp.add(_desc['architecture'])
617 self._apps[_sc][_ap][_name] = list(_tmp)
618 except KeyError:
619 nested_set(
620 self._apps,
621 [_sc, _ap, _name],
622 [_desc['architecture']]
623 )
625 # Check is mainteiner is Mirantis
626 if _mainteiner.endswith("@mirantis.com>"):
627 # update mirantis versions
628 if self._update_pkg_version(
629 self._versions_mirantis,
630 _name,
631 _version,
632 _md5,
633 _desc['section'].lower(),
634 _ap,
635 self._create_repo_header(_pkg),
636 _get_value_index(
637 self._mainteiners_index,
638 _mainteiner
639 )
640 ):
641 _new += 1
642 else:
643 # update other versions
644 if self._update_pkg_version(
645 self._versions_other,
646 _name,
647 _version,
648 _md5,
649 _desc['section'].lower(),
650 _ap,
651 self._create_repo_header(_pkg),
652 _get_value_index(
653 self._mainteiners_index,
654 _mainteiner
655 )
656 ):
657 _new += 1
659 if descriptions:
660 _d_new = {
661 _md5: deepcopy(_desc)
662 }
663 try:
664 _descriptions[_name].update(_d_new)
665 except KeyError:
666 _descriptions[_name] = _d_new
667 # clear the data for next pkg
668 _processed += 1
669 _desc = {}
670 _key = ""
671 _value = ""
672 elif _line.startswith(' '):
673 _desc[_key] += "\n{}".format(_line)
674 else:
675 _key, _value = _line.split(': ', 1)
676 _key = _key.lower()
678 _desc[_key] = _value
679 # save descriptions if needed
680 if descriptions:
681 _progress.clearline()
682 self._save_repo_descriptions(_pkg, _descriptions)
684 _progress.end()
685 # backup headers to disk
686 self.versionstgz.add_file(
687 _repos_index_filename,
688 json.dumps(self._repo_index),
689 replace=True
690 )
691 self.versionstgz.add_file(
692 _mainteiners_index_filename,
693 json.dumps(self._mainteiners_index),
694 replace=True
695 )
696 if apps:
697 self.desctgz.add_file(
698 self._apps_filename,
699 json.dumps(self._apps),
700 replace=True
701 )
703 return
705 def fetch_versions(self, tag, descriptions=False, apps=False):
706 """Executes parsing for specific tag
707 """
708 if descriptions:
709 logger_cli.warning(
710 "\n\n# !!! WARNING: Saving repo descriptions "
711 "consumes huge amount of disk space\n\n"
712 )
713 # if there is no such tag, parse it from repoinfo
714 logger_cli.info("# Fetching versions for {}".format(tag))
715 self.parse_tag(tag, descriptions=descriptions, apps=apps)
716 logger_cli.info("-> saving updated versions")
717 self.versionstgz.add_file(
718 _mirantis_versions_filename,
719 json.dumps(self._versions_mirantis),
720 replace=True
721 )
722 self.versionstgz.add_file(
723 _other_versions_filename,
724 json.dumps(self._versions_other),
725 replace=True
726 )
728 def build_repos(self, url, tag=None):
729 """Builds versions data for selected tag, or for all of them
730 """
731 # Init the ReposInfo class and check if all files are present
732 _repos = ReposInfo()
733 # recoursively walk the mirrors
734 # and gather all of the repos for 'tag' or all of the tags
735 _repos.fetch_repos(url, tag=tag)
737 def _build_action(self, url, tags):
738 for t in tags:
739 logger_cli.info("# Building repo info for '{}'".format(t))
740 self.build_repos(url, tag=t)
742 def get_available_tags(self, tag=None):
743 # Populate action tags
744 major, updates, hotfix = ReposInfo().list_tags(splitted=True)
746 _tags = []
747 if tag in major:
748 _tags.append(tag)
749 if tag in updates:
750 _tags.append(tag + ".update")
751 if tag in hotfix:
752 _tags.append(tag + ".hotfix")
754 return _tags
756 def action_for_tag(
757 self,
758 url,
759 tag,
760 action=None,
761 descriptions=None,
762 apps=None
763 ):
764 """Executes action for every tag from all collections
765 """
766 if not action:
767 logger_cli.info("# No action set, nothing to do")
768 # See if this is a list action
769 if action == "list":
770 _all = ReposInfo().list_tags()
771 if _all:
772 # Print pretty list and exit
773 logger_cli.info("# Tags available at '{}':".format(url))
774 for t in _all:
775 _ri = self._repo_index
776 _isparsed = any(
777 [k for k, v in _ri.items()
778 if v['props']['tag'] == t]
779 )
780 if _isparsed:
781 logger_cli.info(get_tag_label(t, parsed=True))
782 else:
783 logger_cli.info(get_tag_label(t))
784 else:
785 logger_cli.info("# Not tags parsed yet for '{}':".format(url))
787 # exit
788 return
790 if action == "build":
791 self._build_action(url, [tag])
793 # Populate action tags
794 _action_tags = self.get_available_tags(tag)
796 if not _action_tags:
797 logger_cli.info(
798 "# Tag of '{}' not found. "
799 "Consider rebuilding repos info.".format(tag)
800 )
801 else:
802 logger_cli.info(
803 "-> tags to process: {}".format(
804 ", ".join(_action_tags)
805 )
806 )
807 # Execute actions
808 if action == "fetch":
809 for t in _action_tags:
810 self.fetch_versions(t, descriptions=descriptions, apps=apps)
812 logger_cli.info("# Done.")
814 def show_package(self, name):
815 # get the package data
816 _p = self.get_package_versions(name)
817 if not _p:
818 logger_cli.warning(
819 "# WARNING: Package '{}' not found".format(name)
820 )
821 else:
822 # print package info using sorted tags from headers
823 # Package: name
824 # [u/h] tag \t <version>
825 # \t <version>
826 # <10symbols> \t <md5> \t sorted headers with no tag
827 # ...
828 # section
829 for _s in sorted(_p):
830 # app
831 for _a in sorted(_p[_s]):
832 _o = ""
833 _mm = []
834 # get and sort tags
835 for _v in sorted(_p[_s][_a]):
836 _o += "\n" + " "*8 + _v + ':\n'
837 # get and sort tags
838 for _md5 in sorted(_p[_s][_a][_v]):
839 _o += " "*16 + _md5 + "\n"
840 # get and sort repo headers
841 for _r in sorted(_p[_s][_a][_v][_md5]):
842 _o += " "*24 + _r.replace('_', ' ') + '\n'
843 _m = _p[_s][_a][_v][_md5][_r]["maintainer"]
844 if _m not in _mm:
845 _mm.append(_m)
847 logger_cli.info(
848 "\n# Package: {}/{}/{}\nMaintainers: {}".format(
849 _s,
850 _a,
851 name,
852 ", ".join(_mm)
853 )
854 )
856 logger_cli.info(_o)
858 @staticmethod
859 def get_apps(versions, name):
860 _all = True if name == '*' else False
861 _s_max = _a_max = _p_max = _v_max = 0
862 _rows = []
863 for _p in versions.keys():
864 _vs = versions[_p]
865 for _v, _d1 in _vs.items():
866 for _md5, _info in _d1.items():
867 if _all or name == _info['app']:
868 _s_max = max(len(_info['section']), _s_max)
869 _a_max = max(len(_info['app']), _a_max)
870 _p_max = max(len(_p), _p_max)
871 _v_max = max(len(_v), _v_max)
872 _rows.append([
873 _info['section'],
874 _info['app'],
875 _p,
876 _v,
877 _md5,
878 len(_info['repo'])
879 ])
880 # format columns
881 # section
882 _fmt = "{:"+str(_s_max)+"} "
883 # app
884 _fmt += "{:"+str(_a_max)+"} "
885 # package name
886 _fmt += "{:"+str(_p_max)+"} "
887 # version
888 _fmt += "{:"+str(_v_max)+"} "
889 # md5 and number of repos is fixed
890 _fmt += "{} in {} repos"
892 # fill rows
893 _rows = [_fmt.format(s, a, p, v, m, l) for s, a, p, v, m, l in _rows]
894 _rows.sort()
895 return _rows
897 def show_app(self, name):
898 c = 0
899 rows = self.get_apps(self._versions_mirantis, name)
900 if rows:
901 logger_cli.info("\n# Mirantis packages for '{}'".format(name))
902 logger_cli.info("\n".join(rows))
903 c += 1
904 rows = self.get_apps(self._versions_other, name)
905 if rows:
906 logger_cli.info("\n# Other packages for '{}'".format(name))
907 logger_cli.info("\n".join(rows))
908 c += 1
909 if c == 0:
910 logger_cli.info("\n# No app found for '{}'".format(name))
912 def get_mirantis_pkg_names(self):
913 # Mirantis maintainers only
914 return set(
915 self._versions_mirantis.keys()
916 ) - set(
917 self._versions_other.keys()
918 )
920 def get_other_pkg_names(self):
921 # Non-mirantis Maintainers
922 return set(
923 self._versions_other.keys()
924 ) - set(
925 self._versions_mirantis.keys()
926 )
928 def get_mixed_pkg_names(self):
929 # Mixed maintainers
930 return set(
931 self._versions_mirantis.keys()
932 ).intersection(set(
933 self._versions_other.keys()
934 ))
936 def is_mirantis(self, name, tag=None):
937 """Method checks if this package is mainteined
938 by mirantis in target tag repo
939 """
940 if name in self._versions_mirantis:
941 # check tag
942 if tag:
943 _pkg = self.get_package_versions(
944 name,
945 tagged=True
946 )
947 _tags = []
948 for s in _pkg.keys():
949 for a in _pkg[s].keys():
950 for t in _pkg[s][a].keys():
951 _tags.append(t)
952 if any([t.startswith(tag) for t in _tags]):
953 return True
954 else:
955 return None
956 else:
957 return True
958 elif name in self._versions_other:
959 # check tag
960 if tag:
961 _pkg = self.get_package_versions(
962 name,
963 tagged=True
964 )
965 _tags = []
966 for s in _pkg.keys():
967 for a in _pkg[s].keys():
968 for t in _pkg[s][a].keys():
969 _tags.append(t)
970 if any([t.startswith(tag) for t in _tags]):
971 return False
972 else:
973 return None
974 else:
975 return False
976 else:
977 logger.error(
978 "# ERROR: package '{}' not found "
979 "while determining maintainer".format(
980 name
981 )
982 )
983 return None
985 def get_filtered_versions(
986 self,
987 name,
988 tag=None,
989 include=None,
990 exclude=None
991 ):
992 """Method gets all the versions for the package
993 and filters them using keys above
994 """
995 if tag:
996 tag = str(tag) if not isinstance(tag, str) else tag
997 _out = {}
998 _vs = self.get_package_versions(name, tagged=True)
999 # iterate to filter out keywords
1000 for s, apps in _vs.items():
1001 for a, _tt in apps.items():
1002 for t, vs in _tt.items():
1003 # filter tags
1004 if tag and t != tag and t.rsplit('.', 1)[0] != tag:
1005 continue
1006 # Skip hotfix tag
1007 if t == tag + ".hotfix":
1008 continue
1009 for v, rp in vs.items():
1010 for h, p in rp.items():
1011 # filter headers with all keywords matching
1012 _h = re.split(r"[\-\_]+", h)
1013 _included = all([kw in _h for kw in include])
1014 _excluded = any([kw in _h for kw in exclude])
1015 if not _included or _excluded:
1016 continue
1017 else:
1018 nested_set(_out, [s, a, v], [])
1019 _dat = {
1020 "header": h
1021 }
1022 _dat.update(p)
1023 _out[s][a][v].append(_dat)
1024 return _out
1026 def get_package_versions(self, name, tagged=False):
1027 """Method builds package version structure
1028 with repository properties included
1029 """
1030 # get data
1031 _vs = {}
1033 if name in self._versions_mirantis:
1034 _vs.update(self._versions_mirantis[name])
1035 if name in self._versions_other:
1036 _vs.update(self._versions_other[name])
1038 # insert repo data, insert props into headers place
1039 _package = {}
1040 if tagged:
1041 for _v, _d1 in _vs.items():
1042 # use tag as a next step
1043 for _md5, _info in _d1.items():
1044 _s = _info['section']
1045 _a = _info['app']
1046 for _pair in _info['repo']:
1047 _rp = {}
1048 # extract props for a repo
1049 _r, _m = self._get_indexed_values(_pair)
1050 # get tag
1051 _tag = _r["props"]["tag"]
1052 # cut tag from the header
1053 _cut_head = _r["header"].split("_", 1)[1]
1054 # populate dict
1055 _rp["maintainer"] = _m
1056 _rp["md5"] = _md5
1057 _rp.update(_r["props"])
1058 nested_set(
1059 _package,
1060 [_s, _a, _tag, _v, _cut_head],
1061 _rp
1062 )
1063 else:
1064 for _v, _d1 in _vs.items():
1065 for _md5, _info in _d1.items():
1066 _s = _info['section']
1067 _a = _info['app']
1068 for _pair in _info['repo']:
1069 _r, _m = self._get_indexed_values(_pair)
1070 _info["maintainer"] = _m
1071 _info.update(_r["props"])
1072 nested_set(
1073 _package,
1074 [_s, _a, _v, _md5, _r["header"]],
1075 _info
1076 )
1078 return _package
1080 def parse_repos(self):
1081 # all tags to check
1082 major, updates, hotfix = ReposInfo().list_tags(splitted=True)
1084 # major tags
1085 logger_cli.info("# Processing major tags")
1086 for _tag in major:
1087 self.fetch_versions(_tag)
1089 # updates tags
1090 logger_cli.info("# Processing update tags")
1091 for _tag in updates:
1092 self.fetch_versions(_tag + ".update")
1094 # hotfix tags
1095 logger_cli.info("# Processing hotfix tags")
1096 for _tag in hotfix:
1097 self.fetch_versions(_tag + ".hotfix")