blob: 4c5d0a49a48ecbc0f9f89ae04b5ec7e7d4410456 [file] [log] [blame]
Alex0989ecf2022-03-29 13:43:21 -05001# Author: Alex Savatieiev (osavatieiev@mirantis.com; a.savex@gmail.com)
2# Copyright 2019-2022 Mirantis, Inc.
Alexd9fd85e2019-05-16 16:58:24 -05003import json
4import os
Alexd0391d42019-05-21 18:48:55 -05005import re
Alexd9fd85e2019-05-16 16:58:24 -05006from copy import deepcopy
7
Alex74dc1352019-05-17 13:18:24 -05008from cfg_checker.common import logger, logger_cli, nested_set
Alex0ed4f762019-05-17 17:55:33 -05009from cfg_checker.common.const import _mainteiners_index_filename
10from cfg_checker.common.const import _mirantis_versions_filename
11from cfg_checker.common.const import _other_versions_filename
Alexd9fd85e2019-05-16 16:58:24 -050012from cfg_checker.common.const import _pkg_desc_archive
13from cfg_checker.common.const import _repos_index_filename
14from cfg_checker.common.const import _repos_info_archive
15from cfg_checker.common.const import _repos_versions_archive
Alexd9fd85e2019-05-16 16:58:24 -050016from cfg_checker.common.const import ubuntu_releases
Alexeffa0682021-06-04 12:18:33 -050017from cfg_checker.common.const import kaas_ubuntu_active
18from cfg_checker.common.const import mcp_active_tags as active_tags
Alex7f69a6a2019-05-31 16:53:35 -050019from cfg_checker.common.file_utils import ensure_folder_exists
Alexd9fd85e2019-05-16 16:58:24 -050020from cfg_checker.common.file_utils import get_gzipped_file
21from cfg_checker.common.settings import pkg_dir
22from cfg_checker.helpers.console_utils import Progress
23from cfg_checker.helpers.tgz import TGZFile
24
25import requests
26from requests.exceptions import ConnectionError
27
28ext = ".json"
29
30
Alex0ed4f762019-05-17 17:55:33 -050031def get_tag_label(_tag, parsed=False):
Alex74dc1352019-05-17 13:18:24 -050032 # prettify the tag for printing
Alex0ed4f762019-05-17 17:55:33 -050033 if parsed:
34 _label = "+ "
35 else:
36 _label = " "
37
Alex74dc1352019-05-17 13:18:24 -050038 if _tag.endswith(".update"):
39 _label += "[updates] " + _tag.rsplit('.', 1)[0]
40 elif _tag.endswith(".hotfix"):
41 _label += " [hotfix] " + _tag.rsplit('.', 1)[0]
42 else:
43 _label += " "*10 + _tag
Alex0ed4f762019-05-17 17:55:33 -050044
Alex74dc1352019-05-17 13:18:24 -050045 return _label
46
47
Alex0ed4f762019-05-17 17:55:33 -050048def _get_value_index(_di, value, header=None):
Alex29ee76f2019-05-17 18:52:29 -050049 # Mainteiner names often uses specific chars
Alex3bc95f62020-03-05 17:00:04 -060050 # so make sure that value saved is str not str
51 # Python2
52 # _val = str(value, 'utf-8') if isinstance(value, str) else value
53 # Python3 has always utf-8 decoded value
54 _val = value
Alex0ed4f762019-05-17 17:55:33 -050055 if header:
Alex3bc95f62020-03-05 17:00:04 -060056 try:
Alexccb72e02021-01-20 16:38:03 -060057 _index = next(i for i in _di if header in _di[i]['header'])
Alex3bc95f62020-03-05 17:00:04 -060058 except StopIteration:
Alexccb72e02021-01-20 16:38:03 -060059 _index = str(len(_di) + 1)
Alex0ed4f762019-05-17 17:55:33 -050060 _di[_index] = {
61 "header": header,
Alex29ee76f2019-05-17 18:52:29 -050062 "props": _val
Alex0ed4f762019-05-17 17:55:33 -050063 }
Alex3bc95f62020-03-05 17:00:04 -060064 finally:
65 return _index
Alex0ed4f762019-05-17 17:55:33 -050066 else:
Alex3bc95f62020-03-05 17:00:04 -060067 try:
Alexccb72e02021-01-20 16:38:03 -060068 _index = next(i for i in _di if _val in _di[i])
Alex3bc95f62020-03-05 17:00:04 -060069 # iterator not empty, find index
Alex3bc95f62020-03-05 17:00:04 -060070 except StopIteration:
Alexccb72e02021-01-20 16:38:03 -060071 _index = str(len(_di) + 1)
Alex3bc95f62020-03-05 17:00:04 -060072 # on save, cast it as str
73 _di[_index] = _val
74 finally:
75 return _index
Alex0ed4f762019-05-17 17:55:33 -050076
77
78def _safe_load(_f, _a):
79 if _f in _a.list_files():
Alexd0391d42019-05-21 18:48:55 -050080 logger_cli.debug(
81 "... loading '{}':'{}'".format(
Alex0ed4f762019-05-17 17:55:33 -050082 _a.basefile,
83 _f
84 )
85 )
Alex3bc95f62020-03-05 17:00:04 -060086 return json.loads(_a.get_file(_f, decode=True))
Alex0ed4f762019-05-17 17:55:33 -050087 else:
88 return {}
89
90
Alexd9fd85e2019-05-16 16:58:24 -050091def _n_url(url):
92 if url[-1] == '/':
93 return url
94 else:
95 return url + '/'
96
97
98class ReposInfo(object):
Alex3bc95f62020-03-05 17:00:04 -060099 init_done = False
100
101 def _init_vars(self):
102 self.repos = []
103
104 def _init_folders(self, arch_folder=None):
105 if arch_folder:
106 self._arch_folder = arch_folder
107 self._repofile = os.path.join(arch_folder, _repos_info_archive)
108 else:
109 self._arch_folder = os.path.join(pkg_dir, "versions")
110 self._repofile = os.path.join(
111 self._arch_folder,
112 _repos_info_archive
113 )
114
115 def __init__(self, arch_folder=None):
116 # perform inits
117 self._init_vars()
118 self._init_folders(arch_folder)
119 self.init_done = True
120
121 def __call__(self, *args, **kwargs):
122 if self.init_done:
123 return self
124 else:
125 return self.__init__(self, *args, **kwargs)
Alexd9fd85e2019-05-16 16:58:24 -0500126
127 @staticmethod
128 def _ls_repo_page(url):
129 # Yes, this is ugly. But it works ok for small HTMLs.
130 _a = "<a"
131 _s = "href="
132 _e = "\">"
133 try:
134 page = requests.get(url, timeout=60)
135 except ConnectionError as e:
136 logger_cli.error("# ERROR: {}".format(e.message))
137 return [], []
138 a = page.text.splitlines()
139 # Comprehension for dirs. Anchors for ends with '-'
Alexe8643642021-08-23 14:08:46 -0500140 _dirs = [ll[ll.index(_s)+6:ll.index(_e)-1]
141 for ll in a if ll.startswith(_a) and ll.endswith('-')]
Alexd9fd85e2019-05-16 16:58:24 -0500142 # Comprehension for files. Anchors ends with size
Alexe8643642021-08-23 14:08:46 -0500143 _files = [ll[ll.index(_s)+6:ll.index(_e)]
144 for ll in a if ll.startswith(_a) and not ll.endswith('-')]
Alexd9fd85e2019-05-16 16:58:24 -0500145
146 return _dirs, _files
147
148 def search_pkg(self, url, _list):
149 # recoursive method to walk dists tree
150 _dirs, _files = self._ls_repo_page(url)
151
152 for _d in _dirs:
153 # Search only in dists, ignore the rest
154 if "dists" not in url and _d != "dists":
155 continue
156 _u = _n_url(url + _d)
157 self.search_pkg(_u, _list)
158
159 for _f in _files:
160 if _f == "Packages.gz":
161 _list.append(url + _f)
162 logger.debug("... [F] '{}'".format(url + _f))
163
164 return _list
165
166 @staticmethod
167 def _map_repo(_path_list, _r):
168 for _pkg_path in _path_list:
169 _l = _pkg_path.split('/')
170 _kw = _l[_l.index('dists')+1:]
171 _kw.reverse()
172 _repo_item = {
173 "arch": _kw[1][7:] if "binary" in _kw[1] else _kw[1],
174 "type": _kw[2],
175 "ubuntu-release": _kw[3],
176 "filepath": _pkg_path
177 }
178 _r.append(_repo_item)
179
180 def _find_tag(self, _t, _u, label=""):
181 if label:
182 _url = _n_url(_u + label)
183 _label = _t + '.' + label
184 else:
185 _url = _u
186 _label = _t
187 _ts, _ = self._ls_repo_page(_url)
188 if _t in _ts:
189 logger.debug(
190 "... found tag '{}' at '{}'".format(
191 _t,
192 _url
193 )
194 )
195 return {
196 _label: {
197 "baseurl": _n_url(_url + _t),
198 "all": {}
199 }
200 }
201 else:
202 return {}
203
204 def fetch_repos(self, url, tag=None):
205 base_url = _n_url(url)
206 logger_cli.info("# Using '{}' as a repos source".format(base_url))
207
208 logger_cli.info("# Gathering repos info (i.e. links to 'packages.gz')")
209 # init repoinfo archive
210 _repotgz = TGZFile(self._repofile)
211 # prepare repo links
212 _repos = {}
213 if tag:
214 # only one tag to process
215 _repos.update(self._find_tag(tag, base_url))
216 _repos.update(self._find_tag(tag, base_url, label="hotfix"))
217 _repos.update(self._find_tag(tag, base_url, label="update"))
218 else:
219 # gather all of them
220 _tags, _ = self._ls_repo_page(base_url)
Alex3bc95f62020-03-05 17:00:04 -0600221 if "hotfix" in _tags:
222 _tags.remove('hotfix')
223 if "update" in _tags:
224 _tags.remove('update')
Alexeffa0682021-06-04 12:18:33 -0500225 # Filter out not active tags
226 logger_cli.info("Active tags for mcp: {}".format(
227 ", ".join(active_tags)
228 ))
229 logger_cli.info("Active kaas ubuntu repos: {}".format(
230 ", ".join(kaas_ubuntu_active)
231 ))
232 _active_tags = [t for t in _tags if t in active_tags]
233
Alexd9fd85e2019-05-16 16:58:24 -0500234 # search tags in subfolders
235 _h_tags, _ = self._ls_repo_page(base_url + 'hotfix')
236 _u_tags, _ = self._ls_repo_page(base_url + 'update')
Alexeffa0682021-06-04 12:18:33 -0500237 _active_tags.extend(
238 [t for t in _h_tags if t not in _tags and t in active_tags]
239 )
240 _active_tags.extend(
241 [t for t in _u_tags if t not in _tags and t in active_tags]
242 )
243 _progress = Progress(len(_active_tags))
Alexd9fd85e2019-05-16 16:58:24 -0500244 _index = 0
Alexeffa0682021-06-04 12:18:33 -0500245 for _tag in _active_tags:
Alexd9fd85e2019-05-16 16:58:24 -0500246 _repos.update(self._find_tag(_tag, base_url))
247 _repos.update(self._find_tag(_tag, base_url, label="hotfix"))
248 _repos.update(self._find_tag(_tag, base_url, label="update"))
249 _index += 1
250 _progress.write_progress(_index)
251 _progress.end()
252
253 # parse subtags
254 for _label in _repos.keys():
255 logger_cli.info("-> processing tag '{}'".format(_label))
256 _name = _label + ".json"
Alexeffa0682021-06-04 12:18:33 -0500257 if _repotgz.has_file(_name) and not tag:
Alexd9fd85e2019-05-16 16:58:24 -0500258 logger_cli.info(
259 "-> skipping, '{}' already has '{}'".format(
260 _repos_info_archive,
261 _name
262 )
263 )
264 continue
265 # process the tag
266 _repo = _repos[_label]
267 _baseurl = _repos[_label]["baseurl"]
268 # get the subtags
269 _sub_tags, _ = self._ls_repo_page(_baseurl)
270 _total_index = len(_sub_tags)
271 _index = 0
272 _progress = Progress(_total_index)
Alexccb72e02021-01-20 16:38:03 -0600273 logger_cli.debug(
Alexd9fd85e2019-05-16 16:58:24 -0500274 "... found {} subtags for '{}'".format(
275 len(_sub_tags),
276 _label
277 )
278 )
279 # save the url and start search
280 for _stag in _sub_tags:
281 _u = _baseurl + _stag
282 _index += 1
Alexccb72e02021-01-20 16:38:03 -0600283 logger_cli.debug(
Alexd9fd85e2019-05-16 16:58:24 -0500284 "... searching repos in '{}/{}'".format(
285 _label,
286 _stag
287 )
288 )
289
290 # Searching Package collections
Alexeffa0682021-06-04 12:18:33 -0500291 if _stag in ubuntu_releases or _stag in kaas_ubuntu_active:
Alexd9fd85e2019-05-16 16:58:24 -0500292 # if stag is the release, this is all packages
293 _repo["all"][_stag] = []
294 _repo["all"]["url"] = _n_url(_u)
295 _path_list = self.search_pkg(_n_url(_u), [])
296 self._map_repo(_path_list, _repo["all"][_stag])
Alexccb72e02021-01-20 16:38:03 -0600297 logger_cli.info(
Alexd9fd85e2019-05-16 16:58:24 -0500298 "-> found {} dists".format(
299 len(_repo["all"][_stag])
300 )
301 )
302
303 else:
304 # each subtag might have any ubuntu release
305 # so iterate them
306 _repo[_stag] = {
307 "url": _n_url(_u)
308 }
309 _releases, _ = self._ls_repo_page(_n_url(_u))
310 for _rel in _releases:
311 if _rel not in ubuntu_releases:
Alexccb72e02021-01-20 16:38:03 -0600312 logger_cli.debug(
Alexd9fd85e2019-05-16 16:58:24 -0500313 "... skipped unknown ubuntu release: "
314 "'{}' in '{}'".format(
315 _rel,
316 _u
317 )
318 )
319 else:
320 _rel_u = _n_url(_u) + _rel
321 _repo[_stag][_rel] = []
322 _path_list = self.search_pkg(_n_url(_rel_u), [])
323 self._map_repo(
324 _path_list,
325 _repo[_stag][_rel]
326 )
Alexccb72e02021-01-20 16:38:03 -0600327 logger_cli.info(
Alexd9fd85e2019-05-16 16:58:24 -0500328 "-> found {} dists for '{}'".format(
329 len(_repo[_stag][_rel]),
330 _rel
331 )
332 )
333 _progress.write_progress(_index)
334
335 _progress.end()
336 _name = _label + ext
Alexdfee5182022-01-20 12:33:08 -0600337 _repotgz.add_file(
338 _name,
339 buf=json.dumps(_repo, indent=2),
340 replace=True
341 )
Alexd9fd85e2019-05-16 16:58:24 -0500342 logger_cli.info(
343 "-> archive '{}' updated with '{}'".format(
344 self._repofile,
345 _name
346 )
347 )
348
349 return
350
Alex74dc1352019-05-17 13:18:24 -0500351 def list_tags(self, splitted=False):
Alexd9fd85e2019-05-16 16:58:24 -0500352 _files = TGZFile(self._repofile).list_files()
353 # all files in archive with no '.json' part
354 _all = set([f.rsplit('.', 1)[0] for f in _files])
Alex74dc1352019-05-17 13:18:24 -0500355 if splitted:
356 # files that ends with '.update'
357 _updates = set([f for f in _all if f.find('update') >= 0])
358 # files that ends with '.hotfix'
359 _hotfix = set([f for f in _all if f.find('hotfix') >= 0])
360 # remove updates and hotfix tags from all. The true magic of SETs
361 _all = _all - _updates - _hotfix
362 # cut updates and hotfix endings
363 _updates = [f.rsplit('.', 1)[0] for f in _updates]
364 _hotfix = [f.rsplit('.', 1)[0] for f in _hotfix]
Alexd9fd85e2019-05-16 16:58:24 -0500365
Alex74dc1352019-05-17 13:18:24 -0500366 return _all, _updates, _hotfix
367 else:
368 # dynamic import
369 import re
370 _all = list(_all)
371 # lexical tags
372 _lex = [s for s in _all if not s[0].isdigit()]
373 _lex.sort()
374 # tags with digits
375 _dig = [s for s in _all if s[0].isdigit()]
376 _dig = sorted(
377 _dig,
Alexd0391d42019-05-21 18:48:55 -0500378 key=lambda x: tuple(int(i) for i in re.findall(r"\d+", x)[:3])
Alex74dc1352019-05-17 13:18:24 -0500379 )
380
381 return _dig + _lex
Alexd9fd85e2019-05-16 16:58:24 -0500382
383 def get_repoinfo(self, tag):
384 _tgz = TGZFile(self._repofile)
Alex3bc95f62020-03-05 17:00:04 -0600385 _buf = _tgz.get_file(tag + ext, decode=True)
Alexd9fd85e2019-05-16 16:58:24 -0500386 return json.loads(_buf)
387
388
389class RepoManager(object):
Alex3bc95f62020-03-05 17:00:04 -0600390 init_done = False
Alexd9fd85e2019-05-16 16:58:24 -0500391
Alex3bc95f62020-03-05 17:00:04 -0600392 def _init_folders(self, arch_folder=None):
Alex9a4ad212020-10-01 18:04:25 -0500393 logger_cli.info("# Loading package versions data")
Alex3bc95f62020-03-05 17:00:04 -0600394 # overide arch folder if needed
395 if arch_folder:
396 self._arch_folder = arch_folder
397 else:
398 self._arch_folder = os.path.join(pkg_dir, "versions")
Alexd9fd85e2019-05-16 16:58:24 -0500399
Alex3bc95f62020-03-05 17:00:04 -0600400 self._versions_arch = os.path.join(
401 self._arch_folder,
402 _repos_versions_archive
403 )
404 self._desc_arch = os.path.join(self._arch_folder, _pkg_desc_archive)
Alexd0391d42019-05-21 18:48:55 -0500405
Alex3bc95f62020-03-05 17:00:04 -0600406 def _init_vars(self, info_class):
407 # RepoInfo instance init
408 if info_class:
409 self._info_class = info_class
410 else:
411 self._info_class = ReposInfo()
412 # archives
413 self._apps_filename = "apps.json"
Alexd9fd85e2019-05-16 16:58:24 -0500414
Alex3bc95f62020-03-05 17:00:04 -0600415 # repository index
416 self._repo_index = {}
417 self._mainteiners_index = {}
418
419 self._apps = {}
420
421 # init package versions storage
422 self._versions_mirantis = {}
423 self._versions_other = {}
424
425 def _init_archives(self):
Alexd9fd85e2019-05-16 16:58:24 -0500426 # Init version files
427 self.versionstgz = TGZFile(
428 self._versions_arch,
429 label="MCP Configuration Checker: Package versions archive"
430 )
431 self.desctgz = TGZFile(
432 self._desc_arch,
433 label="MCP Configuration Checker: Package descriptions archive"
434 )
Alexd0391d42019-05-21 18:48:55 -0500435
436 # section / app
437 self._apps = _safe_load(
438 self._apps_filename,
439 self.desctgz
440 )
441
Alex0ed4f762019-05-17 17:55:33 -0500442 # indices
443 self._repo_index = _safe_load(
444 _repos_index_filename,
445 self.versionstgz
446 )
447 self._mainteiners_index = _safe_load(
448 _mainteiners_index_filename,
449 self.versionstgz
450 )
Alexd9fd85e2019-05-16 16:58:24 -0500451
Alex0ed4f762019-05-17 17:55:33 -0500452 # versions
453 self._versions_mirantis = _safe_load(
454 _mirantis_versions_filename,
455 self.versionstgz
456 )
457 self._versions_other = _safe_load(
458 _other_versions_filename,
459 self.versionstgz
460 )
Alexd9fd85e2019-05-16 16:58:24 -0500461
Alex3bc95f62020-03-05 17:00:04 -0600462 def __init__(self, arch_folder=None, info_class=None):
463 # Perform inits
464 self._init_vars(info_class)
465 self._init_folders(arch_folder)
466 # Ensure that versions folder exists
467 logger_cli.debug(ensure_folder_exists(self._arch_folder))
468 # Preload/create archives
469 self._init_archives()
470 self.init_done = True
471
472 def __call__(self, *args, **kwargs):
473 if self.init_done:
474 return self
475 else:
476 return self.__init__(self, *args, **kwargs)
477
Alexd9fd85e2019-05-16 16:58:24 -0500478 def _create_repo_header(self, p):
479 _header = "_".join([
480 p['tag'],
481 p['subset'],
482 p['release'],
483 p['ubuntu-release'],
484 p['type'],
485 p['arch']
486 ])
Alex0ed4f762019-05-17 17:55:33 -0500487 return _get_value_index(self._repo_index, p, header=_header)
Alexd9fd85e2019-05-16 16:58:24 -0500488
Alex0ed4f762019-05-17 17:55:33 -0500489 def _get_indexed_values(self, pair):
490 _h, _m = pair.split('-')
491 return self._repo_index[_h], self._mainteiners_index[_m]
Alexd9fd85e2019-05-16 16:58:24 -0500492
Alexd0391d42019-05-21 18:48:55 -0500493 def _update_pkg_version(self, _d, n, v, md5, s, a, h_index, m_index):
Alexd9fd85e2019-05-16 16:58:24 -0500494 """Method updates package version record in global dict
495 """
496 # 'if'*4 operation is pretty expensive when using it 100k in a row
497 # so try/except is a better way to go, even faster than 'reduce'
Alex0ed4f762019-05-17 17:55:33 -0500498 _pair = "-".join([h_index, m_index])
Alexd0391d42019-05-21 18:48:55 -0500499 _info = {
500 'repo': [_pair],
501 'section': s,
502 'app': a
503 }
Alexd9fd85e2019-05-16 16:58:24 -0500504 try:
505 # try to load list
Alexd0391d42019-05-21 18:48:55 -0500506 _list = _d[n][v][md5]['repo']
Alexd9fd85e2019-05-16 16:58:24 -0500507 # cast it as set() and union()
Alex0ed4f762019-05-17 17:55:33 -0500508 _list = set(_list).union([_pair])
Alexd9fd85e2019-05-16 16:58:24 -0500509 # cast back as set() is not serializeable
Alexd0391d42019-05-21 18:48:55 -0500510 _d[n][v][md5]['repo'] = list(_list)
Alexd9fd85e2019-05-16 16:58:24 -0500511 return False
512 except KeyError:
513 # ok, this is fresh pkg. Do it slow way.
Alex0ed4f762019-05-17 17:55:33 -0500514 if n in _d:
Alexd9fd85e2019-05-16 16:58:24 -0500515 # there is such pkg already
Alex0ed4f762019-05-17 17:55:33 -0500516 if v in _d[n]:
Alexd9fd85e2019-05-16 16:58:24 -0500517 # there is such version, check md5
Alex0ed4f762019-05-17 17:55:33 -0500518 if md5 in _d[n][v]:
Alexd9fd85e2019-05-16 16:58:24 -0500519 # just add new repo header
Alexd0391d42019-05-21 18:48:55 -0500520 if _pair not in _d[n][v][md5]['repo']:
521 _d[n][v][md5]['repo'].append(_pair)
Alexd9fd85e2019-05-16 16:58:24 -0500522 else:
523 # check if such index is here...
524 _existing = filter(
Alexd0391d42019-05-21 18:48:55 -0500525 lambda i: _pair in _d[n][v][i]['repo'],
Alex0ed4f762019-05-17 17:55:33 -0500526 _d[n][v]
Alexd9fd85e2019-05-16 16:58:24 -0500527 )
528 if _existing:
529 # Yuck! Same version had different MD5
Alex0ed4f762019-05-17 17:55:33 -0500530 _r, _m = self._get_indexed_values(_pair)
Alexd9fd85e2019-05-16 16:58:24 -0500531 logger_cli.error(
532 "# ERROR: Package version has multiple MD5s "
533 "in '{}': {}:{}:{}".format(
Alex0ed4f762019-05-17 17:55:33 -0500534 _r,
Alexd9fd85e2019-05-16 16:58:24 -0500535 n,
536 v,
537 md5
538 )
539 )
Alexd0391d42019-05-21 18:48:55 -0500540 _d[n][v][md5] = _info
Alexd9fd85e2019-05-16 16:58:24 -0500541 else:
542 # this is new version for existing package
Alex0ed4f762019-05-17 17:55:33 -0500543 _d[n][v] = {
Alexd0391d42019-05-21 18:48:55 -0500544 md5: _info
Alexd9fd85e2019-05-16 16:58:24 -0500545 }
546 return False
547 else:
548 # this is new pakcage
Alex0ed4f762019-05-17 17:55:33 -0500549 _d[n] = {
Alexd9fd85e2019-05-16 16:58:24 -0500550 v: {
Alexd0391d42019-05-21 18:48:55 -0500551 md5: _info
Alexd9fd85e2019-05-16 16:58:24 -0500552 }
553 }
554 return True
555
556 def _save_repo_descriptions(self, repo_props, desc):
557 # form the filename for the repo and save it
558 self.desctgz.add_file(
559 self._create_repo_header(repo_props),
560 json.dumps(desc)
561 )
562
563 # def get_description(self, repo_props, name, md5=None):
564 # """Gets target description
565 # """
566 # _filename = self._create_repo_header(repo_props)
567 # # check if it is present in cache
568 # if _filename in self._desc_cache:
569 # _descs = self._desc_cache[_filename]
570 # else:
571 # # load data
572 # _descs = self.desctgz.get_file(_filename)
573 # # Serialize it
574 # _descs = json.loads(_descs)
575 # self._desc_cache[_filename] = _descs
576 # # return target desc
577 # if name in _descs and md5 in _descs[name]:
578 # return _descs[name][md5]
579 # else:
580 # return None
581
Alexd0391d42019-05-21 18:48:55 -0500582 def parse_tag(self, tag, descriptions=False, apps=False):
Alexd9fd85e2019-05-16 16:58:24 -0500583 """Download and parse Package.gz files for specific tag
584 By default, descriptions not saved
585 due to huge resulting file size and slow processing
586 """
587 # init gzip and downloader
Alex3bc95f62020-03-05 17:00:04 -0600588 _info = self._info_class.get_repoinfo(tag)
Alexd9fd85e2019-05-16 16:58:24 -0500589 # calculate Packages.gz files to process
590 _baseurl = _info.pop("baseurl")
591 _total_components = len(_info.keys()) - 1
592 _ubuntu_package_repos = 0
593 _other_repos = 0
Alex3bc95f62020-03-05 17:00:04 -0600594 for _c, _d in _info.items():
595 for _ur, _l in _d.items():
Alexeffa0682021-06-04 12:18:33 -0500596 if _ur in ubuntu_releases or _ur in kaas_ubuntu_active:
Alexd9fd85e2019-05-16 16:58:24 -0500597 _ubuntu_package_repos += len(_l)
598 elif _ur != 'url':
599 _other_repos += len(_l)
600 logger_cli.info(
601 "-> loaded repository info for '{}'.\n"
602 " '{}', {} components, {} ubuntu repos, {} other/uknown".format(
603 _baseurl,
604 tag,
605 _total_components,
606 _ubuntu_package_repos,
607 _other_repos
608 )
609 )
610 # init progress bar
611 _progress = Progress(_ubuntu_package_repos)
612 _index = 0
613 _processed = 0
614 _new = 0
Alex3bc95f62020-03-05 17:00:04 -0600615 for _c, _d in _info.items():
Alexd9fd85e2019-05-16 16:58:24 -0500616 # we do not need url here, just get rid of it
617 if 'url' in _d:
618 _d.pop('url')
619 # _url = if 'url' in _d else _baseurl + _c
Alex3bc95f62020-03-05 17:00:04 -0600620 for _ur, _l in _d.items():
Alexd9fd85e2019-05-16 16:58:24 -0500621 # iterate package collections
622 for _p in _l:
623 # descriptions
624 if descriptions:
625 _descriptions = {}
626 # download and unzip
Alexd0391d42019-05-21 18:48:55 -0500627 _index += 1
628 _progress.write_progress(
629 _index,
630 note="/ {} {} {} {} {}, GET 'Packages.gz'".format(
631 _c,
632 _ur,
633 _p['ubuntu-release'],
634 _p['type'],
635 _p['arch']
636 )
637 )
638 _raw = get_gzipped_file(_p['filepath'])
639 if not _raw:
640 # empty repo...
641 _progress.clearline()
642 logger_cli.warning(
643 "# WARNING: Empty file: '{}'".format(
644 _p['filepath']
645 )
646 )
647 continue
Alex3bc95f62020-03-05 17:00:04 -0600648 else:
649 _raw = _raw.decode("utf-8")
Alexd9fd85e2019-05-16 16:58:24 -0500650 _progress.write_progress(
651 _index,
652 note="/ {} {} {} {} {}, {}/{}".format(
653 _c,
654 _ur,
655 _p['ubuntu-release'],
656 _p['type'],
657 _p['arch'],
658 _processed,
659 _new
660 )
661 )
Alexd9fd85e2019-05-16 16:58:24 -0500662 _lines = _raw.splitlines()
Alexd9fd85e2019-05-16 16:58:24 -0500663 # break lines collection into isolated pkg data
664 _pkg = {
665 "tag": tag,
666 "subset": _c,
667 "release": _ur
668 }
669 _pkg.update(_p)
670 _desc = {}
671 _key = _value = ""
Alexd0391d42019-05-21 18:48:55 -0500672 # if there is no empty line at end, add it
673 if _lines[-1] != '':
674 _lines.append('')
675 # Process lines
Alexd9fd85e2019-05-16 16:58:24 -0500676 for _line in _lines:
677 if not _line:
678 # if the line is empty, process pkg data gathered
679 _name = _desc['package']
680 _md5 = _desc['md5sum']
681 _version = _desc['version']
Alex0ed4f762019-05-17 17:55:33 -0500682 _mainteiner = _desc['maintainer']
683
Alexd0391d42019-05-21 18:48:55 -0500684 if 'source' in _desc:
685 _ap = _desc['source'].lower()
686 else:
687 _ap = "-"
688
689 if apps:
690 # insert app
691 _sc = _desc['section'].lower()
692 if 'source' in _desc:
693 _ap = _desc['source'].lower()
694 else:
695 _ap = "-"
696
697 try:
698 _tmp = set(self._apps[_sc][_ap][_name])
699 _tmp.add(_desc['architecture'])
700 self._apps[_sc][_ap][_name] = list(_tmp)
701 except KeyError:
702 nested_set(
703 self._apps,
704 [_sc, _ap, _name],
705 [_desc['architecture']]
706 )
707
Alex0ed4f762019-05-17 17:55:33 -0500708 # Check is mainteiner is Mirantis
709 if _mainteiner.endswith("@mirantis.com>"):
710 # update mirantis versions
711 if self._update_pkg_version(
712 self._versions_mirantis,
713 _name,
714 _version,
715 _md5,
Alexd0391d42019-05-21 18:48:55 -0500716 _desc['section'].lower(),
717 _ap,
Alex0ed4f762019-05-17 17:55:33 -0500718 self._create_repo_header(_pkg),
719 _get_value_index(
720 self._mainteiners_index,
721 _mainteiner
722 )
723 ):
724 _new += 1
725 else:
726 # update other versions
727 if self._update_pkg_version(
728 self._versions_other,
729 _name,
730 _version,
731 _md5,
Alexd0391d42019-05-21 18:48:55 -0500732 _desc['section'].lower(),
733 _ap,
Alex0ed4f762019-05-17 17:55:33 -0500734 self._create_repo_header(_pkg),
735 _get_value_index(
736 self._mainteiners_index,
737 _mainteiner
738 )
739 ):
740 _new += 1
Alexd9fd85e2019-05-16 16:58:24 -0500741
742 if descriptions:
743 _d_new = {
744 _md5: deepcopy(_desc)
745 }
746 try:
747 _descriptions[_name].update(_d_new)
748 except KeyError:
749 _descriptions[_name] = _d_new
750 # clear the data for next pkg
751 _processed += 1
752 _desc = {}
753 _key = ""
754 _value = ""
755 elif _line.startswith(' '):
756 _desc[_key] += "\n{}".format(_line)
757 else:
Alexccb72e02021-01-20 16:38:03 -0600758 if _line.endswith(":"):
759 _key = _line[:-1]
760 _value = ""
761 else:
762 _key, _value = _line.split(": ", 1)
Alexccb72e02021-01-20 16:38:03 -0600763 _key = _key.lower()
Alexd9fd85e2019-05-16 16:58:24 -0500764 _desc[_key] = _value
765 # save descriptions if needed
766 if descriptions:
767 _progress.clearline()
768 self._save_repo_descriptions(_pkg, _descriptions)
769
770 _progress.end()
771 # backup headers to disk
772 self.versionstgz.add_file(
Alex0ed4f762019-05-17 17:55:33 -0500773 _repos_index_filename,
Alexd9fd85e2019-05-16 16:58:24 -0500774 json.dumps(self._repo_index),
775 replace=True
776 )
Alex0ed4f762019-05-17 17:55:33 -0500777 self.versionstgz.add_file(
778 _mainteiners_index_filename,
779 json.dumps(self._mainteiners_index),
780 replace=True
781 )
Alexd0391d42019-05-21 18:48:55 -0500782 if apps:
783 self.desctgz.add_file(
784 self._apps_filename,
785 json.dumps(self._apps),
786 replace=True
787 )
788
Alexd9fd85e2019-05-16 16:58:24 -0500789 return
790
Alexd0391d42019-05-21 18:48:55 -0500791 def fetch_versions(self, tag, descriptions=False, apps=False):
Alexd9fd85e2019-05-16 16:58:24 -0500792 """Executes parsing for specific tag
793 """
794 if descriptions:
795 logger_cli.warning(
796 "\n\n# !!! WARNING: Saving repo descriptions "
797 "consumes huge amount of disk space\n\n"
798 )
799 # if there is no such tag, parse it from repoinfo
Alexd9fd85e2019-05-16 16:58:24 -0500800 logger_cli.info("# Fetching versions for {}".format(tag))
Alexd0391d42019-05-21 18:48:55 -0500801 self.parse_tag(tag, descriptions=descriptions, apps=apps)
Alex0ed4f762019-05-17 17:55:33 -0500802 logger_cli.info("-> saving updated versions")
803 self.versionstgz.add_file(
804 _mirantis_versions_filename,
805 json.dumps(self._versions_mirantis),
806 replace=True
807 )
808 self.versionstgz.add_file(
809 _other_versions_filename,
810 json.dumps(self._versions_other),
811 replace=True
812 )
Alexd9fd85e2019-05-16 16:58:24 -0500813
814 def build_repos(self, url, tag=None):
815 """Builds versions data for selected tag, or for all of them
816 """
Alexd9fd85e2019-05-16 16:58:24 -0500817 # recoursively walk the mirrors
818 # and gather all of the repos for 'tag' or all of the tags
Alex3bc95f62020-03-05 17:00:04 -0600819 self._info_class.fetch_repos(url, tag=tag)
Alexd9fd85e2019-05-16 16:58:24 -0500820
Alex74dc1352019-05-17 13:18:24 -0500821 def _build_action(self, url, tags):
822 for t in tags:
Alex6df29ad2019-05-31 17:55:32 -0500823 logger_cli.info("# Building repo info for '{}'".format(t))
Alex74dc1352019-05-17 13:18:24 -0500824 self.build_repos(url, tag=t)
825
Alexd0391d42019-05-21 18:48:55 -0500826 def get_available_tags(self, tag=None):
827 # Populate action tags
Alex3bc95f62020-03-05 17:00:04 -0600828 major, updates, hotfix = self._info_class.list_tags(splitted=True)
Alexd0391d42019-05-21 18:48:55 -0500829
830 _tags = []
831 if tag in major:
832 _tags.append(tag)
833 if tag in updates:
834 _tags.append(tag + ".update")
835 if tag in hotfix:
836 _tags.append(tag + ".hotfix")
837
838 return _tags
839
Alexd9fd85e2019-05-16 16:58:24 -0500840 def action_for_tag(
841 self,
842 url,
843 tag,
844 action=None,
Alexd0391d42019-05-21 18:48:55 -0500845 descriptions=None,
846 apps=None
Alexd9fd85e2019-05-16 16:58:24 -0500847 ):
848 """Executes action for every tag from all collections
849 """
850 if not action:
851 logger_cli.info("# No action set, nothing to do")
Alex74dc1352019-05-17 13:18:24 -0500852 # See if this is a list action
Alexd9fd85e2019-05-16 16:58:24 -0500853 if action == "list":
Alex3bc95f62020-03-05 17:00:04 -0600854 _all = self._info_class.list_tags()
Alex6df29ad2019-05-31 17:55:32 -0500855 if _all:
856 # Print pretty list and exit
857 logger_cli.info("# Tags available at '{}':".format(url))
858 for t in _all:
859 _ri = self._repo_index
860 _isparsed = any(
Alex3bc95f62020-03-05 17:00:04 -0600861 [k for k, v in _ri.items()
Alex6df29ad2019-05-31 17:55:32 -0500862 if v['props']['tag'] == t]
863 )
864 if _isparsed:
865 logger_cli.info(get_tag_label(t, parsed=True))
866 else:
867 logger_cli.info(get_tag_label(t))
868 else:
869 logger_cli.info("# Not tags parsed yet for '{}':".format(url))
870
Alex74dc1352019-05-17 13:18:24 -0500871 # exit
Alexd9fd85e2019-05-16 16:58:24 -0500872 return
Alex74dc1352019-05-17 13:18:24 -0500873
Alex6df29ad2019-05-31 17:55:32 -0500874 if action == "build":
875 self._build_action(url, [tag])
876
Alexd0391d42019-05-21 18:48:55 -0500877 # Populate action tags
878 _action_tags = self.get_available_tags(tag)
879
Alexd9fd85e2019-05-16 16:58:24 -0500880 if not _action_tags:
881 logger_cli.info(
882 "# Tag of '{}' not found. "
883 "Consider rebuilding repos info.".format(tag)
884 )
Alex74dc1352019-05-17 13:18:24 -0500885 else:
Alexd9fd85e2019-05-16 16:58:24 -0500886 logger_cli.info(
Alex74dc1352019-05-17 13:18:24 -0500887 "-> tags to process: {}".format(
Alexd9fd85e2019-05-16 16:58:24 -0500888 ", ".join(_action_tags)
889 )
890 )
Alex74dc1352019-05-17 13:18:24 -0500891 # Execute actions
Alex6df29ad2019-05-31 17:55:32 -0500892 if action == "fetch":
Alexd9fd85e2019-05-16 16:58:24 -0500893 for t in _action_tags:
Alexd0391d42019-05-21 18:48:55 -0500894 self.fetch_versions(t, descriptions=descriptions, apps=apps)
Alexd9fd85e2019-05-16 16:58:24 -0500895
896 logger_cli.info("# Done.")
897
Alex74dc1352019-05-17 13:18:24 -0500898 def show_package(self, name):
899 # get the package data
900 _p = self.get_package_versions(name)
901 if not _p:
902 logger_cli.warning(
903 "# WARNING: Package '{}' not found".format(name)
904 )
905 else:
906 # print package info using sorted tags from headers
907 # Package: name
908 # [u/h] tag \t <version>
909 # \t <version>
910 # <10symbols> \t <md5> \t sorted headers with no tag
911 # ...
Alexd0391d42019-05-21 18:48:55 -0500912 # section
Alex92e07ce2019-05-31 16:00:03 -0500913 for _s in sorted(_p):
Alexd0391d42019-05-21 18:48:55 -0500914 # app
Alex92e07ce2019-05-31 16:00:03 -0500915 for _a in sorted(_p[_s]):
Alexcf91b182019-05-31 11:57:07 -0500916 _o = ""
917 _mm = []
Alexd0391d42019-05-21 18:48:55 -0500918 # get and sort tags
Alex92e07ce2019-05-31 16:00:03 -0500919 for _v in sorted(_p[_s][_a]):
Alexd0391d42019-05-21 18:48:55 -0500920 _o += "\n" + " "*8 + _v + ':\n'
921 # get and sort tags
Alex92e07ce2019-05-31 16:00:03 -0500922 for _md5 in sorted(_p[_s][_a][_v]):
Alexd0391d42019-05-21 18:48:55 -0500923 _o += " "*16 + _md5 + "\n"
924 # get and sort repo headers
Alex92e07ce2019-05-31 16:00:03 -0500925 for _r in sorted(_p[_s][_a][_v][_md5]):
Alexcf91b182019-05-31 11:57:07 -0500926 _o += " "*24 + _r.replace('_', ' ') + '\n'
927 _m = _p[_s][_a][_v][_md5][_r]["maintainer"]
928 if _m not in _mm:
929 _mm.append(_m)
Alex74dc1352019-05-17 13:18:24 -0500930
Alexcf91b182019-05-31 11:57:07 -0500931 logger_cli.info(
932 "\n# Package: {}/{}/{}\nMaintainers: {}".format(
933 _s,
934 _a,
935 name,
936 ", ".join(_mm)
937 )
938 )
939
940 logger_cli.info(_o)
Alex74dc1352019-05-17 13:18:24 -0500941
Alexd0391d42019-05-21 18:48:55 -0500942 @staticmethod
943 def get_apps(versions, name):
944 _all = True if name == '*' else False
Alexcf91b182019-05-31 11:57:07 -0500945 _s_max = _a_max = _p_max = _v_max = 0
Alexd0391d42019-05-21 18:48:55 -0500946 _rows = []
947 for _p in versions.keys():
948 _vs = versions[_p]
Alex3bc95f62020-03-05 17:00:04 -0600949 for _v, _d1 in _vs.items():
950 for _md5, _info in _d1.items():
Alexd0391d42019-05-21 18:48:55 -0500951 if _all or name == _info['app']:
952 _s_max = max(len(_info['section']), _s_max)
953 _a_max = max(len(_info['app']), _a_max)
Alexcf91b182019-05-31 11:57:07 -0500954 _p_max = max(len(_p), _p_max)
955 _v_max = max(len(_v), _v_max)
Alexd0391d42019-05-21 18:48:55 -0500956 _rows.append([
957 _info['section'],
958 _info['app'],
Alexcf91b182019-05-31 11:57:07 -0500959 _p,
960 _v,
961 _md5,
962 len(_info['repo'])
Alexd0391d42019-05-21 18:48:55 -0500963 ])
Alexcf91b182019-05-31 11:57:07 -0500964 # format columns
965 # section
966 _fmt = "{:"+str(_s_max)+"} "
967 # app
968 _fmt += "{:"+str(_a_max)+"} "
969 # package name
970 _fmt += "{:"+str(_p_max)+"} "
971 # version
972 _fmt += "{:"+str(_v_max)+"} "
973 # md5 and number of repos is fixed
974 _fmt += "{} in {} repos"
975
976 # fill rows
977 _rows = [_fmt.format(s, a, p, v, m, l) for s, a, p, v, m, l in _rows]
Alexd0391d42019-05-21 18:48:55 -0500978 _rows.sort()
979 return _rows
980
981 def show_app(self, name):
982 c = 0
983 rows = self.get_apps(self._versions_mirantis, name)
984 if rows:
Alexcf91b182019-05-31 11:57:07 -0500985 logger_cli.info("\n# Mirantis packages for '{}'".format(name))
Alexd0391d42019-05-21 18:48:55 -0500986 logger_cli.info("\n".join(rows))
987 c += 1
988 rows = self.get_apps(self._versions_other, name)
989 if rows:
Alexcf91b182019-05-31 11:57:07 -0500990 logger_cli.info("\n# Other packages for '{}'".format(name))
Alexd0391d42019-05-21 18:48:55 -0500991 logger_cli.info("\n".join(rows))
992 c += 1
993 if c == 0:
994 logger_cli.info("\n# No app found for '{}'".format(name))
995
996 def get_mirantis_pkg_names(self):
997 # Mirantis maintainers only
998 return set(
999 self._versions_mirantis.keys()
1000 ) - set(
1001 self._versions_other.keys()
1002 )
1003
1004 def get_other_pkg_names(self):
1005 # Non-mirantis Maintainers
1006 return set(
1007 self._versions_other.keys()
1008 ) - set(
1009 self._versions_mirantis.keys()
1010 )
1011
1012 def get_mixed_pkg_names(self):
1013 # Mixed maintainers
1014 return set(
1015 self._versions_mirantis.keys()
1016 ).intersection(set(
1017 self._versions_other.keys()
1018 ))
1019
1020 def is_mirantis(self, name, tag=None):
1021 """Method checks if this package is mainteined
1022 by mirantis in target tag repo
1023 """
1024 if name in self._versions_mirantis:
1025 # check tag
1026 if tag:
1027 _pkg = self.get_package_versions(
1028 name,
1029 tagged=True
1030 )
1031 _tags = []
1032 for s in _pkg.keys():
1033 for a in _pkg[s].keys():
1034 for t in _pkg[s][a].keys():
1035 _tags.append(t)
1036 if any([t.startswith(tag) for t in _tags]):
1037 return True
1038 else:
1039 return None
1040 else:
1041 return True
1042 elif name in self._versions_other:
1043 # check tag
1044 if tag:
1045 _pkg = self.get_package_versions(
1046 name,
1047 tagged=True
1048 )
1049 _tags = []
1050 for s in _pkg.keys():
1051 for a in _pkg[s].keys():
1052 for t in _pkg[s][a].keys():
1053 _tags.append(t)
1054 if any([t.startswith(tag) for t in _tags]):
1055 return False
1056 else:
1057 return None
1058 else:
1059 return False
1060 else:
1061 logger.error(
1062 "# ERROR: package '{}' not found "
1063 "while determining maintainer".format(
1064 name
1065 )
1066 )
1067 return None
1068
1069 def get_filtered_versions(
1070 self,
1071 name,
1072 tag=None,
1073 include=None,
1074 exclude=None
1075 ):
1076 """Method gets all the versions for the package
1077 and filters them using keys above
1078 """
1079 if tag:
Alex3bc95f62020-03-05 17:00:04 -06001080 tag = str(tag) if not isinstance(tag, str) else tag
Alexd0391d42019-05-21 18:48:55 -05001081 _out = {}
1082 _vs = self.get_package_versions(name, tagged=True)
1083 # iterate to filter out keywords
Alex3bc95f62020-03-05 17:00:04 -06001084 for s, apps in _vs.items():
1085 for a, _tt in apps.items():
1086 for t, vs in _tt.items():
Alexd0391d42019-05-21 18:48:55 -05001087 # filter tags
1088 if tag and t != tag and t.rsplit('.', 1)[0] != tag:
1089 continue
1090 # Skip hotfix tag
1091 if t == tag + ".hotfix":
1092 continue
Alex3bc95f62020-03-05 17:00:04 -06001093 for v, rp in vs.items():
1094 for h, p in rp.items():
Alexd0391d42019-05-21 18:48:55 -05001095 # filter headers with all keywords matching
1096 _h = re.split(r"[\-\_]+", h)
1097 _included = all([kw in _h for kw in include])
1098 _excluded = any([kw in _h for kw in exclude])
1099 if not _included or _excluded:
1100 continue
1101 else:
1102 nested_set(_out, [s, a, v], [])
1103 _dat = {
1104 "header": h
1105 }
1106 _dat.update(p)
1107 _out[s][a][v].append(_dat)
1108 return _out
1109
1110 def get_package_versions(self, name, tagged=False):
Alex74dc1352019-05-17 13:18:24 -05001111 """Method builds package version structure
1112 with repository properties included
1113 """
1114 # get data
Alexd0391d42019-05-21 18:48:55 -05001115 _vs = {}
1116
1117 if name in self._versions_mirantis:
1118 _vs.update(self._versions_mirantis[name])
1119 if name in self._versions_other:
1120 _vs.update(self._versions_other[name])
Alex0ed4f762019-05-17 17:55:33 -05001121
Alex74dc1352019-05-17 13:18:24 -05001122 # insert repo data, insert props into headers place
1123 _package = {}
1124 if tagged:
Alex3bc95f62020-03-05 17:00:04 -06001125 for _v, _d1 in _vs.items():
Alex74dc1352019-05-17 13:18:24 -05001126 # use tag as a next step
Alex3bc95f62020-03-05 17:00:04 -06001127 for _md5, _info in _d1.items():
Alexd0391d42019-05-21 18:48:55 -05001128 _s = _info['section']
1129 _a = _info['app']
1130 for _pair in _info['repo']:
1131 _rp = {}
Alex74dc1352019-05-17 13:18:24 -05001132 # extract props for a repo
Alex0ed4f762019-05-17 17:55:33 -05001133 _r, _m = self._get_indexed_values(_pair)
Alex74dc1352019-05-17 13:18:24 -05001134 # get tag
Alex0ed4f762019-05-17 17:55:33 -05001135 _tag = _r["props"]["tag"]
Alex74dc1352019-05-17 13:18:24 -05001136 # cut tag from the header
Alex0ed4f762019-05-17 17:55:33 -05001137 _cut_head = _r["header"].split("_", 1)[1]
Alex74dc1352019-05-17 13:18:24 -05001138 # populate dict
Alexd0391d42019-05-21 18:48:55 -05001139 _rp["maintainer"] = _m
1140 _rp["md5"] = _md5
1141 _rp.update(_r["props"])
Alex74dc1352019-05-17 13:18:24 -05001142 nested_set(
1143 _package,
Alexd0391d42019-05-21 18:48:55 -05001144 [_s, _a, _tag, _v, _cut_head],
1145 _rp
Alex74dc1352019-05-17 13:18:24 -05001146 )
1147 else:
Alex3bc95f62020-03-05 17:00:04 -06001148 for _v, _d1 in _vs.items():
1149 for _md5, _info in _d1.items():
Alexd0391d42019-05-21 18:48:55 -05001150 _s = _info['section']
1151 _a = _info['app']
1152 for _pair in _info['repo']:
Alex0ed4f762019-05-17 17:55:33 -05001153 _r, _m = self._get_indexed_values(_pair)
Alexd0391d42019-05-21 18:48:55 -05001154 _info["maintainer"] = _m
1155 _info.update(_r["props"])
Alex74dc1352019-05-17 13:18:24 -05001156 nested_set(
1157 _package,
Alexd0391d42019-05-21 18:48:55 -05001158 [_s, _a, _v, _md5, _r["header"]],
1159 _info
Alex74dc1352019-05-17 13:18:24 -05001160 )
1161
1162 return _package
1163
Alexd9fd85e2019-05-16 16:58:24 -05001164 def parse_repos(self):
1165 # all tags to check
Alex3bc95f62020-03-05 17:00:04 -06001166 major, updates, hotfix = self._info_class.list_tags(splitted=True)
Alexd9fd85e2019-05-16 16:58:24 -05001167
1168 # major tags
1169 logger_cli.info("# Processing major tags")
1170 for _tag in major:
1171 self.fetch_versions(_tag)
1172
1173 # updates tags
1174 logger_cli.info("# Processing update tags")
1175 for _tag in updates:
1176 self.fetch_versions(_tag + ".update")
1177
1178 # hotfix tags
1179 logger_cli.info("# Processing hotfix tags")
1180 for _tag in hotfix:
1181 self.fetch_versions(_tag + ".hotfix")