blob: a308864d02e4c9534ac4e4c016bed9c990d2ccb5 [file] [log] [blame]
Alexd9fd85e2019-05-16 16:58:24 -05001import json
2import os
Alexd0391d42019-05-21 18:48:55 -05003import re
Alexd9fd85e2019-05-16 16:58:24 -05004from copy import deepcopy
5
Alex74dc1352019-05-17 13:18:24 -05006from cfg_checker.common import logger, logger_cli, nested_set
Alex0ed4f762019-05-17 17:55:33 -05007from cfg_checker.common.const import _mainteiners_index_filename
8from cfg_checker.common.const import _mirantis_versions_filename
9from cfg_checker.common.const import _other_versions_filename
Alexd9fd85e2019-05-16 16:58:24 -050010from cfg_checker.common.const import _pkg_desc_archive
11from cfg_checker.common.const import _repos_index_filename
12from cfg_checker.common.const import _repos_info_archive
13from cfg_checker.common.const import _repos_versions_archive
Alexd9fd85e2019-05-16 16:58:24 -050014from cfg_checker.common.const import ubuntu_releases
Alexeffa0682021-06-04 12:18:33 -050015from cfg_checker.common.const import kaas_ubuntu_active
16from cfg_checker.common.const import mcp_active_tags as active_tags
Alex7f69a6a2019-05-31 16:53:35 -050017from cfg_checker.common.file_utils import ensure_folder_exists
Alexd9fd85e2019-05-16 16:58:24 -050018from cfg_checker.common.file_utils import get_gzipped_file
19from cfg_checker.common.settings import pkg_dir
20from cfg_checker.helpers.console_utils import Progress
21from cfg_checker.helpers.tgz import TGZFile
22
23import requests
24from requests.exceptions import ConnectionError
25
26ext = ".json"
27
28
Alex0ed4f762019-05-17 17:55:33 -050029def get_tag_label(_tag, parsed=False):
Alex74dc1352019-05-17 13:18:24 -050030 # prettify the tag for printing
Alex0ed4f762019-05-17 17:55:33 -050031 if parsed:
32 _label = "+ "
33 else:
34 _label = " "
35
Alex74dc1352019-05-17 13:18:24 -050036 if _tag.endswith(".update"):
37 _label += "[updates] " + _tag.rsplit('.', 1)[0]
38 elif _tag.endswith(".hotfix"):
39 _label += " [hotfix] " + _tag.rsplit('.', 1)[0]
40 else:
41 _label += " "*10 + _tag
Alex0ed4f762019-05-17 17:55:33 -050042
Alex74dc1352019-05-17 13:18:24 -050043 return _label
44
45
Alex0ed4f762019-05-17 17:55:33 -050046def _get_value_index(_di, value, header=None):
Alex29ee76f2019-05-17 18:52:29 -050047 # Mainteiner names often uses specific chars
Alex3bc95f62020-03-05 17:00:04 -060048 # so make sure that value saved is str not str
49 # Python2
50 # _val = str(value, 'utf-8') if isinstance(value, str) else value
51 # Python3 has always utf-8 decoded value
52 _val = value
Alex0ed4f762019-05-17 17:55:33 -050053 if header:
Alex3bc95f62020-03-05 17:00:04 -060054 try:
Alexccb72e02021-01-20 16:38:03 -060055 _index = next(i for i in _di if header in _di[i]['header'])
Alex3bc95f62020-03-05 17:00:04 -060056 except StopIteration:
Alexccb72e02021-01-20 16:38:03 -060057 _index = str(len(_di) + 1)
Alex0ed4f762019-05-17 17:55:33 -050058 _di[_index] = {
59 "header": header,
Alex29ee76f2019-05-17 18:52:29 -050060 "props": _val
Alex0ed4f762019-05-17 17:55:33 -050061 }
Alex3bc95f62020-03-05 17:00:04 -060062 finally:
63 return _index
Alex0ed4f762019-05-17 17:55:33 -050064 else:
Alex3bc95f62020-03-05 17:00:04 -060065 try:
Alexccb72e02021-01-20 16:38:03 -060066 _index = next(i for i in _di if _val in _di[i])
Alex3bc95f62020-03-05 17:00:04 -060067 # iterator not empty, find index
Alex3bc95f62020-03-05 17:00:04 -060068 except StopIteration:
Alexccb72e02021-01-20 16:38:03 -060069 _index = str(len(_di) + 1)
Alex3bc95f62020-03-05 17:00:04 -060070 # on save, cast it as str
71 _di[_index] = _val
72 finally:
73 return _index
Alex0ed4f762019-05-17 17:55:33 -050074
75
76def _safe_load(_f, _a):
77 if _f in _a.list_files():
Alexd0391d42019-05-21 18:48:55 -050078 logger_cli.debug(
79 "... loading '{}':'{}'".format(
Alex0ed4f762019-05-17 17:55:33 -050080 _a.basefile,
81 _f
82 )
83 )
Alex3bc95f62020-03-05 17:00:04 -060084 return json.loads(_a.get_file(_f, decode=True))
Alex0ed4f762019-05-17 17:55:33 -050085 else:
86 return {}
87
88
Alexd9fd85e2019-05-16 16:58:24 -050089def _n_url(url):
90 if url[-1] == '/':
91 return url
92 else:
93 return url + '/'
94
95
96class ReposInfo(object):
Alex3bc95f62020-03-05 17:00:04 -060097 init_done = False
98
99 def _init_vars(self):
100 self.repos = []
101
102 def _init_folders(self, arch_folder=None):
103 if arch_folder:
104 self._arch_folder = arch_folder
105 self._repofile = os.path.join(arch_folder, _repos_info_archive)
106 else:
107 self._arch_folder = os.path.join(pkg_dir, "versions")
108 self._repofile = os.path.join(
109 self._arch_folder,
110 _repos_info_archive
111 )
112
113 def __init__(self, arch_folder=None):
114 # perform inits
115 self._init_vars()
116 self._init_folders(arch_folder)
117 self.init_done = True
118
119 def __call__(self, *args, **kwargs):
120 if self.init_done:
121 return self
122 else:
123 return self.__init__(self, *args, **kwargs)
Alexd9fd85e2019-05-16 16:58:24 -0500124
125 @staticmethod
126 def _ls_repo_page(url):
127 # Yes, this is ugly. But it works ok for small HTMLs.
128 _a = "<a"
129 _s = "href="
130 _e = "\">"
131 try:
132 page = requests.get(url, timeout=60)
133 except ConnectionError as e:
134 logger_cli.error("# ERROR: {}".format(e.message))
135 return [], []
136 a = page.text.splitlines()
137 # Comprehension for dirs. Anchors for ends with '-'
Alexe8643642021-08-23 14:08:46 -0500138 _dirs = [ll[ll.index(_s)+6:ll.index(_e)-1]
139 for ll in a if ll.startswith(_a) and ll.endswith('-')]
Alexd9fd85e2019-05-16 16:58:24 -0500140 # Comprehension for files. Anchors ends with size
Alexe8643642021-08-23 14:08:46 -0500141 _files = [ll[ll.index(_s)+6:ll.index(_e)]
142 for ll in a if ll.startswith(_a) and not ll.endswith('-')]
Alexd9fd85e2019-05-16 16:58:24 -0500143
144 return _dirs, _files
145
146 def search_pkg(self, url, _list):
147 # recoursive method to walk dists tree
148 _dirs, _files = self._ls_repo_page(url)
149
150 for _d in _dirs:
151 # Search only in dists, ignore the rest
152 if "dists" not in url and _d != "dists":
153 continue
154 _u = _n_url(url + _d)
155 self.search_pkg(_u, _list)
156
157 for _f in _files:
158 if _f == "Packages.gz":
159 _list.append(url + _f)
160 logger.debug("... [F] '{}'".format(url + _f))
161
162 return _list
163
164 @staticmethod
165 def _map_repo(_path_list, _r):
166 for _pkg_path in _path_list:
167 _l = _pkg_path.split('/')
168 _kw = _l[_l.index('dists')+1:]
169 _kw.reverse()
170 _repo_item = {
171 "arch": _kw[1][7:] if "binary" in _kw[1] else _kw[1],
172 "type": _kw[2],
173 "ubuntu-release": _kw[3],
174 "filepath": _pkg_path
175 }
176 _r.append(_repo_item)
177
178 def _find_tag(self, _t, _u, label=""):
179 if label:
180 _url = _n_url(_u + label)
181 _label = _t + '.' + label
182 else:
183 _url = _u
184 _label = _t
185 _ts, _ = self._ls_repo_page(_url)
186 if _t in _ts:
187 logger.debug(
188 "... found tag '{}' at '{}'".format(
189 _t,
190 _url
191 )
192 )
193 return {
194 _label: {
195 "baseurl": _n_url(_url + _t),
196 "all": {}
197 }
198 }
199 else:
200 return {}
201
202 def fetch_repos(self, url, tag=None):
203 base_url = _n_url(url)
204 logger_cli.info("# Using '{}' as a repos source".format(base_url))
205
206 logger_cli.info("# Gathering repos info (i.e. links to 'packages.gz')")
207 # init repoinfo archive
208 _repotgz = TGZFile(self._repofile)
209 # prepare repo links
210 _repos = {}
211 if tag:
212 # only one tag to process
213 _repos.update(self._find_tag(tag, base_url))
214 _repos.update(self._find_tag(tag, base_url, label="hotfix"))
215 _repos.update(self._find_tag(tag, base_url, label="update"))
216 else:
217 # gather all of them
218 _tags, _ = self._ls_repo_page(base_url)
Alex3bc95f62020-03-05 17:00:04 -0600219 if "hotfix" in _tags:
220 _tags.remove('hotfix')
221 if "update" in _tags:
222 _tags.remove('update')
Alexeffa0682021-06-04 12:18:33 -0500223 # Filter out not active tags
224 logger_cli.info("Active tags for mcp: {}".format(
225 ", ".join(active_tags)
226 ))
227 logger_cli.info("Active kaas ubuntu repos: {}".format(
228 ", ".join(kaas_ubuntu_active)
229 ))
230 _active_tags = [t for t in _tags if t in active_tags]
231
Alexd9fd85e2019-05-16 16:58:24 -0500232 # search tags in subfolders
233 _h_tags, _ = self._ls_repo_page(base_url + 'hotfix')
234 _u_tags, _ = self._ls_repo_page(base_url + 'update')
Alexeffa0682021-06-04 12:18:33 -0500235 _active_tags.extend(
236 [t for t in _h_tags if t not in _tags and t in active_tags]
237 )
238 _active_tags.extend(
239 [t for t in _u_tags if t not in _tags and t in active_tags]
240 )
241 _progress = Progress(len(_active_tags))
Alexd9fd85e2019-05-16 16:58:24 -0500242 _index = 0
Alexeffa0682021-06-04 12:18:33 -0500243 for _tag in _active_tags:
Alexd9fd85e2019-05-16 16:58:24 -0500244 _repos.update(self._find_tag(_tag, base_url))
245 _repos.update(self._find_tag(_tag, base_url, label="hotfix"))
246 _repos.update(self._find_tag(_tag, base_url, label="update"))
247 _index += 1
248 _progress.write_progress(_index)
249 _progress.end()
250
251 # parse subtags
252 for _label in _repos.keys():
253 logger_cli.info("-> processing tag '{}'".format(_label))
254 _name = _label + ".json"
Alexeffa0682021-06-04 12:18:33 -0500255 if _repotgz.has_file(_name) and not tag:
Alexd9fd85e2019-05-16 16:58:24 -0500256 logger_cli.info(
257 "-> skipping, '{}' already has '{}'".format(
258 _repos_info_archive,
259 _name
260 )
261 )
262 continue
263 # process the tag
264 _repo = _repos[_label]
265 _baseurl = _repos[_label]["baseurl"]
266 # get the subtags
267 _sub_tags, _ = self._ls_repo_page(_baseurl)
268 _total_index = len(_sub_tags)
269 _index = 0
270 _progress = Progress(_total_index)
Alexccb72e02021-01-20 16:38:03 -0600271 logger_cli.debug(
Alexd9fd85e2019-05-16 16:58:24 -0500272 "... found {} subtags for '{}'".format(
273 len(_sub_tags),
274 _label
275 )
276 )
277 # save the url and start search
278 for _stag in _sub_tags:
279 _u = _baseurl + _stag
280 _index += 1
Alexccb72e02021-01-20 16:38:03 -0600281 logger_cli.debug(
Alexd9fd85e2019-05-16 16:58:24 -0500282 "... searching repos in '{}/{}'".format(
283 _label,
284 _stag
285 )
286 )
287
288 # Searching Package collections
Alexeffa0682021-06-04 12:18:33 -0500289 if _stag in ubuntu_releases or _stag in kaas_ubuntu_active:
Alexd9fd85e2019-05-16 16:58:24 -0500290 # if stag is the release, this is all packages
291 _repo["all"][_stag] = []
292 _repo["all"]["url"] = _n_url(_u)
293 _path_list = self.search_pkg(_n_url(_u), [])
294 self._map_repo(_path_list, _repo["all"][_stag])
Alexccb72e02021-01-20 16:38:03 -0600295 logger_cli.info(
Alexd9fd85e2019-05-16 16:58:24 -0500296 "-> found {} dists".format(
297 len(_repo["all"][_stag])
298 )
299 )
300
301 else:
302 # each subtag might have any ubuntu release
303 # so iterate them
304 _repo[_stag] = {
305 "url": _n_url(_u)
306 }
307 _releases, _ = self._ls_repo_page(_n_url(_u))
308 for _rel in _releases:
309 if _rel not in ubuntu_releases:
Alexccb72e02021-01-20 16:38:03 -0600310 logger_cli.debug(
Alexd9fd85e2019-05-16 16:58:24 -0500311 "... skipped unknown ubuntu release: "
312 "'{}' in '{}'".format(
313 _rel,
314 _u
315 )
316 )
317 else:
318 _rel_u = _n_url(_u) + _rel
319 _repo[_stag][_rel] = []
320 _path_list = self.search_pkg(_n_url(_rel_u), [])
321 self._map_repo(
322 _path_list,
323 _repo[_stag][_rel]
324 )
Alexccb72e02021-01-20 16:38:03 -0600325 logger_cli.info(
Alexd9fd85e2019-05-16 16:58:24 -0500326 "-> found {} dists for '{}'".format(
327 len(_repo[_stag][_rel]),
328 _rel
329 )
330 )
331 _progress.write_progress(_index)
332
333 _progress.end()
334 _name = _label + ext
335 _repotgz.add_file(_name, buf=json.dumps(_repo, indent=2))
336 logger_cli.info(
337 "-> archive '{}' updated with '{}'".format(
338 self._repofile,
339 _name
340 )
341 )
342
343 return
344
Alex74dc1352019-05-17 13:18:24 -0500345 def list_tags(self, splitted=False):
Alexd9fd85e2019-05-16 16:58:24 -0500346 _files = TGZFile(self._repofile).list_files()
347 # all files in archive with no '.json' part
348 _all = set([f.rsplit('.', 1)[0] for f in _files])
Alex74dc1352019-05-17 13:18:24 -0500349 if splitted:
350 # files that ends with '.update'
351 _updates = set([f for f in _all if f.find('update') >= 0])
352 # files that ends with '.hotfix'
353 _hotfix = set([f for f in _all if f.find('hotfix') >= 0])
354 # remove updates and hotfix tags from all. The true magic of SETs
355 _all = _all - _updates - _hotfix
356 # cut updates and hotfix endings
357 _updates = [f.rsplit('.', 1)[0] for f in _updates]
358 _hotfix = [f.rsplit('.', 1)[0] for f in _hotfix]
Alexd9fd85e2019-05-16 16:58:24 -0500359
Alex74dc1352019-05-17 13:18:24 -0500360 return _all, _updates, _hotfix
361 else:
362 # dynamic import
363 import re
364 _all = list(_all)
365 # lexical tags
366 _lex = [s for s in _all if not s[0].isdigit()]
367 _lex.sort()
368 # tags with digits
369 _dig = [s for s in _all if s[0].isdigit()]
370 _dig = sorted(
371 _dig,
Alexd0391d42019-05-21 18:48:55 -0500372 key=lambda x: tuple(int(i) for i in re.findall(r"\d+", x)[:3])
Alex74dc1352019-05-17 13:18:24 -0500373 )
374
375 return _dig + _lex
Alexd9fd85e2019-05-16 16:58:24 -0500376
377 def get_repoinfo(self, tag):
378 _tgz = TGZFile(self._repofile)
Alex3bc95f62020-03-05 17:00:04 -0600379 _buf = _tgz.get_file(tag + ext, decode=True)
Alexd9fd85e2019-05-16 16:58:24 -0500380 return json.loads(_buf)
381
382
383class RepoManager(object):
Alex3bc95f62020-03-05 17:00:04 -0600384 init_done = False
Alexd9fd85e2019-05-16 16:58:24 -0500385
Alex3bc95f62020-03-05 17:00:04 -0600386 def _init_folders(self, arch_folder=None):
Alex9a4ad212020-10-01 18:04:25 -0500387 logger_cli.info("# Loading package versions data")
Alex3bc95f62020-03-05 17:00:04 -0600388 # overide arch folder if needed
389 if arch_folder:
390 self._arch_folder = arch_folder
391 else:
392 self._arch_folder = os.path.join(pkg_dir, "versions")
Alexd9fd85e2019-05-16 16:58:24 -0500393
Alex3bc95f62020-03-05 17:00:04 -0600394 self._versions_arch = os.path.join(
395 self._arch_folder,
396 _repos_versions_archive
397 )
398 self._desc_arch = os.path.join(self._arch_folder, _pkg_desc_archive)
Alexd0391d42019-05-21 18:48:55 -0500399
Alex3bc95f62020-03-05 17:00:04 -0600400 def _init_vars(self, info_class):
401 # RepoInfo instance init
402 if info_class:
403 self._info_class = info_class
404 else:
405 self._info_class = ReposInfo()
406 # archives
407 self._apps_filename = "apps.json"
Alexd9fd85e2019-05-16 16:58:24 -0500408
Alex3bc95f62020-03-05 17:00:04 -0600409 # repository index
410 self._repo_index = {}
411 self._mainteiners_index = {}
412
413 self._apps = {}
414
415 # init package versions storage
416 self._versions_mirantis = {}
417 self._versions_other = {}
418
419 def _init_archives(self):
Alexd9fd85e2019-05-16 16:58:24 -0500420 # Init version files
421 self.versionstgz = TGZFile(
422 self._versions_arch,
423 label="MCP Configuration Checker: Package versions archive"
424 )
425 self.desctgz = TGZFile(
426 self._desc_arch,
427 label="MCP Configuration Checker: Package descriptions archive"
428 )
Alexd0391d42019-05-21 18:48:55 -0500429
430 # section / app
431 self._apps = _safe_load(
432 self._apps_filename,
433 self.desctgz
434 )
435
Alex0ed4f762019-05-17 17:55:33 -0500436 # indices
437 self._repo_index = _safe_load(
438 _repos_index_filename,
439 self.versionstgz
440 )
441 self._mainteiners_index = _safe_load(
442 _mainteiners_index_filename,
443 self.versionstgz
444 )
Alexd9fd85e2019-05-16 16:58:24 -0500445
Alex0ed4f762019-05-17 17:55:33 -0500446 # versions
447 self._versions_mirantis = _safe_load(
448 _mirantis_versions_filename,
449 self.versionstgz
450 )
451 self._versions_other = _safe_load(
452 _other_versions_filename,
453 self.versionstgz
454 )
Alexd9fd85e2019-05-16 16:58:24 -0500455
Alex3bc95f62020-03-05 17:00:04 -0600456 def __init__(self, arch_folder=None, info_class=None):
457 # Perform inits
458 self._init_vars(info_class)
459 self._init_folders(arch_folder)
460 # Ensure that versions folder exists
461 logger_cli.debug(ensure_folder_exists(self._arch_folder))
462 # Preload/create archives
463 self._init_archives()
464 self.init_done = True
465
466 def __call__(self, *args, **kwargs):
467 if self.init_done:
468 return self
469 else:
470 return self.__init__(self, *args, **kwargs)
471
Alexd9fd85e2019-05-16 16:58:24 -0500472 def _create_repo_header(self, p):
473 _header = "_".join([
474 p['tag'],
475 p['subset'],
476 p['release'],
477 p['ubuntu-release'],
478 p['type'],
479 p['arch']
480 ])
Alex0ed4f762019-05-17 17:55:33 -0500481 return _get_value_index(self._repo_index, p, header=_header)
Alexd9fd85e2019-05-16 16:58:24 -0500482
Alex0ed4f762019-05-17 17:55:33 -0500483 def _get_indexed_values(self, pair):
484 _h, _m = pair.split('-')
485 return self._repo_index[_h], self._mainteiners_index[_m]
Alexd9fd85e2019-05-16 16:58:24 -0500486
Alexd0391d42019-05-21 18:48:55 -0500487 def _update_pkg_version(self, _d, n, v, md5, s, a, h_index, m_index):
Alexd9fd85e2019-05-16 16:58:24 -0500488 """Method updates package version record in global dict
489 """
490 # 'if'*4 operation is pretty expensive when using it 100k in a row
491 # so try/except is a better way to go, even faster than 'reduce'
Alex0ed4f762019-05-17 17:55:33 -0500492 _pair = "-".join([h_index, m_index])
Alexd0391d42019-05-21 18:48:55 -0500493 _info = {
494 'repo': [_pair],
495 'section': s,
496 'app': a
497 }
Alexd9fd85e2019-05-16 16:58:24 -0500498 try:
499 # try to load list
Alexd0391d42019-05-21 18:48:55 -0500500 _list = _d[n][v][md5]['repo']
Alexd9fd85e2019-05-16 16:58:24 -0500501 # cast it as set() and union()
Alex0ed4f762019-05-17 17:55:33 -0500502 _list = set(_list).union([_pair])
Alexd9fd85e2019-05-16 16:58:24 -0500503 # cast back as set() is not serializeable
Alexd0391d42019-05-21 18:48:55 -0500504 _d[n][v][md5]['repo'] = list(_list)
Alexd9fd85e2019-05-16 16:58:24 -0500505 return False
506 except KeyError:
507 # ok, this is fresh pkg. Do it slow way.
Alex0ed4f762019-05-17 17:55:33 -0500508 if n in _d:
Alexd9fd85e2019-05-16 16:58:24 -0500509 # there is such pkg already
Alex0ed4f762019-05-17 17:55:33 -0500510 if v in _d[n]:
Alexd9fd85e2019-05-16 16:58:24 -0500511 # there is such version, check md5
Alex0ed4f762019-05-17 17:55:33 -0500512 if md5 in _d[n][v]:
Alexd9fd85e2019-05-16 16:58:24 -0500513 # just add new repo header
Alexd0391d42019-05-21 18:48:55 -0500514 if _pair not in _d[n][v][md5]['repo']:
515 _d[n][v][md5]['repo'].append(_pair)
Alexd9fd85e2019-05-16 16:58:24 -0500516 else:
517 # check if such index is here...
518 _existing = filter(
Alexd0391d42019-05-21 18:48:55 -0500519 lambda i: _pair in _d[n][v][i]['repo'],
Alex0ed4f762019-05-17 17:55:33 -0500520 _d[n][v]
Alexd9fd85e2019-05-16 16:58:24 -0500521 )
522 if _existing:
523 # Yuck! Same version had different MD5
Alex0ed4f762019-05-17 17:55:33 -0500524 _r, _m = self._get_indexed_values(_pair)
Alexd9fd85e2019-05-16 16:58:24 -0500525 logger_cli.error(
526 "# ERROR: Package version has multiple MD5s "
527 "in '{}': {}:{}:{}".format(
Alex0ed4f762019-05-17 17:55:33 -0500528 _r,
Alexd9fd85e2019-05-16 16:58:24 -0500529 n,
530 v,
531 md5
532 )
533 )
Alexd0391d42019-05-21 18:48:55 -0500534 _d[n][v][md5] = _info
Alexd9fd85e2019-05-16 16:58:24 -0500535 else:
536 # this is new version for existing package
Alex0ed4f762019-05-17 17:55:33 -0500537 _d[n][v] = {
Alexd0391d42019-05-21 18:48:55 -0500538 md5: _info
Alexd9fd85e2019-05-16 16:58:24 -0500539 }
540 return False
541 else:
542 # this is new pakcage
Alex0ed4f762019-05-17 17:55:33 -0500543 _d[n] = {
Alexd9fd85e2019-05-16 16:58:24 -0500544 v: {
Alexd0391d42019-05-21 18:48:55 -0500545 md5: _info
Alexd9fd85e2019-05-16 16:58:24 -0500546 }
547 }
548 return True
549
550 def _save_repo_descriptions(self, repo_props, desc):
551 # form the filename for the repo and save it
552 self.desctgz.add_file(
553 self._create_repo_header(repo_props),
554 json.dumps(desc)
555 )
556
557 # def get_description(self, repo_props, name, md5=None):
558 # """Gets target description
559 # """
560 # _filename = self._create_repo_header(repo_props)
561 # # check if it is present in cache
562 # if _filename in self._desc_cache:
563 # _descs = self._desc_cache[_filename]
564 # else:
565 # # load data
566 # _descs = self.desctgz.get_file(_filename)
567 # # Serialize it
568 # _descs = json.loads(_descs)
569 # self._desc_cache[_filename] = _descs
570 # # return target desc
571 # if name in _descs and md5 in _descs[name]:
572 # return _descs[name][md5]
573 # else:
574 # return None
575
Alexd0391d42019-05-21 18:48:55 -0500576 def parse_tag(self, tag, descriptions=False, apps=False):
Alexd9fd85e2019-05-16 16:58:24 -0500577 """Download and parse Package.gz files for specific tag
578 By default, descriptions not saved
579 due to huge resulting file size and slow processing
580 """
581 # init gzip and downloader
Alex3bc95f62020-03-05 17:00:04 -0600582 _info = self._info_class.get_repoinfo(tag)
Alexd9fd85e2019-05-16 16:58:24 -0500583 # calculate Packages.gz files to process
584 _baseurl = _info.pop("baseurl")
585 _total_components = len(_info.keys()) - 1
586 _ubuntu_package_repos = 0
587 _other_repos = 0
Alex3bc95f62020-03-05 17:00:04 -0600588 for _c, _d in _info.items():
589 for _ur, _l in _d.items():
Alexeffa0682021-06-04 12:18:33 -0500590 if _ur in ubuntu_releases or _ur in kaas_ubuntu_active:
Alexd9fd85e2019-05-16 16:58:24 -0500591 _ubuntu_package_repos += len(_l)
592 elif _ur != 'url':
593 _other_repos += len(_l)
594 logger_cli.info(
595 "-> loaded repository info for '{}'.\n"
596 " '{}', {} components, {} ubuntu repos, {} other/uknown".format(
597 _baseurl,
598 tag,
599 _total_components,
600 _ubuntu_package_repos,
601 _other_repos
602 )
603 )
604 # init progress bar
605 _progress = Progress(_ubuntu_package_repos)
606 _index = 0
607 _processed = 0
608 _new = 0
Alex3bc95f62020-03-05 17:00:04 -0600609 for _c, _d in _info.items():
Alexd9fd85e2019-05-16 16:58:24 -0500610 # we do not need url here, just get rid of it
611 if 'url' in _d:
612 _d.pop('url')
613 # _url = if 'url' in _d else _baseurl + _c
Alex3bc95f62020-03-05 17:00:04 -0600614 for _ur, _l in _d.items():
Alexd9fd85e2019-05-16 16:58:24 -0500615 # iterate package collections
616 for _p in _l:
617 # descriptions
618 if descriptions:
619 _descriptions = {}
620 # download and unzip
Alexd0391d42019-05-21 18:48:55 -0500621 _index += 1
622 _progress.write_progress(
623 _index,
624 note="/ {} {} {} {} {}, GET 'Packages.gz'".format(
625 _c,
626 _ur,
627 _p['ubuntu-release'],
628 _p['type'],
629 _p['arch']
630 )
631 )
632 _raw = get_gzipped_file(_p['filepath'])
633 if not _raw:
634 # empty repo...
635 _progress.clearline()
636 logger_cli.warning(
637 "# WARNING: Empty file: '{}'".format(
638 _p['filepath']
639 )
640 )
641 continue
Alex3bc95f62020-03-05 17:00:04 -0600642 else:
643 _raw = _raw.decode("utf-8")
Alexd9fd85e2019-05-16 16:58:24 -0500644 _progress.write_progress(
645 _index,
646 note="/ {} {} {} {} {}, {}/{}".format(
647 _c,
648 _ur,
649 _p['ubuntu-release'],
650 _p['type'],
651 _p['arch'],
652 _processed,
653 _new
654 )
655 )
Alexd9fd85e2019-05-16 16:58:24 -0500656 _lines = _raw.splitlines()
Alexd9fd85e2019-05-16 16:58:24 -0500657 # break lines collection into isolated pkg data
658 _pkg = {
659 "tag": tag,
660 "subset": _c,
661 "release": _ur
662 }
663 _pkg.update(_p)
664 _desc = {}
665 _key = _value = ""
Alexd0391d42019-05-21 18:48:55 -0500666 # if there is no empty line at end, add it
667 if _lines[-1] != '':
668 _lines.append('')
669 # Process lines
Alexd9fd85e2019-05-16 16:58:24 -0500670 for _line in _lines:
671 if not _line:
672 # if the line is empty, process pkg data gathered
673 _name = _desc['package']
674 _md5 = _desc['md5sum']
675 _version = _desc['version']
Alex0ed4f762019-05-17 17:55:33 -0500676 _mainteiner = _desc['maintainer']
677
Alexd0391d42019-05-21 18:48:55 -0500678 if 'source' in _desc:
679 _ap = _desc['source'].lower()
680 else:
681 _ap = "-"
682
683 if apps:
684 # insert app
685 _sc = _desc['section'].lower()
686 if 'source' in _desc:
687 _ap = _desc['source'].lower()
688 else:
689 _ap = "-"
690
691 try:
692 _tmp = set(self._apps[_sc][_ap][_name])
693 _tmp.add(_desc['architecture'])
694 self._apps[_sc][_ap][_name] = list(_tmp)
695 except KeyError:
696 nested_set(
697 self._apps,
698 [_sc, _ap, _name],
699 [_desc['architecture']]
700 )
701
Alex0ed4f762019-05-17 17:55:33 -0500702 # Check is mainteiner is Mirantis
703 if _mainteiner.endswith("@mirantis.com>"):
704 # update mirantis versions
705 if self._update_pkg_version(
706 self._versions_mirantis,
707 _name,
708 _version,
709 _md5,
Alexd0391d42019-05-21 18:48:55 -0500710 _desc['section'].lower(),
711 _ap,
Alex0ed4f762019-05-17 17:55:33 -0500712 self._create_repo_header(_pkg),
713 _get_value_index(
714 self._mainteiners_index,
715 _mainteiner
716 )
717 ):
718 _new += 1
719 else:
720 # update other versions
721 if self._update_pkg_version(
722 self._versions_other,
723 _name,
724 _version,
725 _md5,
Alexd0391d42019-05-21 18:48:55 -0500726 _desc['section'].lower(),
727 _ap,
Alex0ed4f762019-05-17 17:55:33 -0500728 self._create_repo_header(_pkg),
729 _get_value_index(
730 self._mainteiners_index,
731 _mainteiner
732 )
733 ):
734 _new += 1
Alexd9fd85e2019-05-16 16:58:24 -0500735
736 if descriptions:
737 _d_new = {
738 _md5: deepcopy(_desc)
739 }
740 try:
741 _descriptions[_name].update(_d_new)
742 except KeyError:
743 _descriptions[_name] = _d_new
744 # clear the data for next pkg
745 _processed += 1
746 _desc = {}
747 _key = ""
748 _value = ""
749 elif _line.startswith(' '):
750 _desc[_key] += "\n{}".format(_line)
751 else:
Alexccb72e02021-01-20 16:38:03 -0600752 if _line.endswith(":"):
753 _key = _line[:-1]
754 _value = ""
755 else:
756 _key, _value = _line.split(": ", 1)
Alexccb72e02021-01-20 16:38:03 -0600757 _key = _key.lower()
Alexd9fd85e2019-05-16 16:58:24 -0500758 _desc[_key] = _value
759 # save descriptions if needed
760 if descriptions:
761 _progress.clearline()
762 self._save_repo_descriptions(_pkg, _descriptions)
763
764 _progress.end()
765 # backup headers to disk
766 self.versionstgz.add_file(
Alex0ed4f762019-05-17 17:55:33 -0500767 _repos_index_filename,
Alexd9fd85e2019-05-16 16:58:24 -0500768 json.dumps(self._repo_index),
769 replace=True
770 )
Alex0ed4f762019-05-17 17:55:33 -0500771 self.versionstgz.add_file(
772 _mainteiners_index_filename,
773 json.dumps(self._mainteiners_index),
774 replace=True
775 )
Alexd0391d42019-05-21 18:48:55 -0500776 if apps:
777 self.desctgz.add_file(
778 self._apps_filename,
779 json.dumps(self._apps),
780 replace=True
781 )
782
Alexd9fd85e2019-05-16 16:58:24 -0500783 return
784
Alexd0391d42019-05-21 18:48:55 -0500785 def fetch_versions(self, tag, descriptions=False, apps=False):
Alexd9fd85e2019-05-16 16:58:24 -0500786 """Executes parsing for specific tag
787 """
788 if descriptions:
789 logger_cli.warning(
790 "\n\n# !!! WARNING: Saving repo descriptions "
791 "consumes huge amount of disk space\n\n"
792 )
793 # if there is no such tag, parse it from repoinfo
Alexd9fd85e2019-05-16 16:58:24 -0500794 logger_cli.info("# Fetching versions for {}".format(tag))
Alexd0391d42019-05-21 18:48:55 -0500795 self.parse_tag(tag, descriptions=descriptions, apps=apps)
Alex0ed4f762019-05-17 17:55:33 -0500796 logger_cli.info("-> saving updated versions")
797 self.versionstgz.add_file(
798 _mirantis_versions_filename,
799 json.dumps(self._versions_mirantis),
800 replace=True
801 )
802 self.versionstgz.add_file(
803 _other_versions_filename,
804 json.dumps(self._versions_other),
805 replace=True
806 )
Alexd9fd85e2019-05-16 16:58:24 -0500807
808 def build_repos(self, url, tag=None):
809 """Builds versions data for selected tag, or for all of them
810 """
Alexd9fd85e2019-05-16 16:58:24 -0500811 # recoursively walk the mirrors
812 # and gather all of the repos for 'tag' or all of the tags
Alex3bc95f62020-03-05 17:00:04 -0600813 self._info_class.fetch_repos(url, tag=tag)
Alexd9fd85e2019-05-16 16:58:24 -0500814
Alex74dc1352019-05-17 13:18:24 -0500815 def _build_action(self, url, tags):
816 for t in tags:
Alex6df29ad2019-05-31 17:55:32 -0500817 logger_cli.info("# Building repo info for '{}'".format(t))
Alex74dc1352019-05-17 13:18:24 -0500818 self.build_repos(url, tag=t)
819
Alexd0391d42019-05-21 18:48:55 -0500820 def get_available_tags(self, tag=None):
821 # Populate action tags
Alex3bc95f62020-03-05 17:00:04 -0600822 major, updates, hotfix = self._info_class.list_tags(splitted=True)
Alexd0391d42019-05-21 18:48:55 -0500823
824 _tags = []
825 if tag in major:
826 _tags.append(tag)
827 if tag in updates:
828 _tags.append(tag + ".update")
829 if tag in hotfix:
830 _tags.append(tag + ".hotfix")
831
832 return _tags
833
Alexd9fd85e2019-05-16 16:58:24 -0500834 def action_for_tag(
835 self,
836 url,
837 tag,
838 action=None,
Alexd0391d42019-05-21 18:48:55 -0500839 descriptions=None,
840 apps=None
Alexd9fd85e2019-05-16 16:58:24 -0500841 ):
842 """Executes action for every tag from all collections
843 """
844 if not action:
845 logger_cli.info("# No action set, nothing to do")
Alex74dc1352019-05-17 13:18:24 -0500846 # See if this is a list action
Alexd9fd85e2019-05-16 16:58:24 -0500847 if action == "list":
Alex3bc95f62020-03-05 17:00:04 -0600848 _all = self._info_class.list_tags()
Alex6df29ad2019-05-31 17:55:32 -0500849 if _all:
850 # Print pretty list and exit
851 logger_cli.info("# Tags available at '{}':".format(url))
852 for t in _all:
853 _ri = self._repo_index
854 _isparsed = any(
Alex3bc95f62020-03-05 17:00:04 -0600855 [k for k, v in _ri.items()
Alex6df29ad2019-05-31 17:55:32 -0500856 if v['props']['tag'] == t]
857 )
858 if _isparsed:
859 logger_cli.info(get_tag_label(t, parsed=True))
860 else:
861 logger_cli.info(get_tag_label(t))
862 else:
863 logger_cli.info("# Not tags parsed yet for '{}':".format(url))
864
Alex74dc1352019-05-17 13:18:24 -0500865 # exit
Alexd9fd85e2019-05-16 16:58:24 -0500866 return
Alex74dc1352019-05-17 13:18:24 -0500867
Alex6df29ad2019-05-31 17:55:32 -0500868 if action == "build":
869 self._build_action(url, [tag])
870
Alexd0391d42019-05-21 18:48:55 -0500871 # Populate action tags
872 _action_tags = self.get_available_tags(tag)
873
Alexd9fd85e2019-05-16 16:58:24 -0500874 if not _action_tags:
875 logger_cli.info(
876 "# Tag of '{}' not found. "
877 "Consider rebuilding repos info.".format(tag)
878 )
Alex74dc1352019-05-17 13:18:24 -0500879 else:
Alexd9fd85e2019-05-16 16:58:24 -0500880 logger_cli.info(
Alex74dc1352019-05-17 13:18:24 -0500881 "-> tags to process: {}".format(
Alexd9fd85e2019-05-16 16:58:24 -0500882 ", ".join(_action_tags)
883 )
884 )
Alex74dc1352019-05-17 13:18:24 -0500885 # Execute actions
Alex6df29ad2019-05-31 17:55:32 -0500886 if action == "fetch":
Alexd9fd85e2019-05-16 16:58:24 -0500887 for t in _action_tags:
Alexd0391d42019-05-21 18:48:55 -0500888 self.fetch_versions(t, descriptions=descriptions, apps=apps)
Alexd9fd85e2019-05-16 16:58:24 -0500889
890 logger_cli.info("# Done.")
891
Alex74dc1352019-05-17 13:18:24 -0500892 def show_package(self, name):
893 # get the package data
894 _p = self.get_package_versions(name)
895 if not _p:
896 logger_cli.warning(
897 "# WARNING: Package '{}' not found".format(name)
898 )
899 else:
900 # print package info using sorted tags from headers
901 # Package: name
902 # [u/h] tag \t <version>
903 # \t <version>
904 # <10symbols> \t <md5> \t sorted headers with no tag
905 # ...
Alexd0391d42019-05-21 18:48:55 -0500906 # section
Alex92e07ce2019-05-31 16:00:03 -0500907 for _s in sorted(_p):
Alexd0391d42019-05-21 18:48:55 -0500908 # app
Alex92e07ce2019-05-31 16:00:03 -0500909 for _a in sorted(_p[_s]):
Alexcf91b182019-05-31 11:57:07 -0500910 _o = ""
911 _mm = []
Alexd0391d42019-05-21 18:48:55 -0500912 # get and sort tags
Alex92e07ce2019-05-31 16:00:03 -0500913 for _v in sorted(_p[_s][_a]):
Alexd0391d42019-05-21 18:48:55 -0500914 _o += "\n" + " "*8 + _v + ':\n'
915 # get and sort tags
Alex92e07ce2019-05-31 16:00:03 -0500916 for _md5 in sorted(_p[_s][_a][_v]):
Alexd0391d42019-05-21 18:48:55 -0500917 _o += " "*16 + _md5 + "\n"
918 # get and sort repo headers
Alex92e07ce2019-05-31 16:00:03 -0500919 for _r in sorted(_p[_s][_a][_v][_md5]):
Alexcf91b182019-05-31 11:57:07 -0500920 _o += " "*24 + _r.replace('_', ' ') + '\n'
921 _m = _p[_s][_a][_v][_md5][_r]["maintainer"]
922 if _m not in _mm:
923 _mm.append(_m)
Alex74dc1352019-05-17 13:18:24 -0500924
Alexcf91b182019-05-31 11:57:07 -0500925 logger_cli.info(
926 "\n# Package: {}/{}/{}\nMaintainers: {}".format(
927 _s,
928 _a,
929 name,
930 ", ".join(_mm)
931 )
932 )
933
934 logger_cli.info(_o)
Alex74dc1352019-05-17 13:18:24 -0500935
Alexd0391d42019-05-21 18:48:55 -0500936 @staticmethod
937 def get_apps(versions, name):
938 _all = True if name == '*' else False
Alexcf91b182019-05-31 11:57:07 -0500939 _s_max = _a_max = _p_max = _v_max = 0
Alexd0391d42019-05-21 18:48:55 -0500940 _rows = []
941 for _p in versions.keys():
942 _vs = versions[_p]
Alex3bc95f62020-03-05 17:00:04 -0600943 for _v, _d1 in _vs.items():
944 for _md5, _info in _d1.items():
Alexd0391d42019-05-21 18:48:55 -0500945 if _all or name == _info['app']:
946 _s_max = max(len(_info['section']), _s_max)
947 _a_max = max(len(_info['app']), _a_max)
Alexcf91b182019-05-31 11:57:07 -0500948 _p_max = max(len(_p), _p_max)
949 _v_max = max(len(_v), _v_max)
Alexd0391d42019-05-21 18:48:55 -0500950 _rows.append([
951 _info['section'],
952 _info['app'],
Alexcf91b182019-05-31 11:57:07 -0500953 _p,
954 _v,
955 _md5,
956 len(_info['repo'])
Alexd0391d42019-05-21 18:48:55 -0500957 ])
Alexcf91b182019-05-31 11:57:07 -0500958 # format columns
959 # section
960 _fmt = "{:"+str(_s_max)+"} "
961 # app
962 _fmt += "{:"+str(_a_max)+"} "
963 # package name
964 _fmt += "{:"+str(_p_max)+"} "
965 # version
966 _fmt += "{:"+str(_v_max)+"} "
967 # md5 and number of repos is fixed
968 _fmt += "{} in {} repos"
969
970 # fill rows
971 _rows = [_fmt.format(s, a, p, v, m, l) for s, a, p, v, m, l in _rows]
Alexd0391d42019-05-21 18:48:55 -0500972 _rows.sort()
973 return _rows
974
975 def show_app(self, name):
976 c = 0
977 rows = self.get_apps(self._versions_mirantis, name)
978 if rows:
Alexcf91b182019-05-31 11:57:07 -0500979 logger_cli.info("\n# Mirantis packages for '{}'".format(name))
Alexd0391d42019-05-21 18:48:55 -0500980 logger_cli.info("\n".join(rows))
981 c += 1
982 rows = self.get_apps(self._versions_other, name)
983 if rows:
Alexcf91b182019-05-31 11:57:07 -0500984 logger_cli.info("\n# Other packages for '{}'".format(name))
Alexd0391d42019-05-21 18:48:55 -0500985 logger_cli.info("\n".join(rows))
986 c += 1
987 if c == 0:
988 logger_cli.info("\n# No app found for '{}'".format(name))
989
990 def get_mirantis_pkg_names(self):
991 # Mirantis maintainers only
992 return set(
993 self._versions_mirantis.keys()
994 ) - set(
995 self._versions_other.keys()
996 )
997
998 def get_other_pkg_names(self):
999 # Non-mirantis Maintainers
1000 return set(
1001 self._versions_other.keys()
1002 ) - set(
1003 self._versions_mirantis.keys()
1004 )
1005
1006 def get_mixed_pkg_names(self):
1007 # Mixed maintainers
1008 return set(
1009 self._versions_mirantis.keys()
1010 ).intersection(set(
1011 self._versions_other.keys()
1012 ))
1013
1014 def is_mirantis(self, name, tag=None):
1015 """Method checks if this package is mainteined
1016 by mirantis in target tag repo
1017 """
1018 if name in self._versions_mirantis:
1019 # check tag
1020 if tag:
1021 _pkg = self.get_package_versions(
1022 name,
1023 tagged=True
1024 )
1025 _tags = []
1026 for s in _pkg.keys():
1027 for a in _pkg[s].keys():
1028 for t in _pkg[s][a].keys():
1029 _tags.append(t)
1030 if any([t.startswith(tag) for t in _tags]):
1031 return True
1032 else:
1033 return None
1034 else:
1035 return True
1036 elif name in self._versions_other:
1037 # check tag
1038 if tag:
1039 _pkg = self.get_package_versions(
1040 name,
1041 tagged=True
1042 )
1043 _tags = []
1044 for s in _pkg.keys():
1045 for a in _pkg[s].keys():
1046 for t in _pkg[s][a].keys():
1047 _tags.append(t)
1048 if any([t.startswith(tag) for t in _tags]):
1049 return False
1050 else:
1051 return None
1052 else:
1053 return False
1054 else:
1055 logger.error(
1056 "# ERROR: package '{}' not found "
1057 "while determining maintainer".format(
1058 name
1059 )
1060 )
1061 return None
1062
1063 def get_filtered_versions(
1064 self,
1065 name,
1066 tag=None,
1067 include=None,
1068 exclude=None
1069 ):
1070 """Method gets all the versions for the package
1071 and filters them using keys above
1072 """
1073 if tag:
Alex3bc95f62020-03-05 17:00:04 -06001074 tag = str(tag) if not isinstance(tag, str) else tag
Alexd0391d42019-05-21 18:48:55 -05001075 _out = {}
1076 _vs = self.get_package_versions(name, tagged=True)
1077 # iterate to filter out keywords
Alex3bc95f62020-03-05 17:00:04 -06001078 for s, apps in _vs.items():
1079 for a, _tt in apps.items():
1080 for t, vs in _tt.items():
Alexd0391d42019-05-21 18:48:55 -05001081 # filter tags
1082 if tag and t != tag and t.rsplit('.', 1)[0] != tag:
1083 continue
1084 # Skip hotfix tag
1085 if t == tag + ".hotfix":
1086 continue
Alex3bc95f62020-03-05 17:00:04 -06001087 for v, rp in vs.items():
1088 for h, p in rp.items():
Alexd0391d42019-05-21 18:48:55 -05001089 # filter headers with all keywords matching
1090 _h = re.split(r"[\-\_]+", h)
1091 _included = all([kw in _h for kw in include])
1092 _excluded = any([kw in _h for kw in exclude])
1093 if not _included or _excluded:
1094 continue
1095 else:
1096 nested_set(_out, [s, a, v], [])
1097 _dat = {
1098 "header": h
1099 }
1100 _dat.update(p)
1101 _out[s][a][v].append(_dat)
1102 return _out
1103
1104 def get_package_versions(self, name, tagged=False):
Alex74dc1352019-05-17 13:18:24 -05001105 """Method builds package version structure
1106 with repository properties included
1107 """
1108 # get data
Alexd0391d42019-05-21 18:48:55 -05001109 _vs = {}
1110
1111 if name in self._versions_mirantis:
1112 _vs.update(self._versions_mirantis[name])
1113 if name in self._versions_other:
1114 _vs.update(self._versions_other[name])
Alex0ed4f762019-05-17 17:55:33 -05001115
Alex74dc1352019-05-17 13:18:24 -05001116 # insert repo data, insert props into headers place
1117 _package = {}
1118 if tagged:
Alex3bc95f62020-03-05 17:00:04 -06001119 for _v, _d1 in _vs.items():
Alex74dc1352019-05-17 13:18:24 -05001120 # use tag as a next step
Alex3bc95f62020-03-05 17:00:04 -06001121 for _md5, _info in _d1.items():
Alexd0391d42019-05-21 18:48:55 -05001122 _s = _info['section']
1123 _a = _info['app']
1124 for _pair in _info['repo']:
1125 _rp = {}
Alex74dc1352019-05-17 13:18:24 -05001126 # extract props for a repo
Alex0ed4f762019-05-17 17:55:33 -05001127 _r, _m = self._get_indexed_values(_pair)
Alex74dc1352019-05-17 13:18:24 -05001128 # get tag
Alex0ed4f762019-05-17 17:55:33 -05001129 _tag = _r["props"]["tag"]
Alex74dc1352019-05-17 13:18:24 -05001130 # cut tag from the header
Alex0ed4f762019-05-17 17:55:33 -05001131 _cut_head = _r["header"].split("_", 1)[1]
Alex74dc1352019-05-17 13:18:24 -05001132 # populate dict
Alexd0391d42019-05-21 18:48:55 -05001133 _rp["maintainer"] = _m
1134 _rp["md5"] = _md5
1135 _rp.update(_r["props"])
Alex74dc1352019-05-17 13:18:24 -05001136 nested_set(
1137 _package,
Alexd0391d42019-05-21 18:48:55 -05001138 [_s, _a, _tag, _v, _cut_head],
1139 _rp
Alex74dc1352019-05-17 13:18:24 -05001140 )
1141 else:
Alex3bc95f62020-03-05 17:00:04 -06001142 for _v, _d1 in _vs.items():
1143 for _md5, _info in _d1.items():
Alexd0391d42019-05-21 18:48:55 -05001144 _s = _info['section']
1145 _a = _info['app']
1146 for _pair in _info['repo']:
Alex0ed4f762019-05-17 17:55:33 -05001147 _r, _m = self._get_indexed_values(_pair)
Alexd0391d42019-05-21 18:48:55 -05001148 _info["maintainer"] = _m
1149 _info.update(_r["props"])
Alex74dc1352019-05-17 13:18:24 -05001150 nested_set(
1151 _package,
Alexd0391d42019-05-21 18:48:55 -05001152 [_s, _a, _v, _md5, _r["header"]],
1153 _info
Alex74dc1352019-05-17 13:18:24 -05001154 )
1155
1156 return _package
1157
Alexd9fd85e2019-05-16 16:58:24 -05001158 def parse_repos(self):
1159 # all tags to check
Alex3bc95f62020-03-05 17:00:04 -06001160 major, updates, hotfix = self._info_class.list_tags(splitted=True)
Alexd9fd85e2019-05-16 16:58:24 -05001161
1162 # major tags
1163 logger_cli.info("# Processing major tags")
1164 for _tag in major:
1165 self.fetch_versions(_tag)
1166
1167 # updates tags
1168 logger_cli.info("# Processing update tags")
1169 for _tag in updates:
1170 self.fetch_versions(_tag + ".update")
1171
1172 # hotfix tags
1173 logger_cli.info("# Processing hotfix tags")
1174 for _tag in hotfix:
1175 self.fetch_versions(_tag + ".hotfix")