blob: 6853a8f54f26f4348807dd04f552846f5384c915 [file] [log] [blame]
Alexd9fd85e2019-05-16 16:58:24 -05001import json
2import os
Alexd0391d42019-05-21 18:48:55 -05003import re
Alexd9fd85e2019-05-16 16:58:24 -05004from copy import deepcopy
5
Alex74dc1352019-05-17 13:18:24 -05006from cfg_checker.common import logger, logger_cli, nested_set
Alex0ed4f762019-05-17 17:55:33 -05007from cfg_checker.common.const import _mainteiners_index_filename
8from cfg_checker.common.const import _mirantis_versions_filename
9from cfg_checker.common.const import _other_versions_filename
Alexd9fd85e2019-05-16 16:58:24 -050010from cfg_checker.common.const import _pkg_desc_archive
11from cfg_checker.common.const import _repos_index_filename
12from cfg_checker.common.const import _repos_info_archive
13from cfg_checker.common.const import _repos_versions_archive
Alexd9fd85e2019-05-16 16:58:24 -050014from cfg_checker.common.const import ubuntu_releases
Alexeffa0682021-06-04 12:18:33 -050015from cfg_checker.common.const import kaas_ubuntu_active
16from cfg_checker.common.const import mcp_active_tags as active_tags
Alex7f69a6a2019-05-31 16:53:35 -050017from cfg_checker.common.file_utils import ensure_folder_exists
Alexd9fd85e2019-05-16 16:58:24 -050018from cfg_checker.common.file_utils import get_gzipped_file
19from cfg_checker.common.settings import pkg_dir
20from cfg_checker.helpers.console_utils import Progress
21from cfg_checker.helpers.tgz import TGZFile
22
23import requests
24from requests.exceptions import ConnectionError
25
26ext = ".json"
27
28
Alex0ed4f762019-05-17 17:55:33 -050029def get_tag_label(_tag, parsed=False):
Alex74dc1352019-05-17 13:18:24 -050030 # prettify the tag for printing
Alex0ed4f762019-05-17 17:55:33 -050031 if parsed:
32 _label = "+ "
33 else:
34 _label = " "
35
Alex74dc1352019-05-17 13:18:24 -050036 if _tag.endswith(".update"):
37 _label += "[updates] " + _tag.rsplit('.', 1)[0]
38 elif _tag.endswith(".hotfix"):
39 _label += " [hotfix] " + _tag.rsplit('.', 1)[0]
40 else:
41 _label += " "*10 + _tag
Alex0ed4f762019-05-17 17:55:33 -050042
Alex74dc1352019-05-17 13:18:24 -050043 return _label
44
45
Alex0ed4f762019-05-17 17:55:33 -050046def _get_value_index(_di, value, header=None):
Alex29ee76f2019-05-17 18:52:29 -050047 # Mainteiner names often uses specific chars
Alex3bc95f62020-03-05 17:00:04 -060048 # so make sure that value saved is str not str
49 # Python2
50 # _val = str(value, 'utf-8') if isinstance(value, str) else value
51 # Python3 has always utf-8 decoded value
52 _val = value
Alex0ed4f762019-05-17 17:55:33 -050053 if header:
Alex3bc95f62020-03-05 17:00:04 -060054 try:
Alexccb72e02021-01-20 16:38:03 -060055 _index = next(i for i in _di if header in _di[i]['header'])
Alex3bc95f62020-03-05 17:00:04 -060056 except StopIteration:
Alexccb72e02021-01-20 16:38:03 -060057 _index = str(len(_di) + 1)
Alex0ed4f762019-05-17 17:55:33 -050058 _di[_index] = {
59 "header": header,
Alex29ee76f2019-05-17 18:52:29 -050060 "props": _val
Alex0ed4f762019-05-17 17:55:33 -050061 }
Alex3bc95f62020-03-05 17:00:04 -060062 finally:
63 return _index
Alex0ed4f762019-05-17 17:55:33 -050064 else:
Alex3bc95f62020-03-05 17:00:04 -060065 try:
Alexccb72e02021-01-20 16:38:03 -060066 _index = next(i for i in _di if _val in _di[i])
Alex3bc95f62020-03-05 17:00:04 -060067 # iterator not empty, find index
Alex3bc95f62020-03-05 17:00:04 -060068 except StopIteration:
Alexccb72e02021-01-20 16:38:03 -060069 _index = str(len(_di) + 1)
Alex3bc95f62020-03-05 17:00:04 -060070 # on save, cast it as str
71 _di[_index] = _val
72 finally:
73 return _index
Alex0ed4f762019-05-17 17:55:33 -050074
75
76def _safe_load(_f, _a):
77 if _f in _a.list_files():
Alexd0391d42019-05-21 18:48:55 -050078 logger_cli.debug(
79 "... loading '{}':'{}'".format(
Alex0ed4f762019-05-17 17:55:33 -050080 _a.basefile,
81 _f
82 )
83 )
Alex3bc95f62020-03-05 17:00:04 -060084 return json.loads(_a.get_file(_f, decode=True))
Alex0ed4f762019-05-17 17:55:33 -050085 else:
86 return {}
87
88
Alexd9fd85e2019-05-16 16:58:24 -050089def _n_url(url):
90 if url[-1] == '/':
91 return url
92 else:
93 return url + '/'
94
95
96class ReposInfo(object):
Alex3bc95f62020-03-05 17:00:04 -060097 init_done = False
98
99 def _init_vars(self):
100 self.repos = []
101
102 def _init_folders(self, arch_folder=None):
103 if arch_folder:
104 self._arch_folder = arch_folder
105 self._repofile = os.path.join(arch_folder, _repos_info_archive)
106 else:
107 self._arch_folder = os.path.join(pkg_dir, "versions")
108 self._repofile = os.path.join(
109 self._arch_folder,
110 _repos_info_archive
111 )
112
113 def __init__(self, arch_folder=None):
114 # perform inits
115 self._init_vars()
116 self._init_folders(arch_folder)
117 self.init_done = True
118
119 def __call__(self, *args, **kwargs):
120 if self.init_done:
121 return self
122 else:
123 return self.__init__(self, *args, **kwargs)
Alexd9fd85e2019-05-16 16:58:24 -0500124
125 @staticmethod
126 def _ls_repo_page(url):
127 # Yes, this is ugly. But it works ok for small HTMLs.
128 _a = "<a"
129 _s = "href="
130 _e = "\">"
131 try:
132 page = requests.get(url, timeout=60)
133 except ConnectionError as e:
134 logger_cli.error("# ERROR: {}".format(e.message))
135 return [], []
136 a = page.text.splitlines()
137 # Comprehension for dirs. Anchors for ends with '-'
138 _dirs = [l[l.index(_s)+6:l.index(_e)-1]
139 for l in a if l.startswith(_a) and l.endswith('-')]
140 # Comprehension for files. Anchors ends with size
141 _files = [l[l.index(_s)+6:l.index(_e)]
142 for l in a if l.startswith(_a) and not l.endswith('-')]
143
144 return _dirs, _files
145
146 def search_pkg(self, url, _list):
147 # recoursive method to walk dists tree
148 _dirs, _files = self._ls_repo_page(url)
149
150 for _d in _dirs:
151 # Search only in dists, ignore the rest
152 if "dists" not in url and _d != "dists":
153 continue
154 _u = _n_url(url + _d)
155 self.search_pkg(_u, _list)
156
157 for _f in _files:
158 if _f == "Packages.gz":
159 _list.append(url + _f)
160 logger.debug("... [F] '{}'".format(url + _f))
161
162 return _list
163
164 @staticmethod
165 def _map_repo(_path_list, _r):
166 for _pkg_path in _path_list:
167 _l = _pkg_path.split('/')
168 _kw = _l[_l.index('dists')+1:]
169 _kw.reverse()
170 _repo_item = {
171 "arch": _kw[1][7:] if "binary" in _kw[1] else _kw[1],
172 "type": _kw[2],
173 "ubuntu-release": _kw[3],
174 "filepath": _pkg_path
175 }
176 _r.append(_repo_item)
177
178 def _find_tag(self, _t, _u, label=""):
179 if label:
180 _url = _n_url(_u + label)
181 _label = _t + '.' + label
182 else:
183 _url = _u
184 _label = _t
185 _ts, _ = self._ls_repo_page(_url)
186 if _t in _ts:
187 logger.debug(
188 "... found tag '{}' at '{}'".format(
189 _t,
190 _url
191 )
192 )
193 return {
194 _label: {
195 "baseurl": _n_url(_url + _t),
196 "all": {}
197 }
198 }
199 else:
200 return {}
201
202 def fetch_repos(self, url, tag=None):
203 base_url = _n_url(url)
204 logger_cli.info("# Using '{}' as a repos source".format(base_url))
205
206 logger_cli.info("# Gathering repos info (i.e. links to 'packages.gz')")
207 # init repoinfo archive
208 _repotgz = TGZFile(self._repofile)
209 # prepare repo links
210 _repos = {}
211 if tag:
212 # only one tag to process
213 _repos.update(self._find_tag(tag, base_url))
214 _repos.update(self._find_tag(tag, base_url, label="hotfix"))
215 _repos.update(self._find_tag(tag, base_url, label="update"))
216 else:
217 # gather all of them
218 _tags, _ = self._ls_repo_page(base_url)
Alex3bc95f62020-03-05 17:00:04 -0600219 if "hotfix" in _tags:
220 _tags.remove('hotfix')
221 if "update" in _tags:
222 _tags.remove('update')
Alexeffa0682021-06-04 12:18:33 -0500223 # Filter out not active tags
224 logger_cli.info("Active tags for mcp: {}".format(
225 ", ".join(active_tags)
226 ))
227 logger_cli.info("Active kaas ubuntu repos: {}".format(
228 ", ".join(kaas_ubuntu_active)
229 ))
230 _active_tags = [t for t in _tags if t in active_tags]
231
Alexd9fd85e2019-05-16 16:58:24 -0500232 # search tags in subfolders
233 _h_tags, _ = self._ls_repo_page(base_url + 'hotfix')
234 _u_tags, _ = self._ls_repo_page(base_url + 'update')
Alexeffa0682021-06-04 12:18:33 -0500235 _active_tags.extend(
236 [t for t in _h_tags if t not in _tags and t in active_tags]
237 )
238 _active_tags.extend(
239 [t for t in _u_tags if t not in _tags and t in active_tags]
240 )
241 _progress = Progress(len(_active_tags))
Alexd9fd85e2019-05-16 16:58:24 -0500242 _index = 0
Alexeffa0682021-06-04 12:18:33 -0500243 for _tag in _active_tags:
Alexd9fd85e2019-05-16 16:58:24 -0500244 _repos.update(self._find_tag(_tag, base_url))
245 _repos.update(self._find_tag(_tag, base_url, label="hotfix"))
246 _repos.update(self._find_tag(_tag, base_url, label="update"))
247 _index += 1
248 _progress.write_progress(_index)
249 _progress.end()
250
251 # parse subtags
252 for _label in _repos.keys():
253 logger_cli.info("-> processing tag '{}'".format(_label))
254 _name = _label + ".json"
Alexeffa0682021-06-04 12:18:33 -0500255 if _repotgz.has_file(_name) and not tag:
Alexd9fd85e2019-05-16 16:58:24 -0500256 logger_cli.info(
257 "-> skipping, '{}' already has '{}'".format(
258 _repos_info_archive,
259 _name
260 )
261 )
262 continue
263 # process the tag
264 _repo = _repos[_label]
265 _baseurl = _repos[_label]["baseurl"]
266 # get the subtags
267 _sub_tags, _ = self._ls_repo_page(_baseurl)
268 _total_index = len(_sub_tags)
269 _index = 0
270 _progress = Progress(_total_index)
Alexccb72e02021-01-20 16:38:03 -0600271 logger_cli.debug(
Alexd9fd85e2019-05-16 16:58:24 -0500272 "... found {} subtags for '{}'".format(
273 len(_sub_tags),
274 _label
275 )
276 )
277 # save the url and start search
278 for _stag in _sub_tags:
279 _u = _baseurl + _stag
280 _index += 1
Alexccb72e02021-01-20 16:38:03 -0600281 logger_cli.debug(
Alexd9fd85e2019-05-16 16:58:24 -0500282 "... searching repos in '{}/{}'".format(
283 _label,
284 _stag
285 )
286 )
287
288 # Searching Package collections
Alexeffa0682021-06-04 12:18:33 -0500289 if _stag in ubuntu_releases or _stag in kaas_ubuntu_active:
Alexd9fd85e2019-05-16 16:58:24 -0500290 # if stag is the release, this is all packages
291 _repo["all"][_stag] = []
292 _repo["all"]["url"] = _n_url(_u)
293 _path_list = self.search_pkg(_n_url(_u), [])
294 self._map_repo(_path_list, _repo["all"][_stag])
Alexccb72e02021-01-20 16:38:03 -0600295 logger_cli.info(
Alexd9fd85e2019-05-16 16:58:24 -0500296 "-> found {} dists".format(
297 len(_repo["all"][_stag])
298 )
299 )
300
301 else:
302 # each subtag might have any ubuntu release
303 # so iterate them
304 _repo[_stag] = {
305 "url": _n_url(_u)
306 }
307 _releases, _ = self._ls_repo_page(_n_url(_u))
308 for _rel in _releases:
309 if _rel not in ubuntu_releases:
Alexccb72e02021-01-20 16:38:03 -0600310 logger_cli.debug(
Alexd9fd85e2019-05-16 16:58:24 -0500311 "... skipped unknown ubuntu release: "
312 "'{}' in '{}'".format(
313 _rel,
314 _u
315 )
316 )
317 else:
318 _rel_u = _n_url(_u) + _rel
319 _repo[_stag][_rel] = []
320 _path_list = self.search_pkg(_n_url(_rel_u), [])
321 self._map_repo(
322 _path_list,
323 _repo[_stag][_rel]
324 )
Alexccb72e02021-01-20 16:38:03 -0600325 logger_cli.info(
Alexd9fd85e2019-05-16 16:58:24 -0500326 "-> found {} dists for '{}'".format(
327 len(_repo[_stag][_rel]),
328 _rel
329 )
330 )
331 _progress.write_progress(_index)
332
333 _progress.end()
334 _name = _label + ext
335 _repotgz.add_file(_name, buf=json.dumps(_repo, indent=2))
336 logger_cli.info(
337 "-> archive '{}' updated with '{}'".format(
338 self._repofile,
339 _name
340 )
341 )
342
343 return
344
Alex74dc1352019-05-17 13:18:24 -0500345 def list_tags(self, splitted=False):
Alexd9fd85e2019-05-16 16:58:24 -0500346 _files = TGZFile(self._repofile).list_files()
347 # all files in archive with no '.json' part
348 _all = set([f.rsplit('.', 1)[0] for f in _files])
Alex74dc1352019-05-17 13:18:24 -0500349 if splitted:
350 # files that ends with '.update'
351 _updates = set([f for f in _all if f.find('update') >= 0])
352 # files that ends with '.hotfix'
353 _hotfix = set([f for f in _all if f.find('hotfix') >= 0])
354 # remove updates and hotfix tags from all. The true magic of SETs
355 _all = _all - _updates - _hotfix
356 # cut updates and hotfix endings
357 _updates = [f.rsplit('.', 1)[0] for f in _updates]
358 _hotfix = [f.rsplit('.', 1)[0] for f in _hotfix]
Alexd9fd85e2019-05-16 16:58:24 -0500359
Alex74dc1352019-05-17 13:18:24 -0500360 return _all, _updates, _hotfix
361 else:
362 # dynamic import
363 import re
364 _all = list(_all)
365 # lexical tags
366 _lex = [s for s in _all if not s[0].isdigit()]
367 _lex.sort()
368 # tags with digits
369 _dig = [s for s in _all if s[0].isdigit()]
370 _dig = sorted(
371 _dig,
Alexd0391d42019-05-21 18:48:55 -0500372 key=lambda x: tuple(int(i) for i in re.findall(r"\d+", x)[:3])
Alex74dc1352019-05-17 13:18:24 -0500373 )
374
375 return _dig + _lex
Alexd9fd85e2019-05-16 16:58:24 -0500376
377 def get_repoinfo(self, tag):
378 _tgz = TGZFile(self._repofile)
Alex3bc95f62020-03-05 17:00:04 -0600379 _buf = _tgz.get_file(tag + ext, decode=True)
Alexd9fd85e2019-05-16 16:58:24 -0500380 return json.loads(_buf)
381
382
383class RepoManager(object):
Alex3bc95f62020-03-05 17:00:04 -0600384 init_done = False
Alexd9fd85e2019-05-16 16:58:24 -0500385
Alex3bc95f62020-03-05 17:00:04 -0600386 def _init_folders(self, arch_folder=None):
Alex9a4ad212020-10-01 18:04:25 -0500387 logger_cli.info("# Loading package versions data")
Alex3bc95f62020-03-05 17:00:04 -0600388 # overide arch folder if needed
389 if arch_folder:
390 self._arch_folder = arch_folder
391 else:
392 self._arch_folder = os.path.join(pkg_dir, "versions")
Alexd9fd85e2019-05-16 16:58:24 -0500393
Alex3bc95f62020-03-05 17:00:04 -0600394 self._versions_arch = os.path.join(
395 self._arch_folder,
396 _repos_versions_archive
397 )
398 self._desc_arch = os.path.join(self._arch_folder, _pkg_desc_archive)
Alexd0391d42019-05-21 18:48:55 -0500399
Alex3bc95f62020-03-05 17:00:04 -0600400 def _init_vars(self, info_class):
401 # RepoInfo instance init
402 if info_class:
403 self._info_class = info_class
404 else:
405 self._info_class = ReposInfo()
406 # archives
407 self._apps_filename = "apps.json"
Alexd9fd85e2019-05-16 16:58:24 -0500408
Alex3bc95f62020-03-05 17:00:04 -0600409 # repository index
410 self._repo_index = {}
411 self._mainteiners_index = {}
412
413 self._apps = {}
414
415 # init package versions storage
416 self._versions_mirantis = {}
417 self._versions_other = {}
418
419 def _init_archives(self):
Alexd9fd85e2019-05-16 16:58:24 -0500420 # Init version files
421 self.versionstgz = TGZFile(
422 self._versions_arch,
423 label="MCP Configuration Checker: Package versions archive"
424 )
425 self.desctgz = TGZFile(
426 self._desc_arch,
427 label="MCP Configuration Checker: Package descriptions archive"
428 )
Alexd0391d42019-05-21 18:48:55 -0500429
430 # section / app
431 self._apps = _safe_load(
432 self._apps_filename,
433 self.desctgz
434 )
435
Alex0ed4f762019-05-17 17:55:33 -0500436 # indices
437 self._repo_index = _safe_load(
438 _repos_index_filename,
439 self.versionstgz
440 )
441 self._mainteiners_index = _safe_load(
442 _mainteiners_index_filename,
443 self.versionstgz
444 )
Alexd9fd85e2019-05-16 16:58:24 -0500445
Alex0ed4f762019-05-17 17:55:33 -0500446 # versions
447 self._versions_mirantis = _safe_load(
448 _mirantis_versions_filename,
449 self.versionstgz
450 )
451 self._versions_other = _safe_load(
452 _other_versions_filename,
453 self.versionstgz
454 )
Alexd9fd85e2019-05-16 16:58:24 -0500455
Alex3bc95f62020-03-05 17:00:04 -0600456 def __init__(self, arch_folder=None, info_class=None):
457 # Perform inits
458 self._init_vars(info_class)
459 self._init_folders(arch_folder)
460 # Ensure that versions folder exists
461 logger_cli.debug(ensure_folder_exists(self._arch_folder))
462 # Preload/create archives
463 self._init_archives()
464 self.init_done = True
465
466 def __call__(self, *args, **kwargs):
467 if self.init_done:
468 return self
469 else:
470 return self.__init__(self, *args, **kwargs)
471
Alexd9fd85e2019-05-16 16:58:24 -0500472 def _create_repo_header(self, p):
473 _header = "_".join([
474 p['tag'],
475 p['subset'],
476 p['release'],
477 p['ubuntu-release'],
478 p['type'],
479 p['arch']
480 ])
Alex0ed4f762019-05-17 17:55:33 -0500481 return _get_value_index(self._repo_index, p, header=_header)
Alexd9fd85e2019-05-16 16:58:24 -0500482
Alex0ed4f762019-05-17 17:55:33 -0500483 def _get_indexed_values(self, pair):
484 _h, _m = pair.split('-')
485 return self._repo_index[_h], self._mainteiners_index[_m]
Alexd9fd85e2019-05-16 16:58:24 -0500486
Alexd0391d42019-05-21 18:48:55 -0500487 def _update_pkg_version(self, _d, n, v, md5, s, a, h_index, m_index):
Alexd9fd85e2019-05-16 16:58:24 -0500488 """Method updates package version record in global dict
489 """
490 # 'if'*4 operation is pretty expensive when using it 100k in a row
491 # so try/except is a better way to go, even faster than 'reduce'
Alex0ed4f762019-05-17 17:55:33 -0500492 _pair = "-".join([h_index, m_index])
Alexd0391d42019-05-21 18:48:55 -0500493 _info = {
494 'repo': [_pair],
495 'section': s,
496 'app': a
497 }
Alexd9fd85e2019-05-16 16:58:24 -0500498 try:
499 # try to load list
Alexd0391d42019-05-21 18:48:55 -0500500 _list = _d[n][v][md5]['repo']
Alexd9fd85e2019-05-16 16:58:24 -0500501 # cast it as set() and union()
Alex0ed4f762019-05-17 17:55:33 -0500502 _list = set(_list).union([_pair])
Alexd9fd85e2019-05-16 16:58:24 -0500503 # cast back as set() is not serializeable
Alexd0391d42019-05-21 18:48:55 -0500504 _d[n][v][md5]['repo'] = list(_list)
Alexd9fd85e2019-05-16 16:58:24 -0500505 return False
506 except KeyError:
507 # ok, this is fresh pkg. Do it slow way.
Alex0ed4f762019-05-17 17:55:33 -0500508 if n in _d:
Alexd9fd85e2019-05-16 16:58:24 -0500509 # there is such pkg already
Alex0ed4f762019-05-17 17:55:33 -0500510 if v in _d[n]:
Alexd9fd85e2019-05-16 16:58:24 -0500511 # there is such version, check md5
Alex0ed4f762019-05-17 17:55:33 -0500512 if md5 in _d[n][v]:
Alexd9fd85e2019-05-16 16:58:24 -0500513 # just add new repo header
Alexd0391d42019-05-21 18:48:55 -0500514 if _pair not in _d[n][v][md5]['repo']:
515 _d[n][v][md5]['repo'].append(_pair)
Alexd9fd85e2019-05-16 16:58:24 -0500516 else:
517 # check if such index is here...
518 _existing = filter(
Alexd0391d42019-05-21 18:48:55 -0500519 lambda i: _pair in _d[n][v][i]['repo'],
Alex0ed4f762019-05-17 17:55:33 -0500520 _d[n][v]
Alexd9fd85e2019-05-16 16:58:24 -0500521 )
522 if _existing:
523 # Yuck! Same version had different MD5
Alex0ed4f762019-05-17 17:55:33 -0500524 _r, _m = self._get_indexed_values(_pair)
Alexd9fd85e2019-05-16 16:58:24 -0500525 logger_cli.error(
526 "# ERROR: Package version has multiple MD5s "
527 "in '{}': {}:{}:{}".format(
Alex0ed4f762019-05-17 17:55:33 -0500528 _r,
Alexd9fd85e2019-05-16 16:58:24 -0500529 n,
530 v,
531 md5
532 )
533 )
Alexd0391d42019-05-21 18:48:55 -0500534 _d[n][v][md5] = _info
Alexd9fd85e2019-05-16 16:58:24 -0500535 else:
536 # this is new version for existing package
Alex0ed4f762019-05-17 17:55:33 -0500537 _d[n][v] = {
Alexd0391d42019-05-21 18:48:55 -0500538 md5: _info
Alexd9fd85e2019-05-16 16:58:24 -0500539 }
540 return False
541 else:
542 # this is new pakcage
Alex0ed4f762019-05-17 17:55:33 -0500543 _d[n] = {
Alexd9fd85e2019-05-16 16:58:24 -0500544 v: {
Alexd0391d42019-05-21 18:48:55 -0500545 md5: _info
Alexd9fd85e2019-05-16 16:58:24 -0500546 }
547 }
548 return True
549
550 def _save_repo_descriptions(self, repo_props, desc):
551 # form the filename for the repo and save it
552 self.desctgz.add_file(
553 self._create_repo_header(repo_props),
554 json.dumps(desc)
555 )
556
557 # def get_description(self, repo_props, name, md5=None):
558 # """Gets target description
559 # """
560 # _filename = self._create_repo_header(repo_props)
561 # # check if it is present in cache
562 # if _filename in self._desc_cache:
563 # _descs = self._desc_cache[_filename]
564 # else:
565 # # load data
566 # _descs = self.desctgz.get_file(_filename)
567 # # Serialize it
568 # _descs = json.loads(_descs)
569 # self._desc_cache[_filename] = _descs
570 # # return target desc
571 # if name in _descs and md5 in _descs[name]:
572 # return _descs[name][md5]
573 # else:
574 # return None
575
Alexd0391d42019-05-21 18:48:55 -0500576 def parse_tag(self, tag, descriptions=False, apps=False):
Alexd9fd85e2019-05-16 16:58:24 -0500577 """Download and parse Package.gz files for specific tag
578 By default, descriptions not saved
579 due to huge resulting file size and slow processing
580 """
581 # init gzip and downloader
Alex3bc95f62020-03-05 17:00:04 -0600582 _info = self._info_class.get_repoinfo(tag)
Alexd9fd85e2019-05-16 16:58:24 -0500583 # calculate Packages.gz files to process
584 _baseurl = _info.pop("baseurl")
585 _total_components = len(_info.keys()) - 1
586 _ubuntu_package_repos = 0
587 _other_repos = 0
Alex3bc95f62020-03-05 17:00:04 -0600588 for _c, _d in _info.items():
589 for _ur, _l in _d.items():
Alexeffa0682021-06-04 12:18:33 -0500590 if _ur in ubuntu_releases or _ur in kaas_ubuntu_active:
Alexd9fd85e2019-05-16 16:58:24 -0500591 _ubuntu_package_repos += len(_l)
592 elif _ur != 'url':
593 _other_repos += len(_l)
594 logger_cli.info(
595 "-> loaded repository info for '{}'.\n"
596 " '{}', {} components, {} ubuntu repos, {} other/uknown".format(
597 _baseurl,
598 tag,
599 _total_components,
600 _ubuntu_package_repos,
601 _other_repos
602 )
603 )
604 # init progress bar
605 _progress = Progress(_ubuntu_package_repos)
606 _index = 0
607 _processed = 0
608 _new = 0
Alex3bc95f62020-03-05 17:00:04 -0600609 for _c, _d in _info.items():
Alexd9fd85e2019-05-16 16:58:24 -0500610 # we do not need url here, just get rid of it
611 if 'url' in _d:
612 _d.pop('url')
613 # _url = if 'url' in _d else _baseurl + _c
Alex3bc95f62020-03-05 17:00:04 -0600614 for _ur, _l in _d.items():
Alexd9fd85e2019-05-16 16:58:24 -0500615 # iterate package collections
616 for _p in _l:
617 # descriptions
618 if descriptions:
619 _descriptions = {}
620 # download and unzip
Alexd0391d42019-05-21 18:48:55 -0500621 _index += 1
622 _progress.write_progress(
623 _index,
624 note="/ {} {} {} {} {}, GET 'Packages.gz'".format(
625 _c,
626 _ur,
627 _p['ubuntu-release'],
628 _p['type'],
629 _p['arch']
630 )
631 )
632 _raw = get_gzipped_file(_p['filepath'])
633 if not _raw:
634 # empty repo...
635 _progress.clearline()
636 logger_cli.warning(
637 "# WARNING: Empty file: '{}'".format(
638 _p['filepath']
639 )
640 )
641 continue
Alex3bc95f62020-03-05 17:00:04 -0600642 else:
643 _raw = _raw.decode("utf-8")
Alexd9fd85e2019-05-16 16:58:24 -0500644 _progress.write_progress(
645 _index,
646 note="/ {} {} {} {} {}, {}/{}".format(
647 _c,
648 _ur,
649 _p['ubuntu-release'],
650 _p['type'],
651 _p['arch'],
652 _processed,
653 _new
654 )
655 )
Alexd9fd85e2019-05-16 16:58:24 -0500656 _lines = _raw.splitlines()
Alexd9fd85e2019-05-16 16:58:24 -0500657 # break lines collection into isolated pkg data
658 _pkg = {
659 "tag": tag,
660 "subset": _c,
661 "release": _ur
662 }
663 _pkg.update(_p)
664 _desc = {}
665 _key = _value = ""
Alexd0391d42019-05-21 18:48:55 -0500666 # if there is no empty line at end, add it
667 if _lines[-1] != '':
668 _lines.append('')
669 # Process lines
Alexd9fd85e2019-05-16 16:58:24 -0500670 for _line in _lines:
671 if not _line:
672 # if the line is empty, process pkg data gathered
673 _name = _desc['package']
674 _md5 = _desc['md5sum']
675 _version = _desc['version']
Alex0ed4f762019-05-17 17:55:33 -0500676 _mainteiner = _desc['maintainer']
677
Alexd0391d42019-05-21 18:48:55 -0500678 if 'source' in _desc:
679 _ap = _desc['source'].lower()
680 else:
681 _ap = "-"
682
683 if apps:
684 # insert app
685 _sc = _desc['section'].lower()
686 if 'source' in _desc:
687 _ap = _desc['source'].lower()
688 else:
689 _ap = "-"
690
691 try:
692 _tmp = set(self._apps[_sc][_ap][_name])
693 _tmp.add(_desc['architecture'])
694 self._apps[_sc][_ap][_name] = list(_tmp)
695 except KeyError:
696 nested_set(
697 self._apps,
698 [_sc, _ap, _name],
699 [_desc['architecture']]
700 )
701
Alex0ed4f762019-05-17 17:55:33 -0500702 # Check is mainteiner is Mirantis
703 if _mainteiner.endswith("@mirantis.com>"):
704 # update mirantis versions
705 if self._update_pkg_version(
706 self._versions_mirantis,
707 _name,
708 _version,
709 _md5,
Alexd0391d42019-05-21 18:48:55 -0500710 _desc['section'].lower(),
711 _ap,
Alex0ed4f762019-05-17 17:55:33 -0500712 self._create_repo_header(_pkg),
713 _get_value_index(
714 self._mainteiners_index,
715 _mainteiner
716 )
717 ):
718 _new += 1
719 else:
720 # update other versions
721 if self._update_pkg_version(
722 self._versions_other,
723 _name,
724 _version,
725 _md5,
Alexd0391d42019-05-21 18:48:55 -0500726 _desc['section'].lower(),
727 _ap,
Alex0ed4f762019-05-17 17:55:33 -0500728 self._create_repo_header(_pkg),
729 _get_value_index(
730 self._mainteiners_index,
731 _mainteiner
732 )
733 ):
734 _new += 1
Alexd9fd85e2019-05-16 16:58:24 -0500735
736 if descriptions:
737 _d_new = {
738 _md5: deepcopy(_desc)
739 }
740 try:
741 _descriptions[_name].update(_d_new)
742 except KeyError:
743 _descriptions[_name] = _d_new
744 # clear the data for next pkg
745 _processed += 1
746 _desc = {}
747 _key = ""
748 _value = ""
749 elif _line.startswith(' '):
750 _desc[_key] += "\n{}".format(_line)
751 else:
Alexccb72e02021-01-20 16:38:03 -0600752 if _line.endswith(":"):
753 _key = _line[:-1]
754 _value = ""
755 else:
756 _key, _value = _line.split(": ", 1)
757 # _key = _line[:_line.index(':')]
Alexd9fd85e2019-05-16 16:58:24 -0500758
Alexccb72e02021-01-20 16:38:03 -0600759 # _value = _line[_line.index(':')+1:]
760 # _value = _value if _value[0] != ' ' else _value[1:]
761 _key = _key.lower()
Alexd9fd85e2019-05-16 16:58:24 -0500762 _desc[_key] = _value
763 # save descriptions if needed
764 if descriptions:
765 _progress.clearline()
766 self._save_repo_descriptions(_pkg, _descriptions)
767
768 _progress.end()
769 # backup headers to disk
770 self.versionstgz.add_file(
Alex0ed4f762019-05-17 17:55:33 -0500771 _repos_index_filename,
Alexd9fd85e2019-05-16 16:58:24 -0500772 json.dumps(self._repo_index),
773 replace=True
774 )
Alex0ed4f762019-05-17 17:55:33 -0500775 self.versionstgz.add_file(
776 _mainteiners_index_filename,
777 json.dumps(self._mainteiners_index),
778 replace=True
779 )
Alexd0391d42019-05-21 18:48:55 -0500780 if apps:
781 self.desctgz.add_file(
782 self._apps_filename,
783 json.dumps(self._apps),
784 replace=True
785 )
786
Alexd9fd85e2019-05-16 16:58:24 -0500787 return
788
Alexd0391d42019-05-21 18:48:55 -0500789 def fetch_versions(self, tag, descriptions=False, apps=False):
Alexd9fd85e2019-05-16 16:58:24 -0500790 """Executes parsing for specific tag
791 """
792 if descriptions:
793 logger_cli.warning(
794 "\n\n# !!! WARNING: Saving repo descriptions "
795 "consumes huge amount of disk space\n\n"
796 )
797 # if there is no such tag, parse it from repoinfo
Alexd9fd85e2019-05-16 16:58:24 -0500798 logger_cli.info("# Fetching versions for {}".format(tag))
Alexd0391d42019-05-21 18:48:55 -0500799 self.parse_tag(tag, descriptions=descriptions, apps=apps)
Alex0ed4f762019-05-17 17:55:33 -0500800 logger_cli.info("-> saving updated versions")
801 self.versionstgz.add_file(
802 _mirantis_versions_filename,
803 json.dumps(self._versions_mirantis),
804 replace=True
805 )
806 self.versionstgz.add_file(
807 _other_versions_filename,
808 json.dumps(self._versions_other),
809 replace=True
810 )
Alexd9fd85e2019-05-16 16:58:24 -0500811
812 def build_repos(self, url, tag=None):
813 """Builds versions data for selected tag, or for all of them
814 """
Alexd9fd85e2019-05-16 16:58:24 -0500815 # recoursively walk the mirrors
816 # and gather all of the repos for 'tag' or all of the tags
Alex3bc95f62020-03-05 17:00:04 -0600817 self._info_class.fetch_repos(url, tag=tag)
Alexd9fd85e2019-05-16 16:58:24 -0500818
Alex74dc1352019-05-17 13:18:24 -0500819 def _build_action(self, url, tags):
820 for t in tags:
Alex6df29ad2019-05-31 17:55:32 -0500821 logger_cli.info("# Building repo info for '{}'".format(t))
Alex74dc1352019-05-17 13:18:24 -0500822 self.build_repos(url, tag=t)
823
Alexd0391d42019-05-21 18:48:55 -0500824 def get_available_tags(self, tag=None):
825 # Populate action tags
Alex3bc95f62020-03-05 17:00:04 -0600826 major, updates, hotfix = self._info_class.list_tags(splitted=True)
Alexd0391d42019-05-21 18:48:55 -0500827
828 _tags = []
829 if tag in major:
830 _tags.append(tag)
831 if tag in updates:
832 _tags.append(tag + ".update")
833 if tag in hotfix:
834 _tags.append(tag + ".hotfix")
835
836 return _tags
837
Alexd9fd85e2019-05-16 16:58:24 -0500838 def action_for_tag(
839 self,
840 url,
841 tag,
842 action=None,
Alexd0391d42019-05-21 18:48:55 -0500843 descriptions=None,
844 apps=None
Alexd9fd85e2019-05-16 16:58:24 -0500845 ):
846 """Executes action for every tag from all collections
847 """
848 if not action:
849 logger_cli.info("# No action set, nothing to do")
Alex74dc1352019-05-17 13:18:24 -0500850 # See if this is a list action
Alexd9fd85e2019-05-16 16:58:24 -0500851 if action == "list":
Alex3bc95f62020-03-05 17:00:04 -0600852 _all = self._info_class.list_tags()
Alex6df29ad2019-05-31 17:55:32 -0500853 if _all:
854 # Print pretty list and exit
855 logger_cli.info("# Tags available at '{}':".format(url))
856 for t in _all:
857 _ri = self._repo_index
858 _isparsed = any(
Alex3bc95f62020-03-05 17:00:04 -0600859 [k for k, v in _ri.items()
Alex6df29ad2019-05-31 17:55:32 -0500860 if v['props']['tag'] == t]
861 )
862 if _isparsed:
863 logger_cli.info(get_tag_label(t, parsed=True))
864 else:
865 logger_cli.info(get_tag_label(t))
866 else:
867 logger_cli.info("# Not tags parsed yet for '{}':".format(url))
868
Alex74dc1352019-05-17 13:18:24 -0500869 # exit
Alexd9fd85e2019-05-16 16:58:24 -0500870 return
Alex74dc1352019-05-17 13:18:24 -0500871
Alex6df29ad2019-05-31 17:55:32 -0500872 if action == "build":
873 self._build_action(url, [tag])
874
Alexd0391d42019-05-21 18:48:55 -0500875 # Populate action tags
876 _action_tags = self.get_available_tags(tag)
877
Alexd9fd85e2019-05-16 16:58:24 -0500878 if not _action_tags:
879 logger_cli.info(
880 "# Tag of '{}' not found. "
881 "Consider rebuilding repos info.".format(tag)
882 )
Alex74dc1352019-05-17 13:18:24 -0500883 else:
Alexd9fd85e2019-05-16 16:58:24 -0500884 logger_cli.info(
Alex74dc1352019-05-17 13:18:24 -0500885 "-> tags to process: {}".format(
Alexd9fd85e2019-05-16 16:58:24 -0500886 ", ".join(_action_tags)
887 )
888 )
Alex74dc1352019-05-17 13:18:24 -0500889 # Execute actions
Alex6df29ad2019-05-31 17:55:32 -0500890 if action == "fetch":
Alexd9fd85e2019-05-16 16:58:24 -0500891 for t in _action_tags:
Alexd0391d42019-05-21 18:48:55 -0500892 self.fetch_versions(t, descriptions=descriptions, apps=apps)
Alexd9fd85e2019-05-16 16:58:24 -0500893
894 logger_cli.info("# Done.")
895
Alex74dc1352019-05-17 13:18:24 -0500896 def show_package(self, name):
897 # get the package data
898 _p = self.get_package_versions(name)
899 if not _p:
900 logger_cli.warning(
901 "# WARNING: Package '{}' not found".format(name)
902 )
903 else:
904 # print package info using sorted tags from headers
905 # Package: name
906 # [u/h] tag \t <version>
907 # \t <version>
908 # <10symbols> \t <md5> \t sorted headers with no tag
909 # ...
Alexd0391d42019-05-21 18:48:55 -0500910 # section
Alex92e07ce2019-05-31 16:00:03 -0500911 for _s in sorted(_p):
Alexd0391d42019-05-21 18:48:55 -0500912 # app
Alex92e07ce2019-05-31 16:00:03 -0500913 for _a in sorted(_p[_s]):
Alexcf91b182019-05-31 11:57:07 -0500914 _o = ""
915 _mm = []
Alexd0391d42019-05-21 18:48:55 -0500916 # get and sort tags
Alex92e07ce2019-05-31 16:00:03 -0500917 for _v in sorted(_p[_s][_a]):
Alexd0391d42019-05-21 18:48:55 -0500918 _o += "\n" + " "*8 + _v + ':\n'
919 # get and sort tags
Alex92e07ce2019-05-31 16:00:03 -0500920 for _md5 in sorted(_p[_s][_a][_v]):
Alexd0391d42019-05-21 18:48:55 -0500921 _o += " "*16 + _md5 + "\n"
922 # get and sort repo headers
Alex92e07ce2019-05-31 16:00:03 -0500923 for _r in sorted(_p[_s][_a][_v][_md5]):
Alexcf91b182019-05-31 11:57:07 -0500924 _o += " "*24 + _r.replace('_', ' ') + '\n'
925 _m = _p[_s][_a][_v][_md5][_r]["maintainer"]
926 if _m not in _mm:
927 _mm.append(_m)
Alex74dc1352019-05-17 13:18:24 -0500928
Alexcf91b182019-05-31 11:57:07 -0500929 logger_cli.info(
930 "\n# Package: {}/{}/{}\nMaintainers: {}".format(
931 _s,
932 _a,
933 name,
934 ", ".join(_mm)
935 )
936 )
937
938 logger_cli.info(_o)
Alex74dc1352019-05-17 13:18:24 -0500939
Alexd0391d42019-05-21 18:48:55 -0500940 @staticmethod
941 def get_apps(versions, name):
942 _all = True if name == '*' else False
Alexcf91b182019-05-31 11:57:07 -0500943 _s_max = _a_max = _p_max = _v_max = 0
Alexd0391d42019-05-21 18:48:55 -0500944 _rows = []
945 for _p in versions.keys():
946 _vs = versions[_p]
Alex3bc95f62020-03-05 17:00:04 -0600947 for _v, _d1 in _vs.items():
948 for _md5, _info in _d1.items():
Alexd0391d42019-05-21 18:48:55 -0500949 if _all or name == _info['app']:
950 _s_max = max(len(_info['section']), _s_max)
951 _a_max = max(len(_info['app']), _a_max)
Alexcf91b182019-05-31 11:57:07 -0500952 _p_max = max(len(_p), _p_max)
953 _v_max = max(len(_v), _v_max)
Alexd0391d42019-05-21 18:48:55 -0500954 _rows.append([
955 _info['section'],
956 _info['app'],
Alexcf91b182019-05-31 11:57:07 -0500957 _p,
958 _v,
959 _md5,
960 len(_info['repo'])
Alexd0391d42019-05-21 18:48:55 -0500961 ])
Alexcf91b182019-05-31 11:57:07 -0500962 # format columns
963 # section
964 _fmt = "{:"+str(_s_max)+"} "
965 # app
966 _fmt += "{:"+str(_a_max)+"} "
967 # package name
968 _fmt += "{:"+str(_p_max)+"} "
969 # version
970 _fmt += "{:"+str(_v_max)+"} "
971 # md5 and number of repos is fixed
972 _fmt += "{} in {} repos"
973
974 # fill rows
975 _rows = [_fmt.format(s, a, p, v, m, l) for s, a, p, v, m, l in _rows]
Alexd0391d42019-05-21 18:48:55 -0500976 _rows.sort()
977 return _rows
978
979 def show_app(self, name):
980 c = 0
981 rows = self.get_apps(self._versions_mirantis, name)
982 if rows:
Alexcf91b182019-05-31 11:57:07 -0500983 logger_cli.info("\n# Mirantis packages for '{}'".format(name))
Alexd0391d42019-05-21 18:48:55 -0500984 logger_cli.info("\n".join(rows))
985 c += 1
986 rows = self.get_apps(self._versions_other, name)
987 if rows:
Alexcf91b182019-05-31 11:57:07 -0500988 logger_cli.info("\n# Other packages for '{}'".format(name))
Alexd0391d42019-05-21 18:48:55 -0500989 logger_cli.info("\n".join(rows))
990 c += 1
991 if c == 0:
992 logger_cli.info("\n# No app found for '{}'".format(name))
993
994 def get_mirantis_pkg_names(self):
995 # Mirantis maintainers only
996 return set(
997 self._versions_mirantis.keys()
998 ) - set(
999 self._versions_other.keys()
1000 )
1001
1002 def get_other_pkg_names(self):
1003 # Non-mirantis Maintainers
1004 return set(
1005 self._versions_other.keys()
1006 ) - set(
1007 self._versions_mirantis.keys()
1008 )
1009
1010 def get_mixed_pkg_names(self):
1011 # Mixed maintainers
1012 return set(
1013 self._versions_mirantis.keys()
1014 ).intersection(set(
1015 self._versions_other.keys()
1016 ))
1017
1018 def is_mirantis(self, name, tag=None):
1019 """Method checks if this package is mainteined
1020 by mirantis in target tag repo
1021 """
1022 if name in self._versions_mirantis:
1023 # check tag
1024 if tag:
1025 _pkg = self.get_package_versions(
1026 name,
1027 tagged=True
1028 )
1029 _tags = []
1030 for s in _pkg.keys():
1031 for a in _pkg[s].keys():
1032 for t in _pkg[s][a].keys():
1033 _tags.append(t)
1034 if any([t.startswith(tag) for t in _tags]):
1035 return True
1036 else:
1037 return None
1038 else:
1039 return True
1040 elif name in self._versions_other:
1041 # check tag
1042 if tag:
1043 _pkg = self.get_package_versions(
1044 name,
1045 tagged=True
1046 )
1047 _tags = []
1048 for s in _pkg.keys():
1049 for a in _pkg[s].keys():
1050 for t in _pkg[s][a].keys():
1051 _tags.append(t)
1052 if any([t.startswith(tag) for t in _tags]):
1053 return False
1054 else:
1055 return None
1056 else:
1057 return False
1058 else:
1059 logger.error(
1060 "# ERROR: package '{}' not found "
1061 "while determining maintainer".format(
1062 name
1063 )
1064 )
1065 return None
1066
1067 def get_filtered_versions(
1068 self,
1069 name,
1070 tag=None,
1071 include=None,
1072 exclude=None
1073 ):
1074 """Method gets all the versions for the package
1075 and filters them using keys above
1076 """
1077 if tag:
Alex3bc95f62020-03-05 17:00:04 -06001078 tag = str(tag) if not isinstance(tag, str) else tag
Alexd0391d42019-05-21 18:48:55 -05001079 _out = {}
1080 _vs = self.get_package_versions(name, tagged=True)
1081 # iterate to filter out keywords
Alex3bc95f62020-03-05 17:00:04 -06001082 for s, apps in _vs.items():
1083 for a, _tt in apps.items():
1084 for t, vs in _tt.items():
Alexd0391d42019-05-21 18:48:55 -05001085 # filter tags
1086 if tag and t != tag and t.rsplit('.', 1)[0] != tag:
1087 continue
1088 # Skip hotfix tag
1089 if t == tag + ".hotfix":
1090 continue
Alex3bc95f62020-03-05 17:00:04 -06001091 for v, rp in vs.items():
1092 for h, p in rp.items():
Alexd0391d42019-05-21 18:48:55 -05001093 # filter headers with all keywords matching
1094 _h = re.split(r"[\-\_]+", h)
1095 _included = all([kw in _h for kw in include])
1096 _excluded = any([kw in _h for kw in exclude])
1097 if not _included or _excluded:
1098 continue
1099 else:
1100 nested_set(_out, [s, a, v], [])
1101 _dat = {
1102 "header": h
1103 }
1104 _dat.update(p)
1105 _out[s][a][v].append(_dat)
1106 return _out
1107
1108 def get_package_versions(self, name, tagged=False):
Alex74dc1352019-05-17 13:18:24 -05001109 """Method builds package version structure
1110 with repository properties included
1111 """
1112 # get data
Alexd0391d42019-05-21 18:48:55 -05001113 _vs = {}
1114
1115 if name in self._versions_mirantis:
1116 _vs.update(self._versions_mirantis[name])
1117 if name in self._versions_other:
1118 _vs.update(self._versions_other[name])
Alex0ed4f762019-05-17 17:55:33 -05001119
Alex74dc1352019-05-17 13:18:24 -05001120 # insert repo data, insert props into headers place
1121 _package = {}
1122 if tagged:
Alex3bc95f62020-03-05 17:00:04 -06001123 for _v, _d1 in _vs.items():
Alex74dc1352019-05-17 13:18:24 -05001124 # use tag as a next step
Alex3bc95f62020-03-05 17:00:04 -06001125 for _md5, _info in _d1.items():
Alexd0391d42019-05-21 18:48:55 -05001126 _s = _info['section']
1127 _a = _info['app']
1128 for _pair in _info['repo']:
1129 _rp = {}
Alex74dc1352019-05-17 13:18:24 -05001130 # extract props for a repo
Alex0ed4f762019-05-17 17:55:33 -05001131 _r, _m = self._get_indexed_values(_pair)
Alex74dc1352019-05-17 13:18:24 -05001132 # get tag
Alex0ed4f762019-05-17 17:55:33 -05001133 _tag = _r["props"]["tag"]
Alex74dc1352019-05-17 13:18:24 -05001134 # cut tag from the header
Alex0ed4f762019-05-17 17:55:33 -05001135 _cut_head = _r["header"].split("_", 1)[1]
Alex74dc1352019-05-17 13:18:24 -05001136 # populate dict
Alexd0391d42019-05-21 18:48:55 -05001137 _rp["maintainer"] = _m
1138 _rp["md5"] = _md5
1139 _rp.update(_r["props"])
Alex74dc1352019-05-17 13:18:24 -05001140 nested_set(
1141 _package,
Alexd0391d42019-05-21 18:48:55 -05001142 [_s, _a, _tag, _v, _cut_head],
1143 _rp
Alex74dc1352019-05-17 13:18:24 -05001144 )
1145 else:
Alex3bc95f62020-03-05 17:00:04 -06001146 for _v, _d1 in _vs.items():
1147 for _md5, _info in _d1.items():
Alexd0391d42019-05-21 18:48:55 -05001148 _s = _info['section']
1149 _a = _info['app']
1150 for _pair in _info['repo']:
Alex0ed4f762019-05-17 17:55:33 -05001151 _r, _m = self._get_indexed_values(_pair)
Alexd0391d42019-05-21 18:48:55 -05001152 _info["maintainer"] = _m
1153 _info.update(_r["props"])
Alex74dc1352019-05-17 13:18:24 -05001154 nested_set(
1155 _package,
Alexd0391d42019-05-21 18:48:55 -05001156 [_s, _a, _v, _md5, _r["header"]],
1157 _info
Alex74dc1352019-05-17 13:18:24 -05001158 )
1159
1160 return _package
1161
Alexd9fd85e2019-05-16 16:58:24 -05001162 def parse_repos(self):
1163 # all tags to check
Alex3bc95f62020-03-05 17:00:04 -06001164 major, updates, hotfix = self._info_class.list_tags(splitted=True)
Alexd9fd85e2019-05-16 16:58:24 -05001165
1166 # major tags
1167 logger_cli.info("# Processing major tags")
1168 for _tag in major:
1169 self.fetch_versions(_tag)
1170
1171 # updates tags
1172 logger_cli.info("# Processing update tags")
1173 for _tag in updates:
1174 self.fetch_versions(_tag + ".update")
1175
1176 # hotfix tags
1177 logger_cli.info("# Processing hotfix tags")
1178 for _tag in hotfix:
1179 self.fetch_versions(_tag + ".hotfix")