blob: 02d13578b376ee0d2ef40435074f90ce907ae342 [file] [log] [blame]
Alexd9fd85e2019-05-16 16:58:24 -05001import json
2import os
Alexd0391d42019-05-21 18:48:55 -05003import re
Alexd9fd85e2019-05-16 16:58:24 -05004from copy import deepcopy
5
Alex74dc1352019-05-17 13:18:24 -05006from cfg_checker.common import logger, logger_cli, nested_set
Alex0ed4f762019-05-17 17:55:33 -05007from cfg_checker.common.const import _mainteiners_index_filename
8from cfg_checker.common.const import _mirantis_versions_filename
9from cfg_checker.common.const import _other_versions_filename
Alexd9fd85e2019-05-16 16:58:24 -050010from cfg_checker.common.const import _pkg_desc_archive
11from cfg_checker.common.const import _repos_index_filename
12from cfg_checker.common.const import _repos_info_archive
13from cfg_checker.common.const import _repos_versions_archive
Alexd9fd85e2019-05-16 16:58:24 -050014from cfg_checker.common.const import ubuntu_releases
Alexeffa0682021-06-04 12:18:33 -050015from cfg_checker.common.const import kaas_ubuntu_active
16from cfg_checker.common.const import mcp_active_tags as active_tags
Alex7f69a6a2019-05-31 16:53:35 -050017from cfg_checker.common.file_utils import ensure_folder_exists
Alexd9fd85e2019-05-16 16:58:24 -050018from cfg_checker.common.file_utils import get_gzipped_file
19from cfg_checker.common.settings import pkg_dir
20from cfg_checker.helpers.console_utils import Progress
21from cfg_checker.helpers.tgz import TGZFile
22
23import requests
24from requests.exceptions import ConnectionError
25
26ext = ".json"
27
28
Alex0ed4f762019-05-17 17:55:33 -050029def get_tag_label(_tag, parsed=False):
Alex74dc1352019-05-17 13:18:24 -050030 # prettify the tag for printing
Alex0ed4f762019-05-17 17:55:33 -050031 if parsed:
32 _label = "+ "
33 else:
34 _label = " "
35
Alex74dc1352019-05-17 13:18:24 -050036 if _tag.endswith(".update"):
37 _label += "[updates] " + _tag.rsplit('.', 1)[0]
38 elif _tag.endswith(".hotfix"):
39 _label += " [hotfix] " + _tag.rsplit('.', 1)[0]
40 else:
41 _label += " "*10 + _tag
Alex0ed4f762019-05-17 17:55:33 -050042
Alex74dc1352019-05-17 13:18:24 -050043 return _label
44
45
Alex0ed4f762019-05-17 17:55:33 -050046def _get_value_index(_di, value, header=None):
Alex29ee76f2019-05-17 18:52:29 -050047 # Mainteiner names often uses specific chars
Alex3bc95f62020-03-05 17:00:04 -060048 # so make sure that value saved is str not str
49 # Python2
50 # _val = str(value, 'utf-8') if isinstance(value, str) else value
51 # Python3 has always utf-8 decoded value
52 _val = value
Alex0ed4f762019-05-17 17:55:33 -050053 if header:
Alex3bc95f62020-03-05 17:00:04 -060054 try:
Alexccb72e02021-01-20 16:38:03 -060055 _index = next(i for i in _di if header in _di[i]['header'])
Alex3bc95f62020-03-05 17:00:04 -060056 except StopIteration:
Alexccb72e02021-01-20 16:38:03 -060057 _index = str(len(_di) + 1)
Alex0ed4f762019-05-17 17:55:33 -050058 _di[_index] = {
59 "header": header,
Alex29ee76f2019-05-17 18:52:29 -050060 "props": _val
Alex0ed4f762019-05-17 17:55:33 -050061 }
Alex3bc95f62020-03-05 17:00:04 -060062 finally:
63 return _index
Alex0ed4f762019-05-17 17:55:33 -050064 else:
Alex3bc95f62020-03-05 17:00:04 -060065 try:
Alexccb72e02021-01-20 16:38:03 -060066 _index = next(i for i in _di if _val in _di[i])
Alex3bc95f62020-03-05 17:00:04 -060067 # iterator not empty, find index
Alex3bc95f62020-03-05 17:00:04 -060068 except StopIteration:
Alexccb72e02021-01-20 16:38:03 -060069 _index = str(len(_di) + 1)
Alex3bc95f62020-03-05 17:00:04 -060070 # on save, cast it as str
71 _di[_index] = _val
72 finally:
73 return _index
Alex0ed4f762019-05-17 17:55:33 -050074
75
76def _safe_load(_f, _a):
77 if _f in _a.list_files():
Alexd0391d42019-05-21 18:48:55 -050078 logger_cli.debug(
79 "... loading '{}':'{}'".format(
Alex0ed4f762019-05-17 17:55:33 -050080 _a.basefile,
81 _f
82 )
83 )
Alex3bc95f62020-03-05 17:00:04 -060084 return json.loads(_a.get_file(_f, decode=True))
Alex0ed4f762019-05-17 17:55:33 -050085 else:
86 return {}
87
88
Alexd9fd85e2019-05-16 16:58:24 -050089def _n_url(url):
90 if url[-1] == '/':
91 return url
92 else:
93 return url + '/'
94
95
96class ReposInfo(object):
Alex3bc95f62020-03-05 17:00:04 -060097 init_done = False
98
99 def _init_vars(self):
100 self.repos = []
101
102 def _init_folders(self, arch_folder=None):
103 if arch_folder:
104 self._arch_folder = arch_folder
105 self._repofile = os.path.join(arch_folder, _repos_info_archive)
106 else:
107 self._arch_folder = os.path.join(pkg_dir, "versions")
108 self._repofile = os.path.join(
109 self._arch_folder,
110 _repos_info_archive
111 )
112
113 def __init__(self, arch_folder=None):
114 # perform inits
115 self._init_vars()
116 self._init_folders(arch_folder)
117 self.init_done = True
118
119 def __call__(self, *args, **kwargs):
120 if self.init_done:
121 return self
122 else:
123 return self.__init__(self, *args, **kwargs)
Alexd9fd85e2019-05-16 16:58:24 -0500124
125 @staticmethod
126 def _ls_repo_page(url):
127 # Yes, this is ugly. But it works ok for small HTMLs.
128 _a = "<a"
129 _s = "href="
130 _e = "\">"
131 try:
132 page = requests.get(url, timeout=60)
133 except ConnectionError as e:
134 logger_cli.error("# ERROR: {}".format(e.message))
135 return [], []
136 a = page.text.splitlines()
137 # Comprehension for dirs. Anchors for ends with '-'
Alexe8643642021-08-23 14:08:46 -0500138 _dirs = [ll[ll.index(_s)+6:ll.index(_e)-1]
139 for ll in a if ll.startswith(_a) and ll.endswith('-')]
Alexd9fd85e2019-05-16 16:58:24 -0500140 # Comprehension for files. Anchors ends with size
Alexe8643642021-08-23 14:08:46 -0500141 _files = [ll[ll.index(_s)+6:ll.index(_e)]
142 for ll in a if ll.startswith(_a) and not ll.endswith('-')]
Alexd9fd85e2019-05-16 16:58:24 -0500143
144 return _dirs, _files
145
146 def search_pkg(self, url, _list):
147 # recoursive method to walk dists tree
148 _dirs, _files = self._ls_repo_page(url)
149
150 for _d in _dirs:
151 # Search only in dists, ignore the rest
152 if "dists" not in url and _d != "dists":
153 continue
154 _u = _n_url(url + _d)
155 self.search_pkg(_u, _list)
156
157 for _f in _files:
158 if _f == "Packages.gz":
159 _list.append(url + _f)
160 logger.debug("... [F] '{}'".format(url + _f))
161
162 return _list
163
164 @staticmethod
165 def _map_repo(_path_list, _r):
166 for _pkg_path in _path_list:
167 _l = _pkg_path.split('/')
168 _kw = _l[_l.index('dists')+1:]
169 _kw.reverse()
170 _repo_item = {
171 "arch": _kw[1][7:] if "binary" in _kw[1] else _kw[1],
172 "type": _kw[2],
173 "ubuntu-release": _kw[3],
174 "filepath": _pkg_path
175 }
176 _r.append(_repo_item)
177
178 def _find_tag(self, _t, _u, label=""):
179 if label:
180 _url = _n_url(_u + label)
181 _label = _t + '.' + label
182 else:
183 _url = _u
184 _label = _t
185 _ts, _ = self._ls_repo_page(_url)
186 if _t in _ts:
187 logger.debug(
188 "... found tag '{}' at '{}'".format(
189 _t,
190 _url
191 )
192 )
193 return {
194 _label: {
195 "baseurl": _n_url(_url + _t),
196 "all": {}
197 }
198 }
199 else:
200 return {}
201
202 def fetch_repos(self, url, tag=None):
203 base_url = _n_url(url)
204 logger_cli.info("# Using '{}' as a repos source".format(base_url))
205
206 logger_cli.info("# Gathering repos info (i.e. links to 'packages.gz')")
207 # init repoinfo archive
208 _repotgz = TGZFile(self._repofile)
209 # prepare repo links
210 _repos = {}
211 if tag:
212 # only one tag to process
213 _repos.update(self._find_tag(tag, base_url))
214 _repos.update(self._find_tag(tag, base_url, label="hotfix"))
215 _repos.update(self._find_tag(tag, base_url, label="update"))
216 else:
217 # gather all of them
218 _tags, _ = self._ls_repo_page(base_url)
Alex3bc95f62020-03-05 17:00:04 -0600219 if "hotfix" in _tags:
220 _tags.remove('hotfix')
221 if "update" in _tags:
222 _tags.remove('update')
Alexeffa0682021-06-04 12:18:33 -0500223 # Filter out not active tags
224 logger_cli.info("Active tags for mcp: {}".format(
225 ", ".join(active_tags)
226 ))
227 logger_cli.info("Active kaas ubuntu repos: {}".format(
228 ", ".join(kaas_ubuntu_active)
229 ))
230 _active_tags = [t for t in _tags if t in active_tags]
231
Alexd9fd85e2019-05-16 16:58:24 -0500232 # search tags in subfolders
233 _h_tags, _ = self._ls_repo_page(base_url + 'hotfix')
234 _u_tags, _ = self._ls_repo_page(base_url + 'update')
Alexeffa0682021-06-04 12:18:33 -0500235 _active_tags.extend(
236 [t for t in _h_tags if t not in _tags and t in active_tags]
237 )
238 _active_tags.extend(
239 [t for t in _u_tags if t not in _tags and t in active_tags]
240 )
241 _progress = Progress(len(_active_tags))
Alexd9fd85e2019-05-16 16:58:24 -0500242 _index = 0
Alexeffa0682021-06-04 12:18:33 -0500243 for _tag in _active_tags:
Alexd9fd85e2019-05-16 16:58:24 -0500244 _repos.update(self._find_tag(_tag, base_url))
245 _repos.update(self._find_tag(_tag, base_url, label="hotfix"))
246 _repos.update(self._find_tag(_tag, base_url, label="update"))
247 _index += 1
248 _progress.write_progress(_index)
249 _progress.end()
250
251 # parse subtags
252 for _label in _repos.keys():
253 logger_cli.info("-> processing tag '{}'".format(_label))
254 _name = _label + ".json"
Alexeffa0682021-06-04 12:18:33 -0500255 if _repotgz.has_file(_name) and not tag:
Alexd9fd85e2019-05-16 16:58:24 -0500256 logger_cli.info(
257 "-> skipping, '{}' already has '{}'".format(
258 _repos_info_archive,
259 _name
260 )
261 )
262 continue
263 # process the tag
264 _repo = _repos[_label]
265 _baseurl = _repos[_label]["baseurl"]
266 # get the subtags
267 _sub_tags, _ = self._ls_repo_page(_baseurl)
268 _total_index = len(_sub_tags)
269 _index = 0
270 _progress = Progress(_total_index)
Alexccb72e02021-01-20 16:38:03 -0600271 logger_cli.debug(
Alexd9fd85e2019-05-16 16:58:24 -0500272 "... found {} subtags for '{}'".format(
273 len(_sub_tags),
274 _label
275 )
276 )
277 # save the url and start search
278 for _stag in _sub_tags:
279 _u = _baseurl + _stag
280 _index += 1
Alexccb72e02021-01-20 16:38:03 -0600281 logger_cli.debug(
Alexd9fd85e2019-05-16 16:58:24 -0500282 "... searching repos in '{}/{}'".format(
283 _label,
284 _stag
285 )
286 )
287
288 # Searching Package collections
Alexeffa0682021-06-04 12:18:33 -0500289 if _stag in ubuntu_releases or _stag in kaas_ubuntu_active:
Alexd9fd85e2019-05-16 16:58:24 -0500290 # if stag is the release, this is all packages
291 _repo["all"][_stag] = []
292 _repo["all"]["url"] = _n_url(_u)
293 _path_list = self.search_pkg(_n_url(_u), [])
294 self._map_repo(_path_list, _repo["all"][_stag])
Alexccb72e02021-01-20 16:38:03 -0600295 logger_cli.info(
Alexd9fd85e2019-05-16 16:58:24 -0500296 "-> found {} dists".format(
297 len(_repo["all"][_stag])
298 )
299 )
300
301 else:
302 # each subtag might have any ubuntu release
303 # so iterate them
304 _repo[_stag] = {
305 "url": _n_url(_u)
306 }
307 _releases, _ = self._ls_repo_page(_n_url(_u))
308 for _rel in _releases:
309 if _rel not in ubuntu_releases:
Alexccb72e02021-01-20 16:38:03 -0600310 logger_cli.debug(
Alexd9fd85e2019-05-16 16:58:24 -0500311 "... skipped unknown ubuntu release: "
312 "'{}' in '{}'".format(
313 _rel,
314 _u
315 )
316 )
317 else:
318 _rel_u = _n_url(_u) + _rel
319 _repo[_stag][_rel] = []
320 _path_list = self.search_pkg(_n_url(_rel_u), [])
321 self._map_repo(
322 _path_list,
323 _repo[_stag][_rel]
324 )
Alexccb72e02021-01-20 16:38:03 -0600325 logger_cli.info(
Alexd9fd85e2019-05-16 16:58:24 -0500326 "-> found {} dists for '{}'".format(
327 len(_repo[_stag][_rel]),
328 _rel
329 )
330 )
331 _progress.write_progress(_index)
332
333 _progress.end()
334 _name = _label + ext
Alexdfee5182022-01-20 12:33:08 -0600335 _repotgz.add_file(
336 _name,
337 buf=json.dumps(_repo, indent=2),
338 replace=True
339 )
Alexd9fd85e2019-05-16 16:58:24 -0500340 logger_cli.info(
341 "-> archive '{}' updated with '{}'".format(
342 self._repofile,
343 _name
344 )
345 )
346
347 return
348
Alex74dc1352019-05-17 13:18:24 -0500349 def list_tags(self, splitted=False):
Alexd9fd85e2019-05-16 16:58:24 -0500350 _files = TGZFile(self._repofile).list_files()
351 # all files in archive with no '.json' part
352 _all = set([f.rsplit('.', 1)[0] for f in _files])
Alex74dc1352019-05-17 13:18:24 -0500353 if splitted:
354 # files that ends with '.update'
355 _updates = set([f for f in _all if f.find('update') >= 0])
356 # files that ends with '.hotfix'
357 _hotfix = set([f for f in _all if f.find('hotfix') >= 0])
358 # remove updates and hotfix tags from all. The true magic of SETs
359 _all = _all - _updates - _hotfix
360 # cut updates and hotfix endings
361 _updates = [f.rsplit('.', 1)[0] for f in _updates]
362 _hotfix = [f.rsplit('.', 1)[0] for f in _hotfix]
Alexd9fd85e2019-05-16 16:58:24 -0500363
Alex74dc1352019-05-17 13:18:24 -0500364 return _all, _updates, _hotfix
365 else:
366 # dynamic import
367 import re
368 _all = list(_all)
369 # lexical tags
370 _lex = [s for s in _all if not s[0].isdigit()]
371 _lex.sort()
372 # tags with digits
373 _dig = [s for s in _all if s[0].isdigit()]
374 _dig = sorted(
375 _dig,
Alexd0391d42019-05-21 18:48:55 -0500376 key=lambda x: tuple(int(i) for i in re.findall(r"\d+", x)[:3])
Alex74dc1352019-05-17 13:18:24 -0500377 )
378
379 return _dig + _lex
Alexd9fd85e2019-05-16 16:58:24 -0500380
381 def get_repoinfo(self, tag):
382 _tgz = TGZFile(self._repofile)
Alex3bc95f62020-03-05 17:00:04 -0600383 _buf = _tgz.get_file(tag + ext, decode=True)
Alexd9fd85e2019-05-16 16:58:24 -0500384 return json.loads(_buf)
385
386
387class RepoManager(object):
Alex3bc95f62020-03-05 17:00:04 -0600388 init_done = False
Alexd9fd85e2019-05-16 16:58:24 -0500389
Alex3bc95f62020-03-05 17:00:04 -0600390 def _init_folders(self, arch_folder=None):
Alex9a4ad212020-10-01 18:04:25 -0500391 logger_cli.info("# Loading package versions data")
Alex3bc95f62020-03-05 17:00:04 -0600392 # overide arch folder if needed
393 if arch_folder:
394 self._arch_folder = arch_folder
395 else:
396 self._arch_folder = os.path.join(pkg_dir, "versions")
Alexd9fd85e2019-05-16 16:58:24 -0500397
Alex3bc95f62020-03-05 17:00:04 -0600398 self._versions_arch = os.path.join(
399 self._arch_folder,
400 _repos_versions_archive
401 )
402 self._desc_arch = os.path.join(self._arch_folder, _pkg_desc_archive)
Alexd0391d42019-05-21 18:48:55 -0500403
Alex3bc95f62020-03-05 17:00:04 -0600404 def _init_vars(self, info_class):
405 # RepoInfo instance init
406 if info_class:
407 self._info_class = info_class
408 else:
409 self._info_class = ReposInfo()
410 # archives
411 self._apps_filename = "apps.json"
Alexd9fd85e2019-05-16 16:58:24 -0500412
Alex3bc95f62020-03-05 17:00:04 -0600413 # repository index
414 self._repo_index = {}
415 self._mainteiners_index = {}
416
417 self._apps = {}
418
419 # init package versions storage
420 self._versions_mirantis = {}
421 self._versions_other = {}
422
423 def _init_archives(self):
Alexd9fd85e2019-05-16 16:58:24 -0500424 # Init version files
425 self.versionstgz = TGZFile(
426 self._versions_arch,
427 label="MCP Configuration Checker: Package versions archive"
428 )
429 self.desctgz = TGZFile(
430 self._desc_arch,
431 label="MCP Configuration Checker: Package descriptions archive"
432 )
Alexd0391d42019-05-21 18:48:55 -0500433
434 # section / app
435 self._apps = _safe_load(
436 self._apps_filename,
437 self.desctgz
438 )
439
Alex0ed4f762019-05-17 17:55:33 -0500440 # indices
441 self._repo_index = _safe_load(
442 _repos_index_filename,
443 self.versionstgz
444 )
445 self._mainteiners_index = _safe_load(
446 _mainteiners_index_filename,
447 self.versionstgz
448 )
Alexd9fd85e2019-05-16 16:58:24 -0500449
Alex0ed4f762019-05-17 17:55:33 -0500450 # versions
451 self._versions_mirantis = _safe_load(
452 _mirantis_versions_filename,
453 self.versionstgz
454 )
455 self._versions_other = _safe_load(
456 _other_versions_filename,
457 self.versionstgz
458 )
Alexd9fd85e2019-05-16 16:58:24 -0500459
Alex3bc95f62020-03-05 17:00:04 -0600460 def __init__(self, arch_folder=None, info_class=None):
461 # Perform inits
462 self._init_vars(info_class)
463 self._init_folders(arch_folder)
464 # Ensure that versions folder exists
465 logger_cli.debug(ensure_folder_exists(self._arch_folder))
466 # Preload/create archives
467 self._init_archives()
468 self.init_done = True
469
470 def __call__(self, *args, **kwargs):
471 if self.init_done:
472 return self
473 else:
474 return self.__init__(self, *args, **kwargs)
475
Alexd9fd85e2019-05-16 16:58:24 -0500476 def _create_repo_header(self, p):
477 _header = "_".join([
478 p['tag'],
479 p['subset'],
480 p['release'],
481 p['ubuntu-release'],
482 p['type'],
483 p['arch']
484 ])
Alex0ed4f762019-05-17 17:55:33 -0500485 return _get_value_index(self._repo_index, p, header=_header)
Alexd9fd85e2019-05-16 16:58:24 -0500486
Alex0ed4f762019-05-17 17:55:33 -0500487 def _get_indexed_values(self, pair):
488 _h, _m = pair.split('-')
489 return self._repo_index[_h], self._mainteiners_index[_m]
Alexd9fd85e2019-05-16 16:58:24 -0500490
Alexd0391d42019-05-21 18:48:55 -0500491 def _update_pkg_version(self, _d, n, v, md5, s, a, h_index, m_index):
Alexd9fd85e2019-05-16 16:58:24 -0500492 """Method updates package version record in global dict
493 """
494 # 'if'*4 operation is pretty expensive when using it 100k in a row
495 # so try/except is a better way to go, even faster than 'reduce'
Alex0ed4f762019-05-17 17:55:33 -0500496 _pair = "-".join([h_index, m_index])
Alexd0391d42019-05-21 18:48:55 -0500497 _info = {
498 'repo': [_pair],
499 'section': s,
500 'app': a
501 }
Alexd9fd85e2019-05-16 16:58:24 -0500502 try:
503 # try to load list
Alexd0391d42019-05-21 18:48:55 -0500504 _list = _d[n][v][md5]['repo']
Alexd9fd85e2019-05-16 16:58:24 -0500505 # cast it as set() and union()
Alex0ed4f762019-05-17 17:55:33 -0500506 _list = set(_list).union([_pair])
Alexd9fd85e2019-05-16 16:58:24 -0500507 # cast back as set() is not serializeable
Alexd0391d42019-05-21 18:48:55 -0500508 _d[n][v][md5]['repo'] = list(_list)
Alexd9fd85e2019-05-16 16:58:24 -0500509 return False
510 except KeyError:
511 # ok, this is fresh pkg. Do it slow way.
Alex0ed4f762019-05-17 17:55:33 -0500512 if n in _d:
Alexd9fd85e2019-05-16 16:58:24 -0500513 # there is such pkg already
Alex0ed4f762019-05-17 17:55:33 -0500514 if v in _d[n]:
Alexd9fd85e2019-05-16 16:58:24 -0500515 # there is such version, check md5
Alex0ed4f762019-05-17 17:55:33 -0500516 if md5 in _d[n][v]:
Alexd9fd85e2019-05-16 16:58:24 -0500517 # just add new repo header
Alexd0391d42019-05-21 18:48:55 -0500518 if _pair not in _d[n][v][md5]['repo']:
519 _d[n][v][md5]['repo'].append(_pair)
Alexd9fd85e2019-05-16 16:58:24 -0500520 else:
521 # check if such index is here...
522 _existing = filter(
Alexd0391d42019-05-21 18:48:55 -0500523 lambda i: _pair in _d[n][v][i]['repo'],
Alex0ed4f762019-05-17 17:55:33 -0500524 _d[n][v]
Alexd9fd85e2019-05-16 16:58:24 -0500525 )
526 if _existing:
527 # Yuck! Same version had different MD5
Alex0ed4f762019-05-17 17:55:33 -0500528 _r, _m = self._get_indexed_values(_pair)
Alexd9fd85e2019-05-16 16:58:24 -0500529 logger_cli.error(
530 "# ERROR: Package version has multiple MD5s "
531 "in '{}': {}:{}:{}".format(
Alex0ed4f762019-05-17 17:55:33 -0500532 _r,
Alexd9fd85e2019-05-16 16:58:24 -0500533 n,
534 v,
535 md5
536 )
537 )
Alexd0391d42019-05-21 18:48:55 -0500538 _d[n][v][md5] = _info
Alexd9fd85e2019-05-16 16:58:24 -0500539 else:
540 # this is new version for existing package
Alex0ed4f762019-05-17 17:55:33 -0500541 _d[n][v] = {
Alexd0391d42019-05-21 18:48:55 -0500542 md5: _info
Alexd9fd85e2019-05-16 16:58:24 -0500543 }
544 return False
545 else:
546 # this is new pakcage
Alex0ed4f762019-05-17 17:55:33 -0500547 _d[n] = {
Alexd9fd85e2019-05-16 16:58:24 -0500548 v: {
Alexd0391d42019-05-21 18:48:55 -0500549 md5: _info
Alexd9fd85e2019-05-16 16:58:24 -0500550 }
551 }
552 return True
553
554 def _save_repo_descriptions(self, repo_props, desc):
555 # form the filename for the repo and save it
556 self.desctgz.add_file(
557 self._create_repo_header(repo_props),
558 json.dumps(desc)
559 )
560
561 # def get_description(self, repo_props, name, md5=None):
562 # """Gets target description
563 # """
564 # _filename = self._create_repo_header(repo_props)
565 # # check if it is present in cache
566 # if _filename in self._desc_cache:
567 # _descs = self._desc_cache[_filename]
568 # else:
569 # # load data
570 # _descs = self.desctgz.get_file(_filename)
571 # # Serialize it
572 # _descs = json.loads(_descs)
573 # self._desc_cache[_filename] = _descs
574 # # return target desc
575 # if name in _descs and md5 in _descs[name]:
576 # return _descs[name][md5]
577 # else:
578 # return None
579
Alexd0391d42019-05-21 18:48:55 -0500580 def parse_tag(self, tag, descriptions=False, apps=False):
Alexd9fd85e2019-05-16 16:58:24 -0500581 """Download and parse Package.gz files for specific tag
582 By default, descriptions not saved
583 due to huge resulting file size and slow processing
584 """
585 # init gzip and downloader
Alex3bc95f62020-03-05 17:00:04 -0600586 _info = self._info_class.get_repoinfo(tag)
Alexd9fd85e2019-05-16 16:58:24 -0500587 # calculate Packages.gz files to process
588 _baseurl = _info.pop("baseurl")
589 _total_components = len(_info.keys()) - 1
590 _ubuntu_package_repos = 0
591 _other_repos = 0
Alex3bc95f62020-03-05 17:00:04 -0600592 for _c, _d in _info.items():
593 for _ur, _l in _d.items():
Alexeffa0682021-06-04 12:18:33 -0500594 if _ur in ubuntu_releases or _ur in kaas_ubuntu_active:
Alexd9fd85e2019-05-16 16:58:24 -0500595 _ubuntu_package_repos += len(_l)
596 elif _ur != 'url':
597 _other_repos += len(_l)
598 logger_cli.info(
599 "-> loaded repository info for '{}'.\n"
600 " '{}', {} components, {} ubuntu repos, {} other/uknown".format(
601 _baseurl,
602 tag,
603 _total_components,
604 _ubuntu_package_repos,
605 _other_repos
606 )
607 )
608 # init progress bar
609 _progress = Progress(_ubuntu_package_repos)
610 _index = 0
611 _processed = 0
612 _new = 0
Alex3bc95f62020-03-05 17:00:04 -0600613 for _c, _d in _info.items():
Alexd9fd85e2019-05-16 16:58:24 -0500614 # we do not need url here, just get rid of it
615 if 'url' in _d:
616 _d.pop('url')
617 # _url = if 'url' in _d else _baseurl + _c
Alex3bc95f62020-03-05 17:00:04 -0600618 for _ur, _l in _d.items():
Alexd9fd85e2019-05-16 16:58:24 -0500619 # iterate package collections
620 for _p in _l:
621 # descriptions
622 if descriptions:
623 _descriptions = {}
624 # download and unzip
Alexd0391d42019-05-21 18:48:55 -0500625 _index += 1
626 _progress.write_progress(
627 _index,
628 note="/ {} {} {} {} {}, GET 'Packages.gz'".format(
629 _c,
630 _ur,
631 _p['ubuntu-release'],
632 _p['type'],
633 _p['arch']
634 )
635 )
636 _raw = get_gzipped_file(_p['filepath'])
637 if not _raw:
638 # empty repo...
639 _progress.clearline()
640 logger_cli.warning(
641 "# WARNING: Empty file: '{}'".format(
642 _p['filepath']
643 )
644 )
645 continue
Alex3bc95f62020-03-05 17:00:04 -0600646 else:
647 _raw = _raw.decode("utf-8")
Alexd9fd85e2019-05-16 16:58:24 -0500648 _progress.write_progress(
649 _index,
650 note="/ {} {} {} {} {}, {}/{}".format(
651 _c,
652 _ur,
653 _p['ubuntu-release'],
654 _p['type'],
655 _p['arch'],
656 _processed,
657 _new
658 )
659 )
Alexd9fd85e2019-05-16 16:58:24 -0500660 _lines = _raw.splitlines()
Alexd9fd85e2019-05-16 16:58:24 -0500661 # break lines collection into isolated pkg data
662 _pkg = {
663 "tag": tag,
664 "subset": _c,
665 "release": _ur
666 }
667 _pkg.update(_p)
668 _desc = {}
669 _key = _value = ""
Alexd0391d42019-05-21 18:48:55 -0500670 # if there is no empty line at end, add it
671 if _lines[-1] != '':
672 _lines.append('')
673 # Process lines
Alexd9fd85e2019-05-16 16:58:24 -0500674 for _line in _lines:
675 if not _line:
676 # if the line is empty, process pkg data gathered
677 _name = _desc['package']
678 _md5 = _desc['md5sum']
679 _version = _desc['version']
Alex0ed4f762019-05-17 17:55:33 -0500680 _mainteiner = _desc['maintainer']
681
Alexd0391d42019-05-21 18:48:55 -0500682 if 'source' in _desc:
683 _ap = _desc['source'].lower()
684 else:
685 _ap = "-"
686
687 if apps:
688 # insert app
689 _sc = _desc['section'].lower()
690 if 'source' in _desc:
691 _ap = _desc['source'].lower()
692 else:
693 _ap = "-"
694
695 try:
696 _tmp = set(self._apps[_sc][_ap][_name])
697 _tmp.add(_desc['architecture'])
698 self._apps[_sc][_ap][_name] = list(_tmp)
699 except KeyError:
700 nested_set(
701 self._apps,
702 [_sc, _ap, _name],
703 [_desc['architecture']]
704 )
705
Alex0ed4f762019-05-17 17:55:33 -0500706 # Check is mainteiner is Mirantis
707 if _mainteiner.endswith("@mirantis.com>"):
708 # update mirantis versions
709 if self._update_pkg_version(
710 self._versions_mirantis,
711 _name,
712 _version,
713 _md5,
Alexd0391d42019-05-21 18:48:55 -0500714 _desc['section'].lower(),
715 _ap,
Alex0ed4f762019-05-17 17:55:33 -0500716 self._create_repo_header(_pkg),
717 _get_value_index(
718 self._mainteiners_index,
719 _mainteiner
720 )
721 ):
722 _new += 1
723 else:
724 # update other versions
725 if self._update_pkg_version(
726 self._versions_other,
727 _name,
728 _version,
729 _md5,
Alexd0391d42019-05-21 18:48:55 -0500730 _desc['section'].lower(),
731 _ap,
Alex0ed4f762019-05-17 17:55:33 -0500732 self._create_repo_header(_pkg),
733 _get_value_index(
734 self._mainteiners_index,
735 _mainteiner
736 )
737 ):
738 _new += 1
Alexd9fd85e2019-05-16 16:58:24 -0500739
740 if descriptions:
741 _d_new = {
742 _md5: deepcopy(_desc)
743 }
744 try:
745 _descriptions[_name].update(_d_new)
746 except KeyError:
747 _descriptions[_name] = _d_new
748 # clear the data for next pkg
749 _processed += 1
750 _desc = {}
751 _key = ""
752 _value = ""
753 elif _line.startswith(' '):
754 _desc[_key] += "\n{}".format(_line)
755 else:
Alexccb72e02021-01-20 16:38:03 -0600756 if _line.endswith(":"):
757 _key = _line[:-1]
758 _value = ""
759 else:
760 _key, _value = _line.split(": ", 1)
Alexccb72e02021-01-20 16:38:03 -0600761 _key = _key.lower()
Alexd9fd85e2019-05-16 16:58:24 -0500762 _desc[_key] = _value
763 # save descriptions if needed
764 if descriptions:
765 _progress.clearline()
766 self._save_repo_descriptions(_pkg, _descriptions)
767
768 _progress.end()
769 # backup headers to disk
770 self.versionstgz.add_file(
Alex0ed4f762019-05-17 17:55:33 -0500771 _repos_index_filename,
Alexd9fd85e2019-05-16 16:58:24 -0500772 json.dumps(self._repo_index),
773 replace=True
774 )
Alex0ed4f762019-05-17 17:55:33 -0500775 self.versionstgz.add_file(
776 _mainteiners_index_filename,
777 json.dumps(self._mainteiners_index),
778 replace=True
779 )
Alexd0391d42019-05-21 18:48:55 -0500780 if apps:
781 self.desctgz.add_file(
782 self._apps_filename,
783 json.dumps(self._apps),
784 replace=True
785 )
786
Alexd9fd85e2019-05-16 16:58:24 -0500787 return
788
Alexd0391d42019-05-21 18:48:55 -0500789 def fetch_versions(self, tag, descriptions=False, apps=False):
Alexd9fd85e2019-05-16 16:58:24 -0500790 """Executes parsing for specific tag
791 """
792 if descriptions:
793 logger_cli.warning(
794 "\n\n# !!! WARNING: Saving repo descriptions "
795 "consumes huge amount of disk space\n\n"
796 )
797 # if there is no such tag, parse it from repoinfo
Alexd9fd85e2019-05-16 16:58:24 -0500798 logger_cli.info("# Fetching versions for {}".format(tag))
Alexd0391d42019-05-21 18:48:55 -0500799 self.parse_tag(tag, descriptions=descriptions, apps=apps)
Alex0ed4f762019-05-17 17:55:33 -0500800 logger_cli.info("-> saving updated versions")
801 self.versionstgz.add_file(
802 _mirantis_versions_filename,
803 json.dumps(self._versions_mirantis),
804 replace=True
805 )
806 self.versionstgz.add_file(
807 _other_versions_filename,
808 json.dumps(self._versions_other),
809 replace=True
810 )
Alexd9fd85e2019-05-16 16:58:24 -0500811
812 def build_repos(self, url, tag=None):
813 """Builds versions data for selected tag, or for all of them
814 """
Alexd9fd85e2019-05-16 16:58:24 -0500815 # recoursively walk the mirrors
816 # and gather all of the repos for 'tag' or all of the tags
Alex3bc95f62020-03-05 17:00:04 -0600817 self._info_class.fetch_repos(url, tag=tag)
Alexd9fd85e2019-05-16 16:58:24 -0500818
Alex74dc1352019-05-17 13:18:24 -0500819 def _build_action(self, url, tags):
820 for t in tags:
Alex6df29ad2019-05-31 17:55:32 -0500821 logger_cli.info("# Building repo info for '{}'".format(t))
Alex74dc1352019-05-17 13:18:24 -0500822 self.build_repos(url, tag=t)
823
Alexd0391d42019-05-21 18:48:55 -0500824 def get_available_tags(self, tag=None):
825 # Populate action tags
Alex3bc95f62020-03-05 17:00:04 -0600826 major, updates, hotfix = self._info_class.list_tags(splitted=True)
Alexd0391d42019-05-21 18:48:55 -0500827
828 _tags = []
829 if tag in major:
830 _tags.append(tag)
831 if tag in updates:
832 _tags.append(tag + ".update")
833 if tag in hotfix:
834 _tags.append(tag + ".hotfix")
835
836 return _tags
837
Alexd9fd85e2019-05-16 16:58:24 -0500838 def action_for_tag(
839 self,
840 url,
841 tag,
842 action=None,
Alexd0391d42019-05-21 18:48:55 -0500843 descriptions=None,
844 apps=None
Alexd9fd85e2019-05-16 16:58:24 -0500845 ):
846 """Executes action for every tag from all collections
847 """
848 if not action:
849 logger_cli.info("# No action set, nothing to do")
Alex74dc1352019-05-17 13:18:24 -0500850 # See if this is a list action
Alexd9fd85e2019-05-16 16:58:24 -0500851 if action == "list":
Alex3bc95f62020-03-05 17:00:04 -0600852 _all = self._info_class.list_tags()
Alex6df29ad2019-05-31 17:55:32 -0500853 if _all:
854 # Print pretty list and exit
855 logger_cli.info("# Tags available at '{}':".format(url))
856 for t in _all:
857 _ri = self._repo_index
858 _isparsed = any(
Alex3bc95f62020-03-05 17:00:04 -0600859 [k for k, v in _ri.items()
Alex6df29ad2019-05-31 17:55:32 -0500860 if v['props']['tag'] == t]
861 )
862 if _isparsed:
863 logger_cli.info(get_tag_label(t, parsed=True))
864 else:
865 logger_cli.info(get_tag_label(t))
866 else:
867 logger_cli.info("# Not tags parsed yet for '{}':".format(url))
868
Alex74dc1352019-05-17 13:18:24 -0500869 # exit
Alexd9fd85e2019-05-16 16:58:24 -0500870 return
Alex74dc1352019-05-17 13:18:24 -0500871
Alex6df29ad2019-05-31 17:55:32 -0500872 if action == "build":
873 self._build_action(url, [tag])
874
Alexd0391d42019-05-21 18:48:55 -0500875 # Populate action tags
876 _action_tags = self.get_available_tags(tag)
877
Alexd9fd85e2019-05-16 16:58:24 -0500878 if not _action_tags:
879 logger_cli.info(
880 "# Tag of '{}' not found. "
881 "Consider rebuilding repos info.".format(tag)
882 )
Alex74dc1352019-05-17 13:18:24 -0500883 else:
Alexd9fd85e2019-05-16 16:58:24 -0500884 logger_cli.info(
Alex74dc1352019-05-17 13:18:24 -0500885 "-> tags to process: {}".format(
Alexd9fd85e2019-05-16 16:58:24 -0500886 ", ".join(_action_tags)
887 )
888 )
Alex74dc1352019-05-17 13:18:24 -0500889 # Execute actions
Alex6df29ad2019-05-31 17:55:32 -0500890 if action == "fetch":
Alexd9fd85e2019-05-16 16:58:24 -0500891 for t in _action_tags:
Alexd0391d42019-05-21 18:48:55 -0500892 self.fetch_versions(t, descriptions=descriptions, apps=apps)
Alexd9fd85e2019-05-16 16:58:24 -0500893
894 logger_cli.info("# Done.")
895
Alex74dc1352019-05-17 13:18:24 -0500896 def show_package(self, name):
897 # get the package data
898 _p = self.get_package_versions(name)
899 if not _p:
900 logger_cli.warning(
901 "# WARNING: Package '{}' not found".format(name)
902 )
903 else:
904 # print package info using sorted tags from headers
905 # Package: name
906 # [u/h] tag \t <version>
907 # \t <version>
908 # <10symbols> \t <md5> \t sorted headers with no tag
909 # ...
Alexd0391d42019-05-21 18:48:55 -0500910 # section
Alex92e07ce2019-05-31 16:00:03 -0500911 for _s in sorted(_p):
Alexd0391d42019-05-21 18:48:55 -0500912 # app
Alex92e07ce2019-05-31 16:00:03 -0500913 for _a in sorted(_p[_s]):
Alexcf91b182019-05-31 11:57:07 -0500914 _o = ""
915 _mm = []
Alexd0391d42019-05-21 18:48:55 -0500916 # get and sort tags
Alex92e07ce2019-05-31 16:00:03 -0500917 for _v in sorted(_p[_s][_a]):
Alexd0391d42019-05-21 18:48:55 -0500918 _o += "\n" + " "*8 + _v + ':\n'
919 # get and sort tags
Alex92e07ce2019-05-31 16:00:03 -0500920 for _md5 in sorted(_p[_s][_a][_v]):
Alexd0391d42019-05-21 18:48:55 -0500921 _o += " "*16 + _md5 + "\n"
922 # get and sort repo headers
Alex92e07ce2019-05-31 16:00:03 -0500923 for _r in sorted(_p[_s][_a][_v][_md5]):
Alexcf91b182019-05-31 11:57:07 -0500924 _o += " "*24 + _r.replace('_', ' ') + '\n'
925 _m = _p[_s][_a][_v][_md5][_r]["maintainer"]
926 if _m not in _mm:
927 _mm.append(_m)
Alex74dc1352019-05-17 13:18:24 -0500928
Alexcf91b182019-05-31 11:57:07 -0500929 logger_cli.info(
930 "\n# Package: {}/{}/{}\nMaintainers: {}".format(
931 _s,
932 _a,
933 name,
934 ", ".join(_mm)
935 )
936 )
937
938 logger_cli.info(_o)
Alex74dc1352019-05-17 13:18:24 -0500939
Alexd0391d42019-05-21 18:48:55 -0500940 @staticmethod
941 def get_apps(versions, name):
942 _all = True if name == '*' else False
Alexcf91b182019-05-31 11:57:07 -0500943 _s_max = _a_max = _p_max = _v_max = 0
Alexd0391d42019-05-21 18:48:55 -0500944 _rows = []
945 for _p in versions.keys():
946 _vs = versions[_p]
Alex3bc95f62020-03-05 17:00:04 -0600947 for _v, _d1 in _vs.items():
948 for _md5, _info in _d1.items():
Alexd0391d42019-05-21 18:48:55 -0500949 if _all or name == _info['app']:
950 _s_max = max(len(_info['section']), _s_max)
951 _a_max = max(len(_info['app']), _a_max)
Alexcf91b182019-05-31 11:57:07 -0500952 _p_max = max(len(_p), _p_max)
953 _v_max = max(len(_v), _v_max)
Alexd0391d42019-05-21 18:48:55 -0500954 _rows.append([
955 _info['section'],
956 _info['app'],
Alexcf91b182019-05-31 11:57:07 -0500957 _p,
958 _v,
959 _md5,
960 len(_info['repo'])
Alexd0391d42019-05-21 18:48:55 -0500961 ])
Alexcf91b182019-05-31 11:57:07 -0500962 # format columns
963 # section
964 _fmt = "{:"+str(_s_max)+"} "
965 # app
966 _fmt += "{:"+str(_a_max)+"} "
967 # package name
968 _fmt += "{:"+str(_p_max)+"} "
969 # version
970 _fmt += "{:"+str(_v_max)+"} "
971 # md5 and number of repos is fixed
972 _fmt += "{} in {} repos"
973
974 # fill rows
975 _rows = [_fmt.format(s, a, p, v, m, l) for s, a, p, v, m, l in _rows]
Alexd0391d42019-05-21 18:48:55 -0500976 _rows.sort()
977 return _rows
978
979 def show_app(self, name):
980 c = 0
981 rows = self.get_apps(self._versions_mirantis, name)
982 if rows:
Alexcf91b182019-05-31 11:57:07 -0500983 logger_cli.info("\n# Mirantis packages for '{}'".format(name))
Alexd0391d42019-05-21 18:48:55 -0500984 logger_cli.info("\n".join(rows))
985 c += 1
986 rows = self.get_apps(self._versions_other, name)
987 if rows:
Alexcf91b182019-05-31 11:57:07 -0500988 logger_cli.info("\n# Other packages for '{}'".format(name))
Alexd0391d42019-05-21 18:48:55 -0500989 logger_cli.info("\n".join(rows))
990 c += 1
991 if c == 0:
992 logger_cli.info("\n# No app found for '{}'".format(name))
993
994 def get_mirantis_pkg_names(self):
995 # Mirantis maintainers only
996 return set(
997 self._versions_mirantis.keys()
998 ) - set(
999 self._versions_other.keys()
1000 )
1001
1002 def get_other_pkg_names(self):
1003 # Non-mirantis Maintainers
1004 return set(
1005 self._versions_other.keys()
1006 ) - set(
1007 self._versions_mirantis.keys()
1008 )
1009
1010 def get_mixed_pkg_names(self):
1011 # Mixed maintainers
1012 return set(
1013 self._versions_mirantis.keys()
1014 ).intersection(set(
1015 self._versions_other.keys()
1016 ))
1017
1018 def is_mirantis(self, name, tag=None):
1019 """Method checks if this package is mainteined
1020 by mirantis in target tag repo
1021 """
1022 if name in self._versions_mirantis:
1023 # check tag
1024 if tag:
1025 _pkg = self.get_package_versions(
1026 name,
1027 tagged=True
1028 )
1029 _tags = []
1030 for s in _pkg.keys():
1031 for a in _pkg[s].keys():
1032 for t in _pkg[s][a].keys():
1033 _tags.append(t)
1034 if any([t.startswith(tag) for t in _tags]):
1035 return True
1036 else:
1037 return None
1038 else:
1039 return True
1040 elif name in self._versions_other:
1041 # check tag
1042 if tag:
1043 _pkg = self.get_package_versions(
1044 name,
1045 tagged=True
1046 )
1047 _tags = []
1048 for s in _pkg.keys():
1049 for a in _pkg[s].keys():
1050 for t in _pkg[s][a].keys():
1051 _tags.append(t)
1052 if any([t.startswith(tag) for t in _tags]):
1053 return False
1054 else:
1055 return None
1056 else:
1057 return False
1058 else:
1059 logger.error(
1060 "# ERROR: package '{}' not found "
1061 "while determining maintainer".format(
1062 name
1063 )
1064 )
1065 return None
1066
1067 def get_filtered_versions(
1068 self,
1069 name,
1070 tag=None,
1071 include=None,
1072 exclude=None
1073 ):
1074 """Method gets all the versions for the package
1075 and filters them using keys above
1076 """
1077 if tag:
Alex3bc95f62020-03-05 17:00:04 -06001078 tag = str(tag) if not isinstance(tag, str) else tag
Alexd0391d42019-05-21 18:48:55 -05001079 _out = {}
1080 _vs = self.get_package_versions(name, tagged=True)
1081 # iterate to filter out keywords
Alex3bc95f62020-03-05 17:00:04 -06001082 for s, apps in _vs.items():
1083 for a, _tt in apps.items():
1084 for t, vs in _tt.items():
Alexd0391d42019-05-21 18:48:55 -05001085 # filter tags
1086 if tag and t != tag and t.rsplit('.', 1)[0] != tag:
1087 continue
1088 # Skip hotfix tag
1089 if t == tag + ".hotfix":
1090 continue
Alex3bc95f62020-03-05 17:00:04 -06001091 for v, rp in vs.items():
1092 for h, p in rp.items():
Alexd0391d42019-05-21 18:48:55 -05001093 # filter headers with all keywords matching
1094 _h = re.split(r"[\-\_]+", h)
1095 _included = all([kw in _h for kw in include])
1096 _excluded = any([kw in _h for kw in exclude])
1097 if not _included or _excluded:
1098 continue
1099 else:
1100 nested_set(_out, [s, a, v], [])
1101 _dat = {
1102 "header": h
1103 }
1104 _dat.update(p)
1105 _out[s][a][v].append(_dat)
1106 return _out
1107
1108 def get_package_versions(self, name, tagged=False):
Alex74dc1352019-05-17 13:18:24 -05001109 """Method builds package version structure
1110 with repository properties included
1111 """
1112 # get data
Alexd0391d42019-05-21 18:48:55 -05001113 _vs = {}
1114
1115 if name in self._versions_mirantis:
1116 _vs.update(self._versions_mirantis[name])
1117 if name in self._versions_other:
1118 _vs.update(self._versions_other[name])
Alex0ed4f762019-05-17 17:55:33 -05001119
Alex74dc1352019-05-17 13:18:24 -05001120 # insert repo data, insert props into headers place
1121 _package = {}
1122 if tagged:
Alex3bc95f62020-03-05 17:00:04 -06001123 for _v, _d1 in _vs.items():
Alex74dc1352019-05-17 13:18:24 -05001124 # use tag as a next step
Alex3bc95f62020-03-05 17:00:04 -06001125 for _md5, _info in _d1.items():
Alexd0391d42019-05-21 18:48:55 -05001126 _s = _info['section']
1127 _a = _info['app']
1128 for _pair in _info['repo']:
1129 _rp = {}
Alex74dc1352019-05-17 13:18:24 -05001130 # extract props for a repo
Alex0ed4f762019-05-17 17:55:33 -05001131 _r, _m = self._get_indexed_values(_pair)
Alex74dc1352019-05-17 13:18:24 -05001132 # get tag
Alex0ed4f762019-05-17 17:55:33 -05001133 _tag = _r["props"]["tag"]
Alex74dc1352019-05-17 13:18:24 -05001134 # cut tag from the header
Alex0ed4f762019-05-17 17:55:33 -05001135 _cut_head = _r["header"].split("_", 1)[1]
Alex74dc1352019-05-17 13:18:24 -05001136 # populate dict
Alexd0391d42019-05-21 18:48:55 -05001137 _rp["maintainer"] = _m
1138 _rp["md5"] = _md5
1139 _rp.update(_r["props"])
Alex74dc1352019-05-17 13:18:24 -05001140 nested_set(
1141 _package,
Alexd0391d42019-05-21 18:48:55 -05001142 [_s, _a, _tag, _v, _cut_head],
1143 _rp
Alex74dc1352019-05-17 13:18:24 -05001144 )
1145 else:
Alex3bc95f62020-03-05 17:00:04 -06001146 for _v, _d1 in _vs.items():
1147 for _md5, _info in _d1.items():
Alexd0391d42019-05-21 18:48:55 -05001148 _s = _info['section']
1149 _a = _info['app']
1150 for _pair in _info['repo']:
Alex0ed4f762019-05-17 17:55:33 -05001151 _r, _m = self._get_indexed_values(_pair)
Alexd0391d42019-05-21 18:48:55 -05001152 _info["maintainer"] = _m
1153 _info.update(_r["props"])
Alex74dc1352019-05-17 13:18:24 -05001154 nested_set(
1155 _package,
Alexd0391d42019-05-21 18:48:55 -05001156 [_s, _a, _v, _md5, _r["header"]],
1157 _info
Alex74dc1352019-05-17 13:18:24 -05001158 )
1159
1160 return _package
1161
Alexd9fd85e2019-05-16 16:58:24 -05001162 def parse_repos(self):
1163 # all tags to check
Alex3bc95f62020-03-05 17:00:04 -06001164 major, updates, hotfix = self._info_class.list_tags(splitted=True)
Alexd9fd85e2019-05-16 16:58:24 -05001165
1166 # major tags
1167 logger_cli.info("# Processing major tags")
1168 for _tag in major:
1169 self.fetch_versions(_tag)
1170
1171 # updates tags
1172 logger_cli.info("# Processing update tags")
1173 for _tag in updates:
1174 self.fetch_versions(_tag + ".update")
1175
1176 # hotfix tags
1177 logger_cli.info("# Processing hotfix tags")
1178 for _tag in hotfix:
1179 self.fetch_versions(_tag + ".hotfix")