blob: 1a040ffc4ab24ba0f2ad84903160b1b637970df9 [file] [log] [blame]
Alexd9fd85e2019-05-16 16:58:24 -05001import json
2import os
Alexd0391d42019-05-21 18:48:55 -05003import re
Alexd9fd85e2019-05-16 16:58:24 -05004from copy import deepcopy
5
Alex74dc1352019-05-17 13:18:24 -05006from cfg_checker.common import logger, logger_cli, nested_set
Alex0ed4f762019-05-17 17:55:33 -05007from cfg_checker.common.const import _mainteiners_index_filename
8from cfg_checker.common.const import _mirantis_versions_filename
9from cfg_checker.common.const import _other_versions_filename
Alexd9fd85e2019-05-16 16:58:24 -050010from cfg_checker.common.const import _pkg_desc_archive
11from cfg_checker.common.const import _repos_index_filename
12from cfg_checker.common.const import _repos_info_archive
13from cfg_checker.common.const import _repos_versions_archive
Alexd9fd85e2019-05-16 16:58:24 -050014from cfg_checker.common.const import ubuntu_releases
Alex7f69a6a2019-05-31 16:53:35 -050015from cfg_checker.common.file_utils import ensure_folder_exists
Alexd9fd85e2019-05-16 16:58:24 -050016from cfg_checker.common.file_utils import get_gzipped_file
17from cfg_checker.common.settings import pkg_dir
18from cfg_checker.helpers.console_utils import Progress
19from cfg_checker.helpers.tgz import TGZFile
20
21import requests
22from requests.exceptions import ConnectionError
23
24ext = ".json"
25
26
Alex0ed4f762019-05-17 17:55:33 -050027def get_tag_label(_tag, parsed=False):
Alex74dc1352019-05-17 13:18:24 -050028 # prettify the tag for printing
Alex0ed4f762019-05-17 17:55:33 -050029 if parsed:
30 _label = "+ "
31 else:
32 _label = " "
33
Alex74dc1352019-05-17 13:18:24 -050034 if _tag.endswith(".update"):
35 _label += "[updates] " + _tag.rsplit('.', 1)[0]
36 elif _tag.endswith(".hotfix"):
37 _label += " [hotfix] " + _tag.rsplit('.', 1)[0]
38 else:
39 _label += " "*10 + _tag
Alex0ed4f762019-05-17 17:55:33 -050040
Alex74dc1352019-05-17 13:18:24 -050041 return _label
42
43
Alex0ed4f762019-05-17 17:55:33 -050044def _get_value_index(_di, value, header=None):
Alex29ee76f2019-05-17 18:52:29 -050045 # Mainteiner names often uses specific chars
Alex3bc95f62020-03-05 17:00:04 -060046 # so make sure that value saved is str not str
47 # Python2
48 # _val = str(value, 'utf-8') if isinstance(value, str) else value
49 # Python3 has always utf-8 decoded value
50 _val = value
Alex0ed4f762019-05-17 17:55:33 -050051 if header:
Alex3bc95f62020-03-05 17:00:04 -060052 try:
Alexccb72e02021-01-20 16:38:03 -060053 _index = next(i for i in _di if header in _di[i]['header'])
Alex3bc95f62020-03-05 17:00:04 -060054 except StopIteration:
Alexccb72e02021-01-20 16:38:03 -060055 _index = str(len(_di) + 1)
Alex0ed4f762019-05-17 17:55:33 -050056 _di[_index] = {
57 "header": header,
Alex29ee76f2019-05-17 18:52:29 -050058 "props": _val
Alex0ed4f762019-05-17 17:55:33 -050059 }
Alex3bc95f62020-03-05 17:00:04 -060060 finally:
61 return _index
Alex0ed4f762019-05-17 17:55:33 -050062 else:
Alex3bc95f62020-03-05 17:00:04 -060063 try:
Alexccb72e02021-01-20 16:38:03 -060064 _index = next(i for i in _di if _val in _di[i])
Alex3bc95f62020-03-05 17:00:04 -060065 # iterator not empty, find index
Alex3bc95f62020-03-05 17:00:04 -060066 except StopIteration:
Alexccb72e02021-01-20 16:38:03 -060067 _index = str(len(_di) + 1)
Alex3bc95f62020-03-05 17:00:04 -060068 # on save, cast it as str
69 _di[_index] = _val
70 finally:
71 return _index
Alex0ed4f762019-05-17 17:55:33 -050072
73
74def _safe_load(_f, _a):
75 if _f in _a.list_files():
Alexd0391d42019-05-21 18:48:55 -050076 logger_cli.debug(
77 "... loading '{}':'{}'".format(
Alex0ed4f762019-05-17 17:55:33 -050078 _a.basefile,
79 _f
80 )
81 )
Alex3bc95f62020-03-05 17:00:04 -060082 return json.loads(_a.get_file(_f, decode=True))
Alex0ed4f762019-05-17 17:55:33 -050083 else:
84 return {}
85
86
Alexd9fd85e2019-05-16 16:58:24 -050087def _n_url(url):
88 if url[-1] == '/':
89 return url
90 else:
91 return url + '/'
92
93
94class ReposInfo(object):
Alex3bc95f62020-03-05 17:00:04 -060095 init_done = False
96
97 def _init_vars(self):
98 self.repos = []
99
100 def _init_folders(self, arch_folder=None):
101 if arch_folder:
102 self._arch_folder = arch_folder
103 self._repofile = os.path.join(arch_folder, _repos_info_archive)
104 else:
105 self._arch_folder = os.path.join(pkg_dir, "versions")
106 self._repofile = os.path.join(
107 self._arch_folder,
108 _repos_info_archive
109 )
110
111 def __init__(self, arch_folder=None):
112 # perform inits
113 self._init_vars()
114 self._init_folders(arch_folder)
115 self.init_done = True
116
117 def __call__(self, *args, **kwargs):
118 if self.init_done:
119 return self
120 else:
121 return self.__init__(self, *args, **kwargs)
Alexd9fd85e2019-05-16 16:58:24 -0500122
123 @staticmethod
124 def _ls_repo_page(url):
125 # Yes, this is ugly. But it works ok for small HTMLs.
126 _a = "<a"
127 _s = "href="
128 _e = "\">"
129 try:
130 page = requests.get(url, timeout=60)
131 except ConnectionError as e:
132 logger_cli.error("# ERROR: {}".format(e.message))
133 return [], []
134 a = page.text.splitlines()
135 # Comprehension for dirs. Anchors for ends with '-'
136 _dirs = [l[l.index(_s)+6:l.index(_e)-1]
137 for l in a if l.startswith(_a) and l.endswith('-')]
138 # Comprehension for files. Anchors ends with size
139 _files = [l[l.index(_s)+6:l.index(_e)]
140 for l in a if l.startswith(_a) and not l.endswith('-')]
141
142 return _dirs, _files
143
144 def search_pkg(self, url, _list):
145 # recoursive method to walk dists tree
146 _dirs, _files = self._ls_repo_page(url)
147
148 for _d in _dirs:
149 # Search only in dists, ignore the rest
150 if "dists" not in url and _d != "dists":
151 continue
152 _u = _n_url(url + _d)
153 self.search_pkg(_u, _list)
154
155 for _f in _files:
156 if _f == "Packages.gz":
157 _list.append(url + _f)
158 logger.debug("... [F] '{}'".format(url + _f))
159
160 return _list
161
162 @staticmethod
163 def _map_repo(_path_list, _r):
164 for _pkg_path in _path_list:
165 _l = _pkg_path.split('/')
166 _kw = _l[_l.index('dists')+1:]
167 _kw.reverse()
168 _repo_item = {
169 "arch": _kw[1][7:] if "binary" in _kw[1] else _kw[1],
170 "type": _kw[2],
171 "ubuntu-release": _kw[3],
172 "filepath": _pkg_path
173 }
174 _r.append(_repo_item)
175
176 def _find_tag(self, _t, _u, label=""):
177 if label:
178 _url = _n_url(_u + label)
179 _label = _t + '.' + label
180 else:
181 _url = _u
182 _label = _t
183 _ts, _ = self._ls_repo_page(_url)
184 if _t in _ts:
185 logger.debug(
186 "... found tag '{}' at '{}'".format(
187 _t,
188 _url
189 )
190 )
191 return {
192 _label: {
193 "baseurl": _n_url(_url + _t),
194 "all": {}
195 }
196 }
197 else:
198 return {}
199
200 def fetch_repos(self, url, tag=None):
201 base_url = _n_url(url)
202 logger_cli.info("# Using '{}' as a repos source".format(base_url))
203
204 logger_cli.info("# Gathering repos info (i.e. links to 'packages.gz')")
205 # init repoinfo archive
206 _repotgz = TGZFile(self._repofile)
207 # prepare repo links
208 _repos = {}
209 if tag:
210 # only one tag to process
211 _repos.update(self._find_tag(tag, base_url))
212 _repos.update(self._find_tag(tag, base_url, label="hotfix"))
213 _repos.update(self._find_tag(tag, base_url, label="update"))
214 else:
215 # gather all of them
216 _tags, _ = self._ls_repo_page(base_url)
Alex3bc95f62020-03-05 17:00:04 -0600217 if "hotfix" in _tags:
218 _tags.remove('hotfix')
219 if "update" in _tags:
220 _tags.remove('update')
Alexd9fd85e2019-05-16 16:58:24 -0500221 # search tags in subfolders
222 _h_tags, _ = self._ls_repo_page(base_url + 'hotfix')
223 _u_tags, _ = self._ls_repo_page(base_url + 'update')
224 _tags.extend([t for t in _h_tags if t not in _tags])
225 _tags.extend([t for t in _u_tags if t not in _tags])
226 _progress = Progress(len(_tags))
227 _index = 0
228 for _tag in _tags:
229 _repos.update(self._find_tag(_tag, base_url))
230 _repos.update(self._find_tag(_tag, base_url, label="hotfix"))
231 _repos.update(self._find_tag(_tag, base_url, label="update"))
232 _index += 1
233 _progress.write_progress(_index)
234 _progress.end()
235
236 # parse subtags
237 for _label in _repos.keys():
238 logger_cli.info("-> processing tag '{}'".format(_label))
239 _name = _label + ".json"
240 if _repotgz.has_file(_name):
241 logger_cli.info(
242 "-> skipping, '{}' already has '{}'".format(
243 _repos_info_archive,
244 _name
245 )
246 )
247 continue
248 # process the tag
249 _repo = _repos[_label]
250 _baseurl = _repos[_label]["baseurl"]
251 # get the subtags
252 _sub_tags, _ = self._ls_repo_page(_baseurl)
253 _total_index = len(_sub_tags)
254 _index = 0
255 _progress = Progress(_total_index)
Alexccb72e02021-01-20 16:38:03 -0600256 logger_cli.debug(
Alexd9fd85e2019-05-16 16:58:24 -0500257 "... found {} subtags for '{}'".format(
258 len(_sub_tags),
259 _label
260 )
261 )
262 # save the url and start search
263 for _stag in _sub_tags:
264 _u = _baseurl + _stag
265 _index += 1
Alexccb72e02021-01-20 16:38:03 -0600266 logger_cli.debug(
Alexd9fd85e2019-05-16 16:58:24 -0500267 "... searching repos in '{}/{}'".format(
268 _label,
269 _stag
270 )
271 )
272
273 # Searching Package collections
Alexccb72e02021-01-20 16:38:03 -0600274 if _stag in ubuntu_releases or _stag.startswith("ubuntu-2020"):
Alexd9fd85e2019-05-16 16:58:24 -0500275 # if stag is the release, this is all packages
276 _repo["all"][_stag] = []
277 _repo["all"]["url"] = _n_url(_u)
278 _path_list = self.search_pkg(_n_url(_u), [])
279 self._map_repo(_path_list, _repo["all"][_stag])
Alexccb72e02021-01-20 16:38:03 -0600280 logger_cli.info(
Alexd9fd85e2019-05-16 16:58:24 -0500281 "-> found {} dists".format(
282 len(_repo["all"][_stag])
283 )
284 )
285
286 else:
287 # each subtag might have any ubuntu release
288 # so iterate them
289 _repo[_stag] = {
290 "url": _n_url(_u)
291 }
292 _releases, _ = self._ls_repo_page(_n_url(_u))
293 for _rel in _releases:
294 if _rel not in ubuntu_releases:
Alexccb72e02021-01-20 16:38:03 -0600295 logger_cli.debug(
Alexd9fd85e2019-05-16 16:58:24 -0500296 "... skipped unknown ubuntu release: "
297 "'{}' in '{}'".format(
298 _rel,
299 _u
300 )
301 )
302 else:
303 _rel_u = _n_url(_u) + _rel
304 _repo[_stag][_rel] = []
305 _path_list = self.search_pkg(_n_url(_rel_u), [])
306 self._map_repo(
307 _path_list,
308 _repo[_stag][_rel]
309 )
Alexccb72e02021-01-20 16:38:03 -0600310 logger_cli.info(
Alexd9fd85e2019-05-16 16:58:24 -0500311 "-> found {} dists for '{}'".format(
312 len(_repo[_stag][_rel]),
313 _rel
314 )
315 )
316 _progress.write_progress(_index)
317
318 _progress.end()
319 _name = _label + ext
320 _repotgz.add_file(_name, buf=json.dumps(_repo, indent=2))
321 logger_cli.info(
322 "-> archive '{}' updated with '{}'".format(
323 self._repofile,
324 _name
325 )
326 )
327
328 return
329
Alex74dc1352019-05-17 13:18:24 -0500330 def list_tags(self, splitted=False):
Alexd9fd85e2019-05-16 16:58:24 -0500331 _files = TGZFile(self._repofile).list_files()
332 # all files in archive with no '.json' part
333 _all = set([f.rsplit('.', 1)[0] for f in _files])
Alex74dc1352019-05-17 13:18:24 -0500334 if splitted:
335 # files that ends with '.update'
336 _updates = set([f for f in _all if f.find('update') >= 0])
337 # files that ends with '.hotfix'
338 _hotfix = set([f for f in _all if f.find('hotfix') >= 0])
339 # remove updates and hotfix tags from all. The true magic of SETs
340 _all = _all - _updates - _hotfix
341 # cut updates and hotfix endings
342 _updates = [f.rsplit('.', 1)[0] for f in _updates]
343 _hotfix = [f.rsplit('.', 1)[0] for f in _hotfix]
Alexd9fd85e2019-05-16 16:58:24 -0500344
Alex74dc1352019-05-17 13:18:24 -0500345 return _all, _updates, _hotfix
346 else:
347 # dynamic import
348 import re
349 _all = list(_all)
350 # lexical tags
351 _lex = [s for s in _all if not s[0].isdigit()]
352 _lex.sort()
353 # tags with digits
354 _dig = [s for s in _all if s[0].isdigit()]
355 _dig = sorted(
356 _dig,
Alexd0391d42019-05-21 18:48:55 -0500357 key=lambda x: tuple(int(i) for i in re.findall(r"\d+", x)[:3])
Alex74dc1352019-05-17 13:18:24 -0500358 )
359
360 return _dig + _lex
Alexd9fd85e2019-05-16 16:58:24 -0500361
362 def get_repoinfo(self, tag):
363 _tgz = TGZFile(self._repofile)
Alex3bc95f62020-03-05 17:00:04 -0600364 _buf = _tgz.get_file(tag + ext, decode=True)
Alexd9fd85e2019-05-16 16:58:24 -0500365 return json.loads(_buf)
366
367
368class RepoManager(object):
Alex3bc95f62020-03-05 17:00:04 -0600369 init_done = False
Alexd9fd85e2019-05-16 16:58:24 -0500370
Alex3bc95f62020-03-05 17:00:04 -0600371 def _init_folders(self, arch_folder=None):
Alex9a4ad212020-10-01 18:04:25 -0500372 logger_cli.info("# Loading package versions data")
Alex3bc95f62020-03-05 17:00:04 -0600373 # overide arch folder if needed
374 if arch_folder:
375 self._arch_folder = arch_folder
376 else:
377 self._arch_folder = os.path.join(pkg_dir, "versions")
Alexd9fd85e2019-05-16 16:58:24 -0500378
Alex3bc95f62020-03-05 17:00:04 -0600379 self._versions_arch = os.path.join(
380 self._arch_folder,
381 _repos_versions_archive
382 )
383 self._desc_arch = os.path.join(self._arch_folder, _pkg_desc_archive)
Alexd0391d42019-05-21 18:48:55 -0500384
Alex3bc95f62020-03-05 17:00:04 -0600385 def _init_vars(self, info_class):
386 # RepoInfo instance init
387 if info_class:
388 self._info_class = info_class
389 else:
390 self._info_class = ReposInfo()
391 # archives
392 self._apps_filename = "apps.json"
Alexd9fd85e2019-05-16 16:58:24 -0500393
Alex3bc95f62020-03-05 17:00:04 -0600394 # repository index
395 self._repo_index = {}
396 self._mainteiners_index = {}
397
398 self._apps = {}
399
400 # init package versions storage
401 self._versions_mirantis = {}
402 self._versions_other = {}
403
404 def _init_archives(self):
Alexd9fd85e2019-05-16 16:58:24 -0500405 # Init version files
406 self.versionstgz = TGZFile(
407 self._versions_arch,
408 label="MCP Configuration Checker: Package versions archive"
409 )
410 self.desctgz = TGZFile(
411 self._desc_arch,
412 label="MCP Configuration Checker: Package descriptions archive"
413 )
Alexd0391d42019-05-21 18:48:55 -0500414
415 # section / app
416 self._apps = _safe_load(
417 self._apps_filename,
418 self.desctgz
419 )
420
Alex0ed4f762019-05-17 17:55:33 -0500421 # indices
422 self._repo_index = _safe_load(
423 _repos_index_filename,
424 self.versionstgz
425 )
426 self._mainteiners_index = _safe_load(
427 _mainteiners_index_filename,
428 self.versionstgz
429 )
Alexd9fd85e2019-05-16 16:58:24 -0500430
Alex0ed4f762019-05-17 17:55:33 -0500431 # versions
432 self._versions_mirantis = _safe_load(
433 _mirantis_versions_filename,
434 self.versionstgz
435 )
436 self._versions_other = _safe_load(
437 _other_versions_filename,
438 self.versionstgz
439 )
Alexd9fd85e2019-05-16 16:58:24 -0500440
Alex3bc95f62020-03-05 17:00:04 -0600441 def __init__(self, arch_folder=None, info_class=None):
442 # Perform inits
443 self._init_vars(info_class)
444 self._init_folders(arch_folder)
445 # Ensure that versions folder exists
446 logger_cli.debug(ensure_folder_exists(self._arch_folder))
447 # Preload/create archives
448 self._init_archives()
449 self.init_done = True
450
451 def __call__(self, *args, **kwargs):
452 if self.init_done:
453 return self
454 else:
455 return self.__init__(self, *args, **kwargs)
456
Alexd9fd85e2019-05-16 16:58:24 -0500457 def _create_repo_header(self, p):
458 _header = "_".join([
459 p['tag'],
460 p['subset'],
461 p['release'],
462 p['ubuntu-release'],
463 p['type'],
464 p['arch']
465 ])
Alex0ed4f762019-05-17 17:55:33 -0500466 return _get_value_index(self._repo_index, p, header=_header)
Alexd9fd85e2019-05-16 16:58:24 -0500467
Alex0ed4f762019-05-17 17:55:33 -0500468 def _get_indexed_values(self, pair):
469 _h, _m = pair.split('-')
470 return self._repo_index[_h], self._mainteiners_index[_m]
Alexd9fd85e2019-05-16 16:58:24 -0500471
Alexd0391d42019-05-21 18:48:55 -0500472 def _update_pkg_version(self, _d, n, v, md5, s, a, h_index, m_index):
Alexd9fd85e2019-05-16 16:58:24 -0500473 """Method updates package version record in global dict
474 """
475 # 'if'*4 operation is pretty expensive when using it 100k in a row
476 # so try/except is a better way to go, even faster than 'reduce'
Alex0ed4f762019-05-17 17:55:33 -0500477 _pair = "-".join([h_index, m_index])
Alexd0391d42019-05-21 18:48:55 -0500478 _info = {
479 'repo': [_pair],
480 'section': s,
481 'app': a
482 }
Alexd9fd85e2019-05-16 16:58:24 -0500483 try:
484 # try to load list
Alexd0391d42019-05-21 18:48:55 -0500485 _list = _d[n][v][md5]['repo']
Alexd9fd85e2019-05-16 16:58:24 -0500486 # cast it as set() and union()
Alex0ed4f762019-05-17 17:55:33 -0500487 _list = set(_list).union([_pair])
Alexd9fd85e2019-05-16 16:58:24 -0500488 # cast back as set() is not serializeable
Alexd0391d42019-05-21 18:48:55 -0500489 _d[n][v][md5]['repo'] = list(_list)
Alexd9fd85e2019-05-16 16:58:24 -0500490 return False
491 except KeyError:
492 # ok, this is fresh pkg. Do it slow way.
Alex0ed4f762019-05-17 17:55:33 -0500493 if n in _d:
Alexd9fd85e2019-05-16 16:58:24 -0500494 # there is such pkg already
Alex0ed4f762019-05-17 17:55:33 -0500495 if v in _d[n]:
Alexd9fd85e2019-05-16 16:58:24 -0500496 # there is such version, check md5
Alex0ed4f762019-05-17 17:55:33 -0500497 if md5 in _d[n][v]:
Alexd9fd85e2019-05-16 16:58:24 -0500498 # just add new repo header
Alexd0391d42019-05-21 18:48:55 -0500499 if _pair not in _d[n][v][md5]['repo']:
500 _d[n][v][md5]['repo'].append(_pair)
Alexd9fd85e2019-05-16 16:58:24 -0500501 else:
502 # check if such index is here...
503 _existing = filter(
Alexd0391d42019-05-21 18:48:55 -0500504 lambda i: _pair in _d[n][v][i]['repo'],
Alex0ed4f762019-05-17 17:55:33 -0500505 _d[n][v]
Alexd9fd85e2019-05-16 16:58:24 -0500506 )
507 if _existing:
508 # Yuck! Same version had different MD5
Alex0ed4f762019-05-17 17:55:33 -0500509 _r, _m = self._get_indexed_values(_pair)
Alexd9fd85e2019-05-16 16:58:24 -0500510 logger_cli.error(
511 "# ERROR: Package version has multiple MD5s "
512 "in '{}': {}:{}:{}".format(
Alex0ed4f762019-05-17 17:55:33 -0500513 _r,
Alexd9fd85e2019-05-16 16:58:24 -0500514 n,
515 v,
516 md5
517 )
518 )
Alexd0391d42019-05-21 18:48:55 -0500519 _d[n][v][md5] = _info
Alexd9fd85e2019-05-16 16:58:24 -0500520 else:
521 # this is new version for existing package
Alex0ed4f762019-05-17 17:55:33 -0500522 _d[n][v] = {
Alexd0391d42019-05-21 18:48:55 -0500523 md5: _info
Alexd9fd85e2019-05-16 16:58:24 -0500524 }
525 return False
526 else:
527 # this is new pakcage
Alex0ed4f762019-05-17 17:55:33 -0500528 _d[n] = {
Alexd9fd85e2019-05-16 16:58:24 -0500529 v: {
Alexd0391d42019-05-21 18:48:55 -0500530 md5: _info
Alexd9fd85e2019-05-16 16:58:24 -0500531 }
532 }
533 return True
534
535 def _save_repo_descriptions(self, repo_props, desc):
536 # form the filename for the repo and save it
537 self.desctgz.add_file(
538 self._create_repo_header(repo_props),
539 json.dumps(desc)
540 )
541
542 # def get_description(self, repo_props, name, md5=None):
543 # """Gets target description
544 # """
545 # _filename = self._create_repo_header(repo_props)
546 # # check if it is present in cache
547 # if _filename in self._desc_cache:
548 # _descs = self._desc_cache[_filename]
549 # else:
550 # # load data
551 # _descs = self.desctgz.get_file(_filename)
552 # # Serialize it
553 # _descs = json.loads(_descs)
554 # self._desc_cache[_filename] = _descs
555 # # return target desc
556 # if name in _descs and md5 in _descs[name]:
557 # return _descs[name][md5]
558 # else:
559 # return None
560
Alexd0391d42019-05-21 18:48:55 -0500561 def parse_tag(self, tag, descriptions=False, apps=False):
Alexd9fd85e2019-05-16 16:58:24 -0500562 """Download and parse Package.gz files for specific tag
563 By default, descriptions not saved
564 due to huge resulting file size and slow processing
565 """
566 # init gzip and downloader
Alex3bc95f62020-03-05 17:00:04 -0600567 _info = self._info_class.get_repoinfo(tag)
Alexd9fd85e2019-05-16 16:58:24 -0500568 # calculate Packages.gz files to process
569 _baseurl = _info.pop("baseurl")
570 _total_components = len(_info.keys()) - 1
571 _ubuntu_package_repos = 0
572 _other_repos = 0
Alex3bc95f62020-03-05 17:00:04 -0600573 for _c, _d in _info.items():
574 for _ur, _l in _d.items():
Alexccb72e02021-01-20 16:38:03 -0600575 if _ur in ubuntu_releases or _ur.startswith("ubuntu-2020"):
Alexd9fd85e2019-05-16 16:58:24 -0500576 _ubuntu_package_repos += len(_l)
577 elif _ur != 'url':
578 _other_repos += len(_l)
579 logger_cli.info(
580 "-> loaded repository info for '{}'.\n"
581 " '{}', {} components, {} ubuntu repos, {} other/uknown".format(
582 _baseurl,
583 tag,
584 _total_components,
585 _ubuntu_package_repos,
586 _other_repos
587 )
588 )
589 # init progress bar
590 _progress = Progress(_ubuntu_package_repos)
591 _index = 0
592 _processed = 0
593 _new = 0
Alex3bc95f62020-03-05 17:00:04 -0600594 for _c, _d in _info.items():
Alexd9fd85e2019-05-16 16:58:24 -0500595 # we do not need url here, just get rid of it
596 if 'url' in _d:
597 _d.pop('url')
598 # _url = if 'url' in _d else _baseurl + _c
Alex3bc95f62020-03-05 17:00:04 -0600599 for _ur, _l in _d.items():
Alexd9fd85e2019-05-16 16:58:24 -0500600 # iterate package collections
601 for _p in _l:
602 # descriptions
603 if descriptions:
604 _descriptions = {}
605 # download and unzip
Alexd0391d42019-05-21 18:48:55 -0500606 _index += 1
607 _progress.write_progress(
608 _index,
609 note="/ {} {} {} {} {}, GET 'Packages.gz'".format(
610 _c,
611 _ur,
612 _p['ubuntu-release'],
613 _p['type'],
614 _p['arch']
615 )
616 )
617 _raw = get_gzipped_file(_p['filepath'])
618 if not _raw:
619 # empty repo...
620 _progress.clearline()
621 logger_cli.warning(
622 "# WARNING: Empty file: '{}'".format(
623 _p['filepath']
624 )
625 )
626 continue
Alex3bc95f62020-03-05 17:00:04 -0600627 else:
628 _raw = _raw.decode("utf-8")
Alexd9fd85e2019-05-16 16:58:24 -0500629 _progress.write_progress(
630 _index,
631 note="/ {} {} {} {} {}, {}/{}".format(
632 _c,
633 _ur,
634 _p['ubuntu-release'],
635 _p['type'],
636 _p['arch'],
637 _processed,
638 _new
639 )
640 )
Alexd9fd85e2019-05-16 16:58:24 -0500641 _lines = _raw.splitlines()
Alexd9fd85e2019-05-16 16:58:24 -0500642 # break lines collection into isolated pkg data
643 _pkg = {
644 "tag": tag,
645 "subset": _c,
646 "release": _ur
647 }
648 _pkg.update(_p)
649 _desc = {}
650 _key = _value = ""
Alexd0391d42019-05-21 18:48:55 -0500651 # if there is no empty line at end, add it
652 if _lines[-1] != '':
653 _lines.append('')
654 # Process lines
Alexd9fd85e2019-05-16 16:58:24 -0500655 for _line in _lines:
656 if not _line:
657 # if the line is empty, process pkg data gathered
658 _name = _desc['package']
659 _md5 = _desc['md5sum']
660 _version = _desc['version']
Alex0ed4f762019-05-17 17:55:33 -0500661 _mainteiner = _desc['maintainer']
662
Alexd0391d42019-05-21 18:48:55 -0500663 if 'source' in _desc:
664 _ap = _desc['source'].lower()
665 else:
666 _ap = "-"
667
668 if apps:
669 # insert app
670 _sc = _desc['section'].lower()
671 if 'source' in _desc:
672 _ap = _desc['source'].lower()
673 else:
674 _ap = "-"
675
676 try:
677 _tmp = set(self._apps[_sc][_ap][_name])
678 _tmp.add(_desc['architecture'])
679 self._apps[_sc][_ap][_name] = list(_tmp)
680 except KeyError:
681 nested_set(
682 self._apps,
683 [_sc, _ap, _name],
684 [_desc['architecture']]
685 )
686
Alex0ed4f762019-05-17 17:55:33 -0500687 # Check is mainteiner is Mirantis
688 if _mainteiner.endswith("@mirantis.com>"):
689 # update mirantis versions
690 if self._update_pkg_version(
691 self._versions_mirantis,
692 _name,
693 _version,
694 _md5,
Alexd0391d42019-05-21 18:48:55 -0500695 _desc['section'].lower(),
696 _ap,
Alex0ed4f762019-05-17 17:55:33 -0500697 self._create_repo_header(_pkg),
698 _get_value_index(
699 self._mainteiners_index,
700 _mainteiner
701 )
702 ):
703 _new += 1
704 else:
705 # update other versions
706 if self._update_pkg_version(
707 self._versions_other,
708 _name,
709 _version,
710 _md5,
Alexd0391d42019-05-21 18:48:55 -0500711 _desc['section'].lower(),
712 _ap,
Alex0ed4f762019-05-17 17:55:33 -0500713 self._create_repo_header(_pkg),
714 _get_value_index(
715 self._mainteiners_index,
716 _mainteiner
717 )
718 ):
719 _new += 1
Alexd9fd85e2019-05-16 16:58:24 -0500720
721 if descriptions:
722 _d_new = {
723 _md5: deepcopy(_desc)
724 }
725 try:
726 _descriptions[_name].update(_d_new)
727 except KeyError:
728 _descriptions[_name] = _d_new
729 # clear the data for next pkg
730 _processed += 1
731 _desc = {}
732 _key = ""
733 _value = ""
734 elif _line.startswith(' '):
735 _desc[_key] += "\n{}".format(_line)
736 else:
Alexccb72e02021-01-20 16:38:03 -0600737 if _line.endswith(":"):
738 _key = _line[:-1]
739 _value = ""
740 else:
741 _key, _value = _line.split(": ", 1)
742 # _key = _line[:_line.index(':')]
Alexd9fd85e2019-05-16 16:58:24 -0500743
Alexccb72e02021-01-20 16:38:03 -0600744 # _value = _line[_line.index(':')+1:]
745 # _value = _value if _value[0] != ' ' else _value[1:]
746 _key = _key.lower()
Alexd9fd85e2019-05-16 16:58:24 -0500747 _desc[_key] = _value
748 # save descriptions if needed
749 if descriptions:
750 _progress.clearline()
751 self._save_repo_descriptions(_pkg, _descriptions)
752
753 _progress.end()
754 # backup headers to disk
755 self.versionstgz.add_file(
Alex0ed4f762019-05-17 17:55:33 -0500756 _repos_index_filename,
Alexd9fd85e2019-05-16 16:58:24 -0500757 json.dumps(self._repo_index),
758 replace=True
759 )
Alex0ed4f762019-05-17 17:55:33 -0500760 self.versionstgz.add_file(
761 _mainteiners_index_filename,
762 json.dumps(self._mainteiners_index),
763 replace=True
764 )
Alexd0391d42019-05-21 18:48:55 -0500765 if apps:
766 self.desctgz.add_file(
767 self._apps_filename,
768 json.dumps(self._apps),
769 replace=True
770 )
771
Alexd9fd85e2019-05-16 16:58:24 -0500772 return
773
Alexd0391d42019-05-21 18:48:55 -0500774 def fetch_versions(self, tag, descriptions=False, apps=False):
Alexd9fd85e2019-05-16 16:58:24 -0500775 """Executes parsing for specific tag
776 """
777 if descriptions:
778 logger_cli.warning(
779 "\n\n# !!! WARNING: Saving repo descriptions "
780 "consumes huge amount of disk space\n\n"
781 )
782 # if there is no such tag, parse it from repoinfo
Alexd9fd85e2019-05-16 16:58:24 -0500783 logger_cli.info("# Fetching versions for {}".format(tag))
Alexd0391d42019-05-21 18:48:55 -0500784 self.parse_tag(tag, descriptions=descriptions, apps=apps)
Alex0ed4f762019-05-17 17:55:33 -0500785 logger_cli.info("-> saving updated versions")
786 self.versionstgz.add_file(
787 _mirantis_versions_filename,
788 json.dumps(self._versions_mirantis),
789 replace=True
790 )
791 self.versionstgz.add_file(
792 _other_versions_filename,
793 json.dumps(self._versions_other),
794 replace=True
795 )
Alexd9fd85e2019-05-16 16:58:24 -0500796
797 def build_repos(self, url, tag=None):
798 """Builds versions data for selected tag, or for all of them
799 """
Alexd9fd85e2019-05-16 16:58:24 -0500800 # recoursively walk the mirrors
801 # and gather all of the repos for 'tag' or all of the tags
Alex3bc95f62020-03-05 17:00:04 -0600802 self._info_class.fetch_repos(url, tag=tag)
Alexd9fd85e2019-05-16 16:58:24 -0500803
Alex74dc1352019-05-17 13:18:24 -0500804 def _build_action(self, url, tags):
805 for t in tags:
Alex6df29ad2019-05-31 17:55:32 -0500806 logger_cli.info("# Building repo info for '{}'".format(t))
Alex74dc1352019-05-17 13:18:24 -0500807 self.build_repos(url, tag=t)
808
Alexd0391d42019-05-21 18:48:55 -0500809 def get_available_tags(self, tag=None):
810 # Populate action tags
Alex3bc95f62020-03-05 17:00:04 -0600811 major, updates, hotfix = self._info_class.list_tags(splitted=True)
Alexd0391d42019-05-21 18:48:55 -0500812
813 _tags = []
814 if tag in major:
815 _tags.append(tag)
816 if tag in updates:
817 _tags.append(tag + ".update")
818 if tag in hotfix:
819 _tags.append(tag + ".hotfix")
820
821 return _tags
822
Alexd9fd85e2019-05-16 16:58:24 -0500823 def action_for_tag(
824 self,
825 url,
826 tag,
827 action=None,
Alexd0391d42019-05-21 18:48:55 -0500828 descriptions=None,
829 apps=None
Alexd9fd85e2019-05-16 16:58:24 -0500830 ):
831 """Executes action for every tag from all collections
832 """
833 if not action:
834 logger_cli.info("# No action set, nothing to do")
Alex74dc1352019-05-17 13:18:24 -0500835 # See if this is a list action
Alexd9fd85e2019-05-16 16:58:24 -0500836 if action == "list":
Alex3bc95f62020-03-05 17:00:04 -0600837 _all = self._info_class.list_tags()
Alex6df29ad2019-05-31 17:55:32 -0500838 if _all:
839 # Print pretty list and exit
840 logger_cli.info("# Tags available at '{}':".format(url))
841 for t in _all:
842 _ri = self._repo_index
843 _isparsed = any(
Alex3bc95f62020-03-05 17:00:04 -0600844 [k for k, v in _ri.items()
Alex6df29ad2019-05-31 17:55:32 -0500845 if v['props']['tag'] == t]
846 )
847 if _isparsed:
848 logger_cli.info(get_tag_label(t, parsed=True))
849 else:
850 logger_cli.info(get_tag_label(t))
851 else:
852 logger_cli.info("# Not tags parsed yet for '{}':".format(url))
853
Alex74dc1352019-05-17 13:18:24 -0500854 # exit
Alexd9fd85e2019-05-16 16:58:24 -0500855 return
Alex74dc1352019-05-17 13:18:24 -0500856
Alex6df29ad2019-05-31 17:55:32 -0500857 if action == "build":
858 self._build_action(url, [tag])
859
Alexd0391d42019-05-21 18:48:55 -0500860 # Populate action tags
861 _action_tags = self.get_available_tags(tag)
862
Alexd9fd85e2019-05-16 16:58:24 -0500863 if not _action_tags:
864 logger_cli.info(
865 "# Tag of '{}' not found. "
866 "Consider rebuilding repos info.".format(tag)
867 )
Alex74dc1352019-05-17 13:18:24 -0500868 else:
Alexd9fd85e2019-05-16 16:58:24 -0500869 logger_cli.info(
Alex74dc1352019-05-17 13:18:24 -0500870 "-> tags to process: {}".format(
Alexd9fd85e2019-05-16 16:58:24 -0500871 ", ".join(_action_tags)
872 )
873 )
Alex74dc1352019-05-17 13:18:24 -0500874 # Execute actions
Alex6df29ad2019-05-31 17:55:32 -0500875 if action == "fetch":
Alexd9fd85e2019-05-16 16:58:24 -0500876 for t in _action_tags:
Alexd0391d42019-05-21 18:48:55 -0500877 self.fetch_versions(t, descriptions=descriptions, apps=apps)
Alexd9fd85e2019-05-16 16:58:24 -0500878
879 logger_cli.info("# Done.")
880
Alex74dc1352019-05-17 13:18:24 -0500881 def show_package(self, name):
882 # get the package data
883 _p = self.get_package_versions(name)
884 if not _p:
885 logger_cli.warning(
886 "# WARNING: Package '{}' not found".format(name)
887 )
888 else:
889 # print package info using sorted tags from headers
890 # Package: name
891 # [u/h] tag \t <version>
892 # \t <version>
893 # <10symbols> \t <md5> \t sorted headers with no tag
894 # ...
Alexd0391d42019-05-21 18:48:55 -0500895 # section
Alex92e07ce2019-05-31 16:00:03 -0500896 for _s in sorted(_p):
Alexd0391d42019-05-21 18:48:55 -0500897 # app
Alex92e07ce2019-05-31 16:00:03 -0500898 for _a in sorted(_p[_s]):
Alexcf91b182019-05-31 11:57:07 -0500899 _o = ""
900 _mm = []
Alexd0391d42019-05-21 18:48:55 -0500901 # get and sort tags
Alex92e07ce2019-05-31 16:00:03 -0500902 for _v in sorted(_p[_s][_a]):
Alexd0391d42019-05-21 18:48:55 -0500903 _o += "\n" + " "*8 + _v + ':\n'
904 # get and sort tags
Alex92e07ce2019-05-31 16:00:03 -0500905 for _md5 in sorted(_p[_s][_a][_v]):
Alexd0391d42019-05-21 18:48:55 -0500906 _o += " "*16 + _md5 + "\n"
907 # get and sort repo headers
Alex92e07ce2019-05-31 16:00:03 -0500908 for _r in sorted(_p[_s][_a][_v][_md5]):
Alexcf91b182019-05-31 11:57:07 -0500909 _o += " "*24 + _r.replace('_', ' ') + '\n'
910 _m = _p[_s][_a][_v][_md5][_r]["maintainer"]
911 if _m not in _mm:
912 _mm.append(_m)
Alex74dc1352019-05-17 13:18:24 -0500913
Alexcf91b182019-05-31 11:57:07 -0500914 logger_cli.info(
915 "\n# Package: {}/{}/{}\nMaintainers: {}".format(
916 _s,
917 _a,
918 name,
919 ", ".join(_mm)
920 )
921 )
922
923 logger_cli.info(_o)
Alex74dc1352019-05-17 13:18:24 -0500924
Alexd0391d42019-05-21 18:48:55 -0500925 @staticmethod
926 def get_apps(versions, name):
927 _all = True if name == '*' else False
Alexcf91b182019-05-31 11:57:07 -0500928 _s_max = _a_max = _p_max = _v_max = 0
Alexd0391d42019-05-21 18:48:55 -0500929 _rows = []
930 for _p in versions.keys():
931 _vs = versions[_p]
Alex3bc95f62020-03-05 17:00:04 -0600932 for _v, _d1 in _vs.items():
933 for _md5, _info in _d1.items():
Alexd0391d42019-05-21 18:48:55 -0500934 if _all or name == _info['app']:
935 _s_max = max(len(_info['section']), _s_max)
936 _a_max = max(len(_info['app']), _a_max)
Alexcf91b182019-05-31 11:57:07 -0500937 _p_max = max(len(_p), _p_max)
938 _v_max = max(len(_v), _v_max)
Alexd0391d42019-05-21 18:48:55 -0500939 _rows.append([
940 _info['section'],
941 _info['app'],
Alexcf91b182019-05-31 11:57:07 -0500942 _p,
943 _v,
944 _md5,
945 len(_info['repo'])
Alexd0391d42019-05-21 18:48:55 -0500946 ])
Alexcf91b182019-05-31 11:57:07 -0500947 # format columns
948 # section
949 _fmt = "{:"+str(_s_max)+"} "
950 # app
951 _fmt += "{:"+str(_a_max)+"} "
952 # package name
953 _fmt += "{:"+str(_p_max)+"} "
954 # version
955 _fmt += "{:"+str(_v_max)+"} "
956 # md5 and number of repos is fixed
957 _fmt += "{} in {} repos"
958
959 # fill rows
960 _rows = [_fmt.format(s, a, p, v, m, l) for s, a, p, v, m, l in _rows]
Alexd0391d42019-05-21 18:48:55 -0500961 _rows.sort()
962 return _rows
963
964 def show_app(self, name):
965 c = 0
966 rows = self.get_apps(self._versions_mirantis, name)
967 if rows:
Alexcf91b182019-05-31 11:57:07 -0500968 logger_cli.info("\n# Mirantis packages for '{}'".format(name))
Alexd0391d42019-05-21 18:48:55 -0500969 logger_cli.info("\n".join(rows))
970 c += 1
971 rows = self.get_apps(self._versions_other, name)
972 if rows:
Alexcf91b182019-05-31 11:57:07 -0500973 logger_cli.info("\n# Other packages for '{}'".format(name))
Alexd0391d42019-05-21 18:48:55 -0500974 logger_cli.info("\n".join(rows))
975 c += 1
976 if c == 0:
977 logger_cli.info("\n# No app found for '{}'".format(name))
978
979 def get_mirantis_pkg_names(self):
980 # Mirantis maintainers only
981 return set(
982 self._versions_mirantis.keys()
983 ) - set(
984 self._versions_other.keys()
985 )
986
987 def get_other_pkg_names(self):
988 # Non-mirantis Maintainers
989 return set(
990 self._versions_other.keys()
991 ) - set(
992 self._versions_mirantis.keys()
993 )
994
995 def get_mixed_pkg_names(self):
996 # Mixed maintainers
997 return set(
998 self._versions_mirantis.keys()
999 ).intersection(set(
1000 self._versions_other.keys()
1001 ))
1002
1003 def is_mirantis(self, name, tag=None):
1004 """Method checks if this package is mainteined
1005 by mirantis in target tag repo
1006 """
1007 if name in self._versions_mirantis:
1008 # check tag
1009 if tag:
1010 _pkg = self.get_package_versions(
1011 name,
1012 tagged=True
1013 )
1014 _tags = []
1015 for s in _pkg.keys():
1016 for a in _pkg[s].keys():
1017 for t in _pkg[s][a].keys():
1018 _tags.append(t)
1019 if any([t.startswith(tag) for t in _tags]):
1020 return True
1021 else:
1022 return None
1023 else:
1024 return True
1025 elif name in self._versions_other:
1026 # check tag
1027 if tag:
1028 _pkg = self.get_package_versions(
1029 name,
1030 tagged=True
1031 )
1032 _tags = []
1033 for s in _pkg.keys():
1034 for a in _pkg[s].keys():
1035 for t in _pkg[s][a].keys():
1036 _tags.append(t)
1037 if any([t.startswith(tag) for t in _tags]):
1038 return False
1039 else:
1040 return None
1041 else:
1042 return False
1043 else:
1044 logger.error(
1045 "# ERROR: package '{}' not found "
1046 "while determining maintainer".format(
1047 name
1048 )
1049 )
1050 return None
1051
1052 def get_filtered_versions(
1053 self,
1054 name,
1055 tag=None,
1056 include=None,
1057 exclude=None
1058 ):
1059 """Method gets all the versions for the package
1060 and filters them using keys above
1061 """
1062 if tag:
Alex3bc95f62020-03-05 17:00:04 -06001063 tag = str(tag) if not isinstance(tag, str) else tag
Alexd0391d42019-05-21 18:48:55 -05001064 _out = {}
1065 _vs = self.get_package_versions(name, tagged=True)
1066 # iterate to filter out keywords
Alex3bc95f62020-03-05 17:00:04 -06001067 for s, apps in _vs.items():
1068 for a, _tt in apps.items():
1069 for t, vs in _tt.items():
Alexd0391d42019-05-21 18:48:55 -05001070 # filter tags
1071 if tag and t != tag and t.rsplit('.', 1)[0] != tag:
1072 continue
1073 # Skip hotfix tag
1074 if t == tag + ".hotfix":
1075 continue
Alex3bc95f62020-03-05 17:00:04 -06001076 for v, rp in vs.items():
1077 for h, p in rp.items():
Alexd0391d42019-05-21 18:48:55 -05001078 # filter headers with all keywords matching
1079 _h = re.split(r"[\-\_]+", h)
1080 _included = all([kw in _h for kw in include])
1081 _excluded = any([kw in _h for kw in exclude])
1082 if not _included or _excluded:
1083 continue
1084 else:
1085 nested_set(_out, [s, a, v], [])
1086 _dat = {
1087 "header": h
1088 }
1089 _dat.update(p)
1090 _out[s][a][v].append(_dat)
1091 return _out
1092
1093 def get_package_versions(self, name, tagged=False):
Alex74dc1352019-05-17 13:18:24 -05001094 """Method builds package version structure
1095 with repository properties included
1096 """
1097 # get data
Alexd0391d42019-05-21 18:48:55 -05001098 _vs = {}
1099
1100 if name in self._versions_mirantis:
1101 _vs.update(self._versions_mirantis[name])
1102 if name in self._versions_other:
1103 _vs.update(self._versions_other[name])
Alex0ed4f762019-05-17 17:55:33 -05001104
Alex74dc1352019-05-17 13:18:24 -05001105 # insert repo data, insert props into headers place
1106 _package = {}
1107 if tagged:
Alex3bc95f62020-03-05 17:00:04 -06001108 for _v, _d1 in _vs.items():
Alex74dc1352019-05-17 13:18:24 -05001109 # use tag as a next step
Alex3bc95f62020-03-05 17:00:04 -06001110 for _md5, _info in _d1.items():
Alexd0391d42019-05-21 18:48:55 -05001111 _s = _info['section']
1112 _a = _info['app']
1113 for _pair in _info['repo']:
1114 _rp = {}
Alex74dc1352019-05-17 13:18:24 -05001115 # extract props for a repo
Alex0ed4f762019-05-17 17:55:33 -05001116 _r, _m = self._get_indexed_values(_pair)
Alex74dc1352019-05-17 13:18:24 -05001117 # get tag
Alex0ed4f762019-05-17 17:55:33 -05001118 _tag = _r["props"]["tag"]
Alex74dc1352019-05-17 13:18:24 -05001119 # cut tag from the header
Alex0ed4f762019-05-17 17:55:33 -05001120 _cut_head = _r["header"].split("_", 1)[1]
Alex74dc1352019-05-17 13:18:24 -05001121 # populate dict
Alexd0391d42019-05-21 18:48:55 -05001122 _rp["maintainer"] = _m
1123 _rp["md5"] = _md5
1124 _rp.update(_r["props"])
Alex74dc1352019-05-17 13:18:24 -05001125 nested_set(
1126 _package,
Alexd0391d42019-05-21 18:48:55 -05001127 [_s, _a, _tag, _v, _cut_head],
1128 _rp
Alex74dc1352019-05-17 13:18:24 -05001129 )
1130 else:
Alex3bc95f62020-03-05 17:00:04 -06001131 for _v, _d1 in _vs.items():
1132 for _md5, _info in _d1.items():
Alexd0391d42019-05-21 18:48:55 -05001133 _s = _info['section']
1134 _a = _info['app']
1135 for _pair in _info['repo']:
Alex0ed4f762019-05-17 17:55:33 -05001136 _r, _m = self._get_indexed_values(_pair)
Alexd0391d42019-05-21 18:48:55 -05001137 _info["maintainer"] = _m
1138 _info.update(_r["props"])
Alex74dc1352019-05-17 13:18:24 -05001139 nested_set(
1140 _package,
Alexd0391d42019-05-21 18:48:55 -05001141 [_s, _a, _v, _md5, _r["header"]],
1142 _info
Alex74dc1352019-05-17 13:18:24 -05001143 )
1144
1145 return _package
1146
Alexd9fd85e2019-05-16 16:58:24 -05001147 def parse_repos(self):
1148 # all tags to check
Alex3bc95f62020-03-05 17:00:04 -06001149 major, updates, hotfix = self._info_class.list_tags(splitted=True)
Alexd9fd85e2019-05-16 16:58:24 -05001150
1151 # major tags
1152 logger_cli.info("# Processing major tags")
1153 for _tag in major:
1154 self.fetch_versions(_tag)
1155
1156 # updates tags
1157 logger_cli.info("# Processing update tags")
1158 for _tag in updates:
1159 self.fetch_versions(_tag + ".update")
1160
1161 # hotfix tags
1162 logger_cli.info("# Processing hotfix tags")
1163 for _tag in hotfix:
1164 self.fetch_versions(_tag + ".hotfix")