blob: e5929155ec4cf89499130c9a00016ebeb60e397b [file] [log] [blame]
Alexd9fd85e2019-05-16 16:58:24 -05001import json
2import os
3from copy import deepcopy
4
Alex74dc1352019-05-17 13:18:24 -05005from cfg_checker.common import logger, logger_cli, nested_set
Alexd9fd85e2019-05-16 16:58:24 -05006from cfg_checker.common.const import _pkg_desc_archive
7from cfg_checker.common.const import _repos_index_filename
8from cfg_checker.common.const import _repos_info_archive
9from cfg_checker.common.const import _repos_versions_archive
10from cfg_checker.common.const import _repos_versions_filename
11from cfg_checker.common.const import ubuntu_releases
12from cfg_checker.common.file_utils import get_gzipped_file
13from cfg_checker.common.settings import pkg_dir
14from cfg_checker.helpers.console_utils import Progress
15from cfg_checker.helpers.tgz import TGZFile
16
17import requests
18from requests.exceptions import ConnectionError
19
20ext = ".json"
21
22
Alex74dc1352019-05-17 13:18:24 -050023def get_tag_label(_tag):
24 # prettify the tag for printing
25 _label = ""
26 if _tag.endswith(".update"):
27 _label += "[updates] " + _tag.rsplit('.', 1)[0]
28 elif _tag.endswith(".hotfix"):
29 _label += " [hotfix] " + _tag.rsplit('.', 1)[0]
30 else:
31 _label += " "*10 + _tag
32 return _label
33
34
Alexd9fd85e2019-05-16 16:58:24 -050035def _n_url(url):
36 if url[-1] == '/':
37 return url
38 else:
39 return url + '/'
40
41
42class ReposInfo(object):
43 repos = []
44 _repofile = os.path.join(pkg_dir, "versions", _repos_info_archive)
45
46 @staticmethod
47 def _ls_repo_page(url):
48 # Yes, this is ugly. But it works ok for small HTMLs.
49 _a = "<a"
50 _s = "href="
51 _e = "\">"
52 try:
53 page = requests.get(url, timeout=60)
54 except ConnectionError as e:
55 logger_cli.error("# ERROR: {}".format(e.message))
56 return [], []
57 a = page.text.splitlines()
58 # Comprehension for dirs. Anchors for ends with '-'
59 _dirs = [l[l.index(_s)+6:l.index(_e)-1]
60 for l in a if l.startswith(_a) and l.endswith('-')]
61 # Comprehension for files. Anchors ends with size
62 _files = [l[l.index(_s)+6:l.index(_e)]
63 for l in a if l.startswith(_a) and not l.endswith('-')]
64
65 return _dirs, _files
66
67 def search_pkg(self, url, _list):
68 # recoursive method to walk dists tree
69 _dirs, _files = self._ls_repo_page(url)
70
71 for _d in _dirs:
72 # Search only in dists, ignore the rest
73 if "dists" not in url and _d != "dists":
74 continue
75 _u = _n_url(url + _d)
76 self.search_pkg(_u, _list)
77
78 for _f in _files:
79 if _f == "Packages.gz":
80 _list.append(url + _f)
81 logger.debug("... [F] '{}'".format(url + _f))
82
83 return _list
84
85 @staticmethod
86 def _map_repo(_path_list, _r):
87 for _pkg_path in _path_list:
88 _l = _pkg_path.split('/')
89 _kw = _l[_l.index('dists')+1:]
90 _kw.reverse()
91 _repo_item = {
92 "arch": _kw[1][7:] if "binary" in _kw[1] else _kw[1],
93 "type": _kw[2],
94 "ubuntu-release": _kw[3],
95 "filepath": _pkg_path
96 }
97 _r.append(_repo_item)
98
99 def _find_tag(self, _t, _u, label=""):
100 if label:
101 _url = _n_url(_u + label)
102 _label = _t + '.' + label
103 else:
104 _url = _u
105 _label = _t
106 _ts, _ = self._ls_repo_page(_url)
107 if _t in _ts:
108 logger.debug(
109 "... found tag '{}' at '{}'".format(
110 _t,
111 _url
112 )
113 )
114 return {
115 _label: {
116 "baseurl": _n_url(_url + _t),
117 "all": {}
118 }
119 }
120 else:
121 return {}
122
123 def fetch_repos(self, url, tag=None):
124 base_url = _n_url(url)
125 logger_cli.info("# Using '{}' as a repos source".format(base_url))
126
127 logger_cli.info("# Gathering repos info (i.e. links to 'packages.gz')")
128 # init repoinfo archive
129 _repotgz = TGZFile(self._repofile)
130 # prepare repo links
131 _repos = {}
132 if tag:
133 # only one tag to process
134 _repos.update(self._find_tag(tag, base_url))
135 _repos.update(self._find_tag(tag, base_url, label="hotfix"))
136 _repos.update(self._find_tag(tag, base_url, label="update"))
137 else:
138 # gather all of them
139 _tags, _ = self._ls_repo_page(base_url)
140 _tags.remove('hotfix')
141 _tags.remove('update')
142 # search tags in subfolders
143 _h_tags, _ = self._ls_repo_page(base_url + 'hotfix')
144 _u_tags, _ = self._ls_repo_page(base_url + 'update')
145 _tags.extend([t for t in _h_tags if t not in _tags])
146 _tags.extend([t for t in _u_tags if t not in _tags])
147 _progress = Progress(len(_tags))
148 _index = 0
149 for _tag in _tags:
150 _repos.update(self._find_tag(_tag, base_url))
151 _repos.update(self._find_tag(_tag, base_url, label="hotfix"))
152 _repos.update(self._find_tag(_tag, base_url, label="update"))
153 _index += 1
154 _progress.write_progress(_index)
155 _progress.end()
156
157 # parse subtags
158 for _label in _repos.keys():
159 logger_cli.info("-> processing tag '{}'".format(_label))
160 _name = _label + ".json"
161 if _repotgz.has_file(_name):
162 logger_cli.info(
163 "-> skipping, '{}' already has '{}'".format(
164 _repos_info_archive,
165 _name
166 )
167 )
168 continue
169 # process the tag
170 _repo = _repos[_label]
171 _baseurl = _repos[_label]["baseurl"]
172 # get the subtags
173 _sub_tags, _ = self._ls_repo_page(_baseurl)
174 _total_index = len(_sub_tags)
175 _index = 0
176 _progress = Progress(_total_index)
177 logger.debug(
178 "... found {} subtags for '{}'".format(
179 len(_sub_tags),
180 _label
181 )
182 )
183 # save the url and start search
184 for _stag in _sub_tags:
185 _u = _baseurl + _stag
186 _index += 1
187 logger.debug(
188 "... searching repos in '{}/{}'".format(
189 _label,
190 _stag
191 )
192 )
193
194 # Searching Package collections
195 if _stag in ubuntu_releases:
196 # if stag is the release, this is all packages
197 _repo["all"][_stag] = []
198 _repo["all"]["url"] = _n_url(_u)
199 _path_list = self.search_pkg(_n_url(_u), [])
200 self._map_repo(_path_list, _repo["all"][_stag])
201 logger.info(
202 "-> found {} dists".format(
203 len(_repo["all"][_stag])
204 )
205 )
206
207 else:
208 # each subtag might have any ubuntu release
209 # so iterate them
210 _repo[_stag] = {
211 "url": _n_url(_u)
212 }
213 _releases, _ = self._ls_repo_page(_n_url(_u))
214 for _rel in _releases:
215 if _rel not in ubuntu_releases:
216 logger.debug(
217 "... skipped unknown ubuntu release: "
218 "'{}' in '{}'".format(
219 _rel,
220 _u
221 )
222 )
223 else:
224 _rel_u = _n_url(_u) + _rel
225 _repo[_stag][_rel] = []
226 _path_list = self.search_pkg(_n_url(_rel_u), [])
227 self._map_repo(
228 _path_list,
229 _repo[_stag][_rel]
230 )
231 logger.info(
232 "-> found {} dists for '{}'".format(
233 len(_repo[_stag][_rel]),
234 _rel
235 )
236 )
237 _progress.write_progress(_index)
238
239 _progress.end()
240 _name = _label + ext
241 _repotgz.add_file(_name, buf=json.dumps(_repo, indent=2))
242 logger_cli.info(
243 "-> archive '{}' updated with '{}'".format(
244 self._repofile,
245 _name
246 )
247 )
248
249 return
250
Alex74dc1352019-05-17 13:18:24 -0500251 def list_tags(self, splitted=False):
Alexd9fd85e2019-05-16 16:58:24 -0500252 _files = TGZFile(self._repofile).list_files()
253 # all files in archive with no '.json' part
254 _all = set([f.rsplit('.', 1)[0] for f in _files])
Alex74dc1352019-05-17 13:18:24 -0500255 if splitted:
256 # files that ends with '.update'
257 _updates = set([f for f in _all if f.find('update') >= 0])
258 # files that ends with '.hotfix'
259 _hotfix = set([f for f in _all if f.find('hotfix') >= 0])
260 # remove updates and hotfix tags from all. The true magic of SETs
261 _all = _all - _updates - _hotfix
262 # cut updates and hotfix endings
263 _updates = [f.rsplit('.', 1)[0] for f in _updates]
264 _hotfix = [f.rsplit('.', 1)[0] for f in _hotfix]
Alexd9fd85e2019-05-16 16:58:24 -0500265
Alex74dc1352019-05-17 13:18:24 -0500266 return _all, _updates, _hotfix
267 else:
268 # dynamic import
269 import re
270 _all = list(_all)
271 # lexical tags
272 _lex = [s for s in _all if not s[0].isdigit()]
273 _lex.sort()
274 # tags with digits
275 _dig = [s for s in _all if s[0].isdigit()]
276 _dig = sorted(
277 _dig,
278 key=lambda x: tuple(int(i) for i in re.findall('\\d+', x)[:3])
279 )
280
281 return _dig + _lex
Alexd9fd85e2019-05-16 16:58:24 -0500282
283 def get_repoinfo(self, tag):
284 _tgz = TGZFile(self._repofile)
285 _buf = _tgz.get_file(tag + ext)
286 return json.loads(_buf)
287
288
289class RepoManager(object):
290 # files in archive
291 _repoindexfile = _repos_index_filename
292 _versionsfile = _repos_versions_filename
293 # archives
294 _versions_arch = os.path.join(pkg_dir, "versions", _repos_versions_archive)
295 _desc_arch = os.path.join(pkg_dir, "versions", _pkg_desc_archive)
296
297 # repository index
298 _repo_index = {}
299
300 # init package versions storage
301 _versions = {}
302
303 def __init__(self):
304 # Init version files
305 self.versionstgz = TGZFile(
306 self._versions_arch,
307 label="MCP Configuration Checker: Package versions archive"
308 )
309 self.desctgz = TGZFile(
310 self._desc_arch,
311 label="MCP Configuration Checker: Package descriptions archive"
312 )
313
314 if self._versionsfile in self.versionstgz.list_files():
315 logger_cli.info(
316 "# Loading versions '{}':'{}'".format(
317 self._versions_arch,
318 self._versionsfile
319 )
320 )
321 self._versions = json.loads(
322 self.versionstgz.get_file(self._versionsfile)
323 )
324
325 if self._repoindexfile in self.versionstgz.list_files():
326 self._repo_index = json.loads(
327 self.versionstgz.get_file(
328 self._repoindexfile
329 )
330 )
331
332 def _create_repo_header(self, p):
333 _header = "_".join([
334 p['tag'],
335 p['subset'],
336 p['release'],
337 p['ubuntu-release'],
338 p['type'],
339 p['arch']
340 ])
341 if not filter(
342 lambda i: self._repo_index[i]["header"] == _header,
343 self._repo_index
344 ):
345 _index = str(len(self._repo_index.keys()) + 1)
346 self._repo_index[_index] = {
347 "header": _header,
348 "props": p
349 }
350 else:
351 for _k, _v in self._repo_index.iteritems():
352 if _v["header"] == _header:
353 _index = _k
354
355 return _index
356
357 def _get_repo_header(self, index):
358 return self._repo_index[index]
359
360 def _update_pkg_version(self, n, v, md5, header_index):
361 """Method updates package version record in global dict
362 """
363 # 'if'*4 operation is pretty expensive when using it 100k in a row
364 # so try/except is a better way to go, even faster than 'reduce'
365 vs = self._versions
366 try:
367 # try to load list
368 _list = vs[n][v][md5]
369 # cast it as set() and union()
370 _list = set(_list).union([header_index])
371 # cast back as set() is not serializeable
372 vs[n][v][md5] = list(_list)
373 return False
374 except KeyError:
375 # ok, this is fresh pkg. Do it slow way.
376 if n in vs:
377 # there is such pkg already
378 if v in vs[n]:
379 # there is such version, check md5
380 if md5 in vs[n][v]:
381 # just add new repo header
382 if header_index not in vs[n][v][md5]:
383 vs[n][v][md5].append(header_index)
384 else:
385 # check if such index is here...
386 _existing = filter(
387 lambda i: header_index in vs[n][v][i],
388 vs[n][v]
389 )
390 if _existing:
391 # Yuck! Same version had different MD5
392 logger_cli.error(
393 "# ERROR: Package version has multiple MD5s "
394 "in '{}': {}:{}:{}".format(
395 self._get_repo_header(
396 header_index
397 )["header"],
398 n,
399 v,
400 md5
401 )
402 )
403 vs[n][v][md5] = [header_index]
404 else:
405 # this is new version for existing package
406 vs[n][v] = {
407 md5: [header_index]
408 }
409 return False
410 else:
411 # this is new pakcage
412 vs[n] = {
413 v: {
414 md5: [header_index]
415 }
416 }
417 return True
418
419 def _save_repo_descriptions(self, repo_props, desc):
420 # form the filename for the repo and save it
421 self.desctgz.add_file(
422 self._create_repo_header(repo_props),
423 json.dumps(desc)
424 )
425
426 # def get_description(self, repo_props, name, md5=None):
427 # """Gets target description
428 # """
429 # _filename = self._create_repo_header(repo_props)
430 # # check if it is present in cache
431 # if _filename in self._desc_cache:
432 # _descs = self._desc_cache[_filename]
433 # else:
434 # # load data
435 # _descs = self.desctgz.get_file(_filename)
436 # # Serialize it
437 # _descs = json.loads(_descs)
438 # self._desc_cache[_filename] = _descs
439 # # return target desc
440 # if name in _descs and md5 in _descs[name]:
441 # return _descs[name][md5]
442 # else:
443 # return None
444
445 def parse_tag(self, tag, descriptions=False):
446 """Download and parse Package.gz files for specific tag
447 By default, descriptions not saved
448 due to huge resulting file size and slow processing
449 """
450 # init gzip and downloader
451 _info = ReposInfo().get_repoinfo(tag)
452 # calculate Packages.gz files to process
453 _baseurl = _info.pop("baseurl")
454 _total_components = len(_info.keys()) - 1
455 _ubuntu_package_repos = 0
456 _other_repos = 0
457 for _c, _d in _info.iteritems():
458 for _ur, _l in _d.iteritems():
459 if _ur in ubuntu_releases:
460 _ubuntu_package_repos += len(_l)
461 elif _ur != 'url':
462 _other_repos += len(_l)
463 logger_cli.info(
464 "-> loaded repository info for '{}'.\n"
465 " '{}', {} components, {} ubuntu repos, {} other/uknown".format(
466 _baseurl,
467 tag,
468 _total_components,
469 _ubuntu_package_repos,
470 _other_repos
471 )
472 )
473 # init progress bar
474 _progress = Progress(_ubuntu_package_repos)
475 _index = 0
476 _processed = 0
477 _new = 0
478 for _c, _d in _info.iteritems():
479 # we do not need url here, just get rid of it
480 if 'url' in _d:
481 _d.pop('url')
482 # _url = if 'url' in _d else _baseurl + _c
483 for _ur, _l in _d.iteritems():
484 # iterate package collections
485 for _p in _l:
486 # descriptions
487 if descriptions:
488 _descriptions = {}
489 # download and unzip
490 _progress.write_progress(
491 _index,
492 note="/ {} {} {} {} {}, {}/{}".format(
493 _c,
494 _ur,
495 _p['ubuntu-release'],
496 _p['type'],
497 _p['arch'],
498 _processed,
499 _new
500 )
501 )
502 _raw = get_gzipped_file(_p['filepath'])
503 _lines = _raw.splitlines()
504 _index += 1
505 # break lines collection into isolated pkg data
506 _pkg = {
507 "tag": tag,
508 "subset": _c,
509 "release": _ur
510 }
511 _pkg.update(_p)
512 _desc = {}
513 _key = _value = ""
514 for _line in _lines:
515 if not _line:
516 # if the line is empty, process pkg data gathered
517 _name = _desc['package']
518 _md5 = _desc['md5sum']
519 _version = _desc['version']
Alexd9fd85e2019-05-16 16:58:24 -0500520 # update version for a package
521 if self._update_pkg_version(
522 _name,
523 _version,
524 _md5,
525 self._create_repo_header(_pkg)
526 ):
527 _new += 1
528
529 if descriptions:
530 _d_new = {
531 _md5: deepcopy(_desc)
532 }
533 try:
534 _descriptions[_name].update(_d_new)
535 except KeyError:
536 _descriptions[_name] = _d_new
537 # clear the data for next pkg
538 _processed += 1
539 _desc = {}
540 _key = ""
541 _value = ""
542 elif _line.startswith(' '):
543 _desc[_key] += "\n{}".format(_line)
544 else:
545 _key, _value = _line.split(': ', 1)
546 _key = _key.lower()
547
548 _desc[_key] = _value
549 # save descriptions if needed
550 if descriptions:
551 _progress.clearline()
552 self._save_repo_descriptions(_pkg, _descriptions)
553
554 _progress.end()
555 # backup headers to disk
556 self.versionstgz.add_file(
557 self._repoindexfile,
558 json.dumps(self._repo_index),
559 replace=True
560 )
561 return
562
563 def fetch_versions(self, tag, descriptions=False):
564 """Executes parsing for specific tag
565 """
566 if descriptions:
567 logger_cli.warning(
568 "\n\n# !!! WARNING: Saving repo descriptions "
569 "consumes huge amount of disk space\n\n"
570 )
571 # if there is no such tag, parse it from repoinfo
572 _f = self._versionsfile
573 logger_cli.info("# Fetching versions for {}".format(tag))
574 self.parse_tag(tag, descriptions=descriptions)
575 logger_cli.info("-> saving updated versions to {}".format(_f))
576 self.versionstgz.add_file(_f, json.dumps(self._versions), replace=True)
577
578 def build_repos(self, url, tag=None):
579 """Builds versions data for selected tag, or for all of them
580 """
581 # Init the ReposInfo class and check if all files are present
582 _repos = ReposInfo()
583 # recoursively walk the mirrors
584 # and gather all of the repos for 'tag' or all of the tags
585 _repos.fetch_repos(url, tag=tag)
586
Alex74dc1352019-05-17 13:18:24 -0500587 def _build_action(self, url, tags):
588 for t in tags:
589 logger_cli.info(
590 "# Building repo info for '{}/{}'".format(
591 url,
592 t
593 )
594 )
595 self.build_repos(url, tag=t)
596
Alexd9fd85e2019-05-16 16:58:24 -0500597 def action_for_tag(
598 self,
599 url,
600 tag,
601 action=None,
602 descriptions=None
603 ):
604 """Executes action for every tag from all collections
605 """
606 if not action:
607 logger_cli.info("# No action set, nothing to do")
Alex74dc1352019-05-17 13:18:24 -0500608 # See if this is a list action
Alexd9fd85e2019-05-16 16:58:24 -0500609 if action == "list":
Alex74dc1352019-05-17 13:18:24 -0500610 _all = ReposInfo().list_tags()
611 # Print pretty list and exit
Alexd9fd85e2019-05-16 16:58:24 -0500612 logger_cli.info("# Tags available at '{}':".format(url))
Alex74dc1352019-05-17 13:18:24 -0500613 for t in _all:
614 logger_cli.info(get_tag_label(t))
615 # exit
Alexd9fd85e2019-05-16 16:58:24 -0500616 return
Alex74dc1352019-05-17 13:18:24 -0500617
Alexd9fd85e2019-05-16 16:58:24 -0500618 # Pupulate action tags
Alex74dc1352019-05-17 13:18:24 -0500619 major, updates, hotfix = ReposInfo().list_tags(splitted=True)
Alexd9fd85e2019-05-16 16:58:24 -0500620 _action_tags = []
621 if tag in major:
622 _action_tags.append(tag)
Alex74dc1352019-05-17 13:18:24 -0500623 if tag in updates:
Alexd9fd85e2019-05-16 16:58:24 -0500624 _action_tags.append(tag + ".update")
Alex74dc1352019-05-17 13:18:24 -0500625 if tag in hotfix:
Alexd9fd85e2019-05-16 16:58:24 -0500626 _action_tags.append(tag + ".hotfix")
Alex74dc1352019-05-17 13:18:24 -0500627 # Check if any tags collected
Alexd9fd85e2019-05-16 16:58:24 -0500628 if not _action_tags:
629 logger_cli.info(
630 "# Tag of '{}' not found. "
631 "Consider rebuilding repos info.".format(tag)
632 )
Alex74dc1352019-05-17 13:18:24 -0500633 else:
Alexd9fd85e2019-05-16 16:58:24 -0500634 logger_cli.info(
Alex74dc1352019-05-17 13:18:24 -0500635 "-> tags to process: {}".format(
Alexd9fd85e2019-05-16 16:58:24 -0500636 ", ".join(_action_tags)
637 )
638 )
Alex74dc1352019-05-17 13:18:24 -0500639 # Execute actions
640 if action == "build":
641 self._build_action(url, _action_tags)
642 elif action == "fetch":
Alexd9fd85e2019-05-16 16:58:24 -0500643 for t in _action_tags:
644 self.fetch_versions(t, descriptions=descriptions)
645
646 logger_cli.info("# Done.")
647
Alex74dc1352019-05-17 13:18:24 -0500648 def show_package(self, name):
649 # get the package data
650 _p = self.get_package_versions(name)
651 if not _p:
652 logger_cli.warning(
653 "# WARNING: Package '{}' not found".format(name)
654 )
655 else:
656 # print package info using sorted tags from headers
657 # Package: name
658 # [u/h] tag \t <version>
659 # \t <version>
660 # <10symbols> \t <md5> \t sorted headers with no tag
661 # ...
662 logger_cli.info("\n# Package: {}".format(name))
663 _o = ""
664 # get and sort tags
665 _vs = _p.keys()
666 _vs.sort()
667 for _v in _vs:
668 _o += "\n" + " "*8 + _v + ':\n'
669 # get and sort tags
670 _mds = _p[_v].keys()
671 _mds.sort()
672 for _md5 in _mds:
673 _o += " "*16 + _md5 + "\n"
674 # get and sort repo headers
675 _rr = _p[_v][_md5].keys()
676 _rr.sort()
677 for _r in _rr:
678 _o += " "*24 + _r.replace('_', ' ') + "\n"
679
680 logger_cli.info(_o)
681
682 def get_package_versions(self, name, tagged=False):
683 """Method builds package version structure
684 with repository properties included
685 """
686 # get data
687 if name in self._versions:
688 _vs = self._versions[name]
689 else:
690 return {}
691 # insert repo data, insert props into headers place
692 _package = {}
693 if tagged:
694 for _v, _d1 in _vs.iteritems():
695 # use tag as a next step
696 for _md5, _repos in _d1.iteritems():
697 for _index in _repos:
698 # extract props for a repo
699 _repo_props = self._repo_index[_index]
700 # get tag
701 _tag = _repo_props["props"]["tag"]
702 # cut tag from the header
703 _cut_head = _repo_props["header"].split("_", 1)[1]
704 # populate dict
705 nested_set(
706 _package,
707 [_tag, _v, _cut_head, _md5],
708 _repo_props["props"]
709 )
710 else:
711 for _v, _d1 in _vs.iteritems():
712 for _md5, _repos in _d1.iteritems():
713 for _index in _repos:
714 _repo_props = self._repo_index[_index]
715 nested_set(
716 _package,
717 [_v, _md5, _repo_props["header"]],
718 _repo_props["props"]
719 )
720
721 return _package
722
Alexd9fd85e2019-05-16 16:58:24 -0500723 def parse_repos(self):
724 # all tags to check
Alex74dc1352019-05-17 13:18:24 -0500725 major, updates, hotfix = ReposInfo().list_tags(splitted=True)
Alexd9fd85e2019-05-16 16:58:24 -0500726
727 # major tags
728 logger_cli.info("# Processing major tags")
729 for _tag in major:
730 self.fetch_versions(_tag)
731
732 # updates tags
733 logger_cli.info("# Processing update tags")
734 for _tag in updates:
735 self.fetch_versions(_tag + ".update")
736
737 # hotfix tags
738 logger_cli.info("# Processing hotfix tags")
739 for _tag in hotfix:
740 self.fetch_versions(_tag + ".hotfix")