blob: d029c03bf35e884c2c650e70b617e112177c2d9a [file] [log] [blame]
import datetime
import re
import urllib.request
def get_url_content(url):
with urllib.request.urlopen(url) as f:
return f.read().decode('utf-8')
def get_snapshot_date(target_content):
date_regex = r".*(\d\d\d\d-\d\d-\d\d-\d\d\d\d\d\d)"
snapshot_date_str = re.findall(date_regex, target_content)[0]
date_format = "%Y-%m-%d-%H%M%S"
snapshot_date = datetime.datetime.strptime(snapshot_date_str, date_format)
return snapshot_date
update_url = "http://mirror.mirantis.com/update/"
proposed_section = "proposed/"
releases_regex = r"2019.2.\d+"
mirror_target_file = "xenial.target.txt"
update_url_content = get_url_content(update_url)
releases = re.findall(releases_regex, update_url_content)
latest_release = sorted(set(releases),
key=lambda release: int(release.split('.')[2]))[-1]
latest_release += "/"
update_released_url = update_url + latest_release
update_proposed_url = update_url + proposed_section
proposed_subrepos = get_url_content(update_proposed_url)
subrepos = ["cassandra/",
"ceph-luminous/",
"ceph-nautilus/",
"docker/",
"elasticsearch-5.x/",
"elasticsearch-6.x/",
"elasticsearch-curator-5/",
"extra/",
"glusterfs-5/",
"kubernetes-extra/",
"maas/",
"opencontrail-3.2/",
"opencontrail-4.0/",
"opencontrail-4.1/",
"opencontrail-5.0/",
"openstack-mitaka/",
"openstack-newton/",
"openstack-ocata/",
"openstack-pike/",
"openstack-queens/",
"percona/",
"salt-2017.7.8/",
"salt-formulas/",
"saltstack/",
"saltstack-2017.7/",
"td-agent/",
"ubuntu",
"maas-ephemeral-v3"]
good_subrepos = []
suspicious_subrepos = []
error_subrepos = []
def check_snapshot(subrepo):
if subrepo in ['ubuntu', 'maas-ephemeral-v3']:
proposed_target_url = "{}{}".format(
update_proposed_url, "{}.target.txt".format(subrepo))
released_target_url = "{}{}".format(
update_released_url, "{}.target.txt".format(subrepo))
else:
proposed_target_url = "{}{}{}".format(update_proposed_url, subrepo,
mirror_target_file)
released_target_url = "{}{}{}".format(update_released_url, subrepo,
mirror_target_file)
released_snapshot = get_url_content(released_target_url).splitlines()[0]
proposed_snapshot = get_url_content(proposed_target_url).splitlines()[0]
released_snapshot_date = get_snapshot_date(released_snapshot)
proposed_snapshot_date = get_snapshot_date(proposed_snapshot)
if proposed_snapshot_date > released_snapshot_date:
good_subrepos.append(subrepo)
elif proposed_snapshot_date == released_snapshot_date:
suspicious_subrepos.append([subrepo, proposed_snapshot])
else:
error_subrepos.append([subrepo, proposed_snapshot, released_snapshot])
for subrepo in subrepos:
check_snapshot(subrepo)
for repo in good_subrepos:
print("Repo {} - all fine, proposed is fresher than "
"latest release".format(repo))
print("Good news ended\n" + "=" * 79)
for i in suspicious_subrepos:
print(
"Repo {} - same as previous release, ensure than this is fine:\n"
"{}".format(i[0], i[1]))
print("Mediocre news ended\n" + "X" * 79)
for i in error_subrepos:
print(
"ERROR: Repo {} IS OLDER THAN RELEASED ONE!!!\n"
"Released:{}\n"
"Proposed:{}".format(i[0], i[1], i[2]))
if error_subrepos:
exit(1)