Merge "Only push tags to gerrit"
diff --git a/jeepyb/cmd/close_pull_requests.py b/jeepyb/cmd/close_pull_requests.py
index 8abeda9..f3c4f4f 100644
--- a/jeepyb/cmd/close_pull_requests.py
+++ b/jeepyb/cmd/close_pull_requests.py
@@ -95,8 +95,9 @@
for req in pull_requests:
vars = dict(project=project)
issue_data = {"url": repo.url + "/issues/" + str(req.number)}
- issue = github.Issue.Issue(req._requester,
- issue_data,
+ issue = github.Issue.Issue(requester=req._requester,
+ headers={},
+ attributes=issue_data,
completed=True)
issue.create_comment(MESSAGE % vars)
req.edit(state="closed")
diff --git a/jeepyb/cmd/notify_impact.py b/jeepyb/cmd/notify_impact.py
index af1ad8a..2214ffb 100644
--- a/jeepyb/cmd/notify_impact.py
+++ b/jeepyb/cmd/notify_impact.py
@@ -17,6 +17,19 @@
# patchsets for strings like "bug FOO" and updates corresponding Launchpad
# bugs status.
+# You want to test this? I use a command line a bit like this:
+# python notify_impact.py --change 55607 \
+# --change-url https://review.openstack.org/55607 --project nova/ \
+# --branch master --commit c262de4417d48be599c3a7496ef94de5c84b188c \
+# --impact DocImpact --dest-address none@localhost --dryrun \
+# --ignore-duplicates \
+# change-merged
+#
+# But you'll need a git repository at /home/gerrit2/review_site/git/nova.git
+# for that to work
+
+from __future__ import print_function
+
import argparse
import os
import re
@@ -26,6 +39,7 @@
from email.mime import text
from launchpadlib import launchpad
from launchpadlib import uris
+import yaml
BASE_DIR = '/home/gerrit2/review_site'
EMAIL_TEMPLATE = """
@@ -45,7 +59,39 @@
'~/.launchpadlib/creds'))
-def create_bug(git_log, args, lp_project):
+class BugActionsReal(object):
+ """Things we do to bugs."""
+
+ def __init__(self, lpconn):
+ self.lpconn = lpconn
+
+ def create(self, project, bug_title, bug_descr, args):
+ buginfo = self.lpconn.bugs.createBug(
+ target=project, title=bug_title,
+ description=bug_descr, tags=args.project.split('/')[1])
+ buglink = buginfo.web_link
+ return buginfo, buglink
+
+ def subscribe(self, buginfo, subscriber):
+ user = self.lpconn.people[subscriber]
+ if user:
+ buginfo.subscribe(person=user)
+
+
+class BugActionsDryRun(object):
+ def __init__(self, lpconn):
+ self.lpconn = lpconn
+
+ def create(self, project, bug_title, bug_descr, args):
+ print('I would have created a bug, but I am in dry run mode')
+ return None, None
+
+ def subscribe(self, buginfo, subscriber):
+ print('I would have added %s as a subscriber to the bug, '
+ 'but I am in dry run mode' % subscriber)
+
+
+def create_bug(git_log, args, lp_project, subscribers):
"""Create a bug for a change.
Create a launchpad bug in lp_project, titled with the first line of
@@ -60,23 +106,44 @@
credentials_file=GERRIT_CREDENTIALS,
version='devel')
+ if args.dryrun:
+ actions = BugActionsDryRun(lpconn)
+ else:
+ actions = BugActionsReal(lpconn)
+
lines_in_log = git_log.split("\n")
bug_title = lines_in_log[4]
bug_descr = args.change_url + '\n' + git_log
project = lpconn.projects[lp_project]
+
# check for existing bugs by searching for the title, to avoid
# creating multiple bugs per review
+ buglink = None
+ author_class = None
potential_dupes = project.searchTasks(search_text=bug_title)
- if len(potential_dupes) == 0:
- buginfo = lpconn.bugs.createBug(
- target=project, title=bug_title,
- description=bug_descr, tags=args.project.split('/')[1])
- buglink = buginfo.web_link
+
+ if len(potential_dupes) == 0 or args.ignore_duplicates:
+ buginfo, buglink = actions.create(project, bug_title, bug_descr, args)
+
+ # If the author of the merging patch matches our configured
+ # subscriber lists, then subscribe the configured victims.
+ for email_address in subscribers.get('author_map', {}):
+ email_re = re.compile('^Author:.*%s.*' % email_address)
+ for line in bug_descr.split('\n'):
+ m = email_re.match(line)
+ if m:
+ author_class = subscribers['author_map'][email_address]
+
+ if author_class:
+ subscribers = \
+ subscribers.get('subscriber_map', {}).get(author_class, [])
+ for subscriber in subscribers:
+ actions.subscribe(buginfo, subscriber)
return buglink
-def process_impact(git_log, args):
+def process_impact(git_log, args, subscribers):
"""Process DocImpact flag.
If the 'DocImpact' flag is present for a change that is merged,
@@ -87,7 +154,7 @@
"""
if args.impact.lower() == 'docimpact':
if args.hook == "change-merged":
- create_bug(git_log, args, 'openstack-manuals')
+ create_bug(git_log, args, 'openstack-manuals', subscribers)
return
email_content = EMAIL_TEMPLATE % (args.impact,
@@ -121,29 +188,69 @@
def main():
parser = argparse.ArgumentParser()
parser.add_argument('hook')
- #common
+
+ # common
parser.add_argument('--change', default=None)
parser.add_argument('--change-url', default=None)
parser.add_argument('--project', default=None)
parser.add_argument('--branch', default=None)
parser.add_argument('--commit', default=None)
- #change-merged
+
+ # change-merged
parser.add_argument('--submitter', default=None)
- #patchset-created
+
+ # patchset-created
parser.add_argument('--uploader', default=None)
parser.add_argument('--patchset', default=None)
+
# Not passed by gerrit:
parser.add_argument('--impact', default=None)
parser.add_argument('--dest-address', default=None)
+ # Automatic subscribers
+ parser.add_argument('--auto-subscribers', type=argparse.FileType('r'),
+ default=None)
+
+ # Don't actually create the bug
+ parser.add_argument('--dryrun', dest='dryrun', action='store_true')
+ parser.add_argument('--no-dryrun', dest='dryrun', action='store_false')
+ parser.set_defaults(dryrun=False)
+
+ # Ignore duplicates, useful for testing
+ parser.add_argument('--ignore-duplicates', dest='ignore_duplicates',
+ action='store_true')
+ parser.add_argument('--no-ignore-duplicates', dest='ignore_duplicates',
+ action='store_false')
+ parser.set_defaults(ignore_duplicates=False)
+
args = parser.parse_args()
+ # NOTE(mikal): the basic idea here is to let people watch
+ # docimpact bugs filed by people of interest. For example
+ # my team's tech writer wants to be subscribed to all the
+ # docimpact bugs we create. The config for that would be
+ # something like:
+ #
+ # author_map:
+ # mikal@stillhq.com: rcbau
+ # grumpy@dwarves.com: rcbau
+ #
+ # subscriber_map:
+ # rcbau: ['mikalstill', 'grumpypants']
+ #
+ # Where the entries in the author map are email addresses
+ # to match in author lines, and the subscriber map is a
+ # list of launchpad user ids.
+ subscribers = {}
+ if args.auto_subscribers:
+ subscribers = yaml.load(args.auto_subscribers.read())
+
# Get git log
git_log = extract_git_log(args)
# Process impacts found in git log
if impacted(git_log, args.impact):
- process_impact(git_log, args)
+ process_impact(git_log, args, subscribers)
if __name__ == "__main__":
main()
diff --git a/jeepyb/cmd/run_mirror.py b/jeepyb/cmd/run_mirror.py
deleted file mode 100644
index 5e54583..0000000
--- a/jeepyb/cmd/run_mirror.py
+++ /dev/null
@@ -1,373 +0,0 @@
-#! /usr/bin/env python
-# Copyright (C) 2011 OpenStack, LLC.
-# Copyright (C) 2013 Hewlett-Packard Development Company, L.P.
-# Copyright (C) 2013 OpenStack Foundation
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# run_mirror reads a YAML config file like:
-# cache-root: /tmp/cache
-#
-# mirrors:
-# - name: openstack
-# projects:
-# - https://github.com/openstack/requirements
-# output: /tmp/mirror/openstack
-#
-# - name: openstack-infra
-# projects:
-# - https://github.com/openstack-infra/config
-# output: /tmp/mirror/openstack-infra
-#
-# The algorithm it attempts to follow is:
-#
-# for each project:
-# clone if necessary and fetch origin
-# for each project-branch:
-# create new virtualenv
-# pip install reqs into virtualenv
-# if installation succeeds:
-# pip freeze > full-reqs
-# create new virtualenv
-# pip install (download only) full-reqs into virtualenv
-#
-# By default only summary information is printed on stdout (see the
-# -d command line option to get more debug info).
-#
-# If "pip install" for a branch's requirements fails to complete
-# (based on parsing of its output), that output will be copied to
-# stderr and the script will skip ahead to the next branch. This
-# makes it suitable for running in a cron job with only stdout
-# redirected to a log, and also avoids one broken project preventing
-# caching of requirements for others.
-from __future__ import print_function
-
-import argparse
-import datetime
-import md5
-import os
-import pkginfo
-import re
-import shlex
-import shutil
-import subprocess
-import sys
-import tempfile
-import urllib
-import yaml
-
-
-class Mirror(object):
- def __init__(self):
- parser = argparse.ArgumentParser(
- description='Build a pypi mirror from requirements')
- parser.add_argument('-b', dest='branch',
- help='restrict run to a specified branch')
- parser.add_argument('-c', dest='config',
- help='specify the config file')
- parser.add_argument('-n', dest='noop', action='store_true',
- help='do not run any commands')
- parser.add_argument('--no-pip', dest='no_pip', action='store_true',
- help='do not run any pip commands')
- parser.add_argument('--verbose', dest='debug', action='store_true',
- help='output verbose debug information')
- parser.add_argument('--no-download', dest='no_download',
- action='store_true',
- help='only process the pip cache into a mirror '
- '(do not download)')
- parser.add_argument('--no-process', dest='no_process',
- action='store_true',
- help='only download into the pip cache '
- '(do not process the cache into a mirror)')
- parser.add_argument('--no-update', dest='no_update',
- action='store_true',
- help='do not update any git repos')
- self.args = parser.parse_args()
- self.config = yaml.load(open(self.args.config))
-
- def run_command(self, cmd):
- cmd_list = shlex.split(str(cmd))
- self.debug("Run: %s" % cmd)
- if self.args.noop:
- return ''
- if self.args.no_pip and cmd_list[0].endswith('pip'):
- return ''
- p = subprocess.Popen(cmd_list, stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- (out, nothing) = p.communicate()
- out = out.strip()
- self.debug(out)
- return out
-
- def run(self):
- for mirror in self.config['mirrors']:
- if not self.args.no_download:
- self.build_mirror(mirror)
- if not self.args.no_process:
- self.process_cache(mirror)
-
- def chdir(self, dest):
- self.debug("cd %s" % dest)
- if not self.args.noop:
- os.chdir(dest)
-
- def debug(self, msg):
- if self.args.debug:
- print(msg)
-
- def process_http_requirements(self, reqlist, pip_cache_dir, pip):
- new_reqs = []
- for reqfile in reqlist:
- for req in open(reqfile):
- req = req.strip()
- # Handle http://, https://, and git+https?://
- if not re.search('https?://', req):
- new_reqs.append(req)
- continue
- target_url = req.split('#', 1)[0]
- target_file = os.path.join(pip_cache_dir,
- urllib.quote(target_url, ''))
- if os.path.exists(target_file):
- self.debug("Unlink: %s" % target_file)
- os.unlink(target_file)
- if os.path.exists(target_file + '.content-type'):
- self.debug("Unlink: %s.content-type" % target_file)
- os.unlink(target_file + '.content-type')
- return new_reqs
-
- def find_pkg_info(self, path):
- versions = set()
- for root, dirs, files in os.walk(path):
- if not root.endswith('.egg'):
- continue
- if not os.path.exists(os.path.join(root, 'EGG-INFO', 'PKG-INFO')):
- continue
- package = pkginfo.Develop(root)
- versions.add('%s==%s' % (package.name, package.version))
- return versions
-
- def build_mirror(self, mirror):
- print("Building mirror: %s" % mirror['name'])
- pip_format = ("%s install -U %s --exists-action=w "
- "--download-cache=%s --build %s -r %s")
- venv_format = ("virtualenv --clear --extra-search-dir=%s %s")
- upgrade_format = ("%s install -U --exists-action=w "
- "--download-cache=%s --build %s %s")
-
- workdir = tempfile.mkdtemp()
- reqs = os.path.join(workdir, "reqs")
- venv = os.path.join(workdir, "venv")
- build = os.path.join(workdir, "build")
- pip = os.path.join(venv, "bin", "pip")
-
- project_cache_dir = os.path.join(self.config['cache-root'],
- 'projects')
- pip_cache_dir = os.path.join(self.config['cache-root'],
- 'pip', mirror['name'])
- if not self.args.noop:
- if not os.path.exists(project_cache_dir):
- os.makedirs(project_cache_dir)
- if not os.path.exists(pip_cache_dir):
- os.makedirs(pip_cache_dir)
-
- for project in mirror['projects']:
- print("Updating repository: %s" % project)
- self.chdir(project_cache_dir)
- short_project = project.split('/')[-1]
- if short_project.endswith('.git'):
- short_project = short_project[:-4]
- if not os.path.isdir(short_project):
- out = self.run_command("git clone %s %s" %
- (project, short_project))
- self.chdir(os.path.join(project_cache_dir,
- short_project))
- out = self.run_command("git fetch -p origin")
-
- if self.args.branch:
- branches = [self.args.branch]
- else:
- branches = self.run_command("git branch -a").split("\n")
- for branch in branches:
- branch = branch.strip()
- if (not branch.startswith("remotes/origin")
- or "origin/HEAD" in branch):
- continue
- print("Fetching pip requires for %s:%s" %
- (project, branch))
- if not self.args.no_update:
- out = self.run_command("git reset --hard %s" % branch)
- out = self.run_command("git clean -x -f -d -q")
- reqlist = []
- if os.path.exists('global-requirements.txt'):
- reqlist.append('global-requirements.txt')
- else:
- for requires_file in ("requirements.txt",
- "test-requirements.txt",
- "tools/pip-requires",
- "tools/test-requires"):
- if os.path.exists(requires_file):
- reqlist.append(requires_file)
- if reqlist:
- out = self.run_command(venv_format %
- (pip_cache_dir, venv))
- out = self.run_command(upgrade_format %
- (pip, pip_cache_dir,
- build, "setuptools"))
- out = self.run_command(upgrade_format %
- (pip, pip_cache_dir,
- build, "pip"))
- out = self.run_command(upgrade_format %
- (pip, pip_cache_dir,
- build, "virtualenv"))
- if os.path.exists(build):
- shutil.rmtree(build)
- new_reqs = self.process_http_requirements(reqlist,
- pip_cache_dir,
- pip)
- (reqfp, reqfn) = tempfile.mkstemp()
- os.write(reqfp, '\n'.join(new_reqs))
- os.close(reqfp)
- out = self.run_command(pip_format %
- (pip, "", pip_cache_dir,
- build, reqfn))
- if "\nSuccessfully installed " not in out:
- sys.stderr.write("Installing pip requires for %s:%s "
- "failed.\n%s\n" %
- (project, branch, out))
- print("pip install did not indicate success")
- else:
- freeze = self.run_command("%s freeze -l" % pip)
- requires = self.find_pkg_info(build)
- reqfd = open(reqs, "w")
- for line in freeze.split("\n"):
- if line.startswith("-e ") or (
- "==" in line and " " not in line):
- requires.add(line)
- for r in requires:
- reqfd.write(r + "\n")
- reqfd.close()
- out = self.run_command(venv_format %
- (pip_cache_dir, venv))
- if os.path.exists(build):
- shutil.rmtree(build)
- out = self.run_command(pip_format %
- (pip, "--no-install",
- pip_cache_dir, build, reqs))
- if "\nSuccessfully downloaded " not in out:
- sys.stderr.write("Downloading pip requires for "
- "%s:%s failed.\n%s\n" %
- (project, branch, out))
- print("pip install did not indicate success")
- print("cached:\n%s" % freeze)
- else:
- print("no requirements")
- shutil.rmtree(workdir)
-
- def process_cache(self, mirror):
- if self.args.noop:
- return
-
- pip_cache_dir = os.path.join(self.config['cache-root'],
- 'pip', mirror['name'])
- destination_mirror = mirror['output']
-
- PACKAGE_VERSION_RE = re.compile(r'(.*)-[0-9]')
- full_html_line = "<a href='{dir}/{name}'>{name}</a><br />\n"
-
- packages = {}
- package_count = 0
-
- if not os.path.exists(destination_mirror):
- os.makedirs(destination_mirror)
-
- for filename in os.listdir(pip_cache_dir):
- if filename.endswith('content-type'):
- continue
-
- realname = urllib.unquote(filename)
- # The ? accounts for sourceforge downloads
- tarball = os.path.basename(realname).split("?")[0]
- name_match = PACKAGE_VERSION_RE.search(tarball)
-
- if name_match is None:
- continue
- package_name = name_match.group(1)
-
- version_list = packages.get(package_name, {})
- version_list[tarball] = filename
- packages[package_name] = version_list
- package_count = package_count + 1
-
- full_html = open(os.path.join(destination_mirror, ".full.html"), 'w')
- simple_html = open(os.path.join(destination_mirror, ".index.html"),
- 'w')
-
- header = ("<html><head><title>PyPI Mirror</title></head>"
- "<body><h1>PyPI Mirror</h1><h2>Last update: %s</h2>\n\n"
- % datetime.datetime.utcnow().strftime("%c UTC"))
- full_html.write(header)
- simple_html.write(header)
-
- for package_name, versions in packages.items():
- destination_dir = os.path.join(destination_mirror, package_name)
- if not os.path.isdir(destination_dir):
- os.makedirs(destination_dir)
- safe_dir = urllib.quote(package_name)
- simple_html.write("<a href='%s'>%s</a><br />\n" %
- (safe_dir, safe_dir))
- with open(os.path.join(destination_dir, ".index.html"),
- 'w') as index:
- index.write("""<html><head>
- <title>%s – PyPI Mirror</title>
- </head><body>\n""" % package_name)
- for tarball, filename in versions.items():
- source_path = os.path.join(pip_cache_dir, filename)
- destination_path = os.path.join(destination_dir,
- tarball)
- dot_destination_path = os.path.join(destination_dir,
- '.' + tarball)
- with open(dot_destination_path, 'w') as dest:
- src = open(source_path, 'r').read()
- md5sum = md5.md5(src).hexdigest()
- dest.write(src)
-
- safe_name = urllib.quote(tarball)
-
- full_html.write(full_html_line.format(dir=safe_dir,
- name=safe_name))
- index.write("<a href='%s#md5=%s'>%s</a>\n" %
- (safe_name, md5sum, safe_name))
- os.rename(dot_destination_path, destination_path)
- index.write("</body></html>\n")
- os.rename(os.path.join(destination_dir, ".index.html"),
- os.path.join(destination_dir, "index.html"))
- footer = """<p class='footer'>Generated by process_cache.py; %d
- packages mirrored. </p>
- </body></html>\n""" % package_count
- full_html.write(footer)
- full_html.close()
- os.rename(os.path.join(destination_mirror, ".full.html"),
- os.path.join(destination_mirror, "full.html"))
- simple_html.write(footer)
- simple_html.close()
- os.rename(os.path.join(destination_mirror, ".index.html"),
- os.path.join(destination_mirror, "index.html"))
-
-
-def main():
- mb = Mirror()
- mb.run()
-
-
-if __name__ == "__main__":
- main()
diff --git a/jeepyb/config/subscribers-sample b/jeepyb/config/subscribers-sample
new file mode 100644
index 0000000..c8cc608
--- /dev/null
+++ b/jeepyb/config/subscribers-sample
@@ -0,0 +1,6 @@
+author_map:
+ mikal@stillhq.com: rcbau
+ grumpy@dwarves.com: rcbau
+
+subscriber_map:
+ rcbau: ['mikalstill']
\ No newline at end of file
diff --git a/jeepyb/projects.py b/jeepyb/projects.py
index c1de885..19f178b 100644
--- a/jeepyb/projects.py
+++ b/jeepyb/projects.py
@@ -145,6 +145,11 @@
'openstack/oslo-incubator': 'oslo',
'openstack/tripleo-incubator': 'tripleo',
'openstack/django_openstack_auth': 'django-openstack-auth',
+ 'openstack/savanna': 'savanna',
+ 'openstack/python-savannaclient': 'savanna',
+ 'openstack/savanna-dashboard': 'savanna',
+ 'openstack/savanna-image-elements': 'savanna',
+ 'openstack/savanna-extra': 'savanna',
'openstack-infra/askbot-theme': 'openstack-ci',
'openstack-infra/config': 'openstack-ci',
'openstack-infra/devstack-gate': 'openstack-ci',
@@ -182,12 +187,11 @@
'stackforge/puppet-quantum': 'puppet-neutron',
'stackforge/tripleo-heat-templates': 'tripleo',
'stackforge/tripleo-image-elements': 'tripleo',
- 'stackforge/savanna': 'savanna',
- 'stackforge/savanna-dashboard': 'savanna',
- 'stackforge/savanna-extra': 'savanna',
- 'stackforge/savanna-image-elements': 'savanna',
- 'stackforge/python-savannaclient': 'savanna',
- 'stackforge/puppet-savanna': 'savanna'
+ 'stackforge/puppet-savanna': 'savanna',
+ 'stackforge/fuel-web': 'fuel',
+ 'stackforge/fuel-astute': 'fuel',
+ 'stackforge/fuel-ostf': 'fuel',
+ 'stackforge/fuel-main': 'fuel'
}
return project_map.get(project_full_name,
u.short_project_name(project_full_name))
diff --git a/setup.cfg b/setup.cfg
index a257d7e..d71104c 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -25,7 +25,6 @@
notify-impact = jeepyb.cmd.notify_impact:main
openstackwatch = jeepyb.cmd.openstackwatch:main
process-cache = jeepyb.cmd.process_cache:main
- run-mirror = jeepyb.cmd.run_mirror:main
trivial-rebase = jeepyb.cmd.trivial_rebase:main
update-blueprint = jeepyb.cmd.update_blueprint:main
update-bug = jeepyb.cmd.update_bug:main