jeepyb-0+20170923/CONTRIBUTING.md0000644000175000017500000000146413223707256014631 0ustar filipfilipIf you would like to contribute to the development of OpenStack, you must follow the steps in this page: [http://docs.openstack.org/infra/manual/developers.html](http://docs.openstack.org/infra/manual/developers.html) If you already have a good understanding of how the system works and your OpenStack accounts are set up, you can skip to the development workflow section of this documentation to learn how changes to OpenStack should be submitted for review via the Gerrit tool: [http://docs.openstack.org/infra/manual/developers.html#development-workflow](http://docs.openstack.org/infra/manual/developers.html#development-workflow) Pull requests submitted through GitHub will be ignored. Bugs should be filed [on StoryBoard](https://storyboard.openstack.org/#!/project/722), not in GitHub's issue tracker. jeepyb-0+20170923/MANIFEST.in0000644000175000017500000000017213223707256014131 0ustar filipfilipinclude jeepyb/versioninfo include AUTHORS include ChangeLog exclude .gitignore exclude .gitreview global-exclude *.pyc jeepyb-0+20170923/README.rst0000644000175000017500000000046013223707256014062 0ustar filipfilip=============================== Tools to Manage Gerrit Projects =============================== jeepyb is a collection of tools which make managing a gerrit easier. Specifically, management of gerrit projects and their associated upstream integration with things like github, launchpad, and storyboard. jeepyb-0+20170923/jeepyb/0000755000175000017500000000000013223707256013651 5ustar filipfilipjeepyb-0+20170923/jeepyb/gerritdb.py0000644000175000017500000000444713223707256016036 0ustar filipfilip#! /usr/bin/env python # Copyright (C) 2011 OpenStack, LLC. # Copyright (c) 2012 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ConfigParser import os import StringIO GERRIT_CONFIG = os.environ.get( 'GERRIT_CONFIG', '/home/gerrit2/review_site/etc/gerrit.config') GERRIT_SECURE_CONFIG = os.environ.get( 'GERRIT_SECURE_CONFIG', '/home/gerrit2/review_site/etc/secure.config') db_connection = None def get_broken_config(filename): """gerrit config ini files are broken and have leading tabs.""" text = "" for line in open(filename, "r"): text += line.lstrip() fp = StringIO.StringIO(text) c = ConfigParser.ConfigParser() c.readfp(fp) return c def connect(): global db_connection if not db_connection: gerrit_config = get_broken_config(GERRIT_CONFIG) secure_config = get_broken_config(GERRIT_SECURE_CONFIG) DB_TYPE = gerrit_config.get("database", "type") DB_HOST = gerrit_config.get("database", "hostname") DB_USER = gerrit_config.get("database", "username") DB_PASS = secure_config.get("database", "password") DB_DB = gerrit_config.get("database", "database") if DB_TYPE.upper() == "MYSQL": import pymysql db_connection = pymysql.connect( host=DB_HOST, user=DB_USER, password=DB_PASS, db=DB_DB) else: import psycopg2 db_connection = psycopg2.connect( host=DB_HOST, user=DB_USER, password=DB_PASS, database=DB_DB) else: try: # Make sure the database is responding and reconnect if not db_connection.ping(True) except AttributeError: # This database driver lacks a ping implementation pass return db_connection jeepyb-0+20170923/jeepyb/config/0000755000175000017500000000000013223707256015116 5ustar filipfilipjeepyb-0+20170923/jeepyb/config/subscribers-sample0000644000175000017500000000016113223707256020644 0ustar filipfilipauthor_map: mikal@stillhq.com: rcbau grumpy@dwarves.com: rcbau subscriber_map: rcbau: ['mikalstill']jeepyb-0+20170923/jeepyb/config/openstackwatch.ini-sample0000644000175000017500000000212313223707256022112 0ustar filipfilip# -*- Mode: conf -*- [general] # only show certain projects (don't forget the openstack/ as start) projects = openstack/swift, openstack/cinder # The Json URL where is the gerrit system. json_url = https://review.openstack.org/query?q=status:open # Allow different mode to output to swift, by default 'combined' will # combined all rss in one and 'multiple' will upload all the projects # in each rss file. output_mode = multiple # username to your swift cluster # [swift] # username/tenant for swift with 2.0 or just username with 1.0 (i.e: # RAX). # username = # passowrd or api key # password = # container to upload (probably want to be public) # container = # auth_url of the cluster, for Rackspace this is : # https://auth.api.rackspacecloud.com/v1.0 # or Rackspace UK : # https://lon.auth.api.rackspacecloud.com/v1.0 # auth_url = https://lon.auth.api.rackspacecloud.com/v1.0 # auth version (1.0 for Rackspace clouds, 2.0 for keystone backend clusters) # auth_version = 1.0 # the object name where to store the combined rss # combined_output_object = openstackwatch.xml # vim: ft=dosini jeepyb-0+20170923/jeepyb/projects.py0000644000175000017500000000543013223707256016056 0ustar filipfilip# Copyright (c) 2013 Mirantis. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Expected review.projects.yaml format: - project: some/project launchpad: awesomeproject description: Best project ever. groups: - awesome-group options: - delay-release - no-launchpad-bugs - no-launchpad-blueprints """ import ConfigParser import jeepyb.utils as u registry = u.ProjectsRegistry() def project_to_groups(project_full_name): return registry[project_full_name] \ .get('groups', [registry[project_full_name].get('group', u.short_project_name( project_full_name))]) def _is_no_launchpad(project_full_name, obj_type): try: return ('no-launchpad-' + obj_type in registry[project_full_name]['options']) except KeyError: return False def is_no_launchpad_bugs(project_full_name): return _is_no_launchpad(project_full_name, 'bugs') def is_no_launchpad_blueprints(project_full_name): return _is_no_launchpad(project_full_name, 'blueprints') def has_github(project_full_name): try: if not registry.defaults.get('projects', 'has-github'): # If the default is not to use GitHub... try: # ...then rely on the existence of a per-project option... return 'has-github' in registry[project_full_name]['options'] except KeyError: # ...and if it's not set, then still don't use it. return False # It's okay if the global option or even the section for this don't exist. except (ConfigParser.NoSectionError, ConfigParser.NoOptionError): pass # If we got this far, we either explicitly or implicitly default to use it. return True def has_translations(project_full_name): try: return 'translate' in registry[project_full_name]['options'] except KeyError: return False def is_delay_release(project_full_name): try: return 'delay-release' in registry[project_full_name]['options'] except KeyError: return False def docimpact_target(project_full_name): return registry.get_project_item(project_full_name, 'docimpact-group', 'unknown') jeepyb-0+20170923/jeepyb/__init__.py0000644000175000017500000000000013223707256015750 0ustar filipfilipjeepyb-0+20170923/jeepyb/utils.py0000644000175000017500000001747513223707256015401 0ustar filipfilip# Copyright (c) 2013 Mirantis. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ConfigParser import logging import os import shlex import subprocess import tempfile import yaml PROJECTS_INI = os.environ.get('PROJECTS_INI', '/home/gerrit2/projects.ini') PROJECTS_YAML = os.environ.get('PROJECTS_YAML', '/home/gerrit2/projects.yaml') log = logging.getLogger("jeepyb.utils") def is_retired(entry): """Is a project retired""" if entry.get('acl-config', '').endswith('/retired.config'): return True project = entry['project'] if '/' in project: (org, name) = project.split('/') if org.endswith('-attic'): return True return False def short_project_name(full_project_name): """Return the project part of the git repository name.""" return full_project_name.split('/')[-1] def run_command(cmd, status=False, env=None): env = env or {} cmd_list = shlex.split(str(cmd)) newenv = os.environ newenv.update(env) log.info("Executing command: %s" % " ".join(cmd_list)) p = subprocess.Popen(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=newenv) (out, nothing) = p.communicate() log.debug("Return code: %s" % p.returncode) log.debug("Command said: %s" % out.strip()) if status: return (p.returncode, out.strip()) return out.strip() def run_command_status(cmd, env=None): env = env or {} return run_command(cmd, True, env) def git_command(repo_dir, sub_cmd, env=None): env = env or {} git_dir = os.path.join(repo_dir, '.git') cmd = "git --git-dir=%s --work-tree=%s %s" % (git_dir, repo_dir, sub_cmd) status, _ = run_command(cmd, True, env) return status def git_command_output(repo_dir, sub_cmd, env=None): env = env or {} git_dir = os.path.join(repo_dir, '.git') cmd = "git --git-dir=%s --work-tree=%s %s" % (git_dir, repo_dir, sub_cmd) status, out = run_command(cmd, True, env) return (status, out) def make_ssh_wrapper(gerrit_user, gerrit_key): (fd, name) = tempfile.mkstemp(text=True) os.write(fd, '#!/bin/bash\n') os.write(fd, 'ssh -i %s -l %s -o "StrictHostKeyChecking no" $@\n' % (gerrit_key, gerrit_user)) os.close(fd) os.chmod(name, 0o755) return dict(GIT_SSH=name) def make_local_copy(repo_path, project, project_list, git_opts, ssh_env, upstream, GERRIT_HOST, GERRIT_PORT, project_git, GERRIT_GITID): # Ensure that the base location exists if not os.path.exists(os.path.dirname(repo_path)): os.makedirs(os.path.dirname(repo_path)) # Three choices # - If gerrit has it, get from gerrit # - If gerrit doesn't have it: # - If it has an upstream, clone that # - If it doesn't, create it # Gerrit knows about the project, clone it # TODO(mordred): there is a possible failure condition here # we should consider 'gerrit has it' to be # 'gerrit repo has a master branch' if project in project_list: try: run_command( "git clone %(remote_url)s %(repo_path)s" % git_opts, env=ssh_env) if upstream: git_command( repo_path, "remote add -f upstream %(upstream)s" % git_opts) return None except Exception: # If the clone fails, then we need to clone from the upstream # source pass # Gerrit doesn't have it, but it has an upstream configured # We're probably importing it for the first time, clone # upstream, but then ongoing we want gerrit to ge origin # and upstream to be only there for ongoing tracking # purposes, so rename origin to upstream and add a new # origin remote that points at gerrit if upstream: run_command( "git clone %(upstream)s %(repo_path)s" % git_opts, env=ssh_env) git_command( repo_path, "fetch origin +refs/heads/*:refs/copy/heads/*", env=ssh_env) git_command(repo_path, "remote rename origin upstream") git_command( repo_path, "remote add origin %(remote_url)s" % git_opts) return "push %s +refs/copy/heads/*:refs/heads/*" # Neither gerrit has it, nor does it have an upstream, # just create a whole new one else: run_command("git init %s" % repo_path) git_command( repo_path, "remote add origin %(remote_url)s" % git_opts) with open(os.path.join(repo_path, ".gitreview"), 'w') as gitreview: gitreview.write("""[gerrit] host=%s port=%s project=%s """ % (GERRIT_HOST, GERRIT_PORT, project_git)) git_command(repo_path, "add .gitreview") cmd = ("commit -a -m'Added .gitreview' --author='%s'" % GERRIT_GITID) git_command(repo_path, cmd) return "push %s HEAD:refs/heads/master" def fsck_repo(repo_path): rc, out = git_command_output(repo_path, 'fsck --full') # Check for non zero return code or warnings which should # be treated as errors. In this case zeroPaddedFilemodes # will not be accepted by Gerrit/jgit but are accepted by C git. if rc != 0 or 'zeroPaddedFilemode' in out: log.error('git fsck of %s failed:\n%s' % (repo_path, out)) raise Exception('git fsck failed not importing') class ProjectsRegistry(object): """read config from ini or yaml file. It could be used as dict 'project name' -> 'project properties'. """ def __init__(self, yaml_file=PROJECTS_YAML, single_doc=True): self.yaml_doc = [c for c in yaml.safe_load_all(open(yaml_file))] self.single_doc = single_doc self._configs_list = [] self.defaults = {} self._parse_file() def _parse_file(self): if self.single_doc: self._configs_list = self.yaml_doc[0] else: self._configs_list = self.yaml_doc[1] if os.path.exists(PROJECTS_INI): self.defaults = ConfigParser.ConfigParser() self.defaults.read(PROJECTS_INI) else: try: self.defaults = self.yaml_doc[0][0] except IndexError: pass configs = {} for section in self._configs_list: configs[section['project']] = section self.configs = configs def __getitem__(self, item): return self.configs[item] def get_project_item(self, project, item, default=None): if project in self.configs: return self.configs[project].get(item, default) else: return default def get(self, item, default=None): return self.configs.get(item, default) def get_defaults(self, item, default=None): if os.path.exists(PROJECTS_INI): section = 'projects' if self.defaults.has_option(section, item): if type(default) == bool: return self.defaults.getboolean(section, item) else: return self.defaults.get(section, item) return default else: return self.defaults.get(item, default) @property def configs_list(self): return [entry for entry in self._configs_list if not is_retired(entry)] jeepyb-0+20170923/jeepyb/log.py0000644000175000017500000000257313223707256015013 0ustar filipfilip# Copyright (c) 2015 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import logging def setup_logging_arguments(parser): """Sets up logging arguments, adds -d, -l and -v to the given parser.""" parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='verbose output') parser.add_argument('-d', dest='debug', action='store_true', help='debug output') parser.add_argument('-l', dest='logfile', help='log file to use') def configure_logging(args): if args.debug: level = logging.DEBUG elif args.verbose: level = logging.INFO else: level = logging.ERROR logging.basicConfig(level=level, filename=args.logfile, format='%(asctime)-6s: %(name)s - %(levelname)s' ' - %(message)s') jeepyb-0+20170923/jeepyb/translations.py0000755000175000017500000000652613223707256016760 0ustar filipfilip# Copyright (c) 2015 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import json try: from urllib.parse import urljoin except ImportError: from urlparse import urljoin import requests class ZanataRestService: def __init__(self, url, username, api_key, verify=False): self.url = url self.verify = verify content_type = 'application/json;charset=utf8' self.headers = {'Accept': content_type, 'Content-Type': content_type, 'X-Auth-User': username, 'X-Auth-Token': api_key} def _construct_url(self, url_fragment): return urljoin(self.url, url_fragment) def query(self, url_fragment): request_url = self._construct_url(url_fragment) try: return requests.get(request_url, verify=self.verify, headers=self.headers) except requests.exceptions.ConnectionError: raise ValueError('Connection error') def push(self, url_fragment, data): request_url = self._construct_url(url_fragment) try: return requests.put(request_url, verify=self.verify, headers=self.headers, data=json.dumps(data)) except requests.exceptions.ConnectionError: raise ValueError('Connection error') class TranslationProject: def __init__(self, rest_service, project): self.rest_service = rest_service self.project = project def is_registered(self): r = self.rest_service.query('/rest/projects/p/%s' % self.project) return r.status_code == 200 def has_master(self): r = self.rest_service.query( '/rest/projects/p/%s/iterations/i/master' % self.project) return r.status_code == 200 def register_project(self): project_data = {u'defaultType': u'Gettext', u'status': u'ACTIVE', u'id': self.project, u'name': self.project, u'description': self.project.title()} r = self.rest_service.push('/rest/projects/p/%s' % self.project, project_data) return r.status_code in (200, 201) def register_master_iteration(self): iteration = {u'status': u'ACTIVE', u'projectType': u'Gettext', u'id': u'master'} r = self.rest_service.push( '/rest/projects/p/%s/iterations/i/master' % self.project, iteration) return r.status_code in (200, 201) def register(self): if not self.is_registered(): if not self.register_project(): raise ValueError('Failed to register project.') if not self.has_master(): if not self.register_master_iteration(): raise ValueError('Failed to register master iteration.') jeepyb-0+20170923/jeepyb/cmd/0000755000175000017500000000000013223707256014414 5ustar filipfilipjeepyb-0+20170923/jeepyb/cmd/register_zanata_projects.py0000644000175000017500000000351513223707256022065 0ustar filipfilip#!/usr/bin/env python # Copyright (c) 2015 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import logging import os import jeepyb.log as l import jeepyb.projects as p import jeepyb.translations as t import jeepyb.utils as u PROJECTS_YAML = os.environ.get('PROJECTS_YAML', '/home/gerrit2/projects.yaml') ZANATA_URL = os.environ.get('ZANATA_URL') ZANATA_USER = os.environ.get('ZANATA_USER') ZANATA_KEY = os.environ.get('ZANATA_KEY') log = logging.getLogger('register_zanata_projects') def main(): parser = argparse.ArgumentParser(description='Register projects in Zanata') l.setup_logging_arguments(parser) args = parser.parse_args() l.configure_logging(args) registry = u.ProjectsRegistry(PROJECTS_YAML) rest_service = t.ZanataRestService(ZANATA_URL, ZANATA_USER, ZANATA_KEY) log.info("Registering projects in Zanata") for entry in registry.configs_list: project = entry['project'] if not p.has_translations(project): continue log.info("Processing project %s" % project) (org, name) = project.split('/') try: translation_proect = t.TranslationProject(rest_service, name) translation_proect.register() except ValueError as e: log.error(e) if __name__ == "__main__": main() jeepyb-0+20170923/jeepyb/cmd/notify_impact.py0000644000175000017500000002620413223707256017637 0ustar filipfilip#!/usr/bin/env python # Copyright (c) 2012 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # This is designed to be called by a gerrit hook. It searched new # patchsets for strings like "bug FOO" and updates corresponding Launchpad # bugs status. # You want to test this? I use a command line a bit like this: # python notify_impact.py --change 55607 \ # --change-url https://review.openstack.org/55607 --project nova/ \ # --branch master --commit c262de4417d48be599c3a7496ef94de5c84b188c \ # --impact DocImpact --dest-address none@localhost --dryrun \ # --config foo.yaml \ # change-merged # # But you'll need a git repository at /home/gerrit2/review_site/git/nova.git # for that to work from __future__ import print_function import argparse import logging import os import re import smtplib import subprocess from email.mime import text from launchpadlib import launchpad from launchpadlib import uris import yaml from jeepyb import projects logger = logging.getLogger('notify_impact') DOC_TAG = "doc" BASE_DIR = '/home/gerrit2/review_site' EMAIL_TEMPLATE = """ Hi, I'd like you to take a look at this patch for potential %s. %s Log: %s """ GERRIT_CACHE_DIR = os.path.expanduser( os.environ.get('GERRIT_CACHE_DIR', '~/.launchpadlib/cache')) GERRIT_CREDENTIALS = os.path.expanduser( os.environ.get('GERRIT_CREDENTIALS', '~/.launchpadlib/creds')) class BugActionsReal(object): """Things we do to bugs.""" def __init__(self, lpconn): self.lpconn = lpconn def create(self, project, bug_title, bug_descr, args): # If the bug report is not targeting the 'openstack-manuals' # project, add an extra doc tag to make the bug easier to # look up. lp_target_project = str(project).split('/')[-1] tags = args.project.split('/')[1] if lp_target_project != 'openstack-manuals': tags = [tags, DOC_TAG] buginfo = self.lpconn.bugs.createBug( target=project, title=bug_title, description=bug_descr, tags=tags) buglink = buginfo.web_link return buginfo, buglink def subscribe(self, buginfo, subscriber): user = self.lpconn.people[subscriber] if user: buginfo.subscribe(person=user) class BugActionsDryRun(object): def __init__(self, lpconn): self.lpconn = lpconn def create(self, project, bug_title, bug_descr, args): print('I would have created a bug in %s, but I am in dry run mode.\n\n' 'Title: %s\n' 'Description:\n' '%s' % (project, bug_title, bug_descr)) return None, None def subscribe(self, buginfo, subscriber): print('I would have added %s as a subscriber to the bug, ' 'but I am in dry run mode' % subscriber) def create_bug(git_log, args, config): """Create a bug for a change. Create a launchpad bug in a LP project, titled with the first line of the git commit message, with the content of the git_log prepended with the Gerrit review URL. Tag the bug with the name of the repository it came from. Returns link to the bug. """ # Determine what LP project to use prelude = ('\nDear bug triager. This bug was created since a ' 'commit was marked with DOCIMPACT.\n') project_name = args.project.rstrip('/') lp_project = projects.docimpact_target(project_name) if lp_project == 'unknown': prelude = ('\nDear bug triager. This bug was created since a ' 'commit was marked with DOCIMPACT.\n' 'Your project "%s" is set up so that we directly report ' 'the documentation bugs against it. If this needs ' 'changing, the docimpact-group option needs to be added ' 'for the project. You can ask the ' 'OpenStack infra team (#openstack-infra on freenode) for ' 'help if you need to.\n' % args.project) lp_project = project_name lpconn = launchpad.Launchpad.login_with( 'Gerrit User Sync', uris.LPNET_SERVICE_ROOT, GERRIT_CACHE_DIR, credentials_file=GERRIT_CREDENTIALS, version='devel') if args.dryrun: actions = BugActionsDryRun(lpconn) else: actions = BugActionsReal(lpconn) lines_in_log = git_log.split('\n') bug_title = lines_in_log[4] bug_descr = args.change_url + prelude + '\n' + git_log project = lpconn.projects[lp_project] buglink = None author_class = None buginfo, buglink = actions.create(project, bug_title, bug_descr, args) logger.info('Created a bug in project %(project)s with title "%(title)s": ' '%(buglink)s' % {'project': project, 'title': bug_title, 'buglink': buglink}) # If the author of the merging patch matches our configured # subscriber lists, then subscribe the configured victims. for email_address in config.get('author_map', {}): email_re = re.compile('^Author:.*%s.*' % email_address) for line in bug_descr.split('\n'): m = email_re.match(line) if m: author_class = config['author_map'][email_address] if author_class: config = config.get('subscriber_map', {}).get(author_class, []) for subscriber in config: actions.subscribe(buginfo, subscriber) logger.info('Subscribed %(subscriber)s to bug %(buglink)s' % {'subscriber': subscriber, 'buglink': buglink}) return buglink def smtp_connection(args): """Create SMTP connection based on command line arguments, falling back to sensible defaults if no arguments are provided. """ conn = None if args.smtp_ssl: port = 465 if not args.smtp_port else args.smtp_port conn = smtplib.SMTP_SSL(args.smtp_host, port) else: port = 25 if not args.smtp_port else args.smtp_port conn = smtplib.SMTP(args.smtp_host, port) if args.smtp_starttls: conn.starttls() conn.ehlo() if args.smtp_user and args.smtp_pass: conn.login(args.smtp_user, args.smtp_pass) return conn def process_impact(git_log, args, config): """Process DocImpact flag. If the 'DocImpact' flag is present for a change that is merged, create a new documentation bug in the openstack-manuals launchpad project based on the git_log. For non-documentation impacts at all states of merge notify the mailing list of impact. """ if args.impact.lower() == 'docimpact': if args.hook == "change-merged": create_bug(git_log, args, config) return email_content = EMAIL_TEMPLATE % (args.impact, args.change_url, git_log) msg = text.MIMEText(email_content) msg['Subject'] = '[%s] %s review request change %s' % \ (args.project, args.impact, args.change) msg['From'] = args.smtp_from msg['To'] = args.dest_address s = smtp_connection(args) s.sendmail(args.smtp_from, args.dest_address, msg.as_string()) s.quit() def impacted(git_log, impact_string): """Determine if a changes log indicates there is an impact.""" return re.search(impact_string, git_log, re.IGNORECASE) def extract_git_log(args): """Extract git log of all merged commits.""" cmd = ['git', '--git-dir=' + BASE_DIR + '/git/' + args.project + '.git', 'log', '--no-merges', args.commit + '^1..' + args.commit] return subprocess.Popen(cmd, stdout=subprocess.PIPE).communicate()[0] def main(): parser = argparse.ArgumentParser() parser.add_argument('hook') # common parser.add_argument('--change', default=None) parser.add_argument('--change-url', default=None) parser.add_argument('--project', default=None) parser.add_argument('--branch', default=None) parser.add_argument('--commit', default=None) parser.add_argument('--topic', default=None) parser.add_argument('--change-owner', default=None) # patchset-abandoned parser.add_argument('--abandoner', default=None) parser.add_argument('--reason', default=None) # change-merged parser.add_argument('--submitter', default=None) parser.add_argument('--newrev', default=None) # patchset-created parser.add_argument('--uploader', default=None) parser.add_argument('--patchset', default=None) parser.add_argument('--is-draft', default=None) parser.add_argument('--kind', default=None) # Not passed by gerrit: parser.add_argument('--impact', default=None) parser.add_argument('--dest-address', default=None) # Automatic config: config contains a mapping of email addresses to # subscribers. parser.add_argument('--config', type=argparse.FileType('r'), default=None) # Don't actually create the bug parser.add_argument('--dryrun', dest='dryrun', action='store_true') parser.add_argument('--no-dryrun', dest='dryrun', action='store_false') parser.set_defaults(dryrun=False) # SMTP configuration parser.add_argument('--smtp-from', dest='smtp_from', default='gerrit2@review.openstack.org') parser.add_argument('--smtp-host', dest='smtp_host', default="localhost") parser.add_argument('--smtp-port', dest='smtp_port') parser.add_argument('--smtp-ssl', dest='smtp_ssl', action='store_true') parser.add_argument('--smtp-starttls', dest='smtp_starttls', action='store_true') parser.add_argument('--smtp-user', dest='smtp_user', default=os.getenv('SMTP_USER')) parser.add_argument('--smtp-pass', dest='smtp_pass', default=os.getenv('SMTP_PASS')) args = parser.parse_args() # NOTE(mikal): the basic idea here is to let people watch # docimpact bugs filed by people of interest. For example # my team's tech writer wants to be subscribed to all the # docimpact bugs we create. The config for that would be # something like: # # author_map: # mikal@stillhq.com: rcbau # grumpy@dwarves.com: rcbau # # subscriber_map: # rcbau: ['mikalstill', 'grumpypants'] # # Where the entries in the author map are email addresses # to match in author lines, and the subscriber map is a # list of launchpad user ids. config = {} if args.config: config = yaml.load(args.config.read()) # Get git log git_log = extract_git_log(args) # Process impacts found in git log if impacted(git_log, args.impact): process_impact(git_log, args, config) if __name__ == "__main__": main() jeepyb-0+20170923/jeepyb/cmd/manage_projects.py0000644000175000017500000005244313223707256020137 0ustar filipfilip#! /usr/bin/env python # Copyright (C) 2011 OpenStack, LLC. # Copyright (c) 2012 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # manage_projects.py reads a config file called projects.ini # It should look like: # [projects] # homepage=http://openstack.org # gerrit-host=review.openstack.org # local-git-dir=/var/lib/git # gerrit-key=/home/gerrit2/review_site/etc/ssh_host_rsa_key # gerrit-committer=Project Creator # gerrit-replicate=True # has-github=True # has-wiki=False # has-issues=False # has-downloads=False # acl-dir=/home/gerrit2/acls # acl-base=/home/gerrit2/acls/project.config # # manage_projects.py reads a project listing file called projects.yaml # It should look like: # - project: PROJECT_NAME # options: # - has-wiki # - has-issues # - has-downloads # - has-pull-requests # - track-upstream # homepage: Some homepage that isn't http://openstack.org # description: This is a great project # upstream: https://gerrit.googlesource.com/gerrit # upstream-prefix: upstream # acl-config: /path/to/gerrit/project.config # acl-append: # - /path/to/gerrit/project.config # acl-parameters: # project: OTHER_PROJECT_NAME import argparse import ConfigParser import glob import hashlib import json import logging import os import re import shutil import time import gerritlib.gerrit import github import jeepyb.gerritdb import jeepyb.log as l import jeepyb.utils as u registry = u.ProjectsRegistry() log = logging.getLogger("manage_projects") orgs = None # Gerrit system groups as defined: # https://review.openstack.org/Documentation/access-control.html#system_groups # Need to set Gerrit system group's uuid to the format it expects. GERRIT_SYSTEM_GROUPS = { 'Anonymous Users': 'global:Anonymous-Users', 'Project Owners': 'global:Project-Owners', 'Registered Users': 'global:Registered-Users', 'Change Owner': 'global:Change-Owner', } class FetchConfigException(Exception): pass class CopyACLException(Exception): pass class CreateGroupException(Exception): pass def fetch_config(project, remote_url, repo_path, env=None): env = env or {} # Poll for refs/meta/config as gerrit may not have written it out for # us yet. for x in range(10): status = u.git_command( repo_path, "fetch %s +refs/meta/config:refs/remotes/gerrit-meta/config" % remote_url, env) if status == 0: break else: log.debug("Failed to fetch refs/meta/config for project: %s" % project) time.sleep(2) if status != 0: log.error("Failed to fetch refs/meta/config for project: %s" % project) raise FetchConfigException() # Poll for project.config as gerrit may not have committed an empty # one yet. output = "" for x in range(10): status = u.git_command(repo_path, "remote update --prune", env) if status != 0: log.error("Failed to update remote: %s" % remote_url) time.sleep(2) continue else: status, output = u.git_command_output( repo_path, "ls-files --with-tree=remotes/gerrit-meta/config " "project.config", env) if output.strip() != "project.config" or status != 0: log.debug("Failed to find project.config for project: %s" % project) time.sleep(2) else: break if output.strip() != "project.config" or status != 0: log.error("Failed to find project.config for project: %s" % project) raise FetchConfigException() # Because the following fails if executed more than once you should only # run fetch_config once in each repo. status = u.git_command( repo_path, "checkout -B config remotes/gerrit-meta/config") if status != 0: log.error("Failed to checkout config for project: %s" % project) raise FetchConfigException() def copy_acl_config(project, repo_path, acl_config): if not os.path.exists(acl_config): raise CopyACLException() acl_dest = os.path.join(repo_path, "project.config") status, _ = u.run_command( "cp %s %s" % (acl_config, acl_dest), status=True) if status != 0: raise CopyACLException() status = u.git_command(repo_path, "diff --quiet") return status != 0 def push_acl_config(project, remote_url, repo_path, gitid, env=None): env = env or {} cmd = "commit -a -m'Update project config.' --author='%s'" % gitid status = u.git_command(repo_path, cmd) if status != 0: log.error("Failed to commit config for project: %s" % project) return False status, out = u.git_command_output( repo_path, "push %s HEAD:refs/meta/config" % remote_url, env) if status != 0: log.error("Failed to push config for project: %s" % project) return False return True def _get_group_uuid(group, retries=10): """ Gerrit keeps internal user groups in the DB while it keeps systems groups in All-Projects groups file (in refs/meta/config). This will only get the UUIDs for internal user groups. Note: 'Administrators', 'Non-Interactive Users' and all other custom groups in Gerrit are defined as internal user groups. Wait for up to 10 seconds for the group to be created in the DB. """ query = "SELECT group_uuid FROM account_groups WHERE name = %s" con = jeepyb.gerritdb.connect() for x in range(retries): cursor = con.cursor() cursor.execute(query, (group,)) data = cursor.fetchone() cursor.close() con.commit() if data: return data[0] if retries > 1: time.sleep(1) return None def get_group_uuid(gerrit, group): uuid = _get_group_uuid(group, retries=1) if uuid: return uuid if group in GERRIT_SYSTEM_GROUPS: return GERRIT_SYSTEM_GROUPS[group] gerrit.createGroup(group) for user in gerrit.listMembers(group): if gerrit.username == user['username']: # Gerrit now adds creating user to groups. We don't want that. gerrit.removeMember(group, gerrit.username) break uuid = _get_group_uuid(group) if uuid: return uuid return None def create_groups_file(project, gerrit, repo_path): acl_config = os.path.join(repo_path, "project.config") group_file = os.path.join(repo_path, "groups") uuids = {} for line in open(acl_config, 'r'): r = re.match(r'^.*\sgroup\s+(.*)$', line) if r: group = r.group(1) if group in uuids.keys(): continue uuid = get_group_uuid(gerrit, group) if uuid: uuids[group] = uuid else: log.error("Unable to get UUID for group %s." % group) raise CreateGroupException() if uuids: with open(group_file, 'w') as fp: for group, uuid in uuids.items(): fp.write("%s\t%s\n" % (uuid, group)) status = u.git_command(repo_path, "add groups") if status != 0: log.error("Failed to add groups file for project: %s" % project) raise CreateGroupException() def create_update_github_project( default_has_issues, default_has_downloads, default_has_wiki, github_secure_config, options, project, description, homepage, cache): created = False has_issues = 'has-issues' in options or default_has_issues has_downloads = 'has-downloads' in options or default_has_downloads has_wiki = 'has-wiki' in options or default_has_wiki needs_update = False if not cache.get('created-in-github', False): needs_update = True if not cache.get('gerrit-in-team', False): needs_update = True if cache.get('has_issues', default_has_issues) != has_issues: needs_update = True if cache.get('has_downloads', default_has_downloads) != has_downloads: needs_update = True if cache.get('has_wiki', default_has_wiki) != has_wiki: needs_update = True if not needs_update: return False secure_config = ConfigParser.ConfigParser() secure_config.read(github_secure_config) global orgs if orgs is None: if secure_config.has_option("github", "oauth_token"): ghub = github.Github(secure_config.get("github", "oauth_token")) else: ghub = github.Github(secure_config.get("github", "username"), secure_config.get("github", "password")) log.info('Fetching github org list') orgs = ghub.get_user().get_orgs() orgs_dict = dict(zip([o.login.lower() for o in orgs], orgs)) # Find the project's repo project_split = project.split('/', 1) org_name = project_split[0] if len(project_split) > 1: repo_name = project_split[1] else: repo_name = project try: org = orgs_dict[org_name.lower()] except KeyError: # We do not have control of this github org ignore the project. return False try: log.info("Fetching github info about %s", repo_name) repo = org.get_repo(repo_name) except github.GithubException: log.info("Creating %s in github", repo_name) repo = org.create_repo(repo_name, homepage=homepage, has_issues=has_issues, has_downloads=has_downloads, has_wiki=has_wiki) created = True cache['has_wiki'] = has_wiki cache['has_downloads'] = has_downloads cache['has_issues'] = has_issues kwargs = {} # If necessary, update project on Github if description and description != repo.description: kwargs['description'] = description if homepage and homepage != repo.homepage: kwargs['homepage'] = homepage if has_issues != repo.has_issues: kwargs['has_issues'] = has_issues if has_downloads != repo.has_downloads: kwargs['has_downloads'] = has_downloads if has_wiki != repo.has_wiki: kwargs['has_wiki'] = has_wiki if kwargs: log.info("Updating github repo info about %s", repo_name) repo.edit(repo_name, **kwargs) cache.update(kwargs) if not cache.get('gerrit-in-team', False): if 'gerrit' not in [team.name for team in repo.get_teams()]: log.info("Adding gerrit to github team for %s", repo_name) teams = org.get_teams() teams_dict = dict(zip([t.name.lower() for t in teams], teams)) teams_dict['gerrit'].add_to_repos(repo) cache['gerrit-in-team'] = True created = True return created # TODO(mordred): Inspect repo_dir:master for a description # override def find_description_override(repo_path): return None def push_to_gerrit(repo_path, project, push_string, remote_url, ssh_env): try: u.git_command(repo_path, push_string % remote_url, env=ssh_env) u.git_command(repo_path, "push --tags %s" % remote_url, env=ssh_env) except Exception: log.exception( "Error pushing %s to Gerrit." % project) def process_acls(acl_config, project, ACL_DIR, section, remote_url, repo_path, ssh_env, gerrit, GERRIT_GITID): if not os.path.isfile(acl_config): return try: fetch_config(project, remote_url, repo_path, ssh_env) if not copy_acl_config(project, repo_path, acl_config): # nothing was copied, so we're done return create_groups_file(project, gerrit, repo_path) push_acl_config(project, remote_url, repo_path, GERRIT_GITID, ssh_env) except Exception: log.exception( "Exception processing ACLS for %s." % project) finally: u.git_command(repo_path, 'reset --hard') u.git_command(repo_path, 'checkout master') u.git_command(repo_path, 'branch -D config') def create_gerrit_project(project, project_list, gerrit): if project not in project_list: try: gerrit.createProject(project) return True except Exception: log.exception( "Exception creating %s in Gerrit." % project) raise return False def create_local_mirror(local_git_dir, project_git, gerrit_system_user, gerrit_system_group): git_mirror_path = os.path.join(local_git_dir, project_git) if not os.path.exists(git_mirror_path): (ret, output) = u.run_command_status( "git --bare init %s" % git_mirror_path) if ret: u.run_command("rm -rf git_mirror_path") raise Exception(output) u.run_command( "chown -R %s:%s %s" % ( gerrit_system_user, gerrit_system_group, git_mirror_path)) def main(): parser = argparse.ArgumentParser(description='Manage projects') l.setup_logging_arguments(parser) parser.add_argument('--nocleanup', action='store_true', help='do not remove temp directories') parser.add_argument('projects', metavar='project', nargs='*', help='name of project(s) to process') args = parser.parse_args() l.configure_logging(args) default_has_github = registry.get_defaults('has-github', True) LOCAL_GIT_DIR = registry.get_defaults('local-git-dir', '/var/lib/git') JEEPYB_CACHE_DIR = registry.get_defaults('jeepyb-cache-dir', '/var/lib/jeepyb') ACL_DIR = registry.get_defaults('acl-dir') GERRIT_HOST = registry.get_defaults('gerrit-host') GITREVIEW_GERRIT_HOST = registry.get_defaults( 'gitreview-gerrit-host', GERRIT_HOST) GERRIT_PORT = int(registry.get_defaults('gerrit-port', '29418')) GITREVIEW_GERRIT_PORT = int(registry.get_defaults( 'gitreview-gerrit-port', GERRIT_PORT)) GERRIT_USER = registry.get_defaults('gerrit-user') GERRIT_KEY = registry.get_defaults('gerrit-key') GERRIT_GITID = registry.get_defaults('gerrit-committer') GERRIT_REPLICATE = registry.get_defaults('gerrit-replicate', True) GERRIT_OS_SYSTEM_USER = registry.get_defaults('gerrit-system-user', 'gerrit2') GERRIT_OS_SYSTEM_GROUP = registry.get_defaults('gerrit-system-group', 'gerrit2') DEFAULT_HOMEPAGE = registry.get_defaults('homepage') DEFAULT_HAS_ISSUES = registry.get_defaults('has-issues', False) DEFAULT_HAS_DOWNLOADS = registry.get_defaults('has-downloads', False) DEFAULT_HAS_WIKI = registry.get_defaults('has-wiki', False) GITHUB_SECURE_CONFIG = registry.get_defaults( 'github-config', '/etc/github/github-projects.secure.config') PROJECT_CACHE_FILE = os.path.join(JEEPYB_CACHE_DIR, 'project.cache') project_cache = {} if os.path.exists(PROJECT_CACHE_FILE): project_cache = json.loads(open(PROJECT_CACHE_FILE, 'r').read()) acl_cache = {} for acl_file in glob.glob(os.path.join(ACL_DIR, '*/*.config')): sha256 = hashlib.sha256() sha256.update(open(acl_file, 'r').read()) acl_cache[acl_file] = sha256.hexdigest() gerrit = gerritlib.gerrit.Gerrit(GERRIT_HOST, GERRIT_USER, GERRIT_PORT, GERRIT_KEY) project_list = gerrit.listProjects() ssh_env = u.make_ssh_wrapper(GERRIT_USER, GERRIT_KEY) try: for section in registry.configs_list: project = section['project'] if args.projects and project not in args.projects: continue try: log.info("Processing project: %s" % project) # Figure out all of the options options = section.get('options', dict()) description = section.get('description', None) homepage = section.get('homepage', DEFAULT_HOMEPAGE) upstream = section.get('upstream', None) repo_path = os.path.join(JEEPYB_CACHE_DIR, project) # If this project doesn't want to use gerrit, exit cleanly. if 'no-gerrit' in options: continue project_git = "%s.git" % project remote_url = "ssh://%s:%s/%s" % ( GERRIT_HOST, GERRIT_PORT, project) git_opts = dict(upstream=upstream, repo_path=repo_path, remote_url=remote_url) acl_config = section.get( 'acl-config', '%s.config' % os.path.join(ACL_DIR, project)) project_cache.setdefault(project, {}) # Create the project in Gerrit first, since it will fail # spectacularly if its project directory or local replica # already exist on disk project_created = project_cache[project].get( 'project-created', False) if not project_created: try: project_created = create_gerrit_project( project, project_list, gerrit) project_cache[project]['project-created'] = True except Exception: project_cache[project]['project-created'] = False continue pushed_to_gerrit = project_cache[project].get( 'pushed-to-gerrit', False) if not pushed_to_gerrit: # We haven't pushed to gerrit, so grab the repo again if os.path.exists(repo_path): shutil.rmtree(repo_path) # Make Local repo push_string = u.make_local_copy( repo_path, project, project_list, git_opts, ssh_env, upstream, GITREVIEW_GERRIT_HOST, GITREVIEW_GERRIT_PORT, project_git, GERRIT_GITID) description = ( find_description_override(repo_path) or description) u.fsck_repo(repo_path) if push_string: push_to_gerrit( repo_path, project, push_string, remote_url, ssh_env) project_cache[project]['pushed-to-gerrit'] = True if GERRIT_REPLICATE: gerrit.replicate(project) # Create the repo for the local git mirror create_local_mirror( LOCAL_GIT_DIR, project_git, GERRIT_OS_SYSTEM_USER, GERRIT_OS_SYSTEM_GROUP) if acl_config: acl_sha = acl_cache.get(acl_config) if project_cache[project].get('acl-sha') != acl_sha: if not os.path.exists(repo_path): u.make_local_copy( repo_path, project, project_list, git_opts, ssh_env, upstream, GERRIT_HOST, GERRIT_PORT, project_git, GERRIT_GITID) process_acls( acl_config, project, ACL_DIR, section, remote_url, repo_path, ssh_env, gerrit, GERRIT_GITID) project_cache[project]['acl-sha'] = acl_sha else: log.info("%s has matching sha, skipping ACLs", project) if 'has-github' in options or default_has_github: created = create_update_github_project( DEFAULT_HAS_ISSUES, DEFAULT_HAS_DOWNLOADS, DEFAULT_HAS_WIKI, GITHUB_SECURE_CONFIG, options, project, description, homepage, project_cache[project]) if created and GERRIT_REPLICATE: gerrit.replicate(project) project_cache[project]['created-in-github'] = created except Exception: log.exception( "Problems creating %s, moving on." % project) continue finally: # Clean up after ourselves - this repo has no use if os.path.exists(repo_path): shutil.rmtree(repo_path) finally: with open(PROJECT_CACHE_FILE, 'w') as cache_out: log.info("Writing cache file %s", PROJECT_CACHE_FILE) cache_out.write(json.dumps( project_cache, sort_keys=True, indent=2)) os.unlink(ssh_env['GIT_SSH']) if __name__ == "__main__": main() jeepyb-0+20170923/jeepyb/cmd/update_bug.py0000644000175000017500000003347613223707256017122 0ustar filipfilip#!/usr/bin/env python # Copyright (c) 2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # This is designed to be called by a gerrit hook. It searched new # patchsets for strings like "bug FOO" and updates corresponding Launchpad # bugs status. import argparse import os import re import subprocess from launchpadlib import launchpad from launchpadlib import uris import jeepyb.gerritdb from jeepyb import projects as p from jeepyb import utils as u BASE_DIR = '/home/gerrit2/review_site' GERRIT_CACHE_DIR = os.path.expanduser( os.environ.get('GERRIT_CACHE_DIR', '~/.launchpadlib/cache')) GERRIT_CREDENTIALS = os.path.expanduser( os.environ.get('GERRIT_CREDENTIALS', '~/.launchpadlib/creds')) def fix_or_related_fix(related): if related: return "Related fix" else: return "Fix" def add_change_abandoned_message(bugtask, change_url, project, branch, abandoner, reason): subject = ('Change abandoned on %s (%s)' % (u.short_project_name(project), branch)) body = ('Change abandoned by %s on branch: %s\nReview: %s' % (abandoner, branch, change_url)) if reason: body += ('\nReason: %s' % (reason)) bugtask.bug.newMessage(subject=subject, content=body) def add_change_proposed_message(bugtask, change_url, project, branch, related=False): fix = fix_or_related_fix(related) subject = ('%s proposed to %s (%s)' % (fix, u.short_project_name(project), branch)) body = '%s proposed to branch: %s\nReview: %s' % (fix, branch, change_url) bugtask.bug.newMessage(subject=subject, content=body) def add_change_merged_message(bugtask, change_url, project, commit, submitter, branch, git_log, related=False): subject = '%s merged to %s (%s)' % (fix_or_related_fix(related), u.short_project_name(project), branch) git_url = 'https://git.openstack.org/cgit/%s/commit/?id=%s' % (project, commit) body = '''Reviewed: %s Committed: %s Submitter: %s Branch: %s\n''' % (change_url, git_url, submitter, branch) body = body + '\n' + git_log bugtask.bug.newMessage(subject=subject, content=body) def set_in_progress(bugtask, launchpad, uploader, change_url): """Set bug In progress with assignee being the uploader""" # Retrieve uploader from Launchpad by correlating Gerrit E-mail # address to OpenID, and only set if there is a clear match. try: searchkey = uploader[uploader.rindex("(") + 1:-1] except ValueError: searchkey = uploader # The counterintuitive query is due to odd database schema choices # in Gerrit. For example, an account with a secondary E-mail # address added looks like... # select email_address,external_id from account_external_ids # where account_id=1234; # +-----------------+-----------------------------------------+ # | email_address | external_id | # +-----------------+-----------------------------------------+ # | plugh@xyzzy.com | https://login.ubuntu.com/+id/fR0bnU1 | # | bar@foo.org | mailto:bar@foo.org | # | NULL | username:quux | # +-----------------+-----------------------------------------+ # ...thus we need a join on a secondary query to search against # all the user's configured E-mail addresses. # # Worse, we also need to filter by active accounts only since # picking an inactive account could result in using the wrong # OpenId entirely. # query = """SELECT t.external_id FROM account_external_ids t INNER JOIN ( SELECT t.account_id FROM account_external_ids t WHERE t.email_address = %s ) original ON t.account_id = original.account_id AND t.external_id LIKE 'https://login.ubuntu.com%%' JOIN accounts a ON a.account_id = t.account_id WHERE a.inactive = 'N'""" cursor = jeepyb.gerritdb.connect().cursor() cursor.execute(query, searchkey) data = cursor.fetchone() if data: assignee = launchpad.people.getByOpenIDIdentifier(identifier=data[0]) if assignee: bugtask.assignee = assignee bugtask.status = "In Progress" bugtask.lp_save() def set_fix_committed(bugtask): """Set bug fix committed.""" bugtask.status = "Fix Committed" bugtask.lp_save() def set_fix_released(bugtask): """Set bug fix released.""" bugtask.status = "Fix Released" bugtask.lp_save() def release_fixcommitted(bugtask): """Set bug FixReleased if it was FixCommitted.""" if bugtask.status == u'Fix Committed': set_fix_released(bugtask) def tag_in_branchname(bugtask, branch): """Tag bug with in-branch-name tag (if name is appropriate).""" lp_bug = bugtask.bug branch_name = branch.replace('/', '-') if branch_name.replace('-', '').isalnum(): lp_bug.tags = lp_bug.tags + ["in-%s" % branch_name] lp_bug.tags.append("in-%s" % branch_name) lp_bug.lp_save() class Task: def __init__(self, lp_task, prefix): '''Prefixes associated with bug references will allow for certain changes to be made to the bug's launchpad (lp) page. The following tokens represent the automation currently taking place. :: add_comment -> Adds a comment to the bug's lp page. sidenote -> Adds a 'related' comment to the bug's lp page. set_in_progress -> Sets the bug's lp status to 'In Progress'. set_fix_released -> Sets the bug's lp status to 'Fix Released'. set_fix_committed -> Sets the bug's lp status to 'Fix Committed'. :: changes_needed, when populated, simply indicates the actions that are available to be taken based on the value of 'prefix'. ''' self.lp_task = lp_task self.changes_needed = [] # If no prefix was matched, default to 'closes'. prefix = prefix.split('-')[0].lower() if prefix else 'closes' if prefix in ('closes', 'fixes', 'resolves'): self.changes_needed.extend(('add_comment', 'set_in_progress', 'set_fix_committed', 'set_fix_released')) elif prefix in ('partial',): self.changes_needed.extend(('add_comment', 'set_in_progress')) elif prefix in ('related', 'impacts', 'affects'): self.changes_needed.extend(('sidenote',)) else: # prefix is not recognized. self.changes_needed.extend(('add_comment',)) def needs_change(self, change): '''Return a boolean indicating if given 'change' needs to be made.''' if change in self.changes_needed: return True else: return False def process_bugtask(launchpad, task, git_log, args): """Apply changes to lp bug tasks, based on hook / branch.""" bugtask = task.lp_task series = None if args.hook == "change-abandoned": add_change_abandoned_message(bugtask, args.change_url, args.project, args.branch, args.abandoner, args.reason) if args.hook == "change-merged": if args.branch == 'master': if (not p.is_delay_release(args.project) and task.needs_change('set_fix_released')): set_fix_released(bugtask) else: if (bugtask.status != u'Fix Released' and task.needs_change('set_fix_committed')): set_fix_committed(bugtask) elif args.branch.startswith('proposed/'): release_fixcommitted(bugtask) else: series = args.branch.rsplit('/', 1)[-1] if series: # Look for a related task matching the series. for reltask in bugtask.related_tasks: if (reltask.bug_target_name.endswith(series) and reltask.status != u'Fix Released' and task.needs_change('set_fix_committed')): set_fix_committed(reltask) break else: # Use tag_in_branchname if there isn't any. tag_in_branchname(bugtask, args.branch) if task.needs_change('add_comment') or task.needs_change('sidenote'): add_change_merged_message(bugtask, args.change_url, args.project, args.commit, args.submitter, args.branch, git_log, related=task.needs_change('sidenote')) if args.hook == "patchset-created": if args.branch == 'master': if (bugtask.status not in [u'Fix Committed', u'Fix Released'] and task.needs_change('set_in_progress')): set_in_progress(bugtask, launchpad, args.uploader, args.change_url) else: series = args.branch.rsplit('/', 1)[-1] if series: # Look for a related task matching the series. for reltask in bugtask.related_tasks: if (reltask.bug_target_name.endswith(series) and task.needs_change('set_in_progress') and reltask.status not in [u'Fix Committed', u'Fix Released']): set_in_progress(reltask, launchpad, args.uploader, args.change_url) break if args.patchset == '1' and (task.needs_change('add_comment') or task.needs_change('sidenote')): add_change_proposed_message(bugtask, args.change_url, args.project, args.branch, related=task.needs_change('sidenote')) def find_bugs(launchpad, git_log, args): '''Find bugs referenced in the git log and return related tasks. Our regular expression is composed of three major parts: part1: Matches only at start-of-line (required). Optionally matches any word or hyphen separated words. part2: Matches the words 'bug' or 'lp' on a word boundary (required). part3: Matches a whole number (required). The following patterns will be matched properly: bug # 555555 Closes-Bug: 555555 Fixes: bug # 555555 Resolves: bug 555555 Partial-Bug: lp bug # 555555 :returns: an iterable containing Task objects. ''' project = args.project if p.is_no_launchpad_bugs(project): return [] projects = p.project_to_groups(project) part1 = r'^[\t ]*(?P[-\w]+)?[\s:]*' part2 = r'(?:\b(?:bug|lp)\b[\s#:]*)+' part3 = r'(?P\d+)\s*?$' regexp = part1 + part2 + part3 matches = re.finditer(regexp, git_log, flags=re.I | re.M) # Extract unique bug tasks and associated prefixes. bugtasks = {} for match in matches: prefix = match.group('prefix') bug_num = match.group('bug_number') if bug_num not in bugtasks: try: lp_bug = launchpad.bugs[bug_num] for lp_task in lp_bug.bug_tasks: if lp_task.bug_target_name in projects: bugtasks[bug_num] = Task(lp_task, prefix) break except KeyError: # Unknown bug. pass return bugtasks.values() def extract_git_log(args): """Extract git log of all merged commits.""" cmd = ['git', '--git-dir=' + BASE_DIR + '/git/' + args.project + '.git', 'log', '--no-merges', args.commit + '^1..' + args.commit] return subprocess.Popen(cmd, stdout=subprocess.PIPE).communicate()[0] def main(): parser = argparse.ArgumentParser() parser.add_argument('hook') # common parser.add_argument('--change', default=None) parser.add_argument('--change-url', default=None) parser.add_argument('--project', default=None) parser.add_argument('--branch', default=None) parser.add_argument('--commit', default=None) parser.add_argument('--topic', default=None) parser.add_argument('--change-owner', default=None) # change-abandoned parser.add_argument('--abandoner', default=None) parser.add_argument('--reason', default=None) # change-merged parser.add_argument('--submitter', default=None) parser.add_argument('--newrev', default=None) # patchset-created parser.add_argument('--uploader', default=None) parser.add_argument('--patchset', default=None) parser.add_argument('--is-draft', default=None) parser.add_argument('--kind', default=None) args = parser.parse_args() # Connect to Launchpad. lpconn = launchpad.Launchpad.login_with( 'Gerrit User Sync', uris.LPNET_SERVICE_ROOT, GERRIT_CACHE_DIR, credentials_file=GERRIT_CREDENTIALS, version='devel') # Get git log. git_log = extract_git_log(args) # Process tasks found in git log. for task in find_bugs(lpconn, git_log, args): process_bugtask(lpconn, task, git_log, args) if __name__ == "__main__": main() jeepyb-0+20170923/jeepyb/cmd/welcome_message.py0000644000175000017500000001534513223707256020135 0ustar filipfilip#!/usr/bin/env python # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # This is designed to be called by a gerrit hook. It searched new # patchsets for those from a first time commiter, then posts a helpful # message welcoming them to the community and explaining the review process # # For example, this might be called as follows # python welcome_message.py --change Ia1fea1eab3976f1a9cb89ceb3ce1c6c6a7e79c42 # --change-url \ https://review-dev.openstack.org/81 --project gtest-org/test \ # --branch master --uploader User A. Example (user@example.com) --commit \ # 05508ae633852469d2fd7786a3d6f1d06f87055b --patchset 1 patchset-merged \ # --ssh-user=user --ssh-key=/home/user/.ssh/id_rsa # and if this was the first commit from "user@example.com", a message # would be posted on review 81. import argparse import logging import paramiko import jeepyb.gerritdb import jeepyb.log as l BASE_DIR = '/home/gerrit2/review_site' logger = logging.getLogger('welcome_reviews') def is_newbie(uploader): """Determine if the owner of the patch is a first-timer.""" # Retrieve uploader email try: searchkey = uploader[uploader.rindex("(") + 1:-1] except ValueError: logger.info('Couldnt get email for %s', uploader) return False # this query looks for all distinct patchsets for the given # user. If there's only 1, they're a first-timer. query = """SELECT COUNT(DISTINCT p.change_id + p.patch_set_id) FROM patch_sets p, account_external_ids a WHERE a.email_address = %s AND a.account_id = p.uploader_account_id;""" cursor = jeepyb.gerritdb.connect().cursor() cursor.execute(query, searchkey) data = cursor.fetchone() if data: if data[0] == 1: logger.info('We found a newbie: %s', uploader) return True else: return False def post_message(commit, gerrit_user, gerrit_ssh_key, message_file): """Post a welcome message on the patch set specified by the commit.""" default_text = """Thank you for your first contribution to OpenStack. Your patch will now be tested automatically by OpenStack testing frameworks and once the automatic tests pass, it will be reviewed by other friendly developers. They will give you feedback and may require you to refine it. People seldom get their patch approved on the first try, so don't be concerned if requested to make corrections. Feel free to modify your patch and resubmit a new change-set. Patches usually take 3 to 7 days to be reviewed so be patient and be available on IRC to ask and answer questions about your work. Also it takes generally at least a couple of weeks for cores to get around to reviewing code. The more you participate in the community the more rewarding it is for you. You may also notice that the more you get to know people and get to be known, the faster your patches will be reviewed and eventually approved. Get to know others and become known by doing code reviews: anybody can do it, and it's a great way to learn the code base. Thanks again for supporting OpenStack, we look forward to working with you. IRC: https://wiki.openstack.org/wiki/IRC Workflow: http://docs.openstack.org/infra/manual/developers.html Commit Messages: https://wiki.openstack.org/wiki/GitCommitMessages """ if message_file: try: with open(message_file, 'r') as _file: welcome_text = _file.read() except (OSError, IOError): logger.exception("Could not open message file") welcome_text = default_text else: welcome_text = default_text # post the above message, using ssh. command = ('gerrit review ' '--message="{message}" {commit}').format( message=welcome_text, commit=commit) logger.info('Welcoming: %s', commit) ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect('localhost', username=gerrit_user, key_filename=gerrit_ssh_key, port=29418) stdin, stdout, stderr = ssh.exec_command(command) stdout_text = stdout.read() stderr_text = stderr.read() ssh.close() if stdout_text: logger.debug('stdout: %s' % stdout_text) if stderr_text: logger.error('stderr: %s' % stderr_text) def main(): parser = argparse.ArgumentParser() parser.add_argument('hook') # common parser.add_argument('--change', default=None) parser.add_argument('--change-url', default=None) parser.add_argument('--project', default=None) parser.add_argument('--branch', default=None) parser.add_argument('--commit', default=None) parser.add_argument('--topic', default=None) parser.add_argument('--change-owner', default=None) # patchset-abandoned parser.add_argument('--abandoner', default=None) parser.add_argument('--reason', default=None) # change-merged parser.add_argument('--submitter', default=None) parser.add_argument('--newrev', default=None) # patchset-created parser.add_argument('--uploader', default=None) parser.add_argument('--patchset', default=None) parser.add_argument('--is-draft', default=None) parser.add_argument('--kind', default=None) # for Welcome Message parser.add_argument('--ssh-user', dest='ssh_user', help='The gerrit welcome message user') parser.add_argument('--ssh-key', dest='ssh_key', help='The gerrit welcome message SSH key file') parser.add_argument('--message-file', dest='message_file', default=None, help='The gerrit welcome message') # Don't actually post the message parser.add_argument('--dryrun', dest='dryrun', action='store_true') parser.add_argument('--no-dryrun', dest='dryrun', action='store_false') parser.set_defaults(dryrun=False) l.setup_logging_arguments(parser) args = parser.parse_args() l.configure_logging(args) # they're a first-timer, post the message on 1st patchset if is_newbie(args.uploader) and args.patchset == '1' and not args.dryrun: post_message(args.commit, args.ssh_user, args.ssh_key, args.message_file) if __name__ == "__main__": main() jeepyb-0+20170923/jeepyb/cmd/expire_old_reviews.py0000644000175000017500000000624213223707256020670 0ustar filipfilip#!/usr/bin/env python # Copyright (c) 2012 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # This script is designed to expire old code reviews that have not been touched # using the following rules: # 1. if negative comment and no recent activity, expire import argparse import json import logging import paramiko import jeepyb.log as l logger = logging.getLogger('expire_reviews') def expire_patch_set(ssh, patch_id, patch_subject): message = ('Code review expired due to no recent activity' ' after a negative review. It can be restored using' ' the \`Restore Change\` button under the Patch Set' ' on the web interface.') command = ('gerrit review --abandon ' '--message="{message}" {patch_id}').format( message=message, patch_id=patch_id) logger.info('Expiring: %s - %s: %s', patch_id, patch_subject, message) stdin, stdout, stderr = ssh.exec_command(command) if stdout.channel.recv_exit_status() != 0: logger.error(stderr.read()) def main(): parser = argparse.ArgumentParser() parser.add_argument('user', help='The gerrit admin user') parser.add_argument('ssh_key', help='The gerrit admin SSH key file') parser.add_argument('--age', dest='age', default='1w', help='The minimum age of a review to expire') l.setup_logging_arguments(parser) options = parser.parse_args() l.configure_logging(options) GERRIT_USER = options.user GERRIT_SSH_KEY = options.ssh_key EXPIRY_AGE = options.age logger.info('Starting expire reviews') logger.info('Connecting to Gerrit') ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect('localhost', username=GERRIT_USER, key_filename=GERRIT_SSH_KEY, port=29418) # Query all reviewed with no activity for 1 week logger.info('Searching no activity on negative review for 1 week') stdin, stdout, stderr = ssh.exec_command( 'gerrit query --current-patch-set --all-approvals' ' --format JSON status:reviewed age:' + EXPIRY_AGE) for line in stdout: row = json.loads(line) if 'rowCount' not in row and 'open' in row and row['open']: # Search for negative approvals for approval in row['currentPatchSet']['approvals']: if approval['value'] in ('-1', '-2'): expire_patch_set(ssh, row['currentPatchSet']['revision'], row['subject']) break logger.info('End expire review') if __name__ == "__main__": main() jeepyb-0+20170923/jeepyb/cmd/close_pull_requests.py0000644000175000017500000001072413223707256021066 0ustar filipfilip#! /usr/bin/env python # Copyright (C) 2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # Github pull requests closer reads a project config file called projects.yaml # It should look like: # - homepage: http://openstack.org # team-id: 153703 # has-wiki: False # has-issues: False # has-downloads: False # --- # - project: PROJECT_NAME # options: # - has-pull-requests # Github authentication information is read from github.secure.config, # which should look like: # [github] # username = GITHUB_USERNAME # password = GITHUB_PASSWORD # # or # # [github] # oauth_token = GITHUB_OAUTH_TOKEN import argparse import ConfigParser import github import logging import os import jeepyb.log as l import jeepyb.projects as p import jeepyb.utils as u MESSAGE = """Thank you for contributing to %(project)s! %(project)s uses Gerrit for code review. If you have never contributed to OpenStack before make sure you have read the getting started documentation: http://docs.openstack.org/infra/manual/developers.html#getting-started Otherwise please visit http://docs.openstack.org/infra/manual/developers.html#development-workflow and follow the instructions there to upload your change to Gerrit. """ log = logging.getLogger("close_pull_requests") def main(): parser = argparse.ArgumentParser() l.setup_logging_arguments(parser) parser.add_argument('--message-file', dest='message_file', default=None, help='The close pull request message') args = parser.parse_args() l.configure_logging(args) if args.message_file: try: with open(args.message_file, 'r') as _file: pull_request_text = _file.read() except (OSError, IOError): log.exception("Could not open close pull request message file") raise else: pull_request_text = MESSAGE GITHUB_SECURE_CONFIG = os.environ.get('GITHUB_SECURE_CONFIG', '/etc/github/github.secure.config') secure_config = ConfigParser.ConfigParser() secure_config.read(GITHUB_SECURE_CONFIG) registry = u.ProjectsRegistry() if secure_config.has_option("github", "oauth_token"): ghub = github.Github(secure_config.get("github", "oauth_token")) else: ghub = github.Github(secure_config.get("github", "username"), secure_config.get("github", "password")) orgs = ghub.get_user().get_orgs() orgs_dict = dict(zip([o.login.lower() for o in orgs], orgs)) for section in registry.configs_list: project = section['project'] # Make sure we're using GitHub for this project: if not p.has_github(project): continue # Make sure we're supposed to close pull requests for this project: if 'options' in section and 'has-pull-requests' in section['options']: continue # Find the project's repo project_split = project.split('/', 1) # Handle errors in case the repo or the organization doesn't exists try: if len(project_split) > 1: org = orgs_dict[project_split[0].lower()] repo = org.get_repo(project_split[1]) else: repo = ghub.get_user().get_repo(project) except (KeyError, github.GithubException): log.exception("Could not find project %s on GitHub." % project) continue # Close each pull request pull_requests = repo.get_pulls("open") for req in pull_requests: vars = dict(project=project) issue_data = {"url": repo.url + "/issues/" + str(req.number)} issue = github.Issue.Issue(requester=req._requester, headers={}, attributes=issue_data, completed=True) issue.create_comment(pull_request_text % vars) req.edit(state="closed") if __name__ == "__main__": main() jeepyb-0+20170923/jeepyb/cmd/openstackwatch.py0000644000175000017500000001334313223707256020010 0ustar filipfilip#!/usr/bin/env python # Copyright (c) 2013 Chmouel Boudjnah, eNovance # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # This script is designed to generate rss feeds for subscription from updates # to various gerrit tracked projects. It is intended to be run periodically, # for example hourly via cron. It takes an optional argument to specify the # path to a configuration file. # -*- encoding: utf-8 -*- from __future__ import print_function __author__ = "Chmouel Boudjnah " import ConfigParser import cStringIO import datetime import json import os import sys import time import PyRSS2Gen import six.moves.urllib.request as urlrequest PROJECTS = ['openstack/nova', 'openstack/keystone', 'openstack/swift'] JSON_URL = 'https://review.openstack.org/query' DEBUG = False OUTPUT_MODE = 'multiple' curdir = os.path.dirname(os.path.realpath(sys.argv[0])) class ConfigurationError(Exception): pass def get_config(config, section, option, default=None): if not config.has_section(section): raise ConfigurationError("Invalid configuration, missing section: %s" % section) if config.has_option(section, option): return config.get(section, option) elif default is not None: return default else: raise ConfigurationError("Invalid configuration, missing " "section/option: %s/%s" % (section, option)) def parse_ini(inifile): ret = {} if not os.path.exists(inifile): return config = ConfigParser.RawConfigParser(allow_no_value=True) config.read(inifile) if config.has_section('swift'): ret['swift'] = dict(config.items('swift')) ret['projects'] = get_config(config, 'general', 'projects', PROJECTS) if type(ret['projects']) is not list: ret['projects'] = [x.strip() for x in ret['projects'].split(',')] ret['json_url'] = get_config(config, 'general', 'json_url', JSON_URL) ret['debug'] = get_config(config, 'general', 'debug', DEBUG) ret['output_mode'] = get_config(config, 'general', 'output_mode', OUTPUT_MODE) return ret try: conffile = sys.argv[1] except IndexError: conffile = os.path.join(curdir, '..', 'config', 'openstackwatch.ini') CONFIG = parse_ini(conffile) def debug(msg): if DEBUG: print(msg) def get_json(project=None): url = CONFIG['json_url'] if project: url += "+project:" + project fp = urlrequest.urlretrieve(url) ret = open(fp[0]).read() return ret def parse_json(content): for row in content.splitlines(): try: json_row = json.loads(row) except(ValueError): continue if not json_row or 'project' not in json_row or \ json_row['project'] not in CONFIG['projects']: continue yield json_row def upload_to_swift(content, objectname): import swiftclient cfg = CONFIG['swift'] client = swiftclient.Connection(cfg['auth_url'], cfg['username'], cfg['password'], auth_version=cfg.get('auth_version', '2.0')) try: client.get_container(cfg['container']) except(swiftclient.client.ClientException): client.put_container(cfg['container']) # eventual consistenties time.sleep(1) client.put_object(cfg['container'], objectname, cStringIO.StringIO(content)) def generate_rss(content, project=""): title = "OpenStack %s watch RSS feed" % (project) rss = PyRSS2Gen.RSS2( title=title, link="http://github.com/chmouel/openstackwatch.rss", description="The latest reviews about OpenStack, straight " "from Gerrit.", lastBuildDate=datetime.datetime.now() ) for row in parse_json(content): author = row['owner']['name'] author += " <%s>" % ('email' in row['owner'] and row['owner']['email'] or row['owner']['username']) rss.items.append( PyRSS2Gen.RSSItem( title="%s [%s]: %s" % (os.path.basename(row['project']), row['status'], row['subject']), author=author, link=row['url'], guid=PyRSS2Gen.Guid(row['id']), description=row['subject'], pubDate=datetime.datetime.fromtimestamp(row['lastUpdated']), )) return rss.to_xml() def main(): if CONFIG['output_mode'] == "combined": content = generate_rss(get_json()) if 'swift' in CONFIG: upload_to_swift(content, CONFIG['swift']['combined_output_object']) else: print(content) elif CONFIG['output_mode'] == "multiple": for project in CONFIG['projects']: content = generate_rss(get_json(project), project=project) if 'swift' in CONFIG: objectname = "%s.xml" % os.path.basename(project) upload_to_swift(content, objectname) else: print(content) if __name__ == '__main__': main() jeepyb-0+20170923/jeepyb/cmd/__init__.py0000644000175000017500000000113513223707256016525 0ustar filipfilip# Copyright (c) 2012 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. jeepyb-0+20170923/jeepyb/cmd/create_cgitrepos.py0000644000175000017500000001024613223707256020313 0ustar filipfilip#! /usr/bin/env python # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # create_cgitrepos.py reads the project config file called projects.yaml # and generates a cgitrepos configuration file which is then copied to # the cgit server. # # It also creates the necessary top-level directories for each project # organization (openstack, stackforge, etc) import os import subprocess import jeepyb.utils as u PROJECTS_YAML = os.environ.get('PROJECTS_YAML', '/home/cgit/projects.yaml') CGIT_REPOS = os.environ.get('CGIT_REPOS', '/etc/cgitrepos') REPO_PATH = os.environ.get('REPO_PATH', '/var/lib/git') SCRATCH_SUBPATH = os.environ.get('SCRATCH_SUBPATH') SCRATCH_OWNER = os.environ.get('SCRATCH_OWNER', 'scratch') SCRATCH_GROUP = os.environ.get('SCRATCH_GROUP', 'scratch') CGIT_USER = os.environ.get('CGIT_USER', 'cgit') CGIT_GROUP = os.environ.get('CGIT_GROUP', 'cgit') DEFAULT_ORG = os.environ.get('DEFAULT_ORG', None) def clean_string(string): """Scrub out characters that with break cgit. cgit can't handle newlines in many of it's fields, so strip them out. """ return string.replace('\n', ' ').replace('\r', '') def main(): registry = u.ProjectsRegistry(PROJECTS_YAML) gitorgs = {} names = set() for entry in registry.configs_list: project = entry['project'] if '/' in project: (org, name) = project.split('/') else: if DEFAULT_ORG is None: raise RuntimeError('No org specified for project %s and no' 'DEFAULT_ORG is set.' % project) (org, name) = (DEFAULT_ORG, project) description = entry.get('description', name) assert project not in names names.add(project) gitorgs.setdefault(org, []).append((name, description)) if SCRATCH_SUBPATH: assert SCRATCH_SUBPATH not in gitorgs scratch_path = os.path.join(REPO_PATH, SCRATCH_SUBPATH) for org in gitorgs: scratch_dir = os.path.join(scratch_path, org) if not os.path.isdir(scratch_dir): os.makedirs(scratch_dir) projects = gitorgs[org] for (name, description) in projects: scratch_repo = "%s.git" % os.path.join(scratch_dir, name) subprocess.call(['git', 'init', '--bare', scratch_repo]) subprocess.call(['chown', '-R', '%s:%s' % (SCRATCH_OWNER, SCRATCH_GROUP), scratch_repo]) for org in gitorgs: if not os.path.isdir('%s/%s' % (REPO_PATH, org)): os.makedirs('%s/%s' % (REPO_PATH, org)) with open(CGIT_REPOS, 'w') as cgit_file: cgit_file.write('# Autogenerated by create_cgitrepos.py\n') for org in sorted(gitorgs): cgit_file.write('\n') cgit_file.write('section=%s\n' % (org)) org_dir = os.path.join(REPO_PATH, org) projects = gitorgs[org] projects.sort() for (name, description) in projects: project_repo = "%s.git" % os.path.join(org_dir, name) cgit_file.write('\n') cgit_file.write('repo.url=%s/%s\n' % (org, name)) cgit_file.write('repo.path=%s/\n' % (project_repo)) cgit_file.write( 'repo.desc=%s\n' % (clean_string(description))) if not os.path.exists(project_repo): subprocess.call(['git', 'init', '--bare', project_repo]) subprocess.call(['chown', '-R', '%s:%s' % (CGIT_USER, CGIT_GROUP), project_repo]) if __name__ == "__main__": main() jeepyb-0+20170923/jeepyb/cmd/trivial_rebase.py0000644000175000017500000003012113223707256017756 0ustar filipfilip# Copyright (c) 2010, Code Aurora Forum. All rights reserved. # Copyright (c) 2012, Hewlett-Packard Development Company, L.P. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # Neither the name of Code Aurora Forum, Inc. nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED # WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE # OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN # IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # This script is designed to detect when a patchset uploaded to Gerrit is # 'identical' (determined via git-patch-id) and reapply reviews onto the new # patchset from the previous patchset. # Get usage and help info by running: trivial-rebase --help # Documentation is available here: # https://www.codeaurora.org/xwiki/bin/QAEP/Gerrit from __future__ import print_function import json import optparse import subprocess import sys class SilentOptionParser(optparse.OptionParser): """Make OptionParser silently swallow unrecognized options.""" def _process_args(self, largs, rargs, values): while rargs: try: optparse.OptionParser._process_args(self, largs, rargs, values) except (optparse.AmbiguousOptionError, optparse.BadOptionError) as e: largs.append(e.opt_str) class CheckCallError(OSError): """CheckCall() returned non-0.""" def __init__(self, command, cwd, retcode, stdout, stderr=None): OSError.__init__(self, command, cwd, retcode, stdout, stderr) self.command = command self.cwd = cwd self.retcode = retcode self.stdout = stdout self.stderr = stderr def CheckCall(command, cwd=None): """Like subprocess.check_call() but returns stdout. Works on python 2.4 """ try: process = subprocess.Popen(command, cwd=cwd, stdout=subprocess.PIPE) std_out, std_err = process.communicate() except OSError as e: raise CheckCallError(command, cwd, e.errno, None) if process.returncode: raise CheckCallError(command, cwd, process.returncode, std_out, std_err) return std_out, std_err def Gssh(options, api_command): """Makes a Gerrit API call via SSH and returns the stdout results.""" ssh_cmd = ['ssh', '-l', 'Gerrit Code Review', '-p', options.port, '-i', options.private_key_path, options.server, api_command] try: return CheckCall(ssh_cmd)[0] except CheckCallError as e: err_template = "call: %s\nreturn code: %s\nstdout: %s\nstderr: %s\n" sys.stderr.write(err_template % (ssh_cmd, e.retcode, e.stdout, e.stderr)) raise def GsqlQuery(sql_query, options): """Runs a gerrit gsql query and returns the result.""" gsql_cmd = "gerrit gsql --format JSON -c %s" % sql_query gsql_out = Gssh(options, gsql_cmd) new_out = gsql_out.replace('}}\n', '}}\nsplit here\n') return new_out.split('split here\n') def FindPrevRev(options): """Finds the revision of the previous patch set on the change.""" sql_query = ("\"SELECT revision FROM patch_sets,changes WHERE " "patch_sets.change_id = changes.change_id AND " "patch_sets.patch_set_id = %s AND " "changes.change_key = \'%s\'\"" % ((options.patchset - 1), options.changeId)) revisions = GsqlQuery(sql_query, options) json_dict = json.loads(revisions[0], strict=False) return json_dict["columns"]["revision"] def GetApprovals(options): """Get all the approvals on a specific patch set. Returns a list of approval dicts """ sql_query = ("\"SELECT value,account_id,category_id" " FROM patch_set_approvals" " WHERE patch_set_id = %s" " AND change_id = (SELECT change_id FROM" " changes WHERE change_key = \'%s\') AND value <> 0\"" % ((options.patchset - 1), options.changeId)) gsql_out = GsqlQuery(sql_query, options) approvals = [] for json_str in gsql_out: dict = json.loads(json_str, strict=False) if dict["type"] == "row": approvals.append(dict["columns"]) return approvals def GetPatchId(revision, consider_whitespace=False): git_show_cmd = ['git', 'show', revision] patch_id_cmd = ['git', 'patch-id'] patch_id_process = subprocess.Popen(patch_id_cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE) git_show_process = subprocess.Popen(git_show_cmd, stdout=subprocess.PIPE) if consider_whitespace: # This matches on change lines in the patch (those starting with "+" # or "-" but not followed by another of the same), then replaces any # space or tab characters with "%" before calculating a patch-id. replace_ws_cmd = ['sed', r'/^\(+[^+]\|-[^-]\)/y/ \t/%%/'] replace_ws_process = subprocess.Popen(replace_ws_cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE) git_show_output = git_show_process.communicate()[0] replace_ws_output = replace_ws_process.communicate(git_show_output)[0] return patch_id_process.communicate(replace_ws_output)[0] else: return patch_id_process.communicate( git_show_process.communicate()[0])[0] def SuExec(options, as_user, cmd): suexec_cmd = "suexec --as %s -- %s" % (as_user, cmd) Gssh(options, suexec_cmd) def DiffCommitMessages(commit1, commit2): log_cmd1 = ['git', 'log', '--pretty=format:"%an %ae%n%s%n%b"', commit1 + '^!'] commit1_log = CheckCall(log_cmd1) log_cmd2 = ['git', 'log', '--pretty=format:"%an %ae%n%s%n%b"', commit2 + '^!'] commit2_log = CheckCall(log_cmd2) if commit1_log != commit2_log: return True return False def main(): usage = "usage: %prog [optional options]" parser = SilentOptionParser(usage=usage) parser.add_option("--change", dest="changeId", help="Change identifier") parser.add_option("--project", help="Project path in Gerrit") parser.add_option("--commit", help="Git commit-ish for this patchset") parser.add_option("--patchset", type="int", help="The patchset number") parser.add_option("--role-user", dest="role_user", help="E-mail/ID of user commenting on commit messages") parser.add_option("--private-key-path", dest="private_key_path", help="Full path to Gerrit SSH daemon's private host key") parser.add_option("--server-port", dest="port", default='29418', help="Port to connect to Gerrit's SSH daemon " "[default: %default]") parser.add_option("--server", dest="server", default="localhost", help="Server name/address for Gerrit's SSH daemon " "[default: %default]") parser.add_option("--whitespace", action="store_true", help="Treat whitespace as significant") (options, args) = parser.parse_args() if not options.changeId: parser.print_help() sys.exit(0) if options.patchset == 1: # Nothing to detect on first patchset sys.exit(0) prev_revision = None prev_revision = FindPrevRev(options) if not prev_revision: # Couldn't find a previous revision sys.exit(0) prev_patch_id = GetPatchId(prev_revision) cur_patch_id = GetPatchId(options.commit) if cur_patch_id.split()[0] != prev_patch_id.split()[0]: # patch-ids don't match sys.exit(0) # Patch ids match. This is a trivial rebase. # In addition to patch-id we should check if whitespace content changed. # Some languages are more sensitive to whitespace than others, and some # changes may either introduce or be intended to fix style problems # specifically involving whitespace as well. if options.whitespace: prev_patch_ws = GetPatchId(prev_revision, consider_whitespace=True) cur_patch_ws = GetPatchId(options.commit, consider_whitespace=True) if cur_patch_ws.split()[0] != prev_patch_ws.split()[0]: # Insert a comment into the change letting the approvers know # only the whitespace changed comment_msg = ("\"New patchset patch-id matches previous patchset," " but whitespace content has changed.\"") comment_cmd = ['gerrit', 'approve', '--project', options.project, '--message', comment_msg, options.commit] SuExec(options, options.role_user, ' '.join(comment_cmd)) sys.exit(0) # We should also check if the commit message changed. Most approvers would # want to re-review changes when the commit message changes. changed = DiffCommitMessages(prev_revision, options.commit) if changed: # Insert a comment into the change letting the approvers know only the # commit message changed comment_msg = ("\"New patchset patch-id matches previous patchset," " but commit message has changed.\"") comment_cmd = ['gerrit', 'approve', '--project', options.project, '--message', comment_msg, options.commit] SuExec(options, options.role_user, ' '.join(comment_cmd)) sys.exit(0) # Need to get all approvals on prior patch set, then suexec them onto # this patchset. approvals = GetApprovals(options) gerrit_approve_msg = ("\'Automatically re-added by Gerrit trivial rebase " "detection script.\'") for approval in approvals: # Note: Sites with different 'copy_min_score' values in the # approval_categories DB table might want different behavior here. # Additional categories should also be added if desired. if approval["category_id"] == "CRVW": approve_category = '--code-review' elif approval["category_id"] == "VRIF": # Don't re-add verifies # approve_category = '--verified' continue elif approval["category_id"] == "SUBM": # We don't care about previous submit attempts continue elif approval["category_id"] == "APRV": # Similarly squash old approvals continue else: print("Unsupported category: %s" % approval) sys.exit(0) score = approval["value"] gerrit_approve_cmd = ['gerrit', 'approve', '--project', options.project, '--message', gerrit_approve_msg, approve_category, score, options.commit] SuExec(options, approval["account_id"], ' '.join(gerrit_approve_cmd)) sys.exit(0) if __name__ == "__main__": main() jeepyb-0+20170923/jeepyb/cmd/update_blueprint.py0000644000175000017500000001334513223707256020342 0ustar filipfilip#!/usr/bin/env python # Copyright (c) 2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # This is designed to be called by a gerrit hook. It searched new # patchsets for strings like "blueprint FOO" or "bp FOO" and updates # corresponding Launchpad blueprints with links back to the change. import argparse import ConfigParser import os import re import StringIO import subprocess from launchpadlib import launchpad from launchpadlib import uris import pymysql from jeepyb import projects as p BASE_DIR = '/home/gerrit2/review_site' GERRIT_CACHE_DIR = os.path.expanduser( os.environ.get('GERRIT_CACHE_DIR', '~/.launchpadlib/cache')) GERRIT_CREDENTIALS = os.path.expanduser( os.environ.get('GERRIT_CREDENTIALS', '~/.launchpadlib/creds')) GERRIT_CONFIG = os.environ.get('GERRIT_CONFIG', '/home/gerrit2/review_site/etc/gerrit.config') GERRIT_SECURE_CONFIG_DEFAULT = '/home/gerrit2/review_site/etc/secure.config' GERRIT_SECURE_CONFIG = os.environ.get('GERRIT_SECURE_CONFIG', GERRIT_SECURE_CONFIG_DEFAULT) SPEC_RE = re.compile(r'\b(blueprint|bp)\b[ \t]*[#:]?[ \t]*(\S+)', re.I) BODY_RE = re.compile(r'^\s+.*$') def get_broken_config(filename): """gerrit config ini files are broken and have leading tabs.""" text = "" with open(filename, "r") as conf: for line in conf.readlines(): text = "%s%s" % (text, line.lstrip()) fp = StringIO.StringIO(text) c = ConfigParser.ConfigParser() c.readfp(fp) return c GERRIT_CONFIG = get_broken_config(GERRIT_CONFIG) SECURE_CONFIG = get_broken_config(GERRIT_SECURE_CONFIG) DB_HOST = GERRIT_CONFIG.get("database", "hostname") DB_USER = GERRIT_CONFIG.get("database", "username") DB_PASS = SECURE_CONFIG.get("database", "password") DB_DB = GERRIT_CONFIG.get("database", "database") def update_spec(launchpad, project, name, subject, link, topic=None): spec = None if p.is_no_launchpad_blueprints(project): return projects = p.project_to_groups(project) for project in projects: spec = launchpad.projects[project].getSpecification(name=name) if spec: break if not spec: return if spec.whiteboard: wb = spec.whiteboard.strip() else: wb = '' changed = False if topic: topiclink = '%s/#q,topic:%s,n,z' % (link[:link.find('/', 8)], topic) if topiclink not in wb: wb += "\n\n\nGerrit topic: %(link)s" % dict(link=topiclink) changed = True if link not in wb: wb += ("\n\n\nAddressed by: {link}\n" " {subject}\n").format(subject=subject, link=link) changed = True if changed: spec.whiteboard = wb spec.lp_save() def find_specs(launchpad, dbconn, args): git_dir_arg = '--git-dir={base_dir}/git/{project}.git'.format( base_dir=BASE_DIR, project=args.project) git_log = subprocess.Popen(['git', git_dir_arg, 'log', '--no-merges', args.commit + '^1..' + args.commit], stdout=subprocess.PIPE).communicate()[0] change = args.change if '~' in change: # Newer gerrit provides the change argument in this format: # gtest-org%2Fgtest~master~I117f34aaa4253e0b82b98de9077f7188d55c3f33 # So we need to split off the changeid if there is other data in there. change = change.rsplit('~', 1)[1] cur = dbconn.cursor() cur.execute("select subject, topic from changes where change_key=%s", change) subject, topic = cur.fetchone() specs = set([m.group(2) for m in SPEC_RE.finditer(git_log)]) if topic: topicspec = topic.split('/')[-1] specs |= set([topicspec]) for spec in specs: update_spec(launchpad, args.project, spec, subject, args.change_url, topic) def main(): parser = argparse.ArgumentParser() parser.add_argument('hook') # common parser.add_argument('--change', default=None) parser.add_argument('--change-url', default=None) parser.add_argument('--project', default=None) parser.add_argument('--branch', default=None) parser.add_argument('--commit', default=None) parser.add_argument('--topic', default=None) parser.add_argument('--change-owner', default=None) # patchset-abandoned parser.add_argument('--abandoner', default=None) parser.add_argument('--reason', default=None) # change-merged parser.add_argument('--submitter', default=None) parser.add_argument('--newrev', default=None) # patchset-created parser.add_argument('--uploader', default=None) parser.add_argument('--patchset', default=None) parser.add_argument('--is-draft', default=None) parser.add_argument('--kind', default=None) args = parser.parse_args() lpconn = launchpad.Launchpad.login_with( 'Gerrit User Sync', uris.LPNET_SERVICE_ROOT, GERRIT_CACHE_DIR, credentials_file=GERRIT_CREDENTIALS, version='devel') conn = pymysql.connect( host=DB_HOST, user=DB_USER, password=DB_PASS, db=DB_DB) find_specs(lpconn, conn, args) if __name__ == "__main__": main() jeepyb-0+20170923/jeepyb/cmd/create_hound_config.py0000644000175000017500000000471513223707256020762 0ustar filipfilip#! /usr/bin/env python # Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # create_hound_config.py reads the project config file called projects.yaml # and generates a hound configuration file. import json import os import jeepyb.utils as u PROJECTS_YAML = os.environ.get('PROJECTS_YAML', '/home/hound/projects.yaml') GIT_SERVER = os.environ.get('GIT_BASE', 'git.openstack.org') DATA_PATH = os.environ.get('DATA_PATH', 'data') GIT_PROTOCOL = os.environ.get('GIT_PROTOCOL', 'git://') def main(): registry = u.ProjectsRegistry(PROJECTS_YAML) projects = [entry['project'] for entry in registry.configs_list] repos = {} for project in projects: # Ignore attic and stackforge, those are repos that are not # active anymore. if project.startswith(('openstack-attic', 'stackforge')): continue basename = os.path.basename(project) # ignore deb- projects that are forks of other projects intended for # internal debian packaging needs only and are generally not of # interest to upstream developers if basename.startswith('deb-'): continue repos[basename] = { 'url': "%(proto)s%(gitbase)s/%(project)s" % dict( proto=GIT_PROTOCOL, gitbase=GIT_SERVER, project=project), 'url-pattern': { 'base-url': "http://%(gitbase)s/cgit/%(project)s" "/tree/{path}{anchor}" % dict(gitbase=GIT_SERVER, project=project), 'anchor': '#n{line}', } } config = { "dbpath": "data", "repos": repos } with open('config.json', 'w') as config_file: config_file.write( json.dumps( config, indent=2, separators=(',', ': '), sort_keys=False, default=unicode)) if __name__ == "__main__": main() jeepyb-0+20170923/jeepyb/cmd/track_upstream.py0000644000175000017500000002026113223707256020013 0ustar filipfilip#! /usr/bin/env python # Copyright (C) 2011 OpenStack, LLC. # Copyright (c) 2012 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # manage_projects.py reads a config file called projects.ini # It should look like: # [projects] # homepage=http://openstack.org # gerrit-host=review.openstack.org # local-git-dir=/var/lib/git # gerrit-key=/home/gerrit2/review_site/etc/ssh_host_rsa_key # gerrit-committer=Project Creator # gerrit-replicate=True # has-github=True # has-wiki=False # has-issues=False # has-downloads=False # acl-dir=/home/gerrit2/acls # acl-base=/home/gerrit2/acls/project.config # # manage_projects.py reads a project listing file called projects.yaml # It should look like: # - project: PROJECT_NAME # options: # - has-wiki # - has-issues # - has-downloads # - has-pull-requests # - track-upstream # homepage: Some homepage that isn't http://openstack.org # description: This is a great project # upstream: https://gerrit.googlesource.com/gerrit # upstream-prefix: upstream # acl-config: /path/to/gerrit/project.config # acl-append: # - /path/to/gerrit/project.config # acl-parameters: # project: OTHER_PROJECT_NAME import argparse import json import logging import os import gerritlib.gerrit import jeepyb.log as l import jeepyb.utils as u registry = u.ProjectsRegistry() log = logging.getLogger("track_upstream") orgs = None def update_local_copy(repo_path, track_upstream, git_opts, ssh_env): # first do a clean of the branch to prevent possible # problems due to previous runs u.git_command(repo_path, "clean -fdx") has_upstream_remote = ( 'upstream' in u.git_command_output(repo_path, 'remote')[1]) if track_upstream: # If we're configured to track upstream but the repo # does not have an upstream remote, add one if not has_upstream_remote: u.git_command( repo_path, "remote add upstream %(upstream)s" % git_opts) # If we're configured to track upstream, make sure that # the upstream URL matches the config else: u.git_command( repo_path, "remote set-url upstream %(upstream)s" % git_opts) # Now that we have any upstreams configured, fetch all of the refs # we might need, pruning remote branches that no longer exist u.git_command( repo_path, "remote update --prune", env=ssh_env) else: # If we are not tracking upstream, then we do not need # an upstream remote configured if has_upstream_remote: u.git_command(repo_path, "remote rm upstream") # TODO(mordred): This is here so that later we can # inspect the master branch for meta-info # Checkout master and reset to the state of origin/master u.git_command(repo_path, "checkout -B master origin/master") def sync_upstream(repo_path, project, ssh_env, upstream_prefix): u.git_command( repo_path, "remote update upstream --prune", env=ssh_env) # Any branch that exists in the upstream remote, we want # a local branch of, optionally prefixed with the # upstream prefix value for branch in u.git_command_output( repo_path, "branch -a")[1].split('\n'): if not branch.strip().startswith("remotes/upstream"): continue if "->" in branch: continue local_branch = branch.split()[0][len('remotes/upstream/'):] if upstream_prefix: local_branch = "%s/%s" % ( upstream_prefix, local_branch) # Check out an up to date copy of the branch, so that # we can push it and it will get picked up below u.git_command( repo_path, "checkout -B %s %s" % (local_branch, branch)) try: # Push all of the local branches to similarly named # Branches on gerrit. Also, push all of the tags u.git_command( repo_path, "push origin refs/heads/*:refs/heads/*", env=ssh_env) u.git_command(repo_path, 'push origin --tags', env=ssh_env) except Exception: log.exception( "Error pushing %s to Gerrit." % project) def main(): parser = argparse.ArgumentParser(description='Manage projects') l.setup_logging_arguments(parser) parser.add_argument('--nocleanup', action='store_true', help='do not remove temp directories') parser.add_argument('projects', metavar='project', nargs='*', help='name of project(s) to process') args = parser.parse_args() l.configure_logging(args) JEEPYB_CACHE_DIR = registry.get_defaults('jeepyb-cache-dir', '/var/lib/jeepyb') IMPORT_DIR = os.path.join(JEEPYB_CACHE_DIR, 'import') GERRIT_HOST = registry.get_defaults('gerrit-host') GERRIT_PORT = int(registry.get_defaults('gerrit-port', '29418')) GERRIT_USER = registry.get_defaults('gerrit-user') GERRIT_KEY = registry.get_defaults('gerrit-key') GERRIT_GITID = registry.get_defaults('gerrit-committer') PROJECT_CACHE_FILE = os.path.join(JEEPYB_CACHE_DIR, 'project.cache') project_cache = {} if os.path.exists(PROJECT_CACHE_FILE): project_cache = json.loads(open(PROJECT_CACHE_FILE, 'r').read()) gerrit = gerritlib.gerrit.Gerrit(GERRIT_HOST, GERRIT_USER, GERRIT_PORT, GERRIT_KEY) project_list = gerrit.listProjects() ssh_env = u.make_ssh_wrapper(GERRIT_USER, GERRIT_KEY) try: for section in registry.configs_list: project = section['project'] if args.projects and project not in args.projects: continue try: log.info("Processing project: %s" % project) # Figure out all of the options options = section.get('options', dict()) track_upstream = 'track-upstream' in options if not track_upstream: continue # If this project doesn't want to use gerrit, exit cleanly. if 'no-gerrit' in options: continue upstream = section.get('upstream', None) upstream_prefix = section.get('upstream-prefix', None) repo_path = os.path.join(IMPORT_DIR, project) project_git = "%s.git" % project remote_url = "ssh://%s:%s/%s" % ( GERRIT_HOST, GERRIT_PORT, project) git_opts = dict(upstream=upstream, repo_path=repo_path, remote_url=remote_url) project_cache.setdefault(project, {}) if not project_cache[project]['pushed-to-gerrit']: continue # Make Local repo if not os.path.exists(repo_path): u.make_local_copy( repo_path, project, project_list, git_opts, ssh_env, upstream, GERRIT_HOST, GERRIT_PORT, project_git, GERRIT_GITID) else: update_local_copy( repo_path, track_upstream, git_opts, ssh_env) u.fsck_repo(repo_path) sync_upstream(repo_path, project, ssh_env, upstream_prefix) except Exception: log.exception( "Problems creating %s, moving on." % project) continue finally: os.unlink(ssh_env['GIT_SSH']) if __name__ == "__main__": main() jeepyb-0+20170923/requirements.txt0000644000175000017500000000055413223707256015663 0ustar filipfilip# The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. pbr>=1.6 gerritlib>=0.3.0 PyMySQL paramiko>=1.13.0 PyGithub PyYAML>=3.1.0 pkginfo PyRSS2Gen python-swiftclient>=2.2.0 requests!=2.8.0,>=2.5.2 six>=1.9.0 jeepyb-0+20170923/setup.cfg0000644000175000017500000000264613223707256014224 0ustar filipfilip[metadata] name = jeepyb summary = Tools for managing gerrit projects and external sources. description-file = README.rst author = OpenStack Infrastructure Team author-email = openstack-infra@lists.openstack.org home-page = http://docs.openstack.org/infra/system-config/ classifier = Intended Audience :: Information Technology Intended Audience :: System Administrators License :: OSI Approved :: Apache Software License Operating System :: POSIX :: Linux Programming Language :: Python Programming Language :: Python :: 2 Programming Language :: Python :: 2.7 Programming Language :: Python :: 2.6 [entry_points] console_scripts = close-pull-requests = jeepyb.cmd.close_pull_requests:main create-cgitrepos = jeepyb.cmd.create_cgitrepos:main create-hound-config = jeepyb.cmd.create_hound_config:main expire-old-reviews = jeepyb.cmd.expire_old_reviews:main manage-projects = jeepyb.cmd.manage_projects:main notify-impact = jeepyb.cmd.notify_impact:main openstackwatch = jeepyb.cmd.openstackwatch:main process-cache = jeepyb.cmd.process_cache:main register-zanata-projects = jeepyb.cmd.register_zanata_projects:main track-upstream = jeepyb.cmd.track_upstream:main trivial-rebase = jeepyb.cmd.trivial_rebase:main update-blueprint = jeepyb.cmd.update_blueprint:main update-bug = jeepyb.cmd.update_bug:main welcome-message = jeepyb.cmd.welcome_message:main jeepyb-0+20170923/setup.py0000644000175000017500000000200413223707256014101 0ustar filipfilip# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT import setuptools # In python < 2.7.4, a lazy loading of package `pbr` will break # setuptools if some other modules registered functions in `atexit`. # solution from: http://bugs.python.org/issue15881#msg170215 try: import multiprocessing # noqa except ImportError: pass setuptools.setup( setup_requires=['pbr>=1.8'], pbr=True) jeepyb-0+20170923/test-requirements.txt0000644000175000017500000000035013223707256016632 0ustar filipfilip# The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. hacking<0.11,>=0.10.2 jeepyb-0+20170923/tox.ini0000644000175000017500000000057213223707256013712 0ustar filipfilip[tox] envlist = pep8 [testenv] setenv = VIRTUAL_ENV={envdir} deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt [testenv:pep8] commands = flake8 [testenv:pyflakes] commands = flake8 [testenv:venv] commands = {posargs} [flake8] # E125 and H are intentionally ignored ignore = E125,H show-source = True exclude = .venv,.tox,dist,doc,build,*.egg