Merge the two bionicbb services into one.

Change-Id: I6490da1ec96b2e24b330296950be84424e11bd35
This commit is contained in:
Dan Albert 2015-04-17 13:01:29 -07:00
parent 3875744f89
commit d3fe4f1229
6 changed files with 215 additions and 173 deletions

View File

@ -8,6 +8,7 @@ Dependencies
------------ ------------
* Python 2.7 * Python 2.7
* [Advanced Python Scheduler](https://apscheduler.readthedocs.org/en/latest/)
* [Flask](http://flask.pocoo.org/) * [Flask](http://flask.pocoo.org/)
* [Google API Client Library](https://developers.google.com/api-client-library/python/start/installation) * [Google API Client Library](https://developers.google.com/api-client-library/python/start/installation)
* [jenkinsapi](https://pypi.python.org/pypi/jenkinsapi) * [jenkinsapi](https://pypi.python.org/pypi/jenkinsapi)

View File

@ -16,11 +16,15 @@
# #
import json import json
import logging import logging
import os
from apscheduler.schedulers.background import BackgroundScheduler
from flask import Flask, request
import requests import requests
import gerrit import gerrit
import tasks
from flask import Flask, request
app = Flask(__name__) app = Flask(__name__)
@ -115,4 +119,12 @@ def drop_rejection():
if __name__ == "__main__": if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
# Prevent the job from being rescheduled by the reloader.
if os.environ.get('WERKZEUG_RUN_MAIN') == 'true':
scheduler = BackgroundScheduler()
scheduler.start()
scheduler.add_job(tasks.get_and_process_jobs, 'interval', minutes=5)
app.run(host='0.0.0.0', debug=True) app.run(host='0.0.0.0', debug=True)

71
tools/bionicbb/gmail.py Normal file
View File

@ -0,0 +1,71 @@
#
# Copyright (C) 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import base64
import httplib2
import config
def get_body(msg):
if 'attachmentId' in msg['payload']['body']:
raise NotImplementedError('Handling of messages contained in '
'attachments not yet implemented.')
b64_body = msg['payload']['body']['data']
return base64.urlsafe_b64decode(b64_body.encode('ASCII'))
def build_service():
from apiclient.discovery import build
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client.tools import run
OAUTH_SCOPE = 'https://www.googleapis.com/auth/gmail.modify'
STORAGE = Storage('oauth.storage')
# Start the OAuth flow to retrieve credentials
flow = flow_from_clientsecrets(config.client_secret_file,
scope=OAUTH_SCOPE)
http = httplib2.Http()
# Try to retrieve credentials from storage or run the flow to generate them
credentials = STORAGE.get()
if credentials is None or credentials.invalid:
credentials = run(flow, STORAGE, http=http)
http = credentials.authorize(http)
return build('gmail', 'v1', http=http)
def get_gerrit_label(labels):
for label in labels:
if label['name'] == 'gerrit':
return label['id']
return None
def get_all_messages(service, label):
msgs = []
response = service.users().messages().list(
userId='me', labelIds=label).execute()
if 'messages' in response:
msgs.extend(response['messages'])
while 'nextPageToken' in response:
page_token = response['nextPageToken']
response = service.users().messages().list(
userId='me', pageToken=page_token).execute()
msgs.extend(response['messages'])
return msgs

View File

@ -1,4 +1,3 @@
#!/usr/bin/env python2
# #
# Copyright (C) 2015 The Android Open Source Project # Copyright (C) 2015 The Android Open Source Project
# #
@ -14,42 +13,19 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
# #
import base64 from __future__ import absolute_import
import httplib
import httplib2
import jenkinsapi
import json import json
import logging import logging
import os import os.path
import re import re
import requests import requests
import socket
import sys
import time
import apiclient.errors import jenkinsapi
import config
import gerrit import gerrit
import config
class GmailError(RuntimeError):
def __init__(self, message):
super(GmailError, self).__init__(message)
def get_gerrit_label(labels):
for label in labels:
if label['name'] == 'gerrit':
return label['id']
return None
def get_headers(msg):
headers = {}
for hdr in msg['payload']['headers']:
headers[hdr['name']] = hdr['value']
return headers
def is_untrusted_committer(change_id, patch_set): def is_untrusted_committer(change_id, patch_set):
@ -88,59 +64,6 @@ def should_skip_build(info):
return False return False
def build_service():
from apiclient.discovery import build
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client.tools import run
OAUTH_SCOPE = 'https://www.googleapis.com/auth/gmail.modify'
STORAGE = Storage('oauth.storage')
# Start the OAuth flow to retrieve credentials
flow = flow_from_clientsecrets(config.client_secret_file,
scope=OAUTH_SCOPE)
http = httplib2.Http()
# Try to retrieve credentials from storage or run the flow to generate them
credentials = STORAGE.get()
if credentials is None or credentials.invalid:
credentials = run(flow, STORAGE, http=http)
http = credentials.authorize(http)
return build('gmail', 'v1', http=http)
def get_all_messages(service, label):
msgs = []
response = service.users().messages().list(
userId='me', labelIds=label).execute()
if 'messages' in response:
msgs.extend(response['messages'])
while 'nextPageToken' in response:
page_token = response['nextPageToken']
response = service.users().messages().list(
userId='me', pageToken=page_token).execute()
msgs.extend(response['messages'])
return msgs
def get_body(msg):
if 'attachmentId' in msg['payload']['body']:
raise NotImplementedError('Handling of messages contained in '
'attachments not yet implemented.')
b64_body = msg['payload']['body']['data']
return base64.urlsafe_b64decode(b64_body.encode('ASCII'))
def get_gerrit_info(body):
info = {}
gerrit_pattern = r'^Gerrit-(\S+): (.+)$'
for match in re.finditer(gerrit_pattern, body, flags=re.MULTILINE):
info[match.group(1)] = match.group(2).strip()
return info
def clean_project(dry_run): def clean_project(dry_run):
username = config.jenkins_credentials['username'] username = config.jenkins_credentials['username']
password = config.jenkins_credentials['password'] password = config.jenkins_credentials['password']
@ -215,8 +138,6 @@ def handle_change(gerrit_info, _, dry_run):
if should_skip_build(gerrit_info): if should_skip_build(gerrit_info):
return True return True
return build_project(gerrit_info, dry_run) return build_project(gerrit_info, dry_run)
handle_newchange = handle_change
handle_newpatchset = handle_change
def drop_rejection(gerrit_info, dry_run): def drop_rejection(gerrit_info, dry_run):
@ -280,75 +201,3 @@ def skip_handler(gerrit_info, _, __):
logging.info('Skipping %s: %s', gerrit_info['MessageType'], logging.info('Skipping %s: %s', gerrit_info['MessageType'],
gerrit_info['Change-Id']) gerrit_info['Change-Id'])
return True return True
handle_abandon = skip_handler
handle_merge_failed = skip_handler
handle_merged = skip_handler
handle_restore = skip_handler
handle_revert = skip_handler
def process_message(msg, dry_run):
try:
body = get_body(msg)
gerrit_info = get_gerrit_info(body)
if not gerrit_info:
logging.fatal('No Gerrit info found: %s', msg.subject)
msg_type = gerrit_info['MessageType']
handler = 'handle_{}'.format(
gerrit_info['MessageType'].replace('-', '_'))
if handler in globals():
return globals()[handler](gerrit_info, body, dry_run)
else:
logging.warning('MessageType %s unhandled.', msg_type)
return False
except NotImplementedError as ex:
logging.error("%s", ex)
return False
except gerrit.GerritError as ex:
change_id = gerrit_info['Change-Id']
logging.error('Gerrit error (%d): %s %s', ex.code, change_id, ex.url)
return ex.code == 404
def main(argc, argv):
dry_run = False
if argc == 2 and argv[1] == '--dry-run':
dry_run = True
elif argc > 2:
sys.exit('usage: python {} [--dry-run]'.format(argv[0]))
gmail_service = build_service()
msg_service = gmail_service.users().messages()
while True:
try:
labels = gmail_service.users().labels().list(userId='me').execute()
if not labels['labels']:
raise GmailError('Could not retrieve Gmail labels')
label_id = get_gerrit_label(labels['labels'])
if not label_id:
raise GmailError('Could not find gerrit label')
for msg in get_all_messages(gmail_service, label_id):
msg = msg_service.get(userId='me', id=msg['id']).execute()
if process_message(msg, dry_run) and not dry_run:
msg_service.trash(userId='me', id=msg['id']).execute()
time.sleep(60 * 5)
except GmailError as ex:
logging.error('Gmail error: %s', ex)
time.sleep(60 * 5)
except apiclient.errors.HttpError as ex:
logging.error('API Client HTTP error: %s', ex)
time.sleep(60 * 5)
except httplib.BadStatusLine:
pass
except httplib2.ServerNotFoundError:
pass
except socket.error:
pass
if __name__ == '__main__':
main(len(sys.argv), sys.argv)

108
tools/bionicbb/tasks.py Normal file
View File

@ -0,0 +1,108 @@
#
# Copyright (C) 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import httplib
import httplib2
import logging
import re
import socket
import apiclient.errors
import gerrit
import gmail
import presubmit
def get_gerrit_info(body):
info = {}
gerrit_pattern = r'^Gerrit-(\S+): (.+)$'
for match in re.finditer(gerrit_pattern, body, flags=re.MULTILINE):
info[match.group(1)] = match.group(2).strip()
return info
def process_message(msg, dry_run):
try:
body = gmail.get_body(msg)
gerrit_info = get_gerrit_info(body)
if not gerrit_info:
logging.fatal('No Gerrit info found: %s', msg.subject)
msg_type = gerrit_info['MessageType']
handlers = {
'comment': presubmit.handle_comment,
'newchange': presubmit.handle_change,
'newpatchset': presubmit.handle_change,
'abandon': presubmit.skip_handler,
'merge-failed': presubmit.skip_handler,
'merged': presubmit.skip_handler,
'restore': presubmit.skip_handler,
'revert': presubmit.skip_handler,
}
message_type = gerrit_info['MessageType']
if message_type in handlers:
return handlers[message_type](gerrit_info, body, dry_run)
else:
logging.warning('MessageType %s unhandled.', msg_type)
return False
except NotImplementedError as ex:
logging.error("%s", ex)
return False
except gerrit.GerritError as ex:
change_id = gerrit_info['Change-Id']
logging.error('Gerrit error (%d): %s %s', ex.code, change_id, ex.url)
return ex.code == 404
def get_and_process_jobs():
dry_run = False
gmail_service = gmail.build_service()
msg_service = gmail_service.users().messages()
# We run in a loop because some of the exceptions thrown here mean we just
# need to retry. For errors where we should back off (typically any gmail
# API exceptions), process_changes catches the error and returns normally.
while True:
try:
process_changes(gmail_service, msg_service, dry_run)
return
except httplib.BadStatusLine:
pass
except httplib2.ServerNotFoundError:
pass
except socket.error:
pass
def process_changes(gmail_service, msg_service, dry_run):
try:
labels = gmail_service.users().labels().list(userId='me').execute()
if not labels['labels']:
logging.error('Could not retrieve Gmail labels')
return
label_id = gmail.get_gerrit_label(labels['labels'])
if not label_id:
logging.error('Could not find gerrit label')
return
for msg in gmail.get_all_messages(gmail_service, label_id):
msg = msg_service.get(userId='me', id=msg['id']).execute()
if process_message(msg, dry_run) and not dry_run:
msg_service.trash(userId='me', id=msg['id']).execute()
except apiclient.errors.HttpError as ex:
logging.error('API Client HTTP error: %s', ex)

View File

@ -1,11 +1,12 @@
import gmail_listener
import mock import mock
import unittest import unittest
import presubmit
class TestShouldSkipBuild(unittest.TestCase): class TestShouldSkipBuild(unittest.TestCase):
@mock.patch('gmail_listener.contains_bionicbb') @mock.patch('presubmit.contains_bionicbb')
@mock.patch('gmail_listener.contains_cleanspec') @mock.patch('presubmit.contains_cleanspec')
@mock.patch('gerrit.get_commit') @mock.patch('gerrit.get_commit')
def test_accepts_googlers(self, mock_commit, *other_checks): def test_accepts_googlers(self, mock_commit, *other_checks):
mock_commit.return_value = { mock_commit.return_value = {
@ -16,14 +17,14 @@ class TestShouldSkipBuild(unittest.TestCase):
other_check.return_value = False other_check.return_value = False
for message_type in ('newchange', 'newpatchset', 'comment'): for message_type in ('newchange', 'newpatchset', 'comment'):
self.assertFalse(gmail_listener.should_skip_build({ self.assertFalse(presubmit.should_skip_build({
'MessageType': message_type, 'MessageType': message_type,
'Change-Id': '', 'Change-Id': '',
'PatchSet': '', 'PatchSet': '',
})) }))
@mock.patch('gmail_listener.contains_bionicbb') @mock.patch('presubmit.contains_bionicbb')
@mock.patch('gmail_listener.contains_cleanspec') @mock.patch('presubmit.contains_cleanspec')
@mock.patch('gerrit.get_commit') @mock.patch('gerrit.get_commit')
def test_rejects_googlish_domains(self, mock_commit, *other_checks): def test_rejects_googlish_domains(self, mock_commit, *other_checks):
mock_commit.return_value = { mock_commit.return_value = {
@ -34,14 +35,14 @@ class TestShouldSkipBuild(unittest.TestCase):
other_check.return_value = False other_check.return_value = False
for message_type in ('newchange', 'newpatchset', 'comment'): for message_type in ('newchange', 'newpatchset', 'comment'):
self.assertTrue(gmail_listener.should_skip_build({ self.assertTrue(presubmit.should_skip_build({
'MessageType': message_type, 'MessageType': message_type,
'Change-Id': '', 'Change-Id': '',
'PatchSet': '', 'PatchSet': '',
})) }))
@mock.patch('gmail_listener.contains_bionicbb') @mock.patch('presubmit.contains_bionicbb')
@mock.patch('gmail_listener.contains_cleanspec') @mock.patch('presubmit.contains_cleanspec')
@mock.patch('gerrit.get_commit') @mock.patch('gerrit.get_commit')
def test_rejects_non_googlers(self, mock_commit, *other_checks): def test_rejects_non_googlers(self, mock_commit, *other_checks):
mock_commit.return_value = { mock_commit.return_value = {
@ -52,14 +53,14 @@ class TestShouldSkipBuild(unittest.TestCase):
other_check.return_value = False other_check.return_value = False
for message_type in ('newchange', 'newpatchset', 'comment'): for message_type in ('newchange', 'newpatchset', 'comment'):
self.assertTrue(gmail_listener.should_skip_build({ self.assertTrue(presubmit.should_skip_build({
'MessageType': message_type, 'MessageType': message_type,
'Change-Id': '', 'Change-Id': '',
'PatchSet': '', 'PatchSet': '',
})) }))
@mock.patch('gmail_listener.contains_bionicbb') @mock.patch('presubmit.contains_bionicbb')
@mock.patch('gmail_listener.is_untrusted_committer') @mock.patch('presubmit.is_untrusted_committer')
@mock.patch('gerrit.get_files_for_revision') @mock.patch('gerrit.get_files_for_revision')
def test_skips_cleanspecs(self, mock_files, *other_checks): def test_skips_cleanspecs(self, mock_files, *other_checks):
mock_files.return_value = ['foo/CleanSpec.mk'] mock_files.return_value = ['foo/CleanSpec.mk']
@ -67,14 +68,14 @@ class TestShouldSkipBuild(unittest.TestCase):
other_check.return_value = False other_check.return_value = False
for message_type in ('newchange', 'newpatchset', 'comment'): for message_type in ('newchange', 'newpatchset', 'comment'):
self.assertTrue(gmail_listener.should_skip_build({ self.assertTrue(presubmit.should_skip_build({
'MessageType': message_type, 'MessageType': message_type,
'Change-Id': '', 'Change-Id': '',
'PatchSet': '', 'PatchSet': '',
})) }))
@mock.patch('gmail_listener.contains_cleanspec') @mock.patch('presubmit.contains_cleanspec')
@mock.patch('gmail_listener.is_untrusted_committer') @mock.patch('presubmit.is_untrusted_committer')
@mock.patch('gerrit.get_files_for_revision') @mock.patch('gerrit.get_files_for_revision')
def test_skips_bionicbb(self, mock_files, *other_checks): def test_skips_bionicbb(self, mock_files, *other_checks):
mock_files.return_value = ['tools/bionicbb/common.sh'] mock_files.return_value = ['tools/bionicbb/common.sh']
@ -82,7 +83,7 @@ class TestShouldSkipBuild(unittest.TestCase):
other_check.return_value = False other_check.return_value = False
for message_type in ('newchange', 'newpatchset', 'comment'): for message_type in ('newchange', 'newpatchset', 'comment'):
self.assertTrue(gmail_listener.should_skip_build({ self.assertTrue(presubmit.should_skip_build({
'MessageType': message_type, 'MessageType': message_type,
'Change-Id': '', 'Change-Id': '',
'PatchSet': '', 'PatchSet': '',