diff --git a/README.testing b/README.testing deleted file mode 100644 index a7989ac3c..000000000 --- a/README.testing +++ /dev/null @@ -1,23 +0,0 @@ -Most of the tests require a local Rietveld server. - -To set this up: -Method 1: Let the presubmit script do the work for you. - $ git cl presubmit - -Method 2: Manual. -1) Check out a copy of Rietveld: - $ svn checkout http://rietveld.googlecode.com/svn/trunk/ rietveld - (Use git-svn if you must, but man is it slow.) -2) Get the Google App Engine SDK: - http://code.google.com/appengine/downloads.html -3) To run Rietveld you will need Django 1.0, which is not included - with the App Engine SDK. Go to http://www.djangoproject.com/download , - download a Django from the 1.0 series (it's in the sidebar on the right), - untar it, then - $ export PYTHONPATH=`pwd`/Django-1.0.4 -4) Run Rietveld: - $ /path/to/appengine/sdk/dev_appserver.py /path/to/rietveld - (If using one of the App Engine launchers, be sure to use port 8080 - for this project.) - -And then, finally, run the tests. diff --git a/my_activity.py b/my_activity.py index 9d7d87a9d..1e3fbcd6b 100755 --- a/my_activity.py +++ b/my_activity.py @@ -54,7 +54,6 @@ import re import auth import fix_encoding import gerrit_util -import rietveld from third_party import httplib2 @@ -77,36 +76,6 @@ class DefaultFormatter(Formatter): return self.default return Formatter.get_value(self, key, args, kwds) -rietveld_instances = [ - { - 'url': 'codereview.chromium.org', - 'shorturl': 'crrev.com', - 'supports_owner_modified_query': True, - 'requires_auth': False, - 'email_domain': 'chromium.org', - 'short_url_protocol': 'https', - }, - { - 'url': 'chromereviews.googleplex.com', - 'shorturl': 'go/chromerev', - 'supports_owner_modified_query': True, - 'requires_auth': True, - 'email_domain': 'google.com', - }, - { - 'url': 'codereview.appspot.com', - 'supports_owner_modified_query': True, - 'requires_auth': False, - 'email_domain': 'chromium.org', - }, - { - 'url': 'breakpad.appspot.com', - 'supports_owner_modified_query': False, - 'requires_auth': False, - 'email_domain': 'chromium.org', - }, -] - gerrit_instances = [ { 'url': 'android-review.googlesource.com', @@ -187,15 +156,6 @@ def datetime_from_gerrit(date_string): return datetime.strptime(date_string, '%Y-%m-%d %H:%M:%S.%f000') -def datetime_from_rietveld(date_string): - try: - return datetime.strptime(date_string, '%Y-%m-%d %H:%M:%S.%f') - except ValueError: - # Sometimes rietveld returns a value without the milliseconds part, so we - # attempt to parse those cases as well. - return datetime.strptime(date_string, '%Y-%m-%d %H:%M:%S') - - def datetime_from_monorail(date_string): return datetime.strptime(date_string, '%Y-%m-%dT%H:%M:%S') @@ -210,7 +170,6 @@ class MyActivity(object): self.reviews = [] self.issues = [] self.referenced_issues = [] - self.check_cookies() self.google_code_auth_token = None self.access_errors = set() @@ -219,72 +178,6 @@ class MyActivity(object): sys.stdout.write(how) sys.stdout.flush() - # Check the codereview cookie jar to determine which Rietveld instances to - # authenticate to. - def check_cookies(self): - filtered_instances = [] - - def has_cookie(instance): - auth_config = auth.extract_auth_config_from_options(self.options) - a = auth.get_authenticator_for_host(instance['url'], auth_config) - return a.has_cached_credentials() - - for instance in rietveld_instances: - instance['auth'] = has_cookie(instance) - - if filtered_instances: - logging.warning('No cookie found for the following Rietveld instance%s:', - 's' if len(filtered_instances) > 1 else '') - for instance in filtered_instances: - logging.warning('\t' + instance['url']) - logging.warning('Use --auth if you would like to authenticate to them.') - - def rietveld_search(self, instance, owner=None, reviewer=None): - if instance['requires_auth'] and not instance['auth']: - return [] - - - email = None if instance['auth'] else '' - auth_config = auth.extract_auth_config_from_options(self.options) - remote = rietveld.Rietveld('https://' + instance['url'], auth_config, email) - - # See def search() in rietveld.py to see all the filters you can use. - query_modified_after = None - - if instance['supports_owner_modified_query']: - query_modified_after = self.modified_after.strftime('%Y-%m-%d') - - # Rietveld does not allow search by both created_before and modified_after. - # (And some instances don't allow search by both owner and modified_after) - owner_email = None - reviewer_email = None - if owner: - owner_email = owner + '@' + instance['email_domain'] - if reviewer: - reviewer_email = reviewer + '@' + instance['email_domain'] - issues = remote.search( - owner=owner_email, - reviewer=reviewer_email, - modified_after=query_modified_after, - with_messages=True) - self.show_progress() - - issues = filter( - lambda i: (datetime_from_rietveld(i['created']) < self.modified_before), - issues) - issues = filter( - lambda i: (datetime_from_rietveld(i['modified']) > self.modified_after), - issues) - - should_filter_by_user = True - issues = map(partial(self.process_rietveld_issue, remote, instance), issues) - issues = filter( - partial(self.filter_issue, should_filter_by_user=should_filter_by_user), - issues) - issues = sorted(issues, key=lambda i: i['modified'], reverse=True) - - return issues - def extract_bug_numbers_from_description(self, issue): description = None @@ -311,63 +204,6 @@ class MyActivity(object): return sorted(set(bugs)) - def process_rietveld_issue(self, remote, instance, issue): - ret = {} - if self.options.deltas: - patchset_props = remote.get_patchset_properties( - issue['issue'], - issue['patchsets'][-1]) - self.show_progress() - ret['delta'] = '+%d,-%d' % ( - sum(f['num_added'] for f in patchset_props['files'].itervalues()), - sum(f['num_removed'] for f in patchset_props['files'].itervalues())) - - if issue['landed_days_ago'] != 'unknown': - ret['status'] = 'committed' - elif issue['closed']: - ret['status'] = 'closed' - elif len(issue['reviewers']) and issue['all_required_reviewers_approved']: - ret['status'] = 'ready' - else: - ret['status'] = 'open' - - ret['owner'] = issue['owner_email'] - ret['author'] = ret['owner'] - - ret['reviewers'] = set(issue['reviewers']) - - if 'shorturl' in instance: - url = instance['shorturl'] - protocol = instance.get('short_url_protocol', 'http') - else: - url = instance['url'] - protocol = 'https' - - ret['review_url'] = '%s://%s/%d' % (protocol, url, issue['issue']) - - # Rietveld sometimes has '\r\n' instead of '\n'. - ret['header'] = issue['description'].replace('\r', '').split('\n')[0] - - ret['modified'] = datetime_from_rietveld(issue['modified']) - ret['created'] = datetime_from_rietveld(issue['created']) - ret['replies'] = self.process_rietveld_replies(issue['messages']) - - ret['bugs'] = self.extract_bug_numbers_from_description(issue) - ret['landed_days_ago'] = issue['landed_days_ago'] - - return ret - - @staticmethod - def process_rietveld_replies(replies): - ret = [] - for reply in replies: - r = {} - r['author'] = reply['sender'] - r['created'] = datetime_from_rietveld(reply['date']) - r['content'] = '' - ret.append(r) - return ret - def gerrit_changes_over_rest(self, instance, filters): # Convert the "key:value" filter to a list of (key, value) pairs. req = list(f.split(':', 1) for f in filters) @@ -694,17 +530,13 @@ class MyActivity(object): pass def get_changes(self): - num_instances = len(rietveld_instances) + len(gerrit_instances) + num_instances = len(gerrit_instances) with contextlib.closing(ThreadPool(num_instances)) as pool: - rietveld_changes = pool.map_async( - lambda instance: self.rietveld_search(instance, owner=self.user), - rietveld_instances) gerrit_changes = pool.map_async( lambda instance: self.gerrit_search(instance, owner=self.user), gerrit_instances) - rietveld_changes = itertools.chain.from_iterable(rietveld_changes.get()) gerrit_changes = itertools.chain.from_iterable(gerrit_changes.get()) - self.changes = list(rietveld_changes) + list(gerrit_changes) + self.changes = list(gerrit_changes) def print_changes(self): if self.changes: @@ -719,17 +551,13 @@ class MyActivity(object): logging.error(error.rstrip()) def get_reviews(self): - num_instances = len(rietveld_instances) + len(gerrit_instances) + num_instances = len(gerrit_instances) with contextlib.closing(ThreadPool(num_instances)) as pool: - rietveld_reviews = pool.map_async( - lambda instance: self.rietveld_search(instance, reviewer=self.user), - rietveld_instances) gerrit_reviews = pool.map_async( lambda instance: self.gerrit_search(instance, reviewer=self.user), gerrit_instances) - rietveld_reviews = itertools.chain.from_iterable(rietveld_reviews.get()) gerrit_reviews = itertools.chain.from_iterable(gerrit_reviews.get()) - self.reviews = list(rietveld_reviews) + list(gerrit_reviews) + self.reviews = list(gerrit_reviews) def print_reviews(self): if self.reviews: @@ -861,9 +689,6 @@ class MyActivity(object): def main(): - # Silence upload.py. - rietveld.upload.verbosity = 0 - parser = optparse.OptionParser(description=sys.modules[__name__].__doc__) parser.add_option( '-u', '--user', metavar='', diff --git a/my_reviews.py b/my_reviews.py deleted file mode 100755 index 0d81be599..000000000 --- a/my_reviews.py +++ /dev/null @@ -1,401 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Get rietveld stats about the review you done, or forgot to do. - -Example: - - my_reviews.py -r me@chromium.org -Q for stats for last quarter. -""" - -from __future__ import print_function - -import datetime -import math -import optparse -import os -import sys - -import auth -import rietveld - -try: - import dateutil # pylint: disable=import-error - import dateutil.parser - from dateutil.relativedelta import relativedelta -except ImportError: - print('python-dateutil package required') - exit(1) - - -def username(email): - """Keeps the username of an email address.""" - return email.split('@', 1)[0] - - -def to_datetime(string): - """Load UTC time as a string into a datetime object.""" - try: - # Format is 2011-07-05 01:26:12.084316 - return datetime.datetime.strptime( - string.split('.', 1)[0], '%Y-%m-%d %H:%M:%S') - except ValueError: - return datetime.datetime.strptime(string, '%Y-%m-%d') - - -def to_time(seconds): - """Convert a number of seconds into human readable compact string.""" - prefix = '' - if seconds < 0: - prefix = '-' - seconds *= -1 - minutes = math.floor(seconds / 60) - seconds -= minutes * 60 - hours = math.floor(minutes / 60) - minutes -= hours * 60 - days = math.floor(hours / 24) - hours -= days * 24 - out = [] - if days > 0: - out.append('%dd' % days) - if hours > 0 or days > 0: - out.append('%02dh' % hours) - if minutes > 0 or hours > 0 or days > 0: - out.append('%02dm' % minutes) - if seconds > 0 and not out: - # Skip seconds unless there's only seconds. - out.append('%02ds' % seconds) - return prefix + ''.join(out) - - -class Stats(object): - def __init__(self): - self.total = 0 - self.actually_reviewed = 0 - self.latencies = [] - self.lgtms = 0 - self.multiple_lgtms = 0 - self.drive_by = 0 - self.not_requested = 0 - self.self_review = 0 - - self.percent_lgtm = 0. - self.percent_drive_by = 0. - self.percent_not_requested = 0. - self.days = 0 - - @property - def average_latency(self): - if not self.latencies: - return 0 - return sum(self.latencies) / float(len(self.latencies)) - - @property - def median_latency(self): - if not self.latencies: - return 0 - length = len(self.latencies) - latencies = sorted(self.latencies) - if (length & 1) == 0: - return (latencies[length/2] + latencies[length/2-1]) / 2. - else: - return latencies[length/2] - - @property - def percent_done(self): - if not self.total: - return 0 - return self.actually_reviewed * 100. / self.total - - @property - def review_per_day(self): - if not self.days: - return 0 - return self.total * 1. / self.days - - @property - def review_done_per_day(self): - if not self.days: - return 0 - return self.actually_reviewed * 1. / self.days - - def finalize(self, first_day, last_day): - if self.actually_reviewed: - assert self.actually_reviewed > 0 - self.percent_lgtm = (self.lgtms * 100. / self.actually_reviewed) - self.percent_drive_by = (self.drive_by * 100. / self.actually_reviewed) - self.percent_not_requested = ( - self.not_requested * 100. / self.actually_reviewed) - assert bool(first_day) == bool(last_day) - if first_day and last_day: - assert first_day <= last_day - self.days = (to_datetime(last_day) - to_datetime(first_day)).days + 1 - assert self.days > 0 - - -def _process_issue_lgtms(issue, reviewer, stats): - """Calculates LGTMs stats.""" - stats.actually_reviewed += 1 - reviewer_lgtms = len([ - msg for msg in issue['messages'] - if msg['approval'] and msg['sender'] == reviewer]) - if reviewer_lgtms > 1: - stats.multiple_lgtms += 1 - return ' X ' - if reviewer_lgtms: - stats.lgtms += 1 - return ' x ' - else: - return ' o ' - - -def _process_issue_latency(issue, reviewer, stats): - """Calculates latency for an issue that was actually reviewed.""" - from_owner = [ - msg for msg in issue['messages'] if msg['sender'] == issue['owner_email'] - ] - if not from_owner: - # Probably requested by email. - stats.not_requested += 1 - return '' - - first_msg_from_owner = None - latency = None - received = False - for index, msg in enumerate(issue['messages']): - if not first_msg_from_owner and msg['sender'] == issue['owner_email']: - first_msg_from_owner = msg - if index and not received and msg['sender'] == reviewer: - # Not first email, reviewer never received one, reviewer sent a mesage. - stats.drive_by += 1 - return '' - received |= reviewer in msg['recipients'] - - if first_msg_from_owner and msg['sender'] == reviewer: - delta = msg['date'] - first_msg_from_owner['date'] - latency = delta.seconds + delta.days * 24 * 3600 - break - - if latency is None: - stats.not_requested += 1 - return '' - if latency > 0: - stats.latencies.append(latency) - else: - stats.not_requested += 1 - return to_time(latency) - - -def _process_issue(issue): - """Preprocesses the issue to simplify the remaining code.""" - issue['owner_email'] = username(issue['owner_email']) - issue['reviewers'] = set(username(r) for r in issue['reviewers']) - # By default, hide commit-bot. - issue['reviewers'] -= set(['commit-bot']) - for msg in issue['messages']: - msg['sender'] = username(msg['sender']) - msg['recipients'] = [username(r) for r in msg['recipients']] - # Convert all times to datetime instances. - msg['date'] = to_datetime(msg['date']) - issue['messages'].sort(key=lambda x: x['date']) - - -def print_issue(issue, reviewer, stats): - """Process an issue and prints stats about it.""" - stats.total += 1 - _process_issue(issue) - if issue['owner_email'] == reviewer: - stats.self_review += 1 - latency = '' - reviewed = '' - elif any(msg['sender'] == reviewer for msg in issue['messages']): - reviewed = _process_issue_lgtms(issue, reviewer, stats) - latency = _process_issue_latency(issue, reviewer, stats) - else: - latency = 'N/A' - reviewed = '' - - # More information is available, print issue.keys() to see them. - print('%7d %10s %3s %14s %-15s %s' % ( - issue['issue'], - issue['created'][:10], - reviewed, - latency, - issue['owner_email'], - ', '.join(sorted(issue['reviewers'])))) - - -def print_reviews( - reviewer, created_after, created_before, instance_url, auth_config): - """Prints issues |reviewer| received and potentially reviewed.""" - remote = rietveld.Rietveld(instance_url, auth_config) - - # The stats we gather. Feel free to send me a CL to get more stats. - stats = Stats() - - # Column sizes need to match print_issue() output. - print( - 'Issue Creation Did Latency Owner Reviewers', - file=sys.stderr) - - # See def search() in rietveld.py to see all the filters you can use. - issues = [] - for issue in remote.search( - reviewer=reviewer, - created_after=created_after, - created_before=created_before, - with_messages=True): - issues.append(issue) - print_issue(issue, username(reviewer), stats) - - issues.sort(key=lambda x: x['created']) - first_day = None - last_day = None - if issues: - first_day = issues[0]['created'][:10] - last_day = issues[-1]['created'][:10] - stats.finalize(first_day, last_day) - - print( - '%s reviewed %d issues out of %d (%1.1f%%). %d were self-review.' % - (reviewer, stats.actually_reviewed, stats.total, stats.percent_done, - stats.self_review), file=sys.stderr) - print( - '%4.1f review request/day during %3d days (%4.1f r/d done).' % - (stats.review_per_day, stats.days, stats.review_done_per_day), - file=sys.stderr) - print( - '%4d were drive-bys (%5.1f%% of reviews done).' % - (stats.drive_by, stats.percent_drive_by), file=sys.stderr) - print( - '%4d were requested over IM or irc (%5.1f%% of reviews done).' % - (stats.not_requested, stats.percent_not_requested), file=sys.stderr) - print( - '%4d issues LGTM\'d (%5.1f%% of reviews done),' - ' gave multiple LGTMs on %d issues.' % - (stats.lgtms, stats.percent_lgtm, stats.multiple_lgtms), file=sys.stderr) - print( - 'Average latency from request to first comment is %s.' % - to_time(stats.average_latency), file=sys.stderr) - print( - 'Median latency from request to first comment is %s.' % - to_time(stats.median_latency), file=sys.stderr) - - -def print_count( - reviewer, created_after, created_before, instance_url, auth_config): - remote = rietveld.Rietveld(instance_url, auth_config) - print(len(list(remote.search( - reviewer=reviewer, - created_after=created_after, - created_before=created_before, - keys_only=True)))) - - -def get_previous_quarter(today): - """There are four quarters, 01-03, 04-06, 07-09, 10-12. - - If today is in the last month of a quarter, assume it's the current quarter - that is requested. - """ - end_year = today.year - end_month = today.month - (today.month % 3) + 1 - if end_month <= 0: - end_year -= 1 - end_month += 12 - if end_month > 12: - end_year += 1 - end_month -= 12 - end = '%d-%02d-01' % (end_year, end_month) - begin_year = end_year - begin_month = end_month - 3 - if begin_month <= 0: - begin_year -= 1 - begin_month += 12 - begin = '%d-%02d-01' % (begin_year, begin_month) - return begin, end - - -def main(): - # Silence upload.py. - rietveld.upload.verbosity = 0 - today = datetime.date.today() - begin, end = get_previous_quarter(today) - default_email = os.environ.get('EMAIL_ADDRESS') - if not default_email: - user = os.environ.get('USER') - if user: - default_email = user + '@chromium.org' - - parser = optparse.OptionParser(description=__doc__) - parser.add_option( - '--count', action='store_true', - help='Just count instead of printing individual issues') - parser.add_option( - '-r', '--reviewer', metavar='', default=default_email, - help='Filter on issue reviewer, default=%default') - parser.add_option( - '-b', '--begin', metavar='', - help='Filter issues created after the date') - parser.add_option( - '-e', '--end', metavar='', - help='Filter issues created before the date') - parser.add_option( - '-Q', '--last_quarter', action='store_true', - help='Use last quarter\'s dates, e.g. %s to %s' % (begin, end)) - parser.add_option( - '-i', '--instance_url', metavar='', - default='http://codereview.chromium.org', - help='Host to use, default is %default') - auth.add_auth_options(parser) - # Remove description formatting - parser.format_description = ( - lambda _: parser.description) # pylint: disable=no-member - options, args = parser.parse_args() - auth_config = auth.extract_auth_config_from_options(options) - if args: - parser.error('Args unsupported') - if options.reviewer is None: - parser.error('$EMAIL_ADDRESS and $USER are not set, please use -r') - - print('Searching for reviews by %s' % options.reviewer, file=sys.stderr) - if options.last_quarter: - options.begin = begin - options.end = end - print('Using range %s to %s' % - (options.begin, options.end), file=sys.stderr) - else: - if options.begin is None or options.end is None: - parser.error('Please specify either --last_quarter or --begin and --end') - - # Validate dates. - try: - options.begin = dateutil.parser.parse(options.begin).strftime('%Y-%m-%d') - options.end = dateutil.parser.parse(options.end).strftime('%Y-%m-%d') - except ValueError as e: - parser.error('%s: %s - %s' % (e, options.begin, options.end)) - - if options.count: - print_count( - options.reviewer, - options.begin, - options.end, - options.instance_url, - auth_config) - else: - print_reviews( - options.reviewer, - options.begin, - options.end, - options.instance_url, - auth_config) - return 0 - - -if __name__ == '__main__': - try: - sys.exit(main()) - except KeyboardInterrupt: - sys.stderr.write('interrupted\n') - sys.exit(1) diff --git a/rietveld.py b/rietveld.py deleted file mode 100644 index 450f8c995..000000000 --- a/rietveld.py +++ /dev/null @@ -1,781 +0,0 @@ -# coding: utf-8 -# Copyright (c) 2012 The Chromium Authors. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. -"""Defines class Rietveld to easily access a rietveld instance. - -Security implications: - -The following hypothesis are made: -- Rietveld enforces: - - Nobody else than issue owner can upload a patch set - - Verifies the issue owner credentials when creating new issues - - A issue owner can't change once the issue is created - - A patch set cannot be modified -""" - -from __future__ import print_function - -import copy -import errno -import json -import logging -import re -import socket -import ssl -import StringIO -import sys -import time -import urllib -import urllib2 -import urlparse - -import patch - -from third_party import upload -import third_party.oauth2client.client as oa2client -from third_party import httplib2 - -# Appengine replies with 302 when authentication fails (sigh.) -oa2client.REFRESH_STATUS_CODES.append(302) -upload.LOGGER.setLevel(logging.WARNING) # pylint: disable=E1103 - - -class Rietveld(object): - """Accesses rietveld.""" - def __init__( - self, url, auth_config, email=None, extra_headers=None, maxtries=None): - self.url = url.rstrip('/') - self.rpc_server = upload.GetRpcServer(self.url, auth_config, email) - - self._xsrf_token = None - self._xsrf_token_time = None - - self._maxtries = maxtries or 40 - - def xsrf_token(self): - if (not self._xsrf_token_time or - (time.time() - self._xsrf_token_time) > 30*60): - self._xsrf_token_time = time.time() - self._xsrf_token = self.get( - '/xsrf_token', - extra_headers={'X-Requesting-XSRF-Token': '1'}) - return self._xsrf_token - - def get_pending_issues(self): - """Returns an array of dict of all the pending issues on the server.""" - # TODO: Convert this to use Rietveld::search(), defined below. - return json.loads( - self.get('/search?format=json&commit=2&closed=3&' - 'keys_only=True&limit=1000&order=__key__'))['results'] - - def close_issue(self, issue): - """Closes the Rietveld issue for this changelist.""" - logging.info('closing issue %d' % issue) - self.post("/%d/close" % issue, [('xsrf_token', self.xsrf_token())]) - - def get_description(self, issue, force=False): - """Returns the issue's description. - - Converts any CRLF into LF and strip extraneous whitespace. - """ - return '\n'.join(self.get('/%d/description' % issue).strip().splitlines()) - - def get_issue_properties(self, issue, messages): - """Returns all the issue's metadata as a dictionary.""" - url = '/api/%d' % issue - if messages: - url += '?messages=true' - data = json.loads(self.get(url, retry_on_404=True)) - data['description'] = '\n'.join(data['description'].strip().splitlines()) - return data - - def get_depends_on_patchset(self, issue, patchset): - """Returns the patchset this patchset depends on if it exists.""" - url = '/%d/patchset/%d/get_depends_on_patchset' % (issue, patchset) - resp = None - try: - resp = json.loads(self.get(url)) - except (urllib2.HTTPError, ValueError): - # The get_depends_on_patchset endpoint does not exist on this Rietveld - # instance yet. Ignore the error and proceed. - # TODO(rmistry): Make this an error when all Rietveld instances have - # this endpoint. - pass - return resp - - def get_patchset_properties(self, issue, patchset): - """Returns the patchset properties.""" - url = '/api/%d/%d' % (issue, patchset) - return json.loads(self.get(url)) - - def get_file_content(self, issue, patchset, item): - """Returns the content of a new file. - - Throws HTTP 302 exception if the file doesn't exist or is not a binary file. - """ - # content = 0 is the old file, 1 is the new file. - content = 1 - url = '/%d/binary/%d/%d/%d' % (issue, patchset, item, content) - return self.get(url) - - def get_file_diff(self, issue, patchset, item): - """Returns the diff of the file. - - Returns a useless diff for binary files. - """ - url = '/download/issue%d_%d_%d.diff' % (issue, patchset, item) - return self.get(url) - - def get_patch(self, issue, patchset): - """Returns a PatchSet object containing the details to apply this patch.""" - props = self.get_patchset_properties(issue, patchset) or {} - out = [] - for filename, state in props.get('files', {}).iteritems(): - logging.debug('%s' % filename) - # If not status, just assume it's a 'M'. Rietveld often gets it wrong and - # just has status: null. Oh well. - status = state.get('status') or 'M' - if status[0] not in ('A', 'D', 'M', 'R'): - raise patch.UnsupportedPatchFormat( - filename, 'Change with status \'%s\' is not supported.' % status) - - svn_props = self.parse_svn_properties( - state.get('property_changes', ''), filename) - - if state.get('is_binary'): - if status[0] == 'D': - if status[0] != status.strip(): - raise patch.UnsupportedPatchFormat( - filename, 'Deleted file shouldn\'t have property change.') - out.append(patch.FilePatchDelete(filename, state['is_binary'])) - else: - content = self.get_file_content(issue, patchset, state['id']) - if not content or content == 'None': - # As a precaution due to a bug in upload.py for git checkout, refuse - # empty files. If it's empty, it's not a binary file. - raise patch.UnsupportedPatchFormat( - filename, - 'Binary file is empty. Maybe the file wasn\'t uploaded in the ' - 'first place?') - out.append(patch.FilePatchBinary( - filename, - content, - svn_props, - is_new=(status[0] == 'A'))) - continue - - try: - diff = self.get_file_diff(issue, patchset, state['id']) - except urllib2.HTTPError as e: - if e.code == 404: - raise patch.UnsupportedPatchFormat( - filename, 'File doesn\'t have a diff.') - raise - - # FilePatchDiff() will detect file deletion automatically. - p = patch.FilePatchDiff(filename, diff, svn_props) - out.append(p) - if status[0] == 'A': - # It won't be set for empty file. - p.is_new = True - if (len(status) > 1 and - status[1] == '+' and - not (p.source_filename or p.svn_properties)): - raise patch.UnsupportedPatchFormat( - filename, 'Failed to process the svn properties') - - return patch.PatchSet(out) - - @staticmethod - def parse_svn_properties(rietveld_svn_props, filename): - """Returns a list of tuple [('property', 'newvalue')]. - - rietveld_svn_props is the exact format from 'svn diff'. - """ - rietveld_svn_props = rietveld_svn_props.splitlines() - svn_props = [] - if not rietveld_svn_props: - return svn_props - # 1. Ignore svn:mergeinfo. - # 2. Accept svn:eol-style and svn:executable. - # 3. Refuse any other. - # \n - # Added: svn:ignore\n - # + LF\n - - spacer = rietveld_svn_props.pop(0) - if spacer or not rietveld_svn_props: - # svn diff always put a spacer between the unified diff and property - # diff - raise patch.UnsupportedPatchFormat( - filename, 'Failed to parse svn properties.') - - while rietveld_svn_props: - # Something like 'Added: svn:eol-style'. Note the action is localized. - # *sigh*. - action = rietveld_svn_props.pop(0) - match = re.match(r'^(\w+): (.+)$', action) - if not match or not rietveld_svn_props: - raise patch.UnsupportedPatchFormat( - filename, - 'Failed to parse svn properties: %s, %s' % (action, svn_props)) - - if match.group(2) == 'svn:mergeinfo': - # Silently ignore the content. - rietveld_svn_props.pop(0) - continue - - if match.group(1) not in ('Added', 'Modified'): - # Will fail for our French friends. - raise patch.UnsupportedPatchFormat( - filename, 'Unsupported svn property operation.') - - if match.group(2) in ('svn:eol-style', 'svn:executable', 'svn:mime-type'): - # ' + foo' where foo is the new value. That's fragile. - content = rietveld_svn_props.pop(0) - match2 = re.match(r'^ \+ (.*)$', content) - if not match2: - raise patch.UnsupportedPatchFormat( - filename, 'Unsupported svn property format.') - svn_props.append((match.group(2), match2.group(1))) - return svn_props - - def update_description(self, issue, description): - """Sets the description for an issue on Rietveld.""" - logging.info('new description for issue %d' % issue) - self.post('/%d/description' % issue, [ - ('description', description), - ('xsrf_token', self.xsrf_token())]) - - def add_comment(self, issue, message, add_as_reviewer=False): - max_message = 10000 - tail = '…\n(message too large)' - if len(message) > max_message: - message = message[:max_message-len(tail)] + tail - logging.info('issue %d; comment: %s' % (issue, message.strip()[:300])) - return self.post('/%d/publish' % issue, [ - ('xsrf_token', self.xsrf_token()), - ('message', message), - ('message_only', 'True'), - ('add_as_reviewer', str(bool(add_as_reviewer))), - ('send_mail', 'True'), - ('no_redirect', 'True')]) - - def add_inline_comment( - self, issue, text, side, snapshot, patchset, patchid, lineno): - logging.info('add inline comment for issue %d' % issue) - return self.post('/inline_draft', [ - ('issue', str(issue)), - ('text', text), - ('side', side), - ('snapshot', snapshot), - ('patchset', str(patchset)), - ('patch', str(patchid)), - ('lineno', str(lineno))]) - - def set_flag(self, issue, patchset, flag, value): - return self.post('/%d/edit_flags' % issue, [ - ('last_patchset', str(patchset)), - ('xsrf_token', self.xsrf_token()), - (flag, str(value))]) - - def set_flags(self, issue, patchset, flags): - return self.post('/%d/edit_flags' % issue, [ - ('last_patchset', str(patchset)), - ('xsrf_token', self.xsrf_token()), - ] + [(flag, str(value)) for flag, value in flags.iteritems()]) - - def search( - self, - owner=None, reviewer=None, - base=None, - closed=None, private=None, commit=None, - created_before=None, created_after=None, - modified_before=None, modified_after=None, - per_request=None, keys_only=False, - with_messages=False): - """Yields search results.""" - # These are expected to be strings. - string_keys = { - 'owner': owner, - 'reviewer': reviewer, - 'base': base, - 'created_before': created_before, - 'created_after': created_after, - 'modified_before': modified_before, - 'modified_after': modified_after, - } - # These are either None, False or True. - three_state_keys = { - 'closed': closed, - 'private': private, - 'commit': commit, - } - # The integer values were determined by checking HTML source of Rietveld on - # https://codereview.chromium.org/search. See also http://crbug.com/712060. - three_state_value_map = { - None: 1, # Unknown. - True: 2, # Yes. - False: 3, # No. - } - - url = '/search?format=json' - # Sort the keys mainly to ease testing. - for key in sorted(string_keys): - value = string_keys[key] - if value: - url += '&%s=%s' % (key, urllib2.quote(value)) - for key in sorted(three_state_keys): - value = three_state_keys[key] - if value is not None: - url += '&%s=%d' % (key, three_state_value_map[value]) - - if keys_only: - url += '&keys_only=True' - if with_messages: - url += '&with_messages=True' - if per_request: - url += '&limit=%d' % per_request - - cursor = '' - while True: - output = self.get(url + cursor) - if output.startswith('<'): - # It's an error message. Return as no result. - break - data = json.loads(output) or {} - if not data.get('results'): - break - for i in data['results']: - yield i - cursor = '&cursor=%s' % data['cursor'] - - def trigger_try_jobs( - self, issue, patchset, reason, clobber, revision, builders_and_tests, - master=None, category='cq'): - """Requests new try jobs. - - |builders_and_tests| is a map of builders: [tests] to run. - |master| is the name of the try master the builders belong to. - |category| is used to distinguish regular jobs and experimental jobs. - - Returns the keys of the new TryJobResult entites. - """ - params = [ - ('reason', reason), - ('clobber', 'True' if clobber else 'False'), - ('builders', json.dumps(builders_and_tests)), - ('xsrf_token', self.xsrf_token()), - ('category', category), - ] - if revision: - params.append(('revision', revision)) - if master: - # Temporarily allow empty master names for old configurations. The try - # job will not be associated with a master name on rietveld. This is - # going to be deprecated. - params.append(('master', master)) - return self.post('/%d/try/%d' % (issue, patchset), params) - - def trigger_distributed_try_jobs( - self, issue, patchset, reason, clobber, revision, masters, - category='cq'): - """Requests new try jobs. - - |masters| is a map of masters: map of builders: [tests] to run. - |category| is used to distinguish regular jobs and experimental jobs. - """ - for (master, builders_and_tests) in masters.iteritems(): - self.trigger_try_jobs( - issue, patchset, reason, clobber, revision, builders_and_tests, - master, category) - - def get_pending_try_jobs(self, cursor=None, limit=100): - """Retrieves the try job requests in pending state. - - Returns a tuple of the list of try jobs and the cursor for the next request. - """ - url = '/get_pending_try_patchsets?limit=%d' % limit - extra = ('&cursor=' + cursor) if cursor else '' - data = json.loads(self.get(url + extra)) - return data['jobs'], data['cursor'] - - def get(self, request_path, **kwargs): - kwargs.setdefault('payload', None) - return self._send(request_path, **kwargs) - - def post(self, request_path, data, **kwargs): - ctype, body = upload.EncodeMultipartFormData(data, []) - return self._send(request_path, payload=body, content_type=ctype, **kwargs) - - def _send(self, request_path, retry_on_404=False, **kwargs): - """Sends a POST/GET to Rietveld. Returns the response body.""" - # rpc_server.Send() assumes timeout=None by default; make sure it's set - # to something reasonable. - kwargs.setdefault('timeout', 15) - logging.debug('POSTing to %s, args %s.', request_path, kwargs) - try: - # Sadly, upload.py calls ErrorExit() which does a sys.exit(1) on HTTP - # 500 in AbstractRpcServer.Send(). - old_error_exit = upload.ErrorExit - def trap_http_500(msg): - """Converts an incorrect ErrorExit() call into a HTTPError exception.""" - m = re.search(r'(50\d) Server Error', msg) - if m: - # Fake an HTTPError exception. Cheezy. :( - raise urllib2.HTTPError( - request_path, int(m.group(1)), msg, None, StringIO.StringIO()) - old_error_exit(msg) - upload.ErrorExit = trap_http_500 - - for retry in xrange(self._maxtries): - try: - logging.debug('%s' % request_path) - return self.rpc_server.Send(request_path, **kwargs) - except urllib2.HTTPError as e: - if retry >= (self._maxtries - 1): - raise - flake_codes = {500, 502, 503} - if retry_on_404: - flake_codes.add(404) - if e.code not in flake_codes: - raise - except urllib2.URLError as e: - if retry >= (self._maxtries - 1): - raise - - def is_transient(): - # The idea here is to retry if the error isn't permanent. - # Unfortunately, there are so many different possible errors, - # that we end up enumerating those that are known to us to be - # transient. - # The reason can be a string or another exception, e.g., - # socket.error or whatever else. - reason_as_str = str(e.reason) - for retry_anyway in ( - 'Name or service not known', - 'EOF occurred in violation of protocol', - 'timed out', - # See http://crbug.com/601260. - '[Errno 10060] A connection attempt failed', - '[Errno 104] Connection reset by peer', - ): - if retry_anyway in reason_as_str: - return True - return False # Assume permanent otherwise. - if not is_transient(): - logging.error('Caught urllib2.URLError %s which wasn\'t deemed ' - 'transient', e.reason) - raise - except socket.error as e: - if retry >= (self._maxtries - 1): - raise - if not 'timed out' in str(e): - raise - # If reaching this line, loop again. Uses a small backoff. - time.sleep(min(10, 1+retry*2)) - except urllib2.HTTPError as e: - print('Request to %s failed: %s' % (e.geturl(), e.read())) - raise - finally: - upload.ErrorExit = old_error_exit - - # DEPRECATED. - Send = get - - -class OAuthRpcServer(object): - def __init__(self, - host, - client_email, - client_private_key, - private_key_password='notasecret', - user_agent=None, - timeout=None, - extra_headers=None): - """Wrapper around httplib2.Http() that handles authentication. - - client_email: email associated with the service account - client_private_key: encrypted private key, as a string - private_key_password: password used to decrypt the private key - """ - - # Enforce https - host_parts = urlparse.urlparse(host) - - if host_parts.scheme == 'https': # fine - self.host = host - elif host_parts.scheme == 'http': - upload.logging.warning('Changing protocol to https') - self.host = 'https' + host[4:] - else: - msg = 'Invalid url provided: %s' % host - upload.logging.error(msg) - raise ValueError(msg) - - self.host = self.host.rstrip('/') - - self.extra_headers = extra_headers or {} - - if not oa2client.HAS_OPENSSL: - logging.error("No support for OpenSSL has been found, " - "OAuth2 support requires it.") - logging.error("Installing pyopenssl will probably solve this issue.") - raise RuntimeError('No OpenSSL support') - self.creds = oa2client.SignedJwtAssertionCredentials( - client_email, - client_private_key, - 'https://www.googleapis.com/auth/userinfo.email', - private_key_password=private_key_password, - user_agent=user_agent) - - self._http = self.creds.authorize(httplib2.Http(timeout=timeout)) - - def Send(self, - request_path, - payload=None, - content_type='application/octet-stream', - timeout=None, - extra_headers=None, - **kwargs): - """Send a POST or GET request to the server. - - Args: - request_path: path on the server to hit. This is concatenated with the - value of 'host' provided to the constructor. - payload: request is a POST if not None, GET otherwise - timeout: in seconds - extra_headers: (dict) - - Returns: the HTTP response body as a string - - Raises: - urllib2.HTTPError - """ - # This method signature should match upload.py:AbstractRpcServer.Send() - method = 'GET' - - headers = self.extra_headers.copy() - headers.update(extra_headers or {}) - - if payload is not None: - method = 'POST' - headers['Content-Type'] = content_type - - prev_timeout = self._http.timeout - try: - if timeout: - self._http.timeout = timeout - url = self.host + request_path - if kwargs: - url += "?" + urllib.urlencode(kwargs) - - # This weird loop is there to detect when the OAuth2 token has expired. - # This is specific to appengine *and* rietveld. It relies on the - # assumption that a 302 is triggered only by an expired OAuth2 token. This - # prevents any usage of redirections in pages accessed this way. - - # This variable is used to make sure the following loop runs only twice. - redirect_caught = False - while True: - try: - ret = self._http.request(url, - method=method, - body=payload, - headers=headers, - redirections=0) - except httplib2.RedirectLimit: - if redirect_caught or method != 'GET': - logging.error('Redirection detected after logging in. Giving up.') - raise - redirect_caught = True - logging.debug('Redirection detected. Trying to log in again...') - self.creds.access_token = None - continue - break - - if ret[0].status >= 300: - raise urllib2.HTTPError( - request_path, int(ret[0]['status']), ret[1], None, - StringIO.StringIO()) - - return ret[1] - - finally: - self._http.timeout = prev_timeout - - -class JwtOAuth2Rietveld(Rietveld): - """Access to Rietveld using OAuth authentication. - - This class is supposed to be used only by bots, since this kind of - access is restricted to service accounts. - """ - # The parent__init__ is not called on purpose. - # pylint: disable=super-init-not-called - def __init__(self, - url, - client_email, - client_private_key_file, - private_key_password=None, - extra_headers=None, - maxtries=None): - - if private_key_password is None: # '' means 'empty password' - private_key_password = 'notasecret' - - self.url = url.rstrip('/') - bot_url = self.url - if self.url.endswith('googleplex.com'): - bot_url = self.url + '/bots' - - with open(client_private_key_file, 'rb') as f: - client_private_key = f.read() - logging.info('Using OAuth login: %s' % client_email) - self.rpc_server = OAuthRpcServer(bot_url, - client_email, - client_private_key, - private_key_password=private_key_password, - extra_headers=extra_headers or {}) - self._xsrf_token = None - self._xsrf_token_time = None - - self._maxtries = maxtries or 40 - - -class CachingRietveld(Rietveld): - """Caches the common queries. - - Not to be used in long-standing processes, like the commit queue. - """ - def __init__(self, *args, **kwargs): - super(CachingRietveld, self).__init__(*args, **kwargs) - self._cache = {} - - def _lookup(self, function_name, args, update): - """Caches the return values corresponding to the arguments. - - It is important that the arguments are standardized, like None vs False. - """ - function_cache = self._cache.setdefault(function_name, {}) - if args not in function_cache: - function_cache[args] = update(*args) - return copy.deepcopy(function_cache[args]) - - def get_description(self, issue, force=False): - if force: - return super(CachingRietveld, self).get_description(issue, force=force) - else: - return self._lookup( - 'get_description', - (issue,), - super(CachingRietveld, self).get_description) - - def get_issue_properties(self, issue, messages): - """Returns the issue properties. - - Because in practice the presubmit checks often ask without messages first - and then with messages, always ask with messages and strip off if not asked - for the messages. - """ - # It's a tad slower to request with the message but it's better than - # requesting the properties twice. - data = self._lookup( - 'get_issue_properties', - (issue, True), - super(CachingRietveld, self).get_issue_properties) - if not messages: - # Assumes self._lookup uses deepcopy. - del data['messages'] - return data - - def get_patchset_properties(self, issue, patchset): - return self._lookup( - 'get_patchset_properties', - (issue, patchset), - super(CachingRietveld, self).get_patchset_properties) - - -class ReadOnlyRietveld(object): - """ - Only provides read operations, and simulates writes locally. - - Intentionally do not inherit from Rietveld to avoid any write-issuing - logic to be invoked accidentally. - """ - - # Dictionary of local changes, indexed by issue number as int. - _local_changes = {} - - def __init__(self, *args, **kwargs): - # We still need an actual Rietveld instance to issue reads, just keep - # it hidden. - self._rietveld = Rietveld(*args, **kwargs) - - @classmethod - def _get_local_changes(cls, issue): - """Returns dictionary of local changes for |issue|, if any.""" - return cls._local_changes.get(issue, {}) - - @property - def url(self): - return self._rietveld.url - - def get_pending_issues(self): - pending_issues = self._rietveld.get_pending_issues() - - # Filter out issues we've closed or unchecked the commit checkbox. - return [issue for issue in pending_issues - if not self._get_local_changes(issue).get('closed', False) and - self._get_local_changes(issue).get('commit', True)] - - def close_issue(self, issue): # pylint:disable=no-self-use - logging.info('ReadOnlyRietveld: closing issue %d' % issue) - ReadOnlyRietveld._local_changes.setdefault(issue, {})['closed'] = True - - def get_issue_properties(self, issue, messages): - data = self._rietveld.get_issue_properties(issue, messages) - data.update(self._get_local_changes(issue)) - return data - - def get_patchset_properties(self, issue, patchset): - return self._rietveld.get_patchset_properties(issue, patchset) - - def get_depends_on_patchset(self, issue, patchset): - return self._rietveld.get_depends_on_patchset(issue, patchset) - - def get_patch(self, issue, patchset): - return self._rietveld.get_patch(issue, patchset) - - def update_description(self, issue, description): # pylint:disable=no-self-use - logging.info('ReadOnlyRietveld: new description for issue %d: %s' % - (issue, description)) - - def add_comment(self, # pylint:disable=no-self-use - issue, - message, - add_as_reviewer=False): - logging.info('ReadOnlyRietveld: posting comment "%s" to issue %d' % - (message, issue)) - - def set_flag(self, issue, patchset, flag, value): # pylint:disable=no-self-use - logging.info('ReadOnlyRietveld: setting flag "%s" to "%s" for issue %d' % - (flag, value, issue)) - ReadOnlyRietveld._local_changes.setdefault(issue, {})[flag] = value - - def set_flags(self, issue, patchset, flags): - for flag, value in flags.iteritems(): - self.set_flag(issue, patchset, flag, value) - - def trigger_try_jobs( # pylint:disable=no-self-use - self, issue, patchset, reason, clobber, revision, builders_and_tests, - master=None, category='cq'): - logging.info('ReadOnlyRietveld: triggering try jobs %r for issue %d' % - (builders_and_tests, issue)) - - def trigger_distributed_try_jobs( # pylint:disable=no-self-use - self, issue, patchset, reason, clobber, revision, masters, - category='cq'): - logging.info('ReadOnlyRietveld: triggering try jobs %r for issue %d' % - (masters, issue)) diff --git a/third_party/upload.py b/third_party/upload.py deleted file mode 100644 index 774a7c285..000000000 --- a/third_party/upload.py +++ /dev/null @@ -1,2565 +0,0 @@ -#!/usr/bin/env python -# coding: utf-8 -# -# Copyright 2007 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Tool for uploading diffs from a version control system to the codereview app. - -Usage summary: upload.py [options] [-- diff_options] [path...] - -Diff options are passed to the diff command of the underlying system. - -Supported version control systems: - Git - Mercurial - Subversion - Perforce - CVS - -It is important for Git/Mercurial users to specify a tree/node/branch to diff -against by using the '--rev' option. -""" -# This code is derived from appcfg.py in the App Engine SDK (open source), -# and from ASPN recipe #146306. - -from __future__ import print_function - -import cookielib -import errno -import fnmatch -import getpass -import logging -import marshal -import mimetypes -import optparse -import os -import re -import socket -import subprocess -import sys -import urllib -import urllib2 -import urlparse - -from multiprocessing.pool import ThreadPool - -import appengine_mapper - -# The configparser module was renamed in Python 3. -try: - import configparser -except ImportError: - import ConfigParser as configparser - -# The md5 module was deprecated in Python 2.5. -try: - from hashlib import md5 -except ImportError: - from md5 import md5 - -try: - import readline -except ImportError: - pass - -try: - import keyring -except: - keyring = None - -# auth.py is a part of depot_tools. -# TODO(vadimsh): Merge upload.py into depot_tools -import auth - -# The logging verbosity: -# 0: Errors only. -# 1: Status messages. -# 2: Info logs. -# 3: Debug logs. -verbosity = 1 -LOGGER = logging.getLogger('upload') - -# The account type used for authentication. -# This line could be changed by the review server (see handler for -# upload.py). -AUTH_ACCOUNT_TYPE = "GOOGLE" - -# URL of the default review server. As for AUTH_ACCOUNT_TYPE, this line could be -# changed by the review server (see handler for upload.py). -DEFAULT_REVIEW_SERVER = "codereview.appspot.com" - -# Max size of patch or base file. -MAX_UPLOAD_SIZE = 900 * 1024 - - -# Constants for version control names. Used by GuessVCSName. -VCS_GIT = "Git" -VCS_MERCURIAL = "Mercurial" -VCS_SUBVERSION = "Subversion" -VCS_PERFORCE = "Perforce" -VCS_CVS = "CVS" -VCS_UNKNOWN = "Unknown" - -VCS = [ - {'name': VCS_MERCURIAL, - 'aliases': ['hg', 'mercurial']}, - {'name': VCS_SUBVERSION, - 'aliases': ['svn', 'subversion'],}, - {'name': VCS_PERFORCE, - 'aliases': ['p4', 'perforce']}, - {'name': VCS_GIT, - 'aliases': ['git']}, - {'name': VCS_CVS, - 'aliases': ['cvs']}, - ] - - -VCS_SHORT_NAMES = [] # hg, svn, ... -VCS_ABBREVIATIONS = {} # alias: name, ... -for vcs in VCS: - VCS_SHORT_NAMES.append(min(vcs['aliases'], key=len)) - VCS_ABBREVIATIONS.update((alias, vcs['name']) for alias in vcs['aliases']) - -UPLOAD_TIMEOUT = 120 -MAX_UPLOAD_ATTEMPTS = 3 - - -# The result of parsing Subversion's [auto-props] setting. -svn_auto_props_map = None - -def GetEmail(prompt): - """Prompts the user for their email address and returns it. - - The last used email address is saved to a file and offered up as a suggestion - to the user. If the user presses enter without typing in anything the last - used email address is used. If the user enters a new address, it is saved - for next time we prompt. - - """ - last_email_file_name = os.path.expanduser("~/.last_codereview_email_address") - last_email = "" - if os.path.exists(last_email_file_name): - try: - last_email_file = open(last_email_file_name, "r") - last_email = last_email_file.readline().strip("\n") - last_email_file.close() - prompt += " [%s]" % last_email - except IOError as e: - pass - email = raw_input(prompt + ": ").strip() - if email: - try: - last_email_file = open(last_email_file_name, "w") - last_email_file.write(email) - last_email_file.close() - except IOError as e: - pass - else: - email = last_email - return email - - -def StatusUpdate(msg): - """Print a status message to stdout. - - If 'verbosity' is greater than 0, print the message. - - Args: - msg: The string to print. - """ - if verbosity > 0: - print(msg) - - -def ErrorExit(msg): - """Print an error message to stderr and exit.""" - print(msg, file=sys.stderr) - sys.exit(1) - - -class ClientLoginError(urllib2.HTTPError): - """Raised to indicate there was an error authenticating with ClientLogin.""" - - def __init__(self, url, code, msg, headers, args): - urllib2.HTTPError.__init__(self, url, code, msg, headers, None) - self.args = args - self._reason = args["Error"] - self.info = args.get("Info", None) - - @property - def reason(self): - # reason is a property on python 2.7 but a member variable on <=2.6. - # self.args is modified so it cannot be used as-is so save the value in - # self._reason. - return self._reason - - -class AbstractRpcServer(object): - """Provides a common interface for a simple RPC server.""" - - def __init__(self, host, auth_function, host_override=None, - request_path_prefix=None, extra_headers=None, - save_cookies=False, account_type=AUTH_ACCOUNT_TYPE): - """Creates a new AbstractRpcServer. - - Args: - host: The host to send requests to. - auth_function: A function that takes no arguments and returns an - (email, password) tuple when called. Will be called if authentication - is required. - host_override: The host header to send to the server (defaults to host). - request_path_prefix: A string to prefix all URL paths with (e.g. 'bots/'). - extra_headers: A dict of extra headers to append to every request. - save_cookies: If True, save the authentication cookies to local disk. - If False, use an in-memory cookiejar instead. Subclasses must - implement this functionality. Defaults to False. - account_type: Account type used for authentication. Defaults to - AUTH_ACCOUNT_TYPE. - """ - self.host = host - if (not self.host.startswith("http://") and - not self.host.startswith("https://")): - self.host = "http://" + self.host - self.host_override = host_override - self.request_path_prefix = request_path_prefix or '' - self.auth_function = auth_function - self.authenticated = False - self.extra_headers = extra_headers or {} - self.save_cookies = save_cookies - self.account_type = account_type - self.opener = self._GetOpener() - if self.host_override: - LOGGER.info("Server: %s; Host: %s", self.host, self.host_override) - else: - LOGGER.info("Server: %s", self.host) - - def _GetOpener(self): - """Returns an OpenerDirector for making HTTP requests. - - Returns: - A urllib2.OpenerDirector object. - """ - raise NotImplementedError() - - def _CreateRequest(self, url, data=None): - """Creates a new urllib request.""" - LOGGER.debug("Creating request for: '%s' with payload:\n%s", url, data) - req = urllib2.Request(url, data=data, headers={"Accept": "text/plain"}) - if self.host_override: - req.add_header("Host", self.host_override) - for key, value in self.extra_headers.iteritems(): - req.add_header(key, value) - return req - - def _GetAuthToken(self, email, password, internal=False): - """Uses ClientLogin to authenticate the user, returning an auth token. - - Args: - email: The user's email address - password: The user's password - - Raises: - ClientLoginError: If there was an error authenticating with ClientLogin. - HTTPError: If there was some other form of HTTP error. - - Returns: - The authentication token returned by ClientLogin. - """ - account_type = self.account_type - if self.host.endswith(".google.com"): - # Needed for use inside Google. - account_type = "HOSTED" - service = ('ClientLogin') if not internal else ('ClientAuth') - req = self._CreateRequest( - url="https://www.google.com/accounts/%s" % (service,), - data=urllib.urlencode({ - "Email": email, - "Passwd": password, - "service": "ah", - "source": "rietveld-codereview-upload", - "accountType": account_type, - }), - ) - try: - response = self.opener.open(req) - response_body = response.read() - response_dict = dict(x.split("=") - for x in response_body.split("\n") if x) - return response_dict["Auth"] - except urllib2.HTTPError as e: - if e.code == 403: - body = e.read() - response_dict = dict(x.split("=", 1) for x in body.split("\n") if x) - raise ClientLoginError(req.get_full_url(), e.code, e.msg, - e.headers, response_dict) - else: - raise - - def _GetAuthCookie(self, auth_token): - """Fetches authentication cookies for an authentication token. - - Args: - auth_token: The authentication token returned by ClientLogin. - - Raises: - HTTPError: If there was an error fetching the authentication cookies. - """ - # This is a dummy value to allow us to identify when we're successful. - continue_location = "http://localhost/" - args = {"continue": continue_location, "auth": auth_token} - req = self._CreateRequest("%s/_ah/login?%s" % - (self.host, urllib.urlencode(args))) - try: - response = self.opener.open(req) - except urllib2.HTTPError as e: - response = e - if (response.code != 302 or - response.info()["location"] != continue_location): - raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg, - response.headers, response.fp) - self.authenticated = True - - def _Authenticate(self, force_refresh): - """Authenticates the user. - - The authentication process works as follows: - 1) We get a username and password from the user - 2) We use ClientLogin to obtain an AUTH token for the user - (see http://code.google.com/apis/accounts/AuthForInstalledApps.html). - 3) We pass the auth token to /_ah/login on the server to obtain an - authentication cookie. If login was successful, it tries to redirect - us to the URL we provided. - - If we attempt to access the upload API without first obtaining an - authentication cookie, it returns a 401 response (or a 302) and - directs us to authenticate ourselves with ClientLogin. - """ - for i in range(3): - credentials = self.auth_function() - - # Try external, then internal. - e = None - error_map = None - try: - auth_token = self._GetAuthToken(credentials[0], credentials[1]) - except urllib2.HTTPError: - try: - # Try internal endpoint. - error_map = { - "badauth": "BadAuthentication", - "cr": "CaptchaRequired", - "adel": "AccountDeleted", - "adis": "AccountDisabled", - "sdis": "ServiceDisabled", - "ire": "ServiceUnavailable", - } - auth_token = self._GetAuthToken(credentials[0], credentials[1], - internal=True) - except ClientLoginError as exc: - e = exc - if e: - print('', file=sys.stderr) - error_message = e.reason - if error_map: - error_message = error_map.get(error_message, error_message) - if error_message == "BadAuthentication": - if e.info == "InvalidSecondFactor": - print >> sys.stderr, ( - "Use an application-specific password instead " - "of your regular account password.\n" - "See http://www.google.com/" - "support/accounts/bin/answer.py?answer=185833") - else: - print("Invalid username or password.", file=sys.stderr) - elif error_message == "CaptchaRequired": - print >> sys.stderr, ( - "Please go to\n" - "https://www.google.com/accounts/DisplayUnlockCaptcha\n" - "and verify you are a human. Then try again.\n" - "If you are using a Google Apps account the URL is:\n" - "https://www.google.com/a/yourdomain.com/UnlockCaptcha") - elif error_message == "NotVerified": - print("Account not verified.", file=sys.stderr) - elif error_message == "TermsNotAgreed": - print("User has not agreed to TOS.", file=sys.stderr) - elif error_message == "AccountDeleted": - print("The user account has been deleted.", file=sys.stderr) - elif error_message == "AccountDisabled": - print("The user account has been disabled.", file=sys.stderr) - break - elif error_message == "ServiceDisabled": - print("The user's access to the service has been disabled.", - file=sys.stderr) - elif error_message == "ServiceUnavailable": - print("The service is not available; try again later.", - file=sys.stderr) - else: - # Unknown error. - raise e - print('', file=sys.stderr) - continue - self._GetAuthCookie(auth_token) - return - - def Send(self, request_path, payload=None, - content_type="application/octet-stream", - timeout=None, - extra_headers=None, - **kwargs): - """Sends an RPC and returns the response. - - Args: - request_path: The path to send the request to, eg /api/appversion/create. - payload: The body of the request, or None to send an empty request. - content_type: The Content-Type header to use. - timeout: timeout in seconds; default None i.e. no timeout. - (Note: for large requests on OS X, the timeout doesn't work right.) - extra_headers: Dict containing additional HTTP headers that should be - included in the request (string header names mapped to their values), - or None to not include any additional headers. - kwargs: Any keyword arguments are converted into query string parameters. - - Returns: - The response body, as a string. - """ - # TODO: Don't require authentication. Let the server say - # whether it is necessary. - if not self.authenticated and self.auth_function: - self._Authenticate(force_refresh=False) - - old_timeout = socket.getdefaulttimeout() - socket.setdefaulttimeout(timeout) - auth_attempted = False - try: - tries = 0 - while True: - tries += 1 - args = dict(kwargs) - url = "%s%s%s" % (self.host, self.request_path_prefix, request_path) - url = appengine_mapper.MapUrl(url) - if args: - url += "?" + urllib.urlencode(args) - req = self._CreateRequest(url=url, data=payload) - req.add_header("Content-Type", content_type) - if extra_headers: - for header, value in extra_headers.items(): - req.add_header(header, value) - try: - f = self.opener.open(req, timeout=70) - response = f.read() - f.close() - return response - except urllib2.HTTPError as e: - if tries > 3: - raise - elif e.code in (302, 401, 403): - if not self.auth_function: - raise - # Already tried force refresh, didn't help -> give up with error. - if auth_attempted: - raise auth.AuthenticationError( - 'Access to %s is denied (server returned HTTP %d).' - % (self.host, e.code)) - self._Authenticate(force_refresh=True) - auth_attempted = True - elif e.code == 301: - # Handle permanent redirect manually. - url = e.info()["location"] - url_loc = urlparse.urlparse(url) - self.host = '%s://%s' % (url_loc[0], url_loc[1]) - elif e.code >= 500: - # TODO: We should error out on a 500, but the server is too flaky - # for that at the moment. - StatusUpdate('Upload got a 500 response: %d' % e.code) - else: - raise - finally: - socket.setdefaulttimeout(old_timeout) - - -class HttpRpcServer(AbstractRpcServer): - """Provides a simplified RPC-style interface for HTTP requests.""" - - def _Authenticate(self, force_refresh): - """Save the cookie jar after authentication.""" - if isinstance(self.auth_function, auth.Authenticator): - try: - access_token = self.auth_function.get_access_token(force_refresh) - except auth.LoginRequiredError: - # Attempt to make unauthenticated request first if there's no cached - # credentials. HttpRpcServer calls __Authenticate(force_refresh=True) - # again if unauthenticated request doesn't work. - if not force_refresh: - return - raise - self.extra_headers['Authorization'] = 'Bearer %s' % ( - access_token.token,) - else: - super(HttpRpcServer, self)._Authenticate(force_refresh) - if self.save_cookies: - StatusUpdate("Saving authentication cookies to %s" % self.cookie_file) - self.cookie_jar.save() - - def _GetOpener(self): - """Returns an OpenerDirector that supports cookies and ignores redirects. - - Returns: - A urllib2.OpenerDirector object. - """ - opener = urllib2.OpenerDirector() - opener.add_handler(urllib2.ProxyHandler()) - opener.add_handler(urllib2.UnknownHandler()) - opener.add_handler(urllib2.HTTPHandler()) - opener.add_handler(urllib2.HTTPDefaultErrorHandler()) - opener.add_handler(urllib2.HTTPSHandler()) - opener.add_handler(urllib2.HTTPErrorProcessor()) - if self.save_cookies: - self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies") - self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file) - if os.path.exists(self.cookie_file): - try: - self.cookie_jar.load() - self.authenticated = True - StatusUpdate("Loaded authentication cookies from %s" % - self.cookie_file) - except (cookielib.LoadError, IOError): - # Failed to load cookies - just ignore them. - pass - else: - # Create an empty cookie file with mode 600 - fd = os.open(self.cookie_file, os.O_CREAT, 0o600) - os.close(fd) - # Always chmod the cookie file - os.chmod(self.cookie_file, 0o600) - else: - # Don't save cookies across runs of update.py. - self.cookie_jar = cookielib.CookieJar() - opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar)) - return opener - - -class CondensedHelpFormatter(optparse.IndentedHelpFormatter): - """Frees more horizontal space by removing indentation from group - options and collapsing arguments between short and long, e.g. - '-o ARG, --opt=ARG' to -o --opt ARG""" - - def format_heading(self, heading): - return "%s:\n" % heading - - def format_option(self, option): - self.dedent() - res = optparse.HelpFormatter.format_option(self, option) - self.indent() - return res - - def format_option_strings(self, option): - self.set_long_opt_delimiter(" ") - optstr = optparse.HelpFormatter.format_option_strings(self, option) - optlist = optstr.split(", ") - if len(optlist) > 1: - if option.takes_value(): - # strip METAVAR from all but the last option - optlist = [x.split()[0] for x in optlist[:-1]] + optlist[-1:] - optstr = " ".join(optlist) - return optstr - - -parser = optparse.OptionParser( - usage=("%prog [options] [-- diff_options] [path...]\n" - "See also: http://code.google.com/p/rietveld/wiki/UploadPyUsage"), - add_help_option=False, - formatter=CondensedHelpFormatter() - ) -parser.add_option("-h", "--help", action="store_true", - help="Show this help message and exit.") -parser.add_option("-y", "--assume_yes", action="store_true", - dest="assume_yes", default=False, - help="Assume that the answer to yes/no questions is 'yes'.") -# Logging -group = parser.add_option_group("Logging options") -group.add_option("-q", "--quiet", action="store_const", const=0, - dest="verbose", help="Print errors only.") -group.add_option("-v", "--verbose", action="store_const", const=2, - dest="verbose", default=1, - help="Print info level logs.") -group.add_option("--noisy", action="store_const", const=3, - dest="verbose", help="Print all logs.") -group.add_option("--print_diffs", dest="print_diffs", action="store_true", - help="Print full diffs.") -# Review server -group = parser.add_option_group("Review server options") -group.add_option("-s", "--server", action="store", dest="server", - default=DEFAULT_REVIEW_SERVER, - metavar="SERVER", - help=("The server to upload to. The format is host[:port]. " - "Defaults to '%default'.")) -group.add_option("-e", "--email", action="store", dest="email", - metavar="EMAIL", default=None, - help="The username to use. Will prompt if omitted.") -group.add_option("-j", "--number-parallel-uploads", - dest="num_upload_threads", default=8, - help="Number of uploads to do in parallel.") -# Authentication -auth.add_auth_options(parser) -# Issue -group = parser.add_option_group("Issue options") -group.add_option("-t", "--title", action="store", dest="title", - help="New issue subject or new patch set title") -group.add_option("--project", action="store", dest="project", - help="The project the issue belongs to") -group.add_option("-m", "--message", action="store", dest="message", - default=None, - help="New issue description or new patch set message") -group.add_option("-F", "--file", action="store", dest="file", - default=None, help="Read the message above from file.") -group.add_option("-r", "--reviewers", action="store", dest="reviewers", - metavar="REVIEWERS", default=None, - help="Add reviewers (comma separated email addresses).") -group.add_option("--cc", action="store", dest="cc", - metavar="CC", default=None, - help="Add CC (comma separated email addresses).") -group.add_option("--private", action="store_true", dest="private", - default=False, - help="Make the issue restricted to reviewers and those CCed") -# Upload options -group = parser.add_option_group("Patch options") -group.add_option("-i", "--issue", type="int", action="store", - metavar="ISSUE", default=None, - help="Issue number to which to add. Defaults to new issue.") -group.add_option("--target_ref", action="store", dest="target_ref", - default=None, - help="The target ref that is transitively tracked by the " - "local branch this patch comes from.") -parser.add_option("--cq_dry_run", action="store_true", - help="Send the patchset to do a CQ dry run right after " - "upload.") -parser.add_option("--depends_on_patchset", action="store", - dest="depends_on_patchset", - help="The uploaded patchset this patchset depends on. The " - "value will be in this format- issue_num:patchset_num") -group.add_option("--download_base", action="store_true", - dest="download_base", default=False, - help="Base files will be downloaded by the server " - "(side-by-side diffs may not work on files with CRs).") -group.add_option("--rev", action="store", dest="revision", - metavar="REV", default=None, - help="Base revision/branch/tree to diff against. Use " - "rev1:rev2 range to review already committed changeset.") -group.add_option("--send_mail", action="store_true", - dest="send_mail", default=False, - help="Send notification email to reviewers.") -group.add_option("-p", "--send_patch", action="store_true", - dest="send_patch", default=False, - help="Same as --send_mail, but include diff as an " - "attachment, and prepend email subject with 'PATCH:'.") -group.add_option("--vcs", action="store", dest="vcs", - metavar="VCS", default=None, - help=("Explicitly specify version control system (%s)" - % ", ".join(VCS_SHORT_NAMES))) -group.add_option("--emulate_svn_auto_props", action="store_true", - dest="emulate_svn_auto_props", default=False, - help=("Emulate Subversion's auto properties feature.")) -# Git-specific -group = parser.add_option_group("Git-specific options") -group.add_option("--git_similarity", action="store", dest="git_similarity", - metavar="SIM", type="int", default=50, - help=("Set the minimum similarity percentage for detecting " - "renames and copies. See `git diff -C`. (default 50).")) -group.add_option("--git_find_copies_harder", action="store_true", default=False, - dest='git_find_copies_harder', - help="Adds --find-copies-harder when seaching for copies") -group.add_option("--git_no_find_copies", action="store_false", default=True, - dest="git_find_copies", - help=("Prevents git from looking for copies (default off).")) -# Perforce-specific -group = parser.add_option_group("Perforce-specific options " - "(overrides P4 environment variables)") -group.add_option("--p4_port", action="store", dest="p4_port", - metavar="P4_PORT", default=None, - help=("Perforce server and port (optional)")) -group.add_option("--p4_changelist", action="store", dest="p4_changelist", - metavar="P4_CHANGELIST", default=None, - help=("Perforce changelist id")) -group.add_option("--p4_client", action="store", dest="p4_client", - metavar="P4_CLIENT", default=None, - help=("Perforce client/workspace")) -group.add_option("--p4_user", action="store", dest="p4_user", - metavar="P4_USER", default=None, - help=("Perforce user")) - - -class KeyringCreds(object): - def __init__(self, server, host, email): - self.server = server - # Explicitly cast host to str to work around bug in old versions of Keyring - # (versions before 0.10). Even though newer versions of Keyring fix this, - # some modern linuxes (such as Ubuntu 12.04) still bundle a version with - # the bug. - self.host = str(host) - self.email = email - self.accounts_seen = set() - - def GetUserCredentials(self): - """Prompts the user for a username and password. - - Only use keyring on the initial call. If the keyring contains the wrong - password, we want to give the user a chance to enter another one. - """ - # Create a local alias to the email variable to avoid Python's crazy - # scoping rules. - global keyring - email = self.email - if email is None: - email = GetEmail("Email (login for uploading to %s)" % self.server) - password = None - if keyring and not email in self.accounts_seen: - try: - password = keyring.get_password(self.host, email) - except: - # Sadly, we have to trap all errors here as - # gnomekeyring.IOError inherits from object. :/ - print("Failed to get password from keyring") - keyring = None - if password is not None: - print("Using password from system keyring.") - self.accounts_seen.add(email) - else: - password = getpass.getpass("Password for %s: " % email) - if keyring: - answer = raw_input("Store password in system keyring?(y/N) ").strip() - if answer == "y": - keyring.set_password(self.host, email, password) - self.accounts_seen.add(email) - return (email, password) - - -def GetRpcServer(server, auth_config=None, email=None): - """Returns an instance of an AbstractRpcServer. - - Args: - server: String containing the review server URL. - auth_config: auth.AuthConfig tuple with OAuth2 configuration. - email: String containing user's email address [deprecated]. - - Returns: - A new HttpRpcServer, on which RPC calls can be made. - """ - # If email is given as an empty string or no auth config is passed, then - # assume we want to make requests that do not need authentication. Bypass - # authentication by setting the auth_function to None. - if email == '' or not auth_config: - return HttpRpcServer(server, None) - - # If this is the dev_appserver, use fake authentication. - host = server.lower() - if re.match(r'(http://)?localhost([:/]|$)', host): - if email is None: - email = "test@example.com" - LOGGER.info("Using debug user %s. Override with --email" % email) - server = HttpRpcServer( - server, - lambda: (email, "password"), - extra_headers={"Cookie": - 'dev_appserver_login="%s:False"' % email}, - save_cookies=auth_config.save_cookies, - account_type=AUTH_ACCOUNT_TYPE) - # Don't try to talk to ClientLogin. - server.authenticated = True - return server - - if auth_config.use_oauth2: - auth_func = auth.get_authenticator_for_host(server, auth_config) - else: - auth_func = KeyringCreds(server, host, email).GetUserCredentials - - # HACK(crbug.com/476690): Internal Rietveld is configured to require cookie - # auth for all paths except /bots/* (requests to /bots/* are authenticated - # with OAuth). /bots/* paths expose exact same API as /* (at least enough of - # it for depot_tools to work). So when using OAuth with internal Rietveld, - # silently prefix all requests with '/bots'. - request_path_prefix = '' - if auth_config.use_oauth2: - if not host.startswith(('http://', 'https://')): - host = 'https://' + host - parsed = urlparse.urlparse(host) - if parsed.netloc.endswith('.googleplex.com'): - request_path_prefix = '/bots' - - return HttpRpcServer( - server, - auth_func, - request_path_prefix=request_path_prefix, - save_cookies=auth_config.save_cookies, - account_type=AUTH_ACCOUNT_TYPE) - - -def EncodeMultipartFormData(fields, files): - """Encode form fields for multipart/form-data. - - Args: - fields: A sequence of (name, value) elements for regular form fields. - files: A sequence of (name, filename, value) elements for data to be - uploaded as files. - Returns: - (content_type, body) ready for httplib.HTTP instance. - - Source: - http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306 - """ - BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-%s-' % sum(hash(f) for f in files) - CRLF = '\r\n' - lines = [] - for (key, value) in fields: - lines.append('--' + BOUNDARY) - lines.append('Content-Disposition: form-data; name="%s"' % key) - lines.append('') - if isinstance(value, unicode): - value = value.encode('utf-8') - lines.append(value) - for (key, filename, value) in files: - lines.append('--' + BOUNDARY) - lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' % - (key, filename)) - lines.append('Content-Type: %s' % GetContentType(filename)) - lines.append('') - if isinstance(value, unicode): - value = value.encode('utf-8') - lines.append(value) - lines.append('--' + BOUNDARY + '--') - lines.append('') - body = CRLF.join(lines) - content_type = 'multipart/form-data; boundary=%s' % BOUNDARY - return content_type, body - - -def GetContentType(filename): - """Helper to guess the content-type from the filename.""" - return mimetypes.guess_type(filename)[0] or 'application/octet-stream' - - -# Use a shell for subcommands on Windows to get a PATH search. -use_shell = sys.platform.startswith("win") - -def RunShellWithReturnCodeAndStderr(command, universal_newlines=True, - env=os.environ): - """Run a command and return output from stdout, stderr and the return code. - - Args: - command: Command to execute. - universal_newlines: Use universal_newlines flag (default: True). - - Returns: - Tuple (stdout, stderr, return code) - """ - LOGGER.info("Running %s", command) - env = env.copy() - env['LC_MESSAGES'] = 'C' - p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, - shell=use_shell, universal_newlines=universal_newlines, - env=env) - output, errout = p.communicate() - p.stdout.close() - p.stderr.close() - return output, errout, p.returncode - -def RunShellWithReturnCode(command, universal_newlines=True, env=os.environ): - """Run a command and return output from stdout and the return code.""" - out, err, retcode = RunShellWithReturnCodeAndStderr(command, - universal_newlines, env) - return out, retcode - -def RunShell(command, silent_ok=False, universal_newlines=True, - env=os.environ): - data, retcode = RunShellWithReturnCode(command, universal_newlines, env) - if retcode: - ErrorExit("Got error status from %s:\n%s" % (command, data)) - if not silent_ok and not data: - ErrorExit("No output from %s" % command) - return data - - -class VersionControlSystem(object): - """Abstract base class providing an interface to the VCS.""" - - def __init__(self, options): - """Constructor. - - Args: - options: Command line options. - """ - self.options = options - - def GetGUID(self): - """Return string to distinguish the repository from others, for example to - query all opened review issues for it""" - raise NotImplementedError( - "abstract method -- subclass %s must override" % self.__class__) - - def PostProcessDiff(self, diff): - """Return the diff with any special post processing this VCS needs, e.g. - to include an svn-style "Index:".""" - return diff - - def GenerateDiff(self, args): - """Return the current diff as a string. - - Args: - args: Extra arguments to pass to the diff command. - """ - raise NotImplementedError( - "abstract method -- subclass %s must override" % self.__class__) - - def GetUnknownFiles(self): - """Return a list of files unknown to the VCS.""" - raise NotImplementedError( - "abstract method -- subclass %s must override" % self.__class__) - - def CheckForUnknownFiles(self): - """Show an "are you sure?" prompt if there are unknown files.""" - unknown_files = self.GetUnknownFiles() - if unknown_files: - print("The following files are not added to version control:") - for line in unknown_files: - print(line) - prompt = "Are you sure to continue?(y/N) " - answer = raw_input(prompt).strip() - if answer != "y": - ErrorExit("User aborted") - - def GetBaseFile(self, filename): - """Get the content of the upstream version of a file. - - Returns: - A tuple (base_content, new_content, is_binary, status) - base_content: The contents of the base file. - new_content: For text files, this is empty. For binary files, this is - the contents of the new file, since the diff output won't contain - information to reconstruct the current file. - is_binary: True iff the file is binary. - status: The status of the file. - """ - - raise NotImplementedError( - "abstract method -- subclass %s must override" % self.__class__) - - def GetBaseFiles(self, diff): - """Helper that calls GetBase file for each file in the patch. - - Returns: - A dictionary that maps from filename to GetBaseFile's tuple. Filenames - are retrieved based on lines that start with "Index:" or - "Property changes on:". - """ - files = {} - for line in diff.splitlines(True): - if line.startswith('Index:') or line.startswith('Property changes on:'): - unused, filename = line.split(':', 1) - # On Windows if a file has property changes its filename uses '\' - # instead of '/'. - filename = filename.strip().replace('\\', '/') - files[filename] = self.GetBaseFile(filename) - return files - - def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options, - files): - """Uploads the base files (and if necessary, the current ones as well).""" - - def UploadFile(filename, file_id, content, is_binary, status, is_base): - """Uploads a file to the server.""" - file_too_large = False - if is_base: - type = "base" - else: - type = "current" - if len(content) > MAX_UPLOAD_SIZE: - result = ("Not uploading the %s file for %s because it's too large." % - (type, filename)) - file_too_large = True - content = "" - elif options.verbose: - result = "Uploading %s file for %s" % (type, filename) - checksum = md5(content).hexdigest() - url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id) - form_fields = [("filename", filename), - ("status", status), - ("checksum", checksum), - ("is_binary", str(is_binary)), - ("is_current", str(not is_base)), - ] - if file_too_large: - form_fields.append(("file_too_large", "1")) - if options.email: - form_fields.append(("user", options.email)) - ctype, body = EncodeMultipartFormData(form_fields, - [("data", filename, content)]) - try: - response_body = rpc_server.Send(url, body, content_type=ctype) - except urllib2.HTTPError as e: - response_body = ("Failed to upload file for %s. Got %d status code." % - (filename, e.code)) - - if not response_body.startswith("OK"): - StatusUpdate(" --> %s" % response_body) - sys.exit(1) - - return result - - patches = dict() - [patches.setdefault(v, k) for k, v in patch_list] - - def uploadAttempt(): - threads = [] - thread_pool = ThreadPool(options.num_upload_threads) - - for filename in patches.keys(): - base_content, new_content, is_binary, status = files[filename] - file_id_str = patches.get(filename) - if file_id_str.find("nobase") != -1: - base_content = None - file_id_str = file_id_str[file_id_str.rfind("_") + 1:] - file_id = int(file_id_str) - if base_content != None: - t = thread_pool.apply_async(UploadFile, args=(filename, - file_id, base_content, is_binary, status, True)) - threads.append(t) - if new_content != None: - t = thread_pool.apply_async(UploadFile, args=(filename, - file_id, new_content, is_binary, status, False)) - threads.append(t) - - for t in threads: - print(t.get(timeout=UPLOAD_TIMEOUT)) - - success = False - for _ in range(MAX_UPLOAD_ATTEMPTS): - try: - uploadAttempt() - success = True - break - except multiprocessing.TimeoutError: - LOGGER.warning('Timeout error while uploading, retrying...') - - if not success: - raise IOError( - '%d consecutive timeout errors, aborting!' % MAX_UPLOAD_ATTEMPTS) - - def IsImage(self, filename): - """Returns true if the filename has an image extension.""" - mimetype = mimetypes.guess_type(filename)[0] - if not mimetype: - return False - return (mimetype.startswith("image/") and - not mimetype.startswith("image/svg")) - - def IsBinaryData(self, data): - """Returns true if data contains a null byte.""" - # Derived from how Mercurial's heuristic, see - # http://selenic.com/hg/file/848a6658069e/mercurial/util.py#l229 - return bool(data and "\0" in data) - - def GetMostRecentCommitSummary(self): - """Returns a one line summary of the current commit.""" - return "" - - -class SubversionVCS(VersionControlSystem): - """Implementation of the VersionControlSystem interface for Subversion.""" - - def __init__(self, options): - super(SubversionVCS, self).__init__(options) - if self.options.revision: - match = re.match(r"(\d+)(:(\d+))?", self.options.revision) - if not match: - ErrorExit("Invalid Subversion revision %s." % self.options.revision) - self.rev_start = match.group(1) - self.rev_end = match.group(3) - else: - self.rev_start = self.rev_end = None - # Cache output from "svn list -r REVNO dirname". - # Keys: dirname, Values: 2-tuple (ouput for start rev and end rev). - self.svnls_cache = {} - # Base URL is required to fetch files deleted in an older revision. - # Result is cached to not guess it over and over again in GetBaseFile(). - required = self.options.download_base or self.options.revision is not None - self.svn_base = self._GuessBase(required) - - def GetGUID(self): - return self._GetInfo("Repository UUID") - - def GuessBase(self, required): - """Wrapper for _GuessBase.""" - return self.svn_base - - def _GuessBase(self, required): - """Returns base URL for current diff. - - Args: - required: If true, exits if the url can't be guessed, otherwise None is - returned. - """ - url = self._GetInfo("URL") - if url: - scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) - guess = "" - # TODO(anatoli) - repository specific hacks should be handled by server - if netloc == "svn.python.org" and scheme == "svn+ssh": - path = "projects" + path - scheme = "http" - guess = "Python " - elif netloc.endswith(".googlecode.com"): - scheme = "http" - guess = "Google Code " - path = path + "/" - base = urlparse.urlunparse((scheme, netloc, path, params, - query, fragment)) - LOGGER.info("Guessed %sbase = %s", guess, base) - return base - if required: - ErrorExit("Can't find URL in output from svn info") - return None - - def _GetInfo(self, key): - """Parses 'svn info' for current dir. Returns value for key or None""" - for line in RunShell(["svn", "info"]).splitlines(): - if line.startswith(key + ": "): - return line.split(":", 1)[1].strip() - - def _EscapeFilename(self, filename): - """Escapes filename for SVN commands.""" - if "@" in filename and not filename.endswith("@"): - filename = "%s@" % filename - return filename - - def GenerateDiff(self, args): - cmd = ["svn", "diff"] - if self.options.revision: - cmd += ["-r", self.options.revision] - cmd.extend(args) - data = RunShell(cmd) - count = 0 - for line in data.splitlines(): - if line.startswith("Index:") or line.startswith("Property changes on:"): - count += 1 - LOGGER.info(line) - if not count: - ErrorExit("No valid patches found in output from svn diff") - return data - - def _CollapseKeywords(self, content, keyword_str): - """Collapses SVN keywords.""" - # svn cat translates keywords but svn diff doesn't. As a result of this - # behavior patching.PatchChunks() fails with a chunk mismatch error. - # This part was originally written by the Review Board development team - # who had the same problem (http://reviews.review-board.org/r/276/). - # Mapping of keywords to known aliases - svn_keywords = { - # Standard keywords - 'Date': ['Date', 'LastChangedDate'], - 'Revision': ['Revision', 'LastChangedRevision', 'Rev'], - 'Author': ['Author', 'LastChangedBy'], - 'HeadURL': ['HeadURL', 'URL'], - 'Id': ['Id'], - - # Aliases - 'LastChangedDate': ['LastChangedDate', 'Date'], - 'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'], - 'LastChangedBy': ['LastChangedBy', 'Author'], - 'URL': ['URL', 'HeadURL'], - } - - def repl(m): - if m.group(2): - return "$%s::%s$" % (m.group(1), " " * len(m.group(3))) - return "$%s$" % m.group(1) - - keywords = [keyword - for name in keyword_str.split(" ") - for keyword in svn_keywords.get(name, [])] - return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content) - - def GetUnknownFiles(self): - status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True) - unknown_files = [] - for line in status.split("\n"): - if line and line[0] == "?": - unknown_files.append(line) - return unknown_files - - def ReadFile(self, filename): - """Returns the contents of a file.""" - file = open(filename, 'rb') - result = "" - try: - result = file.read() - finally: - file.close() - return result - - def GetStatus(self, filename): - """Returns the status of a file.""" - if not self.options.revision: - status = RunShell(["svn", "status", "--ignore-externals", - self._EscapeFilename(filename)]) - if not status: - ErrorExit("svn status returned no output for %s" % filename) - status_lines = status.splitlines() - # If file is in a cl, the output will begin with - # "\n--- Changelist 'cl_name':\n". See - # http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt - if (len(status_lines) == 3 and - not status_lines[0] and - status_lines[1].startswith("--- Changelist")): - status = status_lines[2] - else: - status = status_lines[0] - # If we have a revision to diff against we need to run "svn list" - # for the old and the new revision and compare the results to get - # the correct status for a file. - else: - dirname, relfilename = os.path.split(filename) - if dirname not in self.svnls_cache: - cmd = ["svn", "list", "-r", self.rev_start, - self._EscapeFilename(dirname) or "."] - out, err, returncode = RunShellWithReturnCodeAndStderr(cmd) - if returncode: - # Directory might not yet exist at start revison - # svn: Unable to find repository location for 'abc' in revision nnn - if re.match('^svn: Unable to find repository location ' - 'for .+ in revision \d+', err): - old_files = () - else: - ErrorExit("Failed to get status for %s:\n%s" % (filename, err)) - else: - old_files = out.splitlines() - args = ["svn", "list"] - if self.rev_end: - args += ["-r", self.rev_end] - cmd = args + [self._EscapeFilename(dirname) or "."] - out, returncode = RunShellWithReturnCode(cmd) - if returncode: - ErrorExit("Failed to run command %s" % cmd) - self.svnls_cache[dirname] = (old_files, out.splitlines()) - old_files, new_files = self.svnls_cache[dirname] - if relfilename in old_files and relfilename not in new_files: - status = "D " - elif relfilename in old_files and relfilename in new_files: - status = "M " - else: - status = "A " - return status - - def GetBaseFile(self, filename): - status = self.GetStatus(filename) - base_content = None - new_content = None - - # If a file is copied its status will be "A +", which signifies - # "addition-with-history". See "svn st" for more information. We need to - # upload the original file or else diff parsing will fail if the file was - # edited. - if status[0] == "A" and status[3] != "+": - # We'll need to upload the new content if we're adding a binary file - # since diff's output won't contain it. - mimetype = RunShell(["svn", "propget", "svn:mime-type", - self._EscapeFilename(filename)], silent_ok=True) - base_content = "" - is_binary = bool(mimetype) and not mimetype.startswith("text/") - if is_binary: - new_content = self.ReadFile(filename) - elif (status[0] in ("M", "D", "R") or - (status[0] == "A" and status[3] == "+") or # Copied file. - (status[0] == " " and status[1] == "M")): # Property change. - args = [] - if self.options.revision: - # filename must not be escaped. We already add an ampersand here. - url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start) - else: - # Don't change filename, it's needed later. - url = filename - args += ["-r", "BASE"] - cmd = ["svn"] + args + ["propget", "svn:mime-type", url] - mimetype, returncode = RunShellWithReturnCode(cmd) - if returncode: - # File does not exist in the requested revision. - # Reset mimetype, it contains an error message. - mimetype = "" - else: - mimetype = mimetype.strip() - get_base = False - # this test for binary is exactly the test prescribed by the - # official SVN docs at - # http://subversion.apache.org/faq.html#binary-files - is_binary = (bool(mimetype) and - not mimetype.startswith("text/") and - mimetype not in ("image/x-xbitmap", "image/x-xpixmap")) - if status[0] == " ": - # Empty base content just to force an upload. - base_content = "" - elif is_binary: - get_base = True - if status[0] == "M": - if not self.rev_end: - new_content = self.ReadFile(filename) - else: - url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end) - new_content = RunShell(["svn", "cat", url], - universal_newlines=True, silent_ok=True) - else: - get_base = True - - if get_base: - if is_binary: - universal_newlines = False - else: - universal_newlines = True - if self.rev_start: - # "svn cat -r REV delete_file.txt" doesn't work. cat requires - # the full URL with "@REV" appended instead of using "-r" option. - url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start) - base_content = RunShell(["svn", "cat", url], - universal_newlines=universal_newlines, - silent_ok=True) - else: - base_content, ret_code = RunShellWithReturnCode( - ["svn", "cat", self._EscapeFilename(filename)], - universal_newlines=universal_newlines) - if ret_code and status[0] == "R": - # It's a replaced file without local history (see issue208). - # The base file needs to be fetched from the server. - url = "%s/%s" % (self.svn_base, filename) - base_content = RunShell(["svn", "cat", url], - universal_newlines=universal_newlines, - silent_ok=True) - elif ret_code: - ErrorExit("Got error status from 'svn cat %s'" % filename) - if not is_binary: - args = [] - if self.rev_start: - url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start) - else: - url = filename - args += ["-r", "BASE"] - cmd = ["svn"] + args + ["propget", "svn:keywords", url] - keywords, returncode = RunShellWithReturnCode(cmd) - if keywords and not returncode: - base_content = self._CollapseKeywords(base_content, keywords) - else: - StatusUpdate("svn status returned unexpected output: %s" % status) - sys.exit(1) - return base_content, new_content, is_binary, status[0:5] - - -class GitVCS(VersionControlSystem): - """Implementation of the VersionControlSystem interface for Git.""" - - def __init__(self, options): - super(GitVCS, self).__init__(options) - # Map of filename -> (hash before, hash after) of base file. - # Hashes for "no such file" are represented as None. - self.hashes = {} - # Map of new filename -> old filename for renames. - self.renames = {} - - def GetGUID(self): - remote, retcode = RunShellWithReturnCode( - "git config remote.origin.url".split()) - if not retcode: - return remote.strip() - revlist = RunShell("git rev-list --parents HEAD".split()).splitlines() - # M-A: Return the 1st root hash, there could be multiple when a - # subtree is merged. In that case, more analysis would need to - # be done to figure out which HEAD is the 'most representative'. - for r in revlist: - if ' ' not in r: - return r - - def PostProcessDiff(self, gitdiff): - """Converts the diff output to include an svn-style "Index:" line as well - as record the hashes of the files, so we can upload them along with our - diff.""" - # Special used by git to indicate "no such content". - NULL_HASH = "0"*40 - - def IsFileNew(filename): - return filename in self.hashes and self.hashes[filename][0] is None - - def AddSubversionPropertyChange(filename): - """Add svn's property change information into the patch if given file is - new file. - - We use Subversion's auto-props setting to retrieve its property. - See http://svnbook.red-bean.com/en/1.1/ch07.html#svn-ch-7-sect-1.3.2 for - Subversion's [auto-props] setting. - """ - if self.options.emulate_svn_auto_props and IsFileNew(filename): - svnprops = GetSubversionPropertyChanges(filename) - if svnprops: - svndiff.append("\n" + svnprops + "\n") - - svndiff = [] - filecount = 0 - filename = None - for line in gitdiff.splitlines(): - match = re.match(r"diff --git a/(.*) b/(.*)$", line) - if match: - # Add auto property here for previously seen file. - if filename is not None: - AddSubversionPropertyChange(filename) - filecount += 1 - # Intentionally use the "after" filename so we can show renames. - filename = match.group(2) - svndiff.append("Index: %s\n" % filename) - if match.group(1) != match.group(2): - self.renames[match.group(2)] = match.group(1) - else: - # The "index" line in a git diff looks like this (long hashes elided): - # index 82c0d44..b2cee3f 100755 - # We want to save the left hash, as that identifies the base file. - match = re.match(r"index (\w+)\.\.(\w+)", line) - if match: - before, after = (match.group(1), match.group(2)) - if before == NULL_HASH: - before = None - if after == NULL_HASH: - after = None - self.hashes[filename] = (before, after) - svndiff.append(line + "\n") - if not filecount: - ErrorExit("No valid patches found in output from git diff") - # Add auto property for the last seen file. - assert filename is not None - AddSubversionPropertyChange(filename) - return "".join(svndiff) - - def GenerateDiff(self, extra_args): - extra_args = extra_args[:] - if self.options.revision: - if ":" in self.options.revision: - extra_args = self.options.revision.split(":", 1) + extra_args - else: - extra_args = [self.options.revision] + extra_args - - # --no-ext-diff is broken in some versions of Git, so try to work around - # this by overriding the environment (but there is still a problem if the - # git config key "diff.external" is used). - env = os.environ.copy() - if "GIT_EXTERNAL_DIFF" in env: - del env["GIT_EXTERNAL_DIFF"] - # 'cat' is a magical git string that disables pagers on all platforms. - env["GIT_PAGER"] = "cat" - - # -M/-C will not print the diff for the deleted file when a file is renamed. - # This is confusing because the original file will not be shown on the - # review when a file is renamed. So, get a diff with ONLY deletes, then - # append a diff (with rename detection), without deletes. - cmd = [ - "git", "diff", "--no-color", "--no-ext-diff", "--full-index", - "--ignore-submodules", "--src-prefix=a/", "--dst-prefix=b/", - ] - diff = RunShell( - cmd + ["--no-renames", "--diff-filter=D"] + extra_args, - env=env, silent_ok=True) - assert 0 <= self.options.git_similarity <= 100 - if self.options.git_find_copies: - similarity_options = ["-l100000", "-C%d%%" % self.options.git_similarity] - if self.options.git_find_copies_harder: - similarity_options.append("--find-copies-harder") - else: - similarity_options = ["-M%d%%" % self.options.git_similarity ] - diff += RunShell( - cmd + ["--diff-filter=AMCRT"] + similarity_options + extra_args, - env=env, silent_ok=True) - - # The CL could be only file deletion or not. So accept silent diff for both - # commands then check for an empty diff manually. - if not diff: - ErrorExit("No output from %s" % (cmd + extra_args)) - return diff - - def GetUnknownFiles(self): - status = RunShell(["git", "ls-files", "--exclude-standard", "--others"], - silent_ok=True) - return status.splitlines() - - def GetFileContent(self, file_hash): - """Returns the content of a file identified by its git hash.""" - data, retcode = RunShellWithReturnCode(["git", "show", file_hash], - universal_newlines=False) - if retcode: - ErrorExit("Got error status from 'git show %s'" % file_hash) - return data - - def GetBaseFile(self, filename): - hash_before, hash_after = self.hashes.get(filename, (None,None)) - base_content = None - new_content = None - status = None - - if filename in self.renames: - status = "A +" # Match svn attribute name for renames. - if filename not in self.hashes: - # If a rename doesn't change the content, we never get a hash. - base_content = RunShell( - ["git", "show", "HEAD:" + filename], silent_ok=True, - universal_newlines=False) - elif not hash_before: - status = "A" - base_content = "" - elif not hash_after: - status = "D" - else: - status = "M" - - # Grab the before/after content if we need it. - # Grab the base content if we don't have it already. - if base_content is None and hash_before: - base_content = self.GetFileContent(hash_before) - - is_binary = self.IsImage(filename) - if base_content: - is_binary = is_binary or self.IsBinaryData(base_content) - - # Only include the "after" file if it's an image; otherwise it - # it is reconstructed from the diff. - if hash_after: - new_content = self.GetFileContent(hash_after) - is_binary = is_binary or self.IsBinaryData(new_content) - if not is_binary: - new_content = None - return (base_content, new_content, is_binary, status) - - def GetMostRecentCommitSummary(self): - return RunShell(["git", "log", "-1", "--format=%s"], silent_ok=True).strip() - - -class CVSVCS(VersionControlSystem): - """Implementation of the VersionControlSystem interface for CVS.""" - - def __init__(self, options): - super(CVSVCS, self).__init__(options) - - def GetGUID(self): - """For now we don't know how to get repository ID for CVS""" - return - - def GetOriginalContent_(self, filename): - RunShell(["cvs", "up", filename], silent_ok=True) - # TODO need detect file content encoding - content = open(filename).read() - return content.replace("\r\n", "\n") - - def GetBaseFile(self, filename): - base_content = None - new_content = None - status = "A" - - output, retcode = RunShellWithReturnCode(["cvs", "status", filename]) - if retcode: - ErrorExit("Got error status from 'cvs status %s'" % filename) - - if output.find("Status: Locally Modified") != -1: - status = "M" - temp_filename = "%s.tmp123" % filename - os.rename(filename, temp_filename) - base_content = self.GetOriginalContent_(filename) - os.rename(temp_filename, filename) - elif output.find("Status: Locally Added"): - status = "A" - base_content = "" - elif output.find("Status: Needs Checkout"): - status = "D" - base_content = self.GetOriginalContent_(filename) - - return (base_content, new_content, self.IsBinaryData(base_content), status) - - def GenerateDiff(self, extra_args): - cmd = ["cvs", "diff", "-u", "-N"] - if self.options.revision: - cmd += ["-r", self.options.revision] - - cmd.extend(extra_args) - data, retcode = RunShellWithReturnCode(cmd) - count = 0 - if retcode in [0, 1]: - for line in data.splitlines(): - if line.startswith("Index:"): - count += 1 - LOGGER.info(line) - - if not count: - ErrorExit("No valid patches found in output from cvs diff") - - return data - - def GetUnknownFiles(self): - data, retcode = RunShellWithReturnCode(["cvs", "diff"]) - if retcode not in [0, 1]: - ErrorExit("Got error status from 'cvs diff':\n%s" % (data,)) - unknown_files = [] - for line in data.split("\n"): - if line and line[0] == "?": - unknown_files.append(line) - return unknown_files - -class MercurialVCS(VersionControlSystem): - """Implementation of the VersionControlSystem interface for Mercurial.""" - - def __init__(self, options, repo_dir): - super(MercurialVCS, self).__init__(options) - # Absolute path to repository (we can be in a subdir) - self.repo_dir = os.path.normpath(repo_dir) - # Compute the subdir - cwd = os.path.normpath(os.getcwd()) - assert cwd.startswith(self.repo_dir) - self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/") - if self.options.revision: - self.base_rev = self.options.revision - else: - self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip() - - def GetGUID(self): - # See chapter "Uniquely identifying a repository" - # http://hgbook.red-bean.com/read/customizing-the-output-of-mercurial.html - info = RunShell("hg log -r0 --template {node}".split()) - return info.strip() - - def _GetRelPath(self, filename): - """Get relative path of a file according to the current directory, - given its logical path in the repo.""" - absname = os.path.join(self.repo_dir, filename) - return os.path.relpath(absname) - - def GenerateDiff(self, extra_args): - cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args - data = RunShell(cmd, silent_ok=True) - svndiff = [] - filecount = 0 - for line in data.splitlines(): - m = re.match("diff --git a/(\S+) b/(\S+)", line) - if m: - # Modify line to make it look like as it comes from svn diff. - # With this modification no changes on the server side are required - # to make upload.py work with Mercurial repos. - # NOTE: for proper handling of moved/copied files, we have to use - # the second filename. - filename = m.group(2) - svndiff.append("Index: %s" % filename) - svndiff.append("=" * 67) - filecount += 1 - LOGGER.info(line) - else: - svndiff.append(line) - if not filecount: - ErrorExit("No valid patches found in output from hg diff") - return "\n".join(svndiff) + "\n" - - def GetUnknownFiles(self): - """Return a list of files unknown to the VCS.""" - args = [] - status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."], - silent_ok=True) - unknown_files = [] - for line in status.splitlines(): - st, fn = line.split(" ", 1) - if st == "?": - unknown_files.append(fn) - return unknown_files - - def GetBaseFile(self, filename): - # "hg status" and "hg cat" both take a path relative to the current subdir, - # but "hg diff" has given us the path relative to the repo root. - base_content = "" - new_content = None - is_binary = False - oldrelpath = relpath = self._GetRelPath(filename) - # "hg status -C" returns two lines for moved/copied files, one otherwise - out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath]) - out = out.splitlines() - # HACK: strip error message about missing file/directory if it isn't in - # the working copy - if out[0].startswith('%s: ' % relpath): - out = out[1:] - status, _ = out[0].split(' ', 1) - if len(out) > 1 and status == "A": - # Moved/copied => considered as modified, use old filename to - # retrieve base contents - oldrelpath = out[1].strip() - status = "M" - if ":" in self.base_rev: - base_rev = self.base_rev.split(":", 1)[0] - else: - base_rev = self.base_rev - if status != "A": - base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath], - silent_ok=True) - is_binary = self.IsBinaryData(base_content) - if status != "R": - new_content = open(relpath, "rb").read() - is_binary = is_binary or self.IsBinaryData(new_content) - if is_binary and base_content: - # Fetch again without converting newlines - base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath], - silent_ok=True, universal_newlines=False) - if not is_binary: - new_content = None - return base_content, new_content, is_binary, status - - -class PerforceVCS(VersionControlSystem): - """Implementation of the VersionControlSystem interface for Perforce.""" - - def __init__(self, options): - - def ConfirmLogin(): - # Make sure we have a valid perforce session - while True: - data, retcode = self.RunPerforceCommandWithReturnCode( - ["login", "-s"], marshal_output=True) - if not data: - ErrorExit("Error checking perforce login") - if not retcode and (not "code" in data or data["code"] != "error"): - break - print("Enter perforce password: ") - self.RunPerforceCommandWithReturnCode(["login"]) - - super(PerforceVCS, self).__init__(options) - - self.p4_changelist = options.p4_changelist - if not self.p4_changelist: - ErrorExit("A changelist id is required") - if (options.revision): - ErrorExit("--rev is not supported for perforce") - - self.p4_port = options.p4_port - self.p4_client = options.p4_client - self.p4_user = options.p4_user - - ConfirmLogin() - - if not options.title: - description = self.RunPerforceCommand(["describe", self.p4_changelist], - marshal_output=True) - if description and "desc" in description: - # Rietveld doesn't support multi-line descriptions - raw_title = description["desc"].strip() - lines = raw_title.splitlines() - if len(lines): - options.title = lines[0] - - def GetGUID(self): - """For now we don't know how to get repository ID for Perforce""" - return - - def RunPerforceCommandWithReturnCode(self, extra_args, marshal_output=False, - universal_newlines=True): - args = ["p4"] - if marshal_output: - # -G makes perforce format its output as marshalled python objects - args.extend(["-G"]) - if self.p4_port: - args.extend(["-p", self.p4_port]) - if self.p4_client: - args.extend(["-c", self.p4_client]) - if self.p4_user: - args.extend(["-u", self.p4_user]) - args.extend(extra_args) - - data, retcode = RunShellWithReturnCode( - args, universal_newlines=universal_newlines) - if marshal_output and data: - data = marshal.loads(data) - return data, retcode - - def RunPerforceCommand(self, extra_args, marshal_output=False, - universal_newlines=True): - # This might be a good place to cache call results, since things like - # describe or fstat might get called repeatedly. - data, retcode = self.RunPerforceCommandWithReturnCode( - extra_args, marshal_output, universal_newlines) - if retcode: - ErrorExit("Got error status from %s:\n%s" % (extra_args, data)) - return data - - def GetFileProperties(self, property_key_prefix = "", command = "describe"): - description = self.RunPerforceCommand(["describe", self.p4_changelist], - marshal_output=True) - - changed_files = {} - file_index = 0 - # Try depotFile0, depotFile1, ... until we don't find a match - while True: - file_key = "depotFile%d" % file_index - if file_key in description: - filename = description[file_key] - change_type = description[property_key_prefix + str(file_index)] - changed_files[filename] = change_type - file_index += 1 - else: - break - return changed_files - - def GetChangedFiles(self): - return self.GetFileProperties("action") - - def GetUnknownFiles(self): - # Perforce doesn't detect new files, they have to be explicitly added - return [] - - def IsBaseBinary(self, filename): - base_filename = self.GetBaseFilename(filename) - return self.IsBinaryHelper(base_filename, "files") - - def IsPendingBinary(self, filename): - return self.IsBinaryHelper(filename, "describe") - - def IsBinaryHelper(self, filename, command): - file_types = self.GetFileProperties("type", command) - if not filename in file_types: - ErrorExit("Trying to check binary status of unknown file %s." % filename) - # This treats symlinks, macintosh resource files, temporary objects, and - # unicode as binary. See the Perforce docs for more details: - # http://www.perforce.com/perforce/doc.current/manuals/cmdref/o.ftypes.html - return not file_types[filename].endswith("text") - - def GetFileContent(self, filename, revision, is_binary): - file_arg = filename - if revision: - file_arg += "#" + revision - # -q suppresses the initial line that displays the filename and revision - return self.RunPerforceCommand(["print", "-q", file_arg], - universal_newlines=not is_binary) - - def GetBaseFilename(self, filename): - actionsWithDifferentBases = [ - "move/add", # p4 move - "branch", # p4 integrate (to a new file), similar to hg "add" - "add", # p4 integrate (to a new file), after modifying the new file - ] - - # We only see a different base for "add" if this is a downgraded branch - # after a file was branched (integrated), then edited. - if self.GetAction(filename) in actionsWithDifferentBases: - # -Or shows information about pending integrations/moves - fstat_result = self.RunPerforceCommand(["fstat", "-Or", filename], - marshal_output=True) - - baseFileKey = "resolveFromFile0" # I think it's safe to use only file0 - if baseFileKey in fstat_result: - return fstat_result[baseFileKey] - - return filename - - def GetBaseRevision(self, filename): - base_filename = self.GetBaseFilename(filename) - - have_result = self.RunPerforceCommand(["have", base_filename], - marshal_output=True) - if "haveRev" in have_result: - return have_result["haveRev"] - - def GetLocalFilename(self, filename): - where = self.RunPerforceCommand(["where", filename], marshal_output=True) - if "path" in where: - return where["path"] - - def GenerateDiff(self, args): - class DiffData: - def __init__(self, perforceVCS, filename, action): - self.perforceVCS = perforceVCS - self.filename = filename - self.action = action - self.base_filename = perforceVCS.GetBaseFilename(filename) - - self.file_body = None - self.base_rev = None - self.prefix = None - self.working_copy = True - self.change_summary = None - - def GenerateDiffHeader(diffData): - header = [] - header.append("Index: %s" % diffData.filename) - header.append("=" * 67) - - if diffData.base_filename != diffData.filename: - if diffData.action.startswith("move"): - verb = "rename" - else: - verb = "copy" - header.append("%s from %s" % (verb, diffData.base_filename)) - header.append("%s to %s" % (verb, diffData.filename)) - - suffix = "\t(revision %s)" % diffData.base_rev - header.append("--- " + diffData.base_filename + suffix) - if diffData.working_copy: - suffix = "\t(working copy)" - header.append("+++ " + diffData.filename + suffix) - if diffData.change_summary: - header.append(diffData.change_summary) - return header - - def GenerateMergeDiff(diffData, args): - # -du generates a unified diff, which is nearly svn format - diffData.file_body = self.RunPerforceCommand( - ["diff", "-du", diffData.filename] + args) - diffData.base_rev = self.GetBaseRevision(diffData.filename) - diffData.prefix = "" - - # We have to replace p4's file status output (the lines starting - # with +++ or ---) to match svn's diff format - lines = diffData.file_body.splitlines() - first_good_line = 0 - while (first_good_line < len(lines) and - not lines[first_good_line].startswith("@@")): - first_good_line += 1 - diffData.file_body = "\n".join(lines[first_good_line:]) - return diffData - - def GenerateAddDiff(diffData): - fstat = self.RunPerforceCommand(["fstat", diffData.filename], - marshal_output=True) - if "headRev" in fstat: - diffData.base_rev = fstat["headRev"] # Re-adding a deleted file - else: - diffData.base_rev = "0" # Brand new file - diffData.working_copy = False - rel_path = self.GetLocalFilename(diffData.filename) - diffData.file_body = open(rel_path, 'r').read() - # Replicate svn's list of changed lines - line_count = len(diffData.file_body.splitlines()) - diffData.change_summary = "@@ -0,0 +1" - if line_count > 1: - diffData.change_summary += ",%d" % line_count - diffData.change_summary += " @@" - diffData.prefix = "+" - return diffData - - def GenerateDeleteDiff(diffData): - diffData.base_rev = self.GetBaseRevision(diffData.filename) - is_base_binary = self.IsBaseBinary(diffData.filename) - # For deletes, base_filename == filename - diffData.file_body = self.GetFileContent(diffData.base_filename, - None, - is_base_binary) - # Replicate svn's list of changed lines - line_count = len(diffData.file_body.splitlines()) - diffData.change_summary = "@@ -1" - if line_count > 1: - diffData.change_summary += ",%d" % line_count - diffData.change_summary += " +0,0 @@" - diffData.prefix = "-" - return diffData - - changed_files = self.GetChangedFiles() - - svndiff = [] - filecount = 0 - for (filename, action) in changed_files.items(): - svn_status = self.PerforceActionToSvnStatus(action) - if svn_status == "SKIP": - continue - - diffData = DiffData(self, filename, action) - # Is it possible to diff a branched file? Stackoverflow says no: - # http://stackoverflow.com/questions/1771314/in-perforce-command-line-how-to-diff-a-file-reopened-for-add - if svn_status == "M": - diffData = GenerateMergeDiff(diffData, args) - elif svn_status == "A": - diffData = GenerateAddDiff(diffData) - elif svn_status == "D": - diffData = GenerateDeleteDiff(diffData) - else: - ErrorExit("Unknown file action %s (svn action %s)." % \ - (action, svn_status)) - - svndiff += GenerateDiffHeader(diffData) - - for line in diffData.file_body.splitlines(): - svndiff.append(diffData.prefix + line) - filecount += 1 - if not filecount: - ErrorExit("No valid patches found in output from p4 diff") - return "\n".join(svndiff) + "\n" - - def PerforceActionToSvnStatus(self, status): - # Mirroring the list at http://permalink.gmane.org/gmane.comp.version-control.mercurial.devel/28717 - # Is there something more official? - return { - "add" : "A", - "branch" : "A", - "delete" : "D", - "edit" : "M", # Also includes changing file types. - "integrate" : "M", - "move/add" : "M", - "move/delete": "SKIP", - "purge" : "D", # How does a file's status become "purge"? - }[status] - - def GetAction(self, filename): - changed_files = self.GetChangedFiles() - if not filename in changed_files: - ErrorExit("Trying to get base version of unknown file %s." % filename) - - return changed_files[filename] - - def GetBaseFile(self, filename): - base_filename = self.GetBaseFilename(filename) - base_content = "" - new_content = None - - status = self.PerforceActionToSvnStatus(self.GetAction(filename)) - - if status != "A": - revision = self.GetBaseRevision(base_filename) - if not revision: - ErrorExit("Couldn't find base revision for file %s" % filename) - is_base_binary = self.IsBaseBinary(base_filename) - base_content = self.GetFileContent(base_filename, - revision, - is_base_binary) - - is_binary = self.IsPendingBinary(filename) - if status != "D" and status != "SKIP": - relpath = self.GetLocalFilename(filename) - if is_binary: - new_content = open(relpath, "rb").read() - - return base_content, new_content, is_binary, status - -# NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync. -def SplitPatch(data): - """Splits a patch into separate pieces for each file. - - Args: - data: A string containing the output of svn diff. - - Returns: - A list of 2-tuple (filename, text) where text is the svn diff output - pertaining to filename. - """ - patches = [] - filename = None - diff = [] - for line in data.splitlines(True): - new_filename = None - if line.startswith('Index:'): - unused, new_filename = line.split(':', 1) - new_filename = new_filename.strip() - elif line.startswith('Property changes on:'): - unused, temp_filename = line.split(':', 1) - # When a file is modified, paths use '/' between directories, however - # when a property is modified '\' is used on Windows. Make them the same - # otherwise the file shows up twice. - temp_filename = temp_filename.strip().replace('\\', '/') - if temp_filename != filename: - # File has property changes but no modifications, create a new diff. - new_filename = temp_filename - if new_filename: - if filename and diff: - patches.append((filename, ''.join(diff))) - filename = new_filename - diff = [line] - continue - if diff is not None: - diff.append(line) - if filename and diff: - patches.append((filename, ''.join(diff))) - return patches - - -def UploadSeparatePatches(issue, rpc_server, patchset, data, options): - """Uploads a separate patch for each file in the diff output. - - Returns a list of [patch_key, filename] for each file. - """ - def UploadFile(filename, data): - form_fields = [("filename", filename)] - if not options.download_base: - form_fields.append(("content_upload", "1")) - files = [("data", "data.diff", data)] - ctype, body = EncodeMultipartFormData(form_fields, files) - url = "/%d/upload_patch/%d" % (int(issue), int(patchset)) - - try: - response_body = rpc_server.Send(url, body, content_type=ctype) - except urllib2.HTTPError as e: - response_body = ("Failed to upload patch for %s. Got %d status code." % - (filename, e.code)) - - lines = response_body.splitlines() - if not lines or lines[0] != "OK": - StatusUpdate(" --> %s" % response_body) - sys.exit(1) - return ("Uploaded patch for " + filename, [lines[1], filename]) - - threads = [] - thread_pool = ThreadPool(options.num_upload_threads) - - patches = SplitPatch(data) - rv = [] - for patch in patches: - if len(patch[1]) > MAX_UPLOAD_SIZE: - print("Not uploading the patch for %s because the file is too large." % - (patch[0],)) - continue - - filename = patch[0] - data = patch[1] - - t = thread_pool.apply_async(UploadFile, args=(filename, data)) - threads.append(t) - - for t in threads: - result = t.get(timeout=UPLOAD_TIMEOUT) - print(result[0]) - rv.append(result[1]) - - return rv - - -def GuessVCSName(options): - """Helper to guess the version control system. - - This examines the current directory, guesses which VersionControlSystem - we're using, and returns an string indicating which VCS is detected. - - Returns: - A pair (vcs, output). vcs is a string indicating which VCS was detected - and is one of VCS_GIT, VCS_MERCURIAL, VCS_SUBVERSION, VCS_PERFORCE, - VCS_CVS, or VCS_UNKNOWN. - Since local perforce repositories can't be easily detected, this method - will only guess VCS_PERFORCE if any perforce options have been specified. - output is a string containing any interesting output from the vcs - detection routine, or None if there is nothing interesting. - """ - for attribute, value in options.__dict__.iteritems(): - if attribute.startswith("p4") and value != None: - return (VCS_PERFORCE, None) - - def RunDetectCommand(vcs_type, command): - """Helper to detect VCS by executing command. - - Returns: - A pair (vcs, output) or None. Throws exception on error. - """ - try: - out, returncode = RunShellWithReturnCode(command) - if returncode == 0: - return (vcs_type, out.strip()) - except OSError as e: - if e.errno != errno.ENOENT: # command not found code - raise - except ValueError as e: - # Workaround for https://bugs.python.org/issue26083 - if e.message != "insecure string pickle": - raise - - # Mercurial has a command to get the base directory of a repository - # Try running it, but don't die if we don't have hg installed. - # NOTE: we try Mercurial first as it can sit on top of an SVN working copy. - res = RunDetectCommand(VCS_MERCURIAL, ["hg", "root"]) - if res != None: - return res - - # Subversion from 1.7 has a single centralized .svn folder - # ( see http://subversion.apache.org/docs/release-notes/1.7.html#wc-ng ) - # That's why we use 'svn info' instead of checking for .svn dir - res = RunDetectCommand(VCS_SUBVERSION, ["svn", "info"]) - if res != None: - return res - - # Git has a command to test if you're in a git tree. - # Try running it, but don't die if we don't have git installed. - res = RunDetectCommand(VCS_GIT, ["git", "rev-parse", - "--is-inside-work-tree"]) - if res != None: - return res - - # detect CVS repos use `cvs status && $? == 0` rules - res = RunDetectCommand(VCS_CVS, ["cvs", "status"]) - if res != None: - return res - - return (VCS_UNKNOWN, None) - - -def GuessVCS(options): - """Helper to guess the version control system. - - This verifies any user-specified VersionControlSystem (by command line - or environment variable). If the user didn't specify one, this examines - the current directory, guesses which VersionControlSystem we're using, - and returns an instance of the appropriate class. Exit with an error - if we can't figure it out. - - Returns: - A VersionControlSystem instance. Exits if the VCS can't be guessed. - """ - vcs = options.vcs - if not vcs: - vcs = os.environ.get("CODEREVIEW_VCS") - if vcs: - v = VCS_ABBREVIATIONS.get(vcs.lower()) - if v is None: - ErrorExit("Unknown version control system %r specified." % vcs) - (vcs, extra_output) = (v, None) - else: - (vcs, extra_output) = GuessVCSName(options) - - if vcs == VCS_MERCURIAL: - if extra_output is None: - extra_output = RunShell(["hg", "root"]).strip() - return MercurialVCS(options, extra_output) - elif vcs == VCS_SUBVERSION: - return SubversionVCS(options) - elif vcs == VCS_PERFORCE: - return PerforceVCS(options) - elif vcs == VCS_GIT: - return GitVCS(options) - elif vcs == VCS_CVS: - return CVSVCS(options) - - ErrorExit(("Could not guess version control system. " - "Are you in a working copy directory?")) - - -def CheckReviewer(reviewer): - """Validate a reviewer -- either a nickname or an email addres. - - Args: - reviewer: A nickname or an email address. - - Calls ErrorExit() if it is an invalid email address. - """ - if "@" not in reviewer: - return # Assume nickname - parts = reviewer.split("@") - if len(parts) > 2: - ErrorExit("Invalid email address: %r" % reviewer) - assert len(parts) == 2 - if "." not in parts[1]: - ErrorExit("Invalid email address: %r" % reviewer) - - -def LoadSubversionAutoProperties(): - """Returns the content of [auto-props] section of Subversion's config file as - a dictionary. - - Returns: - A dictionary whose key-value pair corresponds the [auto-props] section's - key-value pair. - In following cases, returns empty dictionary: - - config file doesn't exist, or - - 'enable-auto-props' is not set to 'true-like-value' in [miscellany]. - """ - if os.name == 'nt': - subversion_config = os.environ.get("APPDATA") + "\\Subversion\\config" - else: - subversion_config = os.path.expanduser("~/.subversion/config") - if not os.path.exists(subversion_config): - return {} - config = configparser.ConfigParser() - config.read(subversion_config) - if (config.has_section("miscellany") and - config.has_option("miscellany", "enable-auto-props") and - config.getboolean("miscellany", "enable-auto-props") and - config.has_section("auto-props")): - props = {} - for file_pattern in config.options("auto-props"): - props[file_pattern] = ParseSubversionPropertyValues( - config.get("auto-props", file_pattern)) - return props - else: - return {} - -def ParseSubversionPropertyValues(props): - """Parse the given property value which comes from [auto-props] section and - returns a list whose element is a (svn_prop_key, svn_prop_value) pair. - - See the following doctest for example. - - >>> ParseSubversionPropertyValues('svn:eol-style=LF') - [('svn:eol-style', 'LF')] - >>> ParseSubversionPropertyValues('svn:mime-type=image/jpeg') - [('svn:mime-type', 'image/jpeg')] - >>> ParseSubversionPropertyValues('svn:eol-style=LF;svn:executable') - [('svn:eol-style', 'LF'), ('svn:executable', '*')] - """ - key_value_pairs = [] - for prop in props.split(";"): - key_value = prop.split("=") - assert len(key_value) <= 2 - if len(key_value) == 1: - # If value is not given, use '*' as a Subversion's convention. - key_value_pairs.append((key_value[0], "*")) - else: - key_value_pairs.append((key_value[0], key_value[1])) - return key_value_pairs - - -def GetSubversionPropertyChanges(filename): - """Return a Subversion's 'Property changes on ...' string, which is used in - the patch file. - - Args: - filename: filename whose property might be set by [auto-props] config. - - Returns: - A string like 'Property changes on |filename| ...' if given |filename| - matches any entries in [auto-props] section. None, otherwise. - """ - global svn_auto_props_map - if svn_auto_props_map is None: - svn_auto_props_map = LoadSubversionAutoProperties() - - all_props = [] - for file_pattern, props in svn_auto_props_map.items(): - if fnmatch.fnmatch(filename, file_pattern): - all_props.extend(props) - if all_props: - return FormatSubversionPropertyChanges(filename, all_props) - return None - - -def FormatSubversionPropertyChanges(filename, props): - """Returns Subversion's 'Property changes on ...' strings using given filename - and properties. - - Args: - filename: filename - props: A list whose element is a (svn_prop_key, svn_prop_value) pair. - - Returns: - A string which can be used in the patch file for Subversion. - - See the following doctest for example. - - >>> print FormatSubversionPropertyChanges('foo.cc', [('svn:eol-style', 'LF')]) - Property changes on: foo.cc - ___________________________________________________________________ - Added: svn:eol-style - + LF - - """ - prop_changes_lines = [ - "Property changes on: %s" % filename, - "___________________________________________________________________"] - for key, value in props: - prop_changes_lines.append("Added: " + key) - prop_changes_lines.append(" + " + value) - return "\n".join(prop_changes_lines) + "\n" - - -def RealMain(argv, data=None): - """The real main function. - - Args: - argv: Command line arguments. - data: Diff contents. If None (default) the diff is generated by - the VersionControlSystem implementation returned by GuessVCS(). - - Returns: - A 2-tuple (issue id, patchset id). - The patchset id is None if the base files are not uploaded by this - script (applies only to SVN checkouts). - """ - options, args = parser.parse_args(argv[1:]) - if options.help: - if options.verbose < 2: - # hide Perforce options - parser.epilog = ( - "Use '--help -v' to show additional Perforce options. " - "For more help, see " - "http://code.google.com/p/rietveld/wiki/CodeReviewHelp" - ) - parser.option_groups.remove(parser.get_option_group('--p4_port')) - parser.print_help() - sys.exit(0) - - global verbosity - verbosity = options.verbose - if verbosity >= 3: - LOGGER.setLevel(logging.DEBUG) - elif verbosity >= 2: - LOGGER.setLevel(logging.INFO) - - vcs = GuessVCS(options) - - if options.download_base: - options.download_base = True - LOGGER.info("Enabled upload of base file") - if not options.assume_yes: - vcs.CheckForUnknownFiles() - if data is None: - data = vcs.GenerateDiff(args) - data = vcs.PostProcessDiff(data) - if options.print_diffs: - print("Rietveld diff start:*****") - print(data) - print("Rietveld diff end:*****") - files = vcs.GetBaseFiles(data) - if verbosity >= 1: - print("Upload server:", options.server, "(change with -s/--server)") - - auth_config = auth.extract_auth_config_from_options(options) - rpc_server = GetRpcServer(options.server, auth_config, options.email) - form_fields = [] - - repo_guid = vcs.GetGUID() - if repo_guid: - form_fields.append(("repo_guid", repo_guid)) - if options.issue: - form_fields.append(("issue", str(options.issue))) - if options.email: - form_fields.append(("user", options.email)) - if options.reviewers: - for reviewer in options.reviewers.split(','): - CheckReviewer(reviewer) - form_fields.append(("reviewers", options.reviewers)) - if options.cc: - for cc in options.cc.split(','): - CheckReviewer(cc) - form_fields.append(("cc", options.cc)) - if options.project: - form_fields.append(("project", options.project)) - if options.target_ref: - form_fields.append(("target_ref", options.target_ref)) - if options.cq_dry_run: - form_fields.append(("cq_dry_run", "1")) - form_fields.append(("commit", "1")) - if options.depends_on_patchset: - form_fields.append(("depends_on_patchset", options.depends_on_patchset)) - - # Process --message, --title and --file. - message = options.message or "" - explicit_title = options.title is not None - title = options.title or "" - if options.file: - if options.message: - ErrorExit("Can't specify both message and message file options") - file = open(options.file, 'r') - message = file.read() - file.close() - title = title or message.split('\n', 1)[0].strip() - if not title and not explicit_title: - if options.issue: - prompt = "Title describing this patch set" - else: - prompt = "New issue subject" - title_default = vcs.GetMostRecentCommitSummary() - if title_default: - prompt += " [%s]" % title_default - title = raw_input(prompt + ": ").strip() or title_default - if not title and not options.issue: - ErrorExit("A non-empty title is required for a new issue") - # For existing issues, it's fine to give a patchset an empty name. Rietveld - # doesn't accept that so use a whitespace. - title = title or " " - if len(title) > 100: - title = title[:99] + '…' - if title and not options.issue: - message = message or title - - form_fields.append(("subject", title)) - # If it's a new issue send message as description. Otherwise a new - # message is created below on upload_complete. - if message and not options.issue: - form_fields.append(("description", message)) - - # Send a hash of all the base file so the server can determine if a copy - # already exists in an earlier patchset. - base_hashes = "" - for file, info in files.iteritems(): - if not info[0] is None: - checksum = md5(info[0]).hexdigest() - if base_hashes: - base_hashes += "|" - base_hashes += checksum + ":" + file - form_fields.append(("base_hashes", base_hashes)) - if options.private: - if options.issue: - print("Warning: Private flag ignored when updating an existing issue.") - else: - form_fields.append(("private", "1")) - if options.send_patch: - options.send_mail = True - if not options.download_base: - form_fields.append(("content_upload", "1")) - if len(data) > MAX_UPLOAD_SIZE: - print("Patch is large, so uploading file patches separately.") - uploaded_diff_file = [] - form_fields.append(("separate_patches", "1")) - else: - uploaded_diff_file = [("data", "data.diff", data)] - ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file) - response_body = rpc_server.Send("/upload", body, content_type=ctype) - issue, patchset = None, None - if not options.download_base or not uploaded_diff_file: - lines = response_body.splitlines() - if len(lines) >= 2: - # lines[0] is "Issue (created|updated): ". - issue = lines[0][lines[0].rfind("/")+1:] - # lines[1] is just patchset number. - patchset = lines[1].strip() - msg = '%s (patchset: %s)' % (lines[0], patchset) - patches = [x.split(" ", 1) for x in lines[2:]] - else: - msg = response_body - else: - msg = response_body - StatusUpdate(msg) - if not response_body.startswith("Issue created.") and \ - not response_body.startswith("Issue updated."): - sys.exit(0) - assert issue - - if not uploaded_diff_file: - result = UploadSeparatePatches(issue, rpc_server, patchset, data, options) - if not options.download_base: - patches = result - - if not options.download_base: - vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files) - - payload = {} # payload for final request - if options.send_mail: - payload["send_mail"] = "yes" - if options.send_patch: - payload["attach_patch"] = "yes" - if options.issue and message: - payload["message"] = message - payload = urllib.urlencode(payload) - rpc_server.Send("/" + issue + "/upload_complete/" + (patchset or ""), - payload=payload) - return issue, patchset - - -def main(): - try: - logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:" - "%(lineno)s %(message)s ")) - os.environ['LC_ALL'] = 'C' - RealMain(sys.argv) - except KeyboardInterrupt: - print() - StatusUpdate("Interrupted.") - sys.exit(1) - except auth.AuthenticationError as e: - print(e, file=sys.stderr) - sys.exit(1) - - -if __name__ == "__main__": - main()