Delete gcl, drover, and trychange

These tools are relatively standalone, and only ever worked for SVN.
Removing these is a good start to removing other SVN support code.

R=maruel@chromium.org
BUG=475321

Review-Url: https://codereview.chromium.org/2269413002
changes/18/399118/1
agable 9 years ago committed by Commit bot
parent 12fa6ff69b
commit 92bec4f56f

@ -1,4 +1,4 @@
# This file is used by gcl to get repository specific information. # This file is used by git cl to get repository specific information.
CODE_REVIEW_SERVER: codereview.chromium.org CODE_REVIEW_SERVER: codereview.chromium.org
CC_LIST: chromium-reviews@chromium.org CC_LIST: chromium-reviews@chromium.org
VIEW_VC: https://chromium.googlesource.com/chromium/tools/depot_tools/+/ VIEW_VC: https://chromium.googlesource.com/chromium/tools/depot_tools/+/

@ -1,25 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This script will try to sync the bootstrap directories and then defer control.
base_dir=$(dirname "$0")
# Use the batch file as an entry point if on cygwin.
if [ "${OSTYPE}" = "cygwin" -a "${TERM}" != "xterm" ]; then
${base_dir}/drover.bat "$@"
exit
fi
# We're on POSIX (not cygwin). We can now safely look for svn checkout.
if [ "X$DEPOT_TOOLS_UPDATE" != "X0" -a -e "${base_dir}/.svn" ]
then
# Update the bootstrap directory to stay up-to-date with the latest
# depot_tools.
svn -q up "${base_dir}"
fi
PYTHONDONTWRITEBYTECODE=1 exec python "${base_dir}/drover.py" "$@"

@ -1,9 +0,0 @@
@echo off
:: Copyright (c) 2009 The Chromium Authors. All rights reserved.
:: Use of this source code is governed by a BSD-style license that can be
:: found in the LICENSE file.
setlocal
set PATH=%~dp0svn;%PATH%
set PYTHONDONTWRITEBYTECODE=1
call python "%~dp0drover.py" %*

@ -1,648 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import datetime
import optparse
import os
import re
import sys
import urlparse
import gclient_utils
import subprocess2
USAGE = """
WARNING: Please use this tool in an empty directory
(or at least one that you don't mind clobbering.)
REQUIRES: SVN 1.5+
NOTE: NO NEED TO CHECKOUT ANYTHING IN ADVANCE OF USING THIS TOOL.
Valid parameters:
[Merge from trunk to branch]
--merge <revision> --branch <branch_num>
Example: %(app)s --merge 12345 --branch 187
[Merge from trunk to local copy]
--merge <revision> --local
Example: %(app)s --merge 12345 --local
[Merge from branch to branch]
--merge <revision> --sbranch <branch_num> --branch <branch_num>
Example: %(app)s --merge 12345 --sbranch 248 --branch 249
[Revert from trunk]
--revert <revision>
Example: %(app)s --revert 12345
[Revert from branch]
--revert <revision> --branch <branch_num>
Example: %(app)s --revert 12345 --branch 187
"""
export_map_ = None
files_info_ = None
delete_map_ = None
file_pattern_ = r"[ ]+([MADUC])[ ]+/((?:trunk|branches/.*?)/src(.*)/(.*))"
depot_tools_dir_ = os.path.dirname(os.path.abspath(__file__))
def runGcl(subcommand):
gcl_path = os.path.join(depot_tools_dir_, "gcl")
if not os.path.exists(gcl_path):
print "WARNING: gcl not found beside drover.py. Using system gcl instead..."
gcl_path = 'gcl'
command = "%s %s" % (gcl_path, subcommand)
return os.system(command)
def gclUpload(revision, author):
command = ("upload " + str(revision) +
" --send_mail --no_presubmit --reviewers=" + author)
return runGcl(command)
def getSVNInfo(url, revision):
info = {}
svn_info = subprocess2.capture(
['svn', 'info', '--non-interactive', '%s@%s' % (url, revision)],
stderr=subprocess2.VOID).splitlines()
for line in svn_info:
match = re.search(r"(.*?):(.*)", line)
if match:
info[match.group(1).strip()] = match.group(2).strip()
return info
def isSVNDirty():
svn_status = subprocess2.check_output(['svn', 'status']).splitlines()
for line in svn_status:
match = re.search(r"^[^X?]", line)
if match:
return True
return False
def getAuthor(url, revision):
info = getSVNInfo(url, revision)
if (info.has_key("Last Changed Author")):
return info["Last Changed Author"]
return None
def isSVNFile(url, revision):
info = getSVNInfo(url, revision)
if (info.has_key("Node Kind")):
if (info["Node Kind"] == "file"):
return True
return False
def isSVNDirectory(url, revision):
info = getSVNInfo(url, revision)
if (info.has_key("Node Kind")):
if (info["Node Kind"] == "directory"):
return True
return False
def inCheckoutRoot(path):
info = getSVNInfo(path, "HEAD")
if (not info.has_key("Repository Root")):
return False
repo_root = info["Repository Root"]
info = getSVNInfo(os.path.dirname(os.path.abspath(path)), "HEAD")
if (info.get("Repository Root", None) != repo_root):
return True
return False
def getRevisionLog(url, revision):
"""Takes an svn url and gets the associated revision."""
svn_log = subprocess2.check_output(
['svn', 'log', url, '-r', str(revision)],
universal_newlines=True).splitlines(True)
# Don't include the header lines and the trailing "---..." line.
return ''.join(svn_log[3:-1])
def getSVNVersionInfo():
"""Extract version information from SVN"""
svn_info = subprocess2.check_output(['svn', '--version']).splitlines()
info = {}
for line in svn_info:
match = re.search(r"svn, version ((\d+)\.(\d+)\.(\d+))", line)
if match:
info['version'] = match.group(1)
info['major'] = int(match.group(2))
info['minor'] = int(match.group(3))
info['patch'] = int(match.group(4))
return info
return None
def isMinimumSVNVersion(major, minor, patch=0):
"""Test for minimum SVN version"""
return _isMinimumSVNVersion(getSVNVersionInfo(), major, minor, patch)
def _isMinimumSVNVersion(version, major, minor, patch=0):
"""Test for minimum SVN version, internal method"""
if not version:
return False
if (version['major'] > major):
return True
elif (version['major'] < major):
return False
if (version['minor'] > minor):
return True
elif (version['minor'] < minor):
return False
if (version['patch'] >= patch):
return True
else:
return False
def checkoutRevision(url, revision, branch_url, revert=False, pop=True):
files_info = getFileInfo(url, revision)
paths = getBestMergePaths2(files_info, revision)
export_map = getBestExportPathsMap2(files_info, revision)
command = 'svn checkout -N ' + branch_url
print command
os.system(command)
match = re.search(r"^[a-z]+://.*/(.*)", branch_url)
if match:
os.chdir(match.group(1))
# This line is extremely important due to the way svn behaves in the
# set-depths action. If parents aren't handled before children, the child
# directories get clobbered and the merge step fails.
paths.sort()
# Checkout the directories that already exist
for path in paths:
if (export_map.has_key(path) and not revert):
print "Exclude new directory " + path
continue
subpaths = path.split('/')
#In the normal case, where no url override is specified and it's just
# chromium source, it's necessary to remove the 'trunk' from the filepath,
# since in the checkout we include 'trunk' or 'branch/\d+'.
#
# However, when a url is specified we want to preserve that because it's
# a part of the filepath and necessary for path operations on svn (because
# frankly, we are checking out the correct top level, and not hacking it).
if pop:
subpaths.pop(0)
base = ''
for subpath in subpaths:
base += '/' + subpath
# This logic ensures that you don't empty out any directories
if not os.path.exists("." + base):
command = ('svn update --depth empty ' + "." + base)
print command
os.system(command)
if (revert):
files = getAllFilesInRevision(files_info)
else:
files = getExistingFilesInRevision(files_info)
for f in files:
# Prevent the tool from clobbering the src directory
if (f == ""):
continue
command = ('svn up ".' + f + '"')
print command
os.system(command)
def mergeRevision(url, revision):
paths = getBestMergePaths(url, revision)
export_map = getBestExportPathsMap(url, revision)
for path in paths:
if export_map.has_key(path):
continue
command = ('svn merge -N -r ' + str(revision-1) + ":" + str(revision) + " ")
command += " --ignore-ancestry "
command += " -x --ignore-eol-style "
command += url + path + "@" + str(revision) + " ." + path
print command
os.system(command)
def exportRevision(url, revision):
paths = getBestExportPathsMap(url, revision).keys()
paths.sort()
for path in paths:
command = ('svn export -N ' + url + path + "@" + str(revision) + " ." +
path)
print command
os.system(command)
command = 'svn add .' + path
print command
os.system(command)
def deleteRevision(url, revision):
paths = getBestDeletePathsMap(url, revision).keys()
paths.sort()
paths.reverse()
for path in paths:
command = "svn delete ." + path
print command
os.system(command)
def revertExportRevision(url, revision):
paths = getBestExportPathsMap(url, revision).keys()
paths.sort()
paths.reverse()
for path in paths:
command = "svn delete ." + path
print command
os.system(command)
def revertRevision(url, revision):
command = ('svn merge --ignore-ancestry -c -%d %s .' % (revision, url))
print command
os.system(command)
def getFileInfo(url, revision):
global files_info_
if (files_info_ != None):
return files_info_
svn_log = subprocess2.check_output(
['svn', 'log', url, '-r', str(revision), '-v']).splitlines()
info = []
for line in svn_log:
# A workaround to dump the (from .*) stuff, regex not so friendly in the 2nd
# pass...
match = re.search(r"(.*) \(from.*\)", line)
if match:
line = match.group(1)
match = re.search(file_pattern_, line)
if match:
info.append([match.group(1).strip(), match.group(2).strip(),
match.group(3).strip(),match.group(4).strip()])
files_info_ = info
return info
def getBestMergePaths(url, revision):
"""Takes an svn url and gets the associated revision."""
return getBestMergePaths2(getFileInfo(url, revision), revision)
def getBestMergePaths2(files_info, revision):
"""Takes an svn url and gets the associated revision."""
return list(set([f[2] for f in files_info]))
def getBestExportPathsMap(url, revision):
return getBestExportPathsMap2(getFileInfo(url, revision), revision)
def getBestExportPathsMap2(files_info, revision):
"""Takes an svn url and gets the associated revision."""
global export_map_
if export_map_:
return export_map_
result = {}
for file_info in files_info:
if (file_info[0] == "A"):
if(isSVNDirectory("svn://svn.chromium.org/chrome/" + file_info[1],
revision)):
result[file_info[2] + "/" + file_info[3]] = ""
export_map_ = result
return result
def getBestDeletePathsMap(url, revision):
return getBestDeletePathsMap2(getFileInfo(url, revision), revision)
def getBestDeletePathsMap2(files_info, revision):
"""Takes an svn url and gets the associated revision."""
global delete_map_
if delete_map_:
return delete_map_
result = {}
for file_info in files_info:
if (file_info[0] == "D"):
if(isSVNDirectory("svn://svn.chromium.org/chrome/" + file_info[1],
revision)):
result[file_info[2] + "/" + file_info[3]] = ""
delete_map_ = result
return result
def getExistingFilesInRevision(files_info):
"""Checks for existing files in the revision.
Anything that's A will require special treatment (either a merge or an
export + add)
"""
return ['%s/%s' % (f[2], f[3]) for f in files_info if f[0] != 'A']
def getAllFilesInRevision(files_info):
"""Checks for existing files in the revision.
Anything that's A will require special treatment (either a merge or an
export + add)
"""
return ['%s/%s' % (f[2], f[3]) for f in files_info]
def getSVNAuthInfo(folder=None):
"""Fetches SVN authorization information in the subversion auth folder and
returns it as a dictionary of dictionaries."""
if not folder:
if sys.platform == 'win32':
folder = '%%APPDATA%\\Subversion\\auth'
else:
folder = '~/.subversion/auth'
folder = os.path.expandvars(os.path.expanduser(folder))
svn_simple_folder = os.path.join(folder, 'svn.simple')
results = {}
try:
for auth_file in os.listdir(svn_simple_folder):
# Read the SVN auth file, convert it into a dictionary, and store it.
results[auth_file] = dict(re.findall(r'K [0-9]+\n(.*)\nV [0-9]+\n(.*)\n',
open(os.path.join(svn_simple_folder, auth_file)).read()))
except Exception as _:
pass
return results
def getCurrentSVNUsers(url):
"""Tries to fetch the current SVN in the current checkout by scanning the
SVN authorization folder for a match with the current SVN URL."""
netloc = urlparse.urlparse(url)[1]
auth_infos = getSVNAuthInfo()
results = []
for _, auth_info in auth_infos.iteritems():
if ('svn:realmstring' in auth_info
and netloc in auth_info['svn:realmstring']):
username = auth_info['username']
results.append(username)
if 'google.com' in username:
results.append(username.replace('google.com', 'chromium.org'))
return results
def prompt(question):
while True:
print question + " [y|n]:",
answer = sys.stdin.readline()
if answer.lower().startswith('n'):
return False
elif answer.lower().startswith('y'):
return True
def text_prompt(question, default):
print question + " [" + default + "]:"
answer = sys.stdin.readline()
if answer.strip() == "":
return default
return answer
def drover(options, args):
revision = options.revert or options.merge
# Initialize some variables used below. They can be overwritten by
# the drover.properties file.
BASE_URL = "svn://svn.chromium.org/chrome"
REVERT_ALT_URLS = ['svn://svn.chromium.org/blink',
'svn://svn.chromium.org/chrome-internal',
'svn://svn.chromium.org/native_client']
TRUNK_URL = BASE_URL + "/trunk/src"
BRANCH_URL = BASE_URL + "/branches/$branch/src"
SKIP_CHECK_WORKING = True
PROMPT_FOR_AUTHOR = False
NO_ALT_URLS = options.no_alt_urls
DEFAULT_WORKING = "drover_" + str(revision)
if options.branch:
DEFAULT_WORKING += ("_" + options.branch)
if not isMinimumSVNVersion(1, 5):
print "You need to use at least SVN version 1.5.x"
return 1
# Override the default properties if there is a drover.properties file.
global file_pattern_
if os.path.exists("drover.properties"):
print 'Using options from %s' % os.path.join(
os.getcwd(), 'drover.properties')
FILE_PATTERN = file_pattern_
f = open("drover.properties")
exec(f)
f.close()
if FILE_PATTERN:
file_pattern_ = FILE_PATTERN
NO_ALT_URLS = True
if options.revert and options.branch:
print 'Note: --branch is usually not needed for reverts.'
url = BRANCH_URL.replace("$branch", options.branch)
elif options.merge and options.sbranch:
url = BRANCH_URL.replace("$branch", options.sbranch)
elif options.revert:
url = options.url or BASE_URL
file_pattern_ = r"[ ]+([MADUC])[ ]+((/.*)/(.*))"
else:
url = TRUNK_URL
working = options.workdir or DEFAULT_WORKING
if options.local:
working = os.getcwd()
if not inCheckoutRoot(working):
print "'%s' appears not to be the root of a working copy" % working
return 1
if (isSVNDirty() and not
prompt("Working copy contains uncommitted files. Continue?")):
return 1
if options.revert and not NO_ALT_URLS and not options.url:
for cur_url in [url] + REVERT_ALT_URLS:
try:
commit_date_str = getSVNInfo(
cur_url, options.revert).get('Last Changed Date', 'x').split()[0]
commit_date = datetime.datetime.strptime(commit_date_str, '%Y-%m-%d')
if (datetime.datetime.now() - commit_date).days < 180:
if cur_url != url:
print 'Guessing svn repo: %s.' % cur_url,
print 'Use --no-alt-urls to disable heuristic.'
url = cur_url
break
except ValueError:
pass
command = 'svn log ' + url + " -r "+str(revision) + " -v"
os.system(command)
if not (options.revertbot or prompt("Is this the correct revision?")):
return 0
if (os.path.exists(working)) and not options.local:
if not (options.revertbot or SKIP_CHECK_WORKING or
prompt("Working directory: '%s' already exists, clobber?" % working)):
return 0
gclient_utils.rmtree(working)
if not options.local:
os.makedirs(working)
os.chdir(working)
if options.merge:
action = "Merge"
if not options.local:
branch_url = BRANCH_URL.replace("$branch", options.branch)
# Checkout everything but stuff that got added into a new dir
checkoutRevision(url, revision, branch_url)
# Merge everything that changed
mergeRevision(url, revision)
# "Export" files that were added from the source and add them to branch
exportRevision(url, revision)
# Delete directories that were deleted (file deletes are handled in the
# merge).
deleteRevision(url, revision)
elif options.revert:
action = "Revert"
pop_em = not options.url
checkoutRevision(url, revision, url, True, pop_em)
revertRevision(url, revision)
revertExportRevision(url, revision)
# Check the base url so we actually find the author who made the change
if options.auditor:
author = options.auditor
else:
author = getAuthor(url, revision)
if not author:
author = getAuthor(TRUNK_URL, revision)
# Check that the author of the CL is different than the user making
# the revert. If they're the same, then we'll want to prompt the user
# for a different reviewer to TBR.
current_users = getCurrentSVNUsers(BASE_URL)
is_self_revert = options.revert and author in current_users
filename = str(revision)+".txt"
out = open(filename,"w")
drover_title = '%s %s' % (action, revision)
revision_log = getRevisionLog(url, revision).splitlines()
if revision_log:
commit_title = revision_log[0]
# Limit title to 68 chars so git log --oneline is <80 chars.
max_commit_title = 68 - (len(drover_title) + 3)
if len(commit_title) > max_commit_title:
commit_title = commit_title[:max_commit_title-3] + '...'
drover_title += ' "%s"' % commit_title
out.write(drover_title + '\n\n')
for line in revision_log:
out.write('> %s\n' % line)
if author:
out.write("\nTBR=" + author)
out.close()
change_cmd = 'change ' + str(revision) + " " + filename
if options.revertbot:
if sys.platform == 'win32':
os.environ['SVN_EDITOR'] = 'cmd.exe /c exit'
else:
os.environ['SVN_EDITOR'] = 'true'
runGcl(change_cmd)
os.unlink(filename)
if options.local:
return 0
print author
print revision
print ("gcl upload " + str(revision) +
" --send_mail --no_presubmit --reviewers=" + author)
if options.revertbot or prompt("Would you like to upload?"):
if PROMPT_FOR_AUTHOR or is_self_revert:
author = text_prompt("Enter new author or press enter to accept default",
author)
if options.revertbot and options.revertbot_reviewers:
author += ","
author += options.revertbot_reviewers
gclUpload(revision, author)
else:
print "Deleting the changelist."
print "gcl delete " + str(revision)
runGcl("delete " + str(revision))
return 0
# We commit if the reverbot is set to commit automatically, or if this is
# not the revertbot and the user agrees.
if options.revertbot_commit or (not options.revertbot and
prompt("Would you like to commit?")):
print "gcl commit " + str(revision) + " --no_presubmit --force"
return runGcl("commit " + str(revision) + " --no_presubmit --force")
else:
return 0
def main():
option_parser = optparse.OptionParser(usage=USAGE % {"app": sys.argv[0]})
option_parser.add_option('-m', '--merge', type="int",
help='Revision to merge from trunk to branch')
option_parser.add_option('-b', '--branch',
help='Branch to revert or merge from')
option_parser.add_option('-l', '--local', action='store_true',
help='Local working copy to merge to')
option_parser.add_option('-s', '--sbranch',
help='Source branch for merge')
option_parser.add_option('-r', '--revert', type="int",
help='Revision to revert')
option_parser.add_option('-w', '--workdir',
help='subdir to use for the revert')
option_parser.add_option('-u', '--url',
help='svn url to use for the revert')
option_parser.add_option('-a', '--auditor',
help='overrides the author for reviewer')
option_parser.add_option('--revertbot', action='store_true',
default=False)
option_parser.add_option('--no-alt-urls', action='store_true',
help='Disable heuristics used to determine svn url')
option_parser.add_option('--revertbot-commit', action='store_true',
default=False)
option_parser.add_option('--revertbot-reviewers')
options, args = option_parser.parse_args()
if not options.merge and not options.revert:
option_parser.error("You need at least --merge or --revert")
return 1
if options.merge and not (options.branch or options.local):
option_parser.error("--merge requires --branch or --local")
return 1
if options.local and (options.revert or options.branch):
option_parser.error("--local cannot be used with --revert or --branch")
return 1
return drover(options, args)
if __name__ == "__main__":
try:
sys.exit(main())
except KeyboardInterrupt:
sys.stderr.write('interrupted\n')
sys.exit(1)

8
gcl

@ -1,8 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
base_dir=$(dirname "$0")
PYTHONDONTWRITEBYTECODE=1 exec python "$base_dir/gcl.py" "$@"

@ -1,6 +0,0 @@
@echo off
setlocal
:: This is required with cygwin only.
PATH=%~dp0;%PATH%
set PYTHONDONTWRITEBYTECODE=1
call python "%~dp0gcl.py" %*

1523
gcl.py

File diff suppressed because it is too large Load Diff

@ -1056,7 +1056,7 @@ class ExecutionQueue(object):
work_queue.ready_cond.release() work_queue.ready_cond.release()
def GetEditor(git, git_editor=None): def GetEditor(git_editor=None):
"""Returns the most plausible editor to use. """Returns the most plausible editor to use.
In order of preference: In order of preference:
@ -1068,14 +1068,8 @@ def GetEditor(git, git_editor=None):
In the case of git-cl, this matches git's behaviour, except that it does not In the case of git-cl, this matches git's behaviour, except that it does not
include dumb terminal detection. include dumb terminal detection.
In the case of gcl, this matches svn's behaviour, except that it does not
accept a command-line flag or check the editor-cmd configuration variable.
""" """
if git: editor = os.environ.get('GIT_EDITOR') or git_editor
editor = os.environ.get('GIT_EDITOR') or git_editor
else:
editor = os.environ.get('SVN_EDITOR')
if not editor: if not editor:
editor = os.environ.get('VISUAL') editor = os.environ.get('VISUAL')
if not editor: if not editor:
@ -1105,7 +1099,7 @@ def RunEditor(content, git, git_editor=None):
fileobj.close() fileobj.close()
try: try:
editor = GetEditor(git, git_editor=git_editor) editor = GetEditor(git_editor=git_editor)
if not editor: if not editor:
return None return None
cmd = '%s %s' % (editor, filename) cmd = '%s %s' % (editor, filename)

@ -1025,7 +1025,7 @@ class Changelist(object):
def GetCCList(self): def GetCCList(self):
"""Return the users cc'd on this CL. """Return the users cc'd on this CL.
Return is a string suitable for passing to gcl with the --cc flag. Return is a string suitable for passing to git cl with the --cc flag.
""" """
if self.cc is None: if self.cc is None:
base_cc = settings.GetDefaultCCList() base_cc = settings.GetDefaultCCList()

@ -1009,32 +1009,6 @@ class SvnChange(Change):
scm = 'svn' scm = 'svn'
_changelists = None _changelists = None
def _GetChangeLists(self):
"""Get all change lists."""
if self._changelists == None:
previous_cwd = os.getcwd()
os.chdir(self.RepositoryRoot())
# Need to import here to avoid circular dependency.
import gcl
self._changelists = gcl.GetModifiedFiles()
os.chdir(previous_cwd)
return self._changelists
def GetAllModifiedFiles(self):
"""Get all modified files."""
changelists = self._GetChangeLists()
all_modified_files = []
for cl in changelists.values():
all_modified_files.extend(
[os.path.join(self.RepositoryRoot(), f[1]) for f in cl])
return all_modified_files
def GetModifiedFiles(self):
"""Get modified files in the current CL."""
changelists = self._GetChangeLists()
return [os.path.join(self.RepositoryRoot(), f[1])
for f in changelists[self.Name()]]
def AllFiles(self, root=None): def AllFiles(self, root=None):
"""List all files under source control in the repo.""" """List all files under source control in the repo."""
root = root or self.RepositoryRoot() root = root or self.RepositoryRoot()
@ -1413,7 +1387,7 @@ class PresubmitExecuter(object):
""" """
Args: Args:
change: The Change object. change: The Change object.
committing: True if 'gcl commit' is running, False if 'gcl upload' is. committing: True if 'git cl land' is running, False if 'git cl upload' is.
rietveld_obj: rietveld.Rietveld client object. rietveld_obj: rietveld.Rietveld client object.
gerrit_obj: provides basic Gerrit codereview functionality. gerrit_obj: provides basic Gerrit codereview functionality.
dry_run: if true, some Checks will be skipped. dry_run: if true, some Checks will be skipped.
@ -1500,7 +1474,7 @@ def DoPresubmitChecks(change,
Args: Args:
change: The Change object. change: The Change object.
committing: True if 'gcl commit' is running, False if 'gcl upload' is. committing: True if 'git cl land' is running, False if 'git cl upload' is.
verbose: Prints debug info. verbose: Prints debug info.
output_stream: A stream to write output from presubmit tests to. output_stream: A stream to write output from presubmit tests to.
input_stream: A stream to read input from the user. input_stream: A stream to read input from the user.

@ -1,161 +0,0 @@
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for trychange.py."""
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from testing_support.super_mox import SuperMoxTestBase
import subprocess2
import trychange
class TryChangeTestsBase(SuperMoxTestBase):
"""Setups and tear downs the mocks but doesn't test anything as-is."""
def setUp(self):
SuperMoxTestBase.setUp(self)
self.mox.StubOutWithMock(subprocess2, 'communicate')
self.mox.StubOutWithMock(trychange, 'RunGit')
self.mox.StubOutWithMock(trychange.scm.GIT, 'Capture')
self.mox.StubOutWithMock(trychange.scm.GIT, 'GenerateDiff')
self.mox.StubOutWithMock(trychange.scm.GIT, 'GetCheckoutRoot')
self.mox.StubOutWithMock(trychange.scm.GIT, 'GetEmail')
self.mox.StubOutWithMock(trychange.scm.GIT, 'GetPatchName')
self.mox.StubOutWithMock(trychange.scm.GIT, 'GetUpstreamBranch')
self.mox.StubOutWithMock(trychange.scm.SVN, 'GenerateDiff')
self.mox.StubOutWithMock(trychange.scm.SVN, 'GetCheckoutRoot')
self.mox.StubOutWithMock(trychange.scm.SVN, 'GetEmail')
self.fake_root = self.Dir()
self.expected_files = ['foo.txt', 'bar.txt']
self.options = trychange.optparse.Values()
self.options.files = self.expected_files
self.options.diff = None
self.options.name = None
self.options.email = None
self.options.exclude = []
class TryChangeUnittest(TryChangeTestsBase):
"""General trychange.py tests."""
def testMembersChanged(self):
members = [
'DieWithError', 'EPILOG', 'Escape', 'GIT', 'GIT_PATCH_DIR_BASENAME',
'GetMungedDiff', 'GuessVCS', 'GIT_BRANCH_FILE',
'HELP_STRING', 'Error', 'InvalidScript', 'NoTryServerAccess',
'OptionParser', 'PrintSuccess',
'RunCommand', 'RunGit', 'SCM', 'SVN', 'TryChange', 'USAGE', 'contextlib',
'datetime', 'errno', 'fix_encoding', 'gcl', 'gclient_utils',
'gerrit_util', 'gen_parser',
'getpass', 'itertools', 'json', 'logging', 'optparse', 'os', 'posixpath',
're', 'scm', 'shutil', 'subprocess2', 'sys', 'tempfile', 'urllib',
'urllib2', 'urlparse']
# If this test fails, you should add the relevant test.
self.compareMembers(trychange, members)
class TryChangeSimpleTest(unittest.TestCase):
# Doesn't require supermox to run.
def test_flags(self):
cmd = [
'--bot', 'bot1,bot2',
'--testfilter', 'test1',
'--testfilter', 'test2',
'--user', 'joe',
'--email', 'joe@example.com',
]
options, args = trychange.gen_parser(None).parse_args(cmd)
self.assertEquals([], args)
# pylint: disable=W0212
bot_spec = trychange._ParseBotList(options.bot, options.testfilter)
if options.testfilter:
bot_spec = trychange._ApplyTestFilter(options.testfilter, bot_spec)
values = trychange._ParseSendChangeOptions(bot_spec, options)
self.assertEquals(
[
('user', 'joe'),
('name', None),
('email', 'joe@example.com'),
('bot', 'bot1:test1,test2'),
('bot', 'bot2:test1,test2'),
],
values)
def test_flags_bad_combination(self):
cmd = [
'--bot', 'bot1:test1',
'--testfilter', 'test2',
]
options, args = trychange.gen_parser(None).parse_args(cmd)
self.assertEquals([], args)
try:
# pylint: disable=W0212
trychange._ParseBotList(options.bot, options.testfilter)
self.fail()
except ValueError:
pass
class SVNUnittest(TryChangeTestsBase):
"""trychange.SVN tests."""
def testMembersChanged(self):
members = [
'AutomagicalSettings', 'CaptureStatus', 'GetCodeReviewSetting',
'ReadRootFile', 'GenerateDiff', 'GetFileNames', 'files', 'file_tuples',
]
# If this test fails, you should add the relevant test.
self.compareMembers(trychange.SVN, members)
def testBasic(self):
# pylint: disable=E1103
trychange.os.path.abspath(self.fake_root).AndReturn(self.fake_root)
trychange.scm.SVN.GetCheckoutRoot(self.fake_root).AndReturn(self.fake_root)
trychange.scm.SVN.GenerateDiff(['foo.txt', 'bar.txt'],
self.fake_root,
full_move=True,
revision=None).AndReturn('A diff')
trychange.scm.SVN.GetEmail(self.fake_root).AndReturn('georges@example.com')
self.mox.ReplayAll()
svn = trychange.SVN(self.options, self.fake_root, self.options.files)
self.assertEqual(svn.GetFileNames(), self.expected_files)
self.assertEqual(svn.checkout_root, self.fake_root)
self.assertEqual(svn.GenerateDiff(), 'A diff')
class GITUnittest(TryChangeTestsBase):
"""trychange.GIT tests."""
def testMembersChanged(self):
members = [
'AutomagicalSettings', 'CaptureStatus', 'GetCodeReviewSetting',
'ReadRootFile', 'GenerateDiff', 'GetFileNames', 'files', 'file_tuples',
]
# If this test fails, you should add the relevant test.
self.compareMembers(trychange.GIT, members)
def testBasic(self):
# pylint: disable=E1103
trychange.os.path.abspath(self.fake_root).AndReturn(self.fake_root)
trychange.scm.GIT.GetCheckoutRoot(self.fake_root).AndReturn(self.fake_root)
trychange.scm.GIT.GetUpstreamBranch(self.fake_root).AndReturn('somewhere')
trychange.RunGit(['diff-index', 'HEAD'])
trychange.scm.GIT.GenerateDiff(self.fake_root,
full_move=True,
files=['foo.txt', 'bar.txt'],
branch='somewhere').AndReturn('A diff')
trychange.scm.GIT.GetPatchName(self.fake_root).AndReturn('bleh-1233')
trychange.scm.GIT.GetEmail(self.fake_root).AndReturn('georges@example.com')
self.mox.ReplayAll()
git = trychange.GIT(self.options, self.fake_root, self.options.files)
self.assertEqual(git.GetFileNames(), self.expected_files)
self.assertEqual(git.checkout_root, self.fake_root)
self.assertEqual(git.GenerateDiff(), 'A diff')
if __name__ == '__main__':
unittest.main()

File diff suppressed because it is too large Load Diff
Loading…
Cancel
Save