Make gclient_scm.py use cache_dir

Instead of having custom logic for dealing with cache directories, use
git_cache.py to populate caches.

Also fixes a bug in git_cache.py where it was looking for lockfiles in cwd rather than the cache dir.

Other changes:
* _Run now returns output.
* Always print to stdout in CheckCallAndFilterOutput, even if it gets a carriage return.  This is done because git progress report are carriage returns and not newlines and we don't want everything on the same line and not strip out the CRs.
* Removed members changed tests, its not very useful to know a new import is added.

BUG=339171

Review URL: https://codereview.chromium.org/180243006

git-svn-id: svn://svn.chromium.org/chrome/trunk/tools/depot_tools@254248 0039d316-1c4b-4281-b951-d872f2087c98
experimental/szager/collated-output
hinoka@google.com 11 years ago
parent a2aaa63f80
commit 267f33e6ad

@ -1829,7 +1829,9 @@ class OptionParser(optparse.OptionParser):
jobs = max(8, gclient_utils.NumLocalCpus())
# cmp: 2013/06/19
# Temporary workaround to lower bot-load on SVN server.
if os.environ.get('CHROME_HEADLESS') == '1':
# Bypassed if a bot_update flag is detected.
if (os.environ.get('CHROME_HEADLESS') == '1' and
not os.path.exists('update.flag')):
jobs = 1
self.add_option(

@ -4,14 +4,12 @@
"""Gclient-specific SCM-specific operations."""
import collections
import logging
import os
import posixpath
import re
import sys
import tempfile
import threading
import traceback
import urlparse
@ -148,9 +146,6 @@ class GitWrapper(SCMWrapper):
remote = 'origin'
cache_dir = None
# If a given cache is used in a solution more than once, prevent multiple
# threads from updating it simultaneously.
cache_locks = collections.defaultdict(threading.Lock)
def __init__(self, url=None, root_dir=None, relpath=None):
"""Removes 'git+' fake prefix from git URL."""
@ -352,10 +347,6 @@ class GitWrapper(SCMWrapper):
self._FetchAndReset(revision, file_list, options)
return_early = True
# Need to do this in the normal path as well as in the post-remote-switch
# path.
self._PossiblySwitchCache(url, options)
if return_early:
return self._Capture(['rev-parse', '--verify', 'HEAD'])
@ -681,54 +672,6 @@ class GitWrapper(SCMWrapper):
base_url = self.url
return base_url[:base_url.rfind('/')] + url
@staticmethod
def _NormalizeGitURL(url):
'''Takes a git url, strips the scheme, and ensures it ends with '.git'.'''
idx = url.find('://')
if idx != -1:
url = url[idx+3:]
if not url.endswith('.git'):
url += '.git'
return url
def _PossiblySwitchCache(self, url, options):
"""Handles switching a repo from with-cache to direct, or vice versa.
When we go from direct to with-cache, the remote url changes from the
'real' url to the local file url (in cache_dir). Therefore, this function
assumes that |url| points to the correctly-switched-over local file url, if
we're in cache_mode.
When we go from with-cache to direct, assume that the normal url-switching
code already flipped the remote over, and we just need to repack and break
the dependency to the cache.
"""
altfile = os.path.join(
self.checkout_path, '.git', 'objects', 'info', 'alternates')
if self.cache_dir:
if not os.path.exists(altfile):
try:
with open(altfile, 'w') as f:
f.write(os.path.join(url, 'objects'))
# pylint: disable=C0301
# This dance is necessary according to emperical evidence, also at:
# http://lists-archives.com/git/713652-retrospectively-add-alternates-to-a-repository.html
self._Run(['repack', '-ad'], options)
self._Run(['repack', '-adl'], options)
except Exception:
# If something goes wrong, try to remove the altfile so we'll go down
# this path again next time.
try:
os.remove(altfile)
except OSError as e:
print >> sys.stderr, "FAILED: os.remove('%s') -> %s" % (altfile, e)
raise
else:
if os.path.exists(altfile):
self._Run(['repack', '-a'], options)
os.remove(altfile)
def _CreateOrUpdateCache(self, url, options):
"""Make a new git mirror or update existing mirror for |url|, and return the
mirror URI to clone from.
@ -737,60 +680,12 @@ class GitWrapper(SCMWrapper):
"""
if not self.cache_dir:
return url
# Replace - with -- to avoid ambiguity. / with - to flatten folder structure
folder = os.path.join(
self.cache_dir,
self._NormalizeGitURL(url).replace('-', '--').replace('/', '-'))
altfile = os.path.join(folder, 'objects', 'info', 'alternates')
# If we're bringing an old cache up to date or cloning a new cache, and the
# existing repo is currently a direct clone, use its objects to help out
# the fetch here.
checkout_objects = os.path.join(self.checkout_path, '.git', 'objects')
checkout_altfile = os.path.join(checkout_objects, 'info', 'alternates')
use_reference = (
os.path.exists(checkout_objects) and
not os.path.exists(checkout_altfile))
v = ['-v'] if options.verbose else []
filter_fn = lambda l: '[up to date]' not in l
with self.cache_locks[folder]:
gclient_utils.safe_makedirs(self.cache_dir)
if not os.path.exists(os.path.join(folder, 'config')):
gclient_utils.rmtree(folder)
cmd = ['clone'] + v + ['-c', 'core.deltaBaseCacheLimit=2g',
'--progress', '--bare']
if use_reference:
cmd += ['--reference', os.path.abspath(self.checkout_path)]
self._Run(cmd + [url, folder],
options, filter_fn=filter_fn, cwd=self.cache_dir, retry=True)
else:
# For now, assert that host/path/to/repo.git is identical. We may want
# to relax this restriction in the future to allow for smarter cache
# repo update schemes (such as pulling the same repo, but from a
# different host).
existing_url = self._Capture(['config', 'remote.%s.url' % self.remote],
cwd=folder)
assert self._NormalizeGitURL(existing_url) == self._NormalizeGitURL(url)
if use_reference:
with open(altfile, 'w') as f:
f.write(os.path.abspath(checkout_objects))
# Would normally use `git remote update`, but it doesn't support
# --progress, so use fetch instead.
self._Run(['fetch'] + v + ['--multiple', '--progress', '--all'],
options, filter_fn=filter_fn, cwd=folder, retry=True)
# If the clone has an object dependency on the existing repo, break it
# with repack and remove the linkage.
if os.path.exists(altfile):
self._Run(['repack', '-a'], options, cwd=folder)
os.remove(altfile)
return folder
self._Run(['cache', 'populate'] + v +
['--shallow', '--cache-dir', self.cache_dir, url],
options, cwd=self._root_dir, retry=True)
return self._Run(['cache', 'exists', '--cache-dir', self.cache_dir, url],
options).strip()
def _Clone(self, revision, url, options):
"""Clone a git repository from the given URL.
@ -1044,7 +939,7 @@ class GitWrapper(SCMWrapper):
stdout = kwargs.get('stdout', sys.stdout)
stdout.write('\n________ running \'git %s\' in \'%s\'\n' % (
' '.join(args), kwargs['cwd']))
gclient_utils.CheckCallAndFilter(['git'] + args, **kwargs)
return gclient_utils.CheckCallAndFilter(['git'] + args, **kwargs)
class SVNWrapper(SCMWrapper):

@ -5,6 +5,7 @@
"""Generic utils."""
import codecs
import cStringIO
import logging
import os
import pipes
@ -428,6 +429,7 @@ def CheckCallAndFilter(args, stdout=None, filter_fn=None,
"""
assert print_stdout or filter_fn
stdout = stdout or sys.stdout
output = cStringIO.StringIO()
filter_fn = filter_fn or (lambda x: None)
sleep_interval = RETRY_INITIAL_SLEEP
@ -453,9 +455,10 @@ def CheckCallAndFilter(args, stdout=None, filter_fn=None,
filter_fn(None)
in_line = ''
while in_byte:
output.write(in_byte)
if print_stdout:
stdout.write(in_byte)
if in_byte != '\r':
if print_stdout:
stdout.write(in_byte)
if in_byte != '\n':
in_line += in_byte
else:
@ -480,7 +483,7 @@ def CheckCallAndFilter(args, stdout=None, filter_fn=None,
raise
if rv == 0:
return 0
return output.getvalue()
if not retry:
break
print ("WARNING: subprocess '%s' in %s failed; will retry after a short "

@ -240,8 +240,9 @@ def CMDunlock(parser, args):
url = args[0]
repo_dirs = [os.path.join(options.cache_dir, UrlToCacheDir(url))]
else:
repo_dirs = [path for path in os.listdir(options.cache_dir)
if os.path.isdir(path)]
repo_dirs = [os.path.join(options.cache_dir, path)
for path in os.listdir(options.cache_dir)
if os.path.isdir(os.path.join(options.cache_dir, path))]
lockfiles = [repo_dir + '.lock' for repo_dir in repo_dirs
if os.path.exists(repo_dir + '.lock')]
@ -255,6 +256,9 @@ def CMDunlock(parser, args):
for repo_dir in repo_dirs:
lf = Lockfile(repo_dir)
if lf.break_lock():
config_lock = os.path.join(repo_dir, 'config.lock')
if os.path.exists(config_lock):
os.remove(config_lock)
unlocked.append(repo_dir)
else:
untouched.append(repo_dir)

@ -96,32 +96,6 @@ class SVNWrapperTestCase(BaseTestCase):
BaseTestCase.setUp(self)
self.url = self.SvnUrl()
def testDir(self):
members = [
'BinaryExists',
'FullUrlForRelativeUrl',
'GetCheckoutRoot',
'GetRevisionDate',
'GetUsableRev',
'Svnversion',
'RunCommand',
'cleanup',
'diff',
'name',
'pack',
'relpath',
'revert',
'revinfo',
'runhooks',
'status',
'update',
'updatesingle',
'url',
]
# If you add a member, be sure to add the relevant test!
self.compareMembers(self._scm_wrapper('svn://a'), members)
def testUnsupportedSCM(self):
args = ['gopher://foo', self.root_dir, self.relpath]
exception_msg = 'No SCM found for url gopher://foo'
@ -831,33 +805,6 @@ from :3
class ManagedGitWrapperTestCase(BaseGitWrapperTestCase):
def testDir(self):
members = [
'BinaryExists',
'FullUrlForRelativeUrl',
'GetCheckoutRoot',
'GetRevisionDate',
'GetUsableRev',
'RunCommand',
'cache_dir',
'cache_locks',
'cleanup',
'diff',
'name',
'pack',
'UpdateSubmoduleConfig',
'relpath',
'remote',
'revert',
'revinfo',
'runhooks',
'status',
'update',
'url',
]
# If you add a member, be sure to add the relevant test!
self.compareMembers(gclient_scm.CreateSCM(url=self.url), members)
def testRevertMissing(self):
if not self.enabled:

@ -24,30 +24,6 @@ class GclientUtilBase(SuperMoxTestBase):
self.mox.StubOutWithMock(subprocess2, 'communicate')
class GclientUtilsUnittest(GclientUtilBase):
"""General gclient_utils.py tests."""
def testMembersChanged(self):
members = [
'Annotated', 'AutoFlush', 'CheckCallAndFilter', 'CommandToStr',
'CheckCallAndFilterAndHeader', 'Error', 'ExecutionQueue', 'FileRead',
'FileWrite', 'FindFileUpwards', 'FindGclientRoot',
'GetGClientRootAndEntries', 'GetEditor', 'GetExeSuffix',
'GetMacWinOrLinux', 'GitFilter', 'IsDateRevision', 'MakeDateRevision',
'MakeFileAutoFlush', 'MakeFileAnnotated', 'PathDifference',
'ParseCodereviewSettingsContent', 'NumLocalCpus', 'PrintableObject',
'RETRY_INITIAL_SLEEP', 'RETRY_MAX', 'RunEditor', 'GCLIENT_CHILDREN',
'GCLIENT_CHILDREN_LOCK', 'GClientChildren', 'SplitUrlRevision',
'SyntaxErrorToError', 'UpgradeToHttps', 'Wrapper', 'WorkItem',
'codecs', 'lockedmethod', 'logging', 'os', 'pipes', 'Queue', 're',
'rmtree', 'safe_makedirs', 'safe_rename', 'stat', 'subprocess',
'subprocess2', 'sys', 'tempfile', 'threading', 'time', 'urlparse',
]
# If this test fails, you should add the relevant test.
self.compareMembers(gclient_utils, members)
class CheckCallAndFilterTestCase(GclientUtilBase):
class ProcessIdMock(object):
def __init__(self, test_string):

Loading…
Cancel
Save