You cannot select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
361 lines
13 KiB
Python
361 lines
13 KiB
Python
# Copyright 2013 The Chromium Authors. All rights reserved.
|
|
# Use of this source code is governed by a BSD-style license that can be
|
|
# found in the LICENSE file.
|
|
|
|
from recipe_engine import recipe_api
|
|
|
|
|
|
class RevisionResolver(object):
|
|
"""Resolves the revision based on build properties."""
|
|
|
|
def resolve(self, properties): # pragma: no cover
|
|
raise NotImplementedError()
|
|
|
|
|
|
class RevisionFallbackChain(RevisionResolver):
|
|
"""Specify that a given project's sync revision follows the fallback chain."""
|
|
def __init__(self, default=None):
|
|
self._default = default
|
|
|
|
def resolve(self, properties):
|
|
"""Resolve the revision via the revision fallback chain.
|
|
|
|
If the given revision was set using the revision_fallback_chain() function,
|
|
this function will follow the chain, looking at relevant build properties
|
|
until it finds one set or reaches the end of the chain and returns the
|
|
default. If the given revision was not set using revision_fallback_chain(),
|
|
this function just returns it as-is.
|
|
"""
|
|
return (properties.get('parent_got_revision') or
|
|
properties.get('orig_revision') or
|
|
properties.get('revision') or
|
|
self._default)
|
|
|
|
|
|
def jsonish_to_python(spec, is_top=False):
|
|
"""Turn a json spec into a python parsable object.
|
|
|
|
This exists because Gclient specs, while resembling json, is actually
|
|
ingested using a python "eval()". Therefore a bit of plumming is required
|
|
to turn our newly constructed Gclient spec into a gclient-readable spec.
|
|
"""
|
|
ret = ''
|
|
if is_top: # We're the 'top' level, so treat this dict as a suite.
|
|
ret = '\n'.join(
|
|
'%s = %s' % (k, jsonish_to_python(spec[k])) for k in sorted(spec)
|
|
)
|
|
else:
|
|
if isinstance(spec, dict):
|
|
ret += '{'
|
|
ret += ', '.join(
|
|
"%s: %s" % (repr(str(k)), jsonish_to_python(spec[k]))
|
|
for k in sorted(spec)
|
|
)
|
|
ret += '}'
|
|
elif isinstance(spec, list):
|
|
ret += '['
|
|
ret += ', '.join(jsonish_to_python(x) for x in spec)
|
|
ret += ']'
|
|
elif isinstance(spec, basestring):
|
|
ret = repr(str(spec))
|
|
else:
|
|
ret = repr(spec)
|
|
return ret
|
|
|
|
class GclientApi(recipe_api.RecipeApi):
|
|
# Singleton object to indicate to checkout() that we should run a revert if
|
|
# we detect that we're on the tryserver.
|
|
RevertOnTryserver = object()
|
|
|
|
def __init__(self, **kwargs):
|
|
super(GclientApi, self).__init__(**kwargs)
|
|
self.USE_MIRROR = None
|
|
self._spec_alias = None
|
|
|
|
def __call__(self, name, cmd, infra_step=True, **kwargs):
|
|
"""Wrapper for easy calling of gclient steps."""
|
|
assert isinstance(cmd, (list, tuple))
|
|
prefix = 'gclient '
|
|
if self.spec_alias:
|
|
prefix = ('[spec: %s] ' % self.spec_alias) + prefix
|
|
|
|
with self.m.context(
|
|
env_suffixes={'PATH': [self.repo_resource()]}):
|
|
return self.m.python(prefix + name,
|
|
self.repo_resource('gclient.py'),
|
|
cmd,
|
|
infra_step=infra_step,
|
|
**kwargs)
|
|
|
|
@property
|
|
def use_mirror(self):
|
|
"""Indicates if gclient will use mirrors in its configuration."""
|
|
if self.USE_MIRROR is None:
|
|
self.USE_MIRROR = self.m.properties.get('use_mirror', True)
|
|
return self.USE_MIRROR
|
|
|
|
@use_mirror.setter
|
|
def use_mirror(self, val): # pragma: no cover
|
|
self.USE_MIRROR = val
|
|
|
|
@property
|
|
def spec_alias(self):
|
|
"""Optional name for the current spec for step naming."""
|
|
return self._spec_alias
|
|
|
|
@spec_alias.setter
|
|
def spec_alias(self, name):
|
|
self._spec_alias = name
|
|
|
|
@spec_alias.deleter
|
|
def spec_alias(self):
|
|
self._spec_alias = None
|
|
|
|
def get_config_defaults(self):
|
|
return {
|
|
'USE_MIRROR': self.use_mirror,
|
|
'CACHE_DIR': self.m.infra_paths.default_git_cache_dir,
|
|
}
|
|
|
|
@staticmethod
|
|
def config_to_pythonish(cfg):
|
|
return jsonish_to_python(cfg.as_jsonish(), True)
|
|
|
|
# TODO(machenbach): Remove this method when the old mapping is deprecated.
|
|
@staticmethod
|
|
def got_revision_reverse_mapping(cfg):
|
|
"""Returns the merged got_revision_reverse_mapping.
|
|
|
|
Returns (dict): A mapping from property name -> project name. It merges the
|
|
values of the deprecated got_revision_mapping and the new
|
|
got_revision_reverse_mapping.
|
|
"""
|
|
rev_map = cfg.got_revision_mapping.as_jsonish()
|
|
reverse_rev_map = cfg.got_revision_reverse_mapping.as_jsonish()
|
|
combined_length = len(rev_map) + len(reverse_rev_map)
|
|
reverse_rev_map.update({v: k for k, v in rev_map.items()})
|
|
|
|
# Make sure we never have duplicate values in the old map.
|
|
assert combined_length == len(reverse_rev_map)
|
|
return reverse_rev_map
|
|
|
|
def resolve_revision(self, revision):
|
|
if hasattr(revision, 'resolve'):
|
|
return revision.resolve(self.m.properties)
|
|
return revision
|
|
|
|
def sync(self, cfg, extra_sync_flags=None, **kwargs):
|
|
revisions = []
|
|
self.set_patch_repo_revision(gclient_config=cfg)
|
|
for i, s in enumerate(cfg.solutions):
|
|
if i == 0 and s.revision is None:
|
|
s.revision = RevisionFallbackChain()
|
|
|
|
if s.revision is not None and s.revision != '':
|
|
fixed_revision = self.resolve_revision(s.revision)
|
|
if fixed_revision:
|
|
revisions.extend(['--revision', '%s@%s' % (s.name, fixed_revision)])
|
|
|
|
for name, revision in sorted(cfg.revisions.items()):
|
|
fixed_revision = self.resolve_revision(revision)
|
|
if fixed_revision:
|
|
revisions.extend(['--revision', '%s@%s' % (name, fixed_revision)])
|
|
|
|
test_data_paths = set(self.got_revision_reverse_mapping(cfg).values() +
|
|
[s.name for s in cfg.solutions])
|
|
step_test_data = lambda: (
|
|
self.test_api.output_json(test_data_paths))
|
|
try:
|
|
# clean() isn't used because the gclient sync flags passed in checkout()
|
|
# do much the same thing, and they're more correct than doing a separate
|
|
# 'gclient revert' because it makes sure the other args are correct when
|
|
# a repo was deleted and needs to be re-cloned (notably
|
|
# --with_branch_heads), whereas 'revert' uses default args for clone
|
|
# operations.
|
|
#
|
|
# TODO(mmoss): To be like current official builders, this step could
|
|
# just delete the whole <slave_name>/build/ directory and start each
|
|
# build from scratch. That might be the least bad solution, at least
|
|
# until we have a reliable gclient method to produce a pristine working
|
|
# dir for git-based builds (e.g. maybe some combination of 'git
|
|
# reset/clean -fx' and removing the 'out' directory).
|
|
j = '-j2' if self.m.platform.is_win else '-j8'
|
|
args = ['sync', '--verbose', '--nohooks', j, '--reset', '--force',
|
|
'--upstream', '--no-nag-max', '--with_branch_heads',
|
|
'--with_tags']
|
|
args.extend(extra_sync_flags or [])
|
|
if cfg.delete_unversioned_trees:
|
|
args.append('--delete_unversioned_trees')
|
|
self('sync', args + revisions +
|
|
['--output-json', self.m.json.output()],
|
|
step_test_data=step_test_data,
|
|
**kwargs)
|
|
finally:
|
|
result = self.m.step.active_result
|
|
if result.json.output is not None:
|
|
solutions = result.json.output['solutions']
|
|
for propname, path in sorted(
|
|
self.got_revision_reverse_mapping(cfg).items()):
|
|
# gclient json paths always end with a slash
|
|
info = solutions.get(path + '/') or solutions.get(path)
|
|
if info:
|
|
result.presentation.properties[propname] = info['revision']
|
|
|
|
return result
|
|
|
|
def inject_parent_got_revision(self, gclient_config=None, override=False):
|
|
"""Match gclient config to build revisions obtained from build_properties.
|
|
|
|
Args:
|
|
gclient_config (gclient config object) - The config to manipulate. A value
|
|
of None manipulates the module's built-in config (self.c).
|
|
override (bool) - If True, will forcibly set revision and custom_vars
|
|
even if the config already contains values for them.
|
|
"""
|
|
cfg = gclient_config or self.c
|
|
|
|
for prop, custom_var in cfg.parent_got_revision_mapping.items():
|
|
val = str(self.m.properties.get(prop, ''))
|
|
# TODO(infra): Fix coverage.
|
|
if val: # pragma: no cover
|
|
# Special case for 'src', inject into solutions[0]
|
|
if custom_var is None:
|
|
# This is not covered because we are deprecating this feature and
|
|
# it is no longer used by the public recipes.
|
|
if cfg.solutions[0].revision is None or override: # pragma: no cover
|
|
cfg.solutions[0].revision = val
|
|
else:
|
|
if custom_var not in cfg.solutions[0].custom_vars or override:
|
|
cfg.solutions[0].custom_vars[custom_var] = val
|
|
|
|
def checkout(self, gclient_config=None, revert=RevertOnTryserver,
|
|
inject_parent_got_revision=True, extra_sync_flags=None,
|
|
**kwargs):
|
|
"""Return a step generator function for gclient checkouts."""
|
|
cfg = gclient_config or self.c
|
|
assert cfg.complete()
|
|
|
|
if revert is self.RevertOnTryserver:
|
|
revert = self.m.tryserver.is_tryserver
|
|
|
|
if inject_parent_got_revision:
|
|
self.inject_parent_got_revision(cfg, override=True)
|
|
|
|
self('setup', ['config', '--spec', self.config_to_pythonish(cfg)], **kwargs)
|
|
|
|
sync_step = None
|
|
try:
|
|
sync_step = self.sync(cfg, extra_sync_flags=extra_sync_flags, **kwargs)
|
|
|
|
cfg_cmds = [
|
|
('user.name', 'local_bot'),
|
|
('user.email', 'local_bot@example.com'),
|
|
]
|
|
for var, val in cfg_cmds:
|
|
name = 'recurse (git config %s)' % var
|
|
self(name, ['recurse', 'git', 'config', var, val], **kwargs)
|
|
finally:
|
|
cwd = self.m.context.cwd or self.m.path['start_dir']
|
|
if 'checkout' not in self.m.path:
|
|
self.m.path['checkout'] = cwd.join(
|
|
*cfg.solutions[0].name.split(self.m.path.sep))
|
|
|
|
return sync_step
|
|
|
|
def runhooks(self, args=None, name='runhooks', **kwargs):
|
|
args = args or []
|
|
assert isinstance(args, (list, tuple))
|
|
with self.m.context(cwd=(self.m.context.cwd or self.m.path['checkout'])):
|
|
return self(name, ['runhooks'] + list(args), infra_step=False, **kwargs)
|
|
|
|
def break_locks(self):
|
|
"""Remove all index.lock files. If a previous run of git crashed, bot was
|
|
reset, etc... we might end up with leftover index.lock files.
|
|
"""
|
|
self.m.python.inline(
|
|
'cleanup index.lock',
|
|
"""
|
|
import os, sys
|
|
|
|
build_path = sys.argv[1]
|
|
if os.path.exists(build_path):
|
|
for (path, dir, files) in os.walk(build_path):
|
|
for cur_file in files:
|
|
if cur_file.endswith('index.lock'):
|
|
path_to_file = os.path.join(path, cur_file)
|
|
print('deleting %s' % path_to_file)
|
|
os.remove(path_to_file)
|
|
""",
|
|
args=[self.m.path['start_dir']],
|
|
infra_step=True,
|
|
)
|
|
|
|
def get_gerrit_patch_root(self, gclient_config=None):
|
|
"""Returns local path to the repo where gerrit patch will be applied.
|
|
|
|
If there is no patch, returns None.
|
|
If patch is specified, but such repo is not found among configured solutions
|
|
or repo_path_map, returns name of the first solution. This is done solely
|
|
for backward compatibility with existing tests.
|
|
Please do not rely on this logic in new code.
|
|
Instead, properly map a repository to a local path using repo_path_map.
|
|
TODO(nodir): remove this. Update all recipe tests to specify a git_repo
|
|
matching the recipe.
|
|
"""
|
|
cfg = gclient_config or self.c
|
|
repo_url = self.m.tryserver.gerrit_change_repo_url
|
|
if not repo_url:
|
|
return None
|
|
root = self.get_repo_path(repo_url, gclient_config=cfg)
|
|
|
|
# This is wrong, but that's what a ton of recipe tests expect today
|
|
root = root or cfg.solutions[0].name
|
|
|
|
return root
|
|
|
|
def _canonicalize_repo_url(self, repo_url):
|
|
"""Attempts to make repo_url canonical. Supports Gitiles URL."""
|
|
return self.m.gitiles.canonicalize_repo_url(repo_url)
|
|
|
|
def get_repo_path(self, repo_url, gclient_config=None):
|
|
"""Returns local path to the repo checkout given its url.
|
|
|
|
Consults cfg.repo_path_map and fallbacks to urls in configured solutions.
|
|
|
|
Returns None if not found.
|
|
"""
|
|
rel_path = self._get_repo_path(repo_url, gclient_config=gclient_config)
|
|
if rel_path:
|
|
return self.m.path.join(*rel_path.split('/'))
|
|
return None
|
|
|
|
def _get_repo_path(self, repo_url, gclient_config=None):
|
|
repo_url = self._canonicalize_repo_url(repo_url)
|
|
cfg = gclient_config or self.c
|
|
rel_path, _ = cfg.repo_path_map.get(repo_url, ('', ''))
|
|
if rel_path:
|
|
return rel_path
|
|
|
|
# repo_path_map keys may be non-canonical.
|
|
for key, (rel_path, _) in cfg.repo_path_map.items():
|
|
if self._canonicalize_repo_url(key) == repo_url:
|
|
return rel_path
|
|
|
|
for s in cfg.solutions:
|
|
if self._canonicalize_repo_url(s.url) == repo_url:
|
|
return s.name
|
|
|
|
return None
|
|
|
|
def set_patch_repo_revision(self, gclient_config=None):
|
|
"""Updates config revision corresponding to patched project.
|
|
|
|
Useful for bot_update only, as this is the only consumer of gclient's config
|
|
revision map. This doesn't overwrite the revision if it was already set.
|
|
"""
|
|
cfg = gclient_config or self.c
|
|
repo_url = self.m.tryserver.gerrit_change_repo_url
|
|
path, revision = cfg.repo_path_map.get(repo_url, (None, None))
|
|
if path and revision and path not in cfg.revisions:
|
|
cfg.revisions[path] = revision
|