Improve logging and ease debugging.

While trying to fix all the multithreading issues...

R=dpranke@chromium.org
BUG=
TEST=


Review URL: http://codereview.chromium.org/8135007

git-svn-id: svn://svn.chromium.org/chrome/trunk/tools/depot_tools@104026 0039d316-1c4b-4281-b951-d872f2087c98
experimental/szager/collated-output
maruel@chromium.org 14 years ago
parent 43ea9d727b
commit 1333cb3edb

@ -314,12 +314,12 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
Manages From() keyword accordingly. Do not touch self.parsed_url nor
self.url because it may called with other urls due to From()."""
assert self.parsed_url == None or not self.should_process, self.parsed_url
overriden_url = self.get_custom_deps(self.name, url)
if overriden_url != url:
logging.info('%s, %s was overriden to %s' % (self.name, url,
overriden_url))
return overriden_url
elif isinstance(url, self.FromImpl):
parsed_url = self.get_custom_deps(self.name, url)
if parsed_url != url:
logging.info('LateOverride(%s, %s) -> %s' % (self.name, url, parsed_url))
return parsed_url
if isinstance(url, self.FromImpl):
ref = [
dep for dep in self.root.subtree(True) if url.module_name == dep.name
]
@ -345,9 +345,11 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
# Call LateOverride() again.
parsed_url = found_dep.LateOverride(found_dep.url)
logging.info('%s, %s to %s' % (self.name, url, parsed_url))
logging.info(
'LateOverride(%s, %s) -> %s (From)' % (self.name, url, parsed_url))
return parsed_url
elif isinstance(url, basestring):
if isinstance(url, basestring):
parsed_url = urlparse.urlparse(url)
if not parsed_url[0]:
# A relative url. Fetch the real base.
@ -363,16 +365,18 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
parsed_url = scm.FullUrlForRelativeUrl(url)
else:
parsed_url = url
logging.info('%s, %s -> %s' % (self.name, url, parsed_url))
return parsed_url
elif isinstance(url, self.FileImpl):
parsed_url = url
logging.info('%s, %s -> %s (File)' % (self.name, url, parsed_url))
logging.info('LateOverride(%s, %s) -> %s' % (self.name, url, parsed_url))
return parsed_url
elif url is None:
return None
if isinstance(url, self.FileImpl):
logging.info('LateOverride(%s, %s) -> %s (File)' % (self.name, url, url))
return url
if url is None:
logging.info('LateOverride(%s, %s) -> %s' % (self.name, url, url))
return url
else:
raise gclient_utils.Error('Unkown url type')
raise gclient_utils.Error('Unknown url type')
def ParseDepsFile(self):
"""Parses the DEPS file for this dependency."""
@ -391,11 +395,12 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
}
filepath = os.path.join(self.root.root_dir, self.name, self.deps_file)
if not os.path.isfile(filepath):
logging.info('%s: No %s file found at %s' % (self.name, self.deps_file,
filepath))
logging.info(
'ParseDepsFile(%s): No %s file found at %s' % (
self.name, self.deps_file, filepath))
else:
deps_content = gclient_utils.FileRead(filepath)
logging.debug(deps_content)
logging.debug('ParseDepsFile(%s) read:\n%s' % (self.name, deps_content))
# Eval the content.
try:
exec(deps_content, global_scope, local_scope)
@ -829,7 +834,7 @@ solutions = [
pprint.pformat(entry.parsed_url))
result += '}\n'
file_path = os.path.join(self.root_dir, self._options.entries_filename)
logging.info(result)
logging.debug(result)
gclient_utils.FileWrite(file_path, result)
def _ReadEntries(self):
@ -1388,11 +1393,8 @@ def Parser():
old_parser = parser.parse_args
def Parse(args):
(options, args) = old_parser(args)
level = None
if options.verbose == 2:
level = logging.INFO
elif options.verbose > 2:
level = logging.DEBUG
level = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG][
min(options.verbose, 3)]
logging.basicConfig(level=level,
format='%(module)s(%(lineno)d) %(funcName)s:%(message)s')
options.entries_filename = options.config_filename + '_entries'

@ -247,7 +247,7 @@ def MakeFileAutoFlush(fileobj, delay=10):
return new_fileobj
def MakeFileAnnotated(fileobj):
def MakeFileAnnotated(fileobj, include_zero=False):
"""Creates a file object clone to automatically prepends every line in worker
threads with a NN> prefix."""
if hasattr(fileobj, 'output_buffers'):
@ -265,9 +265,11 @@ def MakeFileAnnotated(fileobj):
def annotated_write(out):
index = getattr(threading.currentThread(), 'index', None)
if index is None:
# Undexed threads aren't buffered.
new_fileobj.old_annotated_write(out)
return
if not include_zero:
# Unindexed threads aren't buffered.
new_fileobj.old_annotated_write(out)
return
index = 0
new_fileobj.lock.acquire()
try:
@ -557,7 +559,7 @@ class ExecutionQueue(object):
self._flush_terminated_threads()
if (not self.queued and not self.running or
self.jobs == len(self.running)):
# No more worker threads or can't queue anything.
logging.debug('No more worker threads or can\'t queue anything.')
break
# Check for new tasks to start.
@ -644,7 +646,7 @@ class ExecutionQueue(object):
"""One thread to execute one WorkItem."""
def __init__(self, item, index, args, kwargs):
threading.Thread.__init__(self, name=item.name or 'Worker')
logging.info(item.name)
logging.info('_Worker(%s) reqs:%s' % (item.name, item.requirements))
self.item = item
self.index = index
self.args = args
@ -652,7 +654,7 @@ class ExecutionQueue(object):
def run(self):
"""Runs in its own thread."""
logging.debug('running(%s)' % self.item.name)
logging.debug('_Worker.run(%s)' % self.item.name)
work_queue = self.kwargs['work_queue']
try:
self.item.run(*self.args, **self.kwargs)
@ -661,7 +663,7 @@ class ExecutionQueue(object):
logging.info('Caught exception in thread %s' % self.item.name)
logging.info(str(sys.exc_info()))
work_queue.exceptions.put(sys.exc_info())
logging.info('Task %s done' % self.item.name)
logging.info('_Worker.run(%s) done' % self.item.name)
work_queue.ready_cond.acquire()
try:

@ -19,6 +19,7 @@ BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, os.path.dirname(BASE_DIR))
import gclient
import gclient_utils
from tests import trial_dir
@ -162,6 +163,7 @@ class GclientTest(trial_dir.TestCase):
'svn://example.com/foo/dir1/dir2/dir3/dir4/dir1/another',
],
actual)
self._check_requirements(
obj.dependencies[0],
{
@ -192,6 +194,7 @@ class GclientTest(trial_dir.TestCase):
self.assertEquals({}, expected)
def _get_processed(self):
"""Retrieves the item in the order they were processed."""
items = []
try:
while True:
@ -239,9 +242,13 @@ class GclientTest(trial_dir.TestCase):
if __name__ == '__main__':
sys.stdout = gclient_utils.MakeFileAutoFlush(sys.stdout)
sys.stdout = gclient_utils.MakeFileAnnotated(sys.stdout, include_zero=True)
sys.stderr = gclient_utils.MakeFileAutoFlush(sys.stderr)
sys.stderr = gclient_utils.MakeFileAnnotated(sys.stderr, include_zero=True)
logging.basicConfig(
level=[logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG][
min(sys.argv.count('-v'), 3)],
format='%(asctime).19s %(levelname)s %(filename)s:'
'%(lineno)s %(message)s')
format='%(relativeCreated)4d %(levelname)5s %(module)13s('
'%(lineno)d) %(message)s')
unittest.main()

Loading…
Cancel
Save