diff --git a/detect_host_arch.py b/detect_host_arch.py index 3b0cc0bf7..69c50f870 100755 --- a/detect_host_arch.py +++ b/detect_host_arch.py @@ -5,6 +5,8 @@ """Outputs host CPU architecture in format recognized by gyp.""" +from __future__ import print_function + import platform import re import sys @@ -51,4 +53,4 @@ def DoMain(_): return HostArch() if __name__ == '__main__': - print DoMain([]) + print(DoMain([])) diff --git a/download_from_google_storage.py b/download_from_google_storage.py index 657bc6353..cfaac6ccb 100755 --- a/download_from_google_storage.py +++ b/download_from_google_storage.py @@ -5,11 +5,17 @@ """Download files from Google Storage based on SHA1 sums.""" +from __future__ import print_function import hashlib import optparse import os -import Queue + +try: + import Queue as queue +except ImportError: # For Py3 compatibility + import queue + import re import shutil import stat @@ -110,13 +116,13 @@ class Gsutil(object): timeout=self.timeout) # Parse output. - status_code_match = re.search('status=([0-9]+)', err) + status_code_match = re.search(b'status=([0-9]+)', err) if status_code_match: return (int(status_code_match.group(1)), out, err) - if ('You are attempting to access protected data with ' - 'no configured credentials.' in err): + if (b'You are attempting to access protected data with ' + b'no configured credentials.' in err): return (403, out, err) - if 'matched no objects' in err: + if b'matched no objects' in err: return (404, out, err) return (code, out, err) @@ -164,15 +170,15 @@ def enumerate_input(input_filename, directory, recursive, ignore_errors, output, if not os.path.exists(input_filename): if not ignore_errors: raise FileNotFoundError('%s not found.' % input_filename) - print >> sys.stderr, '%s not found.' % input_filename + print('%s not found.' % input_filename, file=sys.stderr) with open(input_filename, 'rb') as f: - sha1_match = re.match('^([A-Za-z0-9]{40})$', f.read(1024).rstrip()) + sha1_match = re.match(b'^([A-Za-z0-9]{40})$', f.read(1024).rstrip()) if sha1_match: yield (sha1_match.groups(1)[0], output) return if not ignore_errors: raise InvalidFileError('No sha1 sum found in %s.' % input_filename) - print >> sys.stderr, 'No sha1 sum found in %s.' % input_filename + print('No sha1 sum found in %s.' % input_filename, file=sys.stderr) return if not directory: @@ -198,20 +204,20 @@ def enumerate_input(input_filename, directory, recursive, ignore_errors, output, 'the path of %s' % full_path) if not ignore_errors: raise InvalidFileError(err) - print >> sys.stderr, err + print(err, file=sys.stderr) continue current_platform = PLATFORM_MAPPING[sys.platform] if current_platform != target_platform: continue with open(full_path, 'rb') as f: - sha1_match = re.match('^([A-Za-z0-9]{40})$', f.read(1024).rstrip()) + sha1_match = re.match(b'^([A-Za-z0-9]{40})$', f.read(1024).rstrip()) if sha1_match: yield (sha1_match.groups(1)[0], full_path.replace('.sha1', '')) else: if not ignore_errors: raise InvalidFileError('No sha1 sum found in %s.' % filename) - print >> sys.stderr, 'No sha1 sum found in %s.' % filename + print('No sha1 sum found in %s.' % filename, file=sys.stderr) def _validate_tar_file(tar, prefix): @@ -246,7 +252,7 @@ def _downloader_worker_thread(thread_num, q, force, base_url, if get_sha1(output_filename) == input_sha1_sum: continue # Check if file exists. - file_url = '%s/%s' % (base_url, input_sha1_sum) + file_url = '%s/%s' % (base_url, input_sha1_sum.decode()) (code, _, err) = gsutil.check_call('ls', file_url) if code != 0: if code == 404: @@ -256,10 +262,10 @@ def _downloader_worker_thread(thread_num, q, force, base_url, file_url, output_filename))) else: # Other error, probably auth related (bad ~/.boto, etc). - out_q.put('%d> Failed to fetch file %s for %s, skipping. [Err: %s]' % ( - thread_num, file_url, output_filename, err)) - ret_codes.put((1, 'Failed to fetch file %s for %s. [Err: %s]' % ( - file_url, output_filename, err))) + out_q.put('%d> Failed to fetch file %s for %s, skipping. [Err: %s]' % + (thread_num, file_url, output_filename, err.decode())) + ret_codes.put((1, 'Failed to fetch file %s for %s. [Err: %s]' % + (file_url, output_filename, err.decode()))) continue # Fetch the file. out_q.put('%d> Downloading %s...' % (thread_num, output_filename)) @@ -272,8 +278,8 @@ def _downloader_worker_thread(thread_num, q, force, base_url, thread_num, output_filename)) code, _, err = gsutil.check_call('cp', file_url, output_filename) if code != 0: - out_q.put('%d> %s' % (thread_num, err)) - ret_codes.put((code, err)) + out_q.put('%d> %s' % (thread_num, err.decode())) + ret_codes.put((code, err.decode())) continue remote_sha1 = get_sha1(output_filename) @@ -328,8 +334,8 @@ def _downloader_worker_thread(thread_num, q, force, base_url, # "x-goog-meta-executable". code, out, _ = gsutil.check_call('stat', file_url) if code != 0: - out_q.put('%d> %s' % (thread_num, err)) - ret_codes.put((code, err)) + out_q.put('%d> %s' % (thread_num, err.decode())) + ret_codes.put((code, err.decode())) elif re.search(r'executable:\s*1', out): st = os.stat(output_filename) os.chmod(output_filename, st.st_mode | stat.S_IEXEC) @@ -348,7 +354,7 @@ class PrinterThread(threading.Thread): if line is None: break self.did_print_anything = True - print line + print(line) def _data_exists(input_sha1_sum, output_filename, extract): @@ -405,9 +411,9 @@ def download_from_google_storage( # Start up all the worker threads. all_threads = [] download_start = time.time() - stdout_queue = Queue.Queue() - work_queue = Queue.Queue() - ret_codes = Queue.Queue() + stdout_queue = queue.Queue() + work_queue = queue.Queue() + ret_codes = queue.Queue() ret_codes.put((0, None)) for thread_num in range(num_threads): t = threading.Thread( @@ -438,12 +444,12 @@ def download_from_google_storage( for ret_code, message in ret_codes.queue: max_ret_code = max(ret_code, max_ret_code) if message: - print >> sys.stderr, message + print(message, file=sys.stderr) # Only print summary if any work was done. if printer_thread.did_print_anything: - print 'Downloading %d files took %1f second(s)' % ( - len(input_data), time.time() - download_start) + print('Downloading %d files took %1f second(s)' % + (len(input_data), time.time() - download_start)) return max_ret_code @@ -530,14 +536,16 @@ def main(args): if (set(('http_proxy', 'https_proxy')).intersection( env.lower() for env in os.environ) and 'NO_AUTH_BOTO_CONFIG' not in os.environ): - print >> sys.stderr, ('NOTICE: You have PROXY values set in your ' - 'environment, but gsutil in depot_tools does not ' - '(yet) obey them.') - print >> sys.stderr, ('Also, --no_auth prevents the normal BOTO_CONFIG ' - 'environment variable from being used.') - print >> sys.stderr, ('To use a proxy in this situation, please supply ' - 'those settings in a .boto file pointed to by ' - 'the NO_AUTH_BOTO_CONFIG environment var.') + print('NOTICE: You have PROXY values set in your environment, but gsutil' + 'in depot_tools does not (yet) obey them.', + file=sys.stderr) + print('Also, --no_auth prevents the normal BOTO_CONFIG environment' + 'variable from being used.', + file=sys.stderr) + print('To use a proxy in this situation, please supply those settings' + 'in a .boto file pointed to by the NO_AUTH_BOTO_CONFIG environment' + 'variable.', + file=sys.stderr) options.boto = os.environ.get('NO_AUTH_BOTO_CONFIG', os.devnull) # Make sure gsutil exists where we expect it to. @@ -550,10 +558,10 @@ def main(args): # Passing in -g/--config will run our copy of GSUtil, then quit. if options.config: - print '===Note from depot_tools===' - print 'If you do not have a project ID, enter "0" when asked for one.' - print '===End note from depot_tools===' - print + print('===Note from depot_tools===') + print('If you do not have a project ID, enter "0" when asked for one.') + print('===End note from depot_tools===') + print() gsutil.check_call('version') return gsutil.call('config') diff --git a/fix_encoding.py b/fix_encoding.py index 34f6cb8ee..faa7c7c69 100644 --- a/fix_encoding.py +++ b/fix_encoding.py @@ -95,8 +95,8 @@ def fix_win_sys_argv(encoding): argc = c_int(0) argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc)) argv = [ - argv_unicode[i].encode(encoding, 'replace') - for i in xrange(0, argc.value)] + argv_unicode[i].encode(encoding, 'replace') for i in range(0, argc.value) + ] if not hasattr(sys, 'frozen'): # If this is an executable produced by py2exe or bbfreeze, then it @@ -107,7 +107,7 @@ def fix_win_sys_argv(encoding): # Also skip option arguments to the Python interpreter. while len(argv) > 0: arg = argv[0] - if not arg.startswith(u'-') or arg == u'-': + if not arg.startswith(b'-') or arg == b'-': break argv = argv[1:] if arg == u'-m': diff --git a/gclient.py b/gclient.py index 867c6ca8f..034314a7e 100755 --- a/gclient.py +++ b/gclient.py @@ -96,7 +96,11 @@ import pprint import re import sys import time -import urlparse + +try: + import urlparse +except ImportError: # For Py3 compatibility + import urllib.parse as urlparse import detect_host_arch import fix_encoding @@ -128,14 +132,14 @@ def ToGNString(value, allow_dicts = True): allow_dicts indicates if this function will allow converting dictionaries to GN scopes. This is only possible at the top level, you can't nest a GN scope in a list, so this should be set to False for recursive calls.""" - if isinstance(value, basestring): + if isinstance(value, str): if value.find('\n') >= 0: raise GNException("Trying to print a string with a newline in it.") return '"' + \ value.replace('\\', '\\\\').replace('"', '\\"').replace('$', '\\$') + \ '"' - if isinstance(value, unicode): + if sys.version_info.major == 2 and isinstance(value, unicode): return ToGNString(value.encode('utf-8')) if isinstance(value, bool): @@ -286,7 +290,7 @@ class DependencySettings(object): self._custom_hooks = custom_hooks or [] # Post process the url to remove trailing slashes. - if isinstance(self.url, basestring): + if isinstance(self.url, str): # urls are sometime incorrectly written as proto://host/path/@rev. Replace # it to proto://host/path@rev. self.set_url(self.url.replace('/@', '@')) @@ -428,7 +432,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): self._OverrideUrl() # This is inherited from WorkItem. We want the URL to be a resource. - if self.url and isinstance(self.url, basestring): + if self.url and isinstance(self.url, str): # The url is usually given to gclient either as https://blah@123 # or just https://blah. The @123 portion is irrelevant. self.resources.append(self.url.split('@')[0]) @@ -448,7 +452,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): self.url, parsed_url) self.set_url(parsed_url) - elif isinstance(self.url, basestring): + elif isinstance(self.url, str): parsed_url = urlparse.urlparse(self.url) if (not parsed_url[0] and not re.match(r'^\w+\@[\w\.-]+\:[\w\/]+', parsed_url[2])): @@ -572,7 +576,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): # If a line is in custom_deps, but not in the solution, we want to append # this line to the solution. - for dep_name, dep_info in self.custom_deps.iteritems(): + for dep_name, dep_info in self.custom_deps.items(): if dep_name not in deps: deps[dep_name] = {'url': dep_info, 'dep_type': 'git'} @@ -601,7 +605,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): def _deps_to_objects(self, deps, use_relative_paths): """Convert a deps dict to a dict of Dependency objects.""" deps_to_add = [] - for name, dep_value in deps.iteritems(): + for name, dep_value in deps.items(): should_process = self.should_process if dep_value is None: continue @@ -709,7 +713,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): self._vars = local_scope.get('vars', {}) if self.parent: - for key, value in self.parent.get_vars().iteritems(): + for key, value in self.parent.get_vars().items(): if key in self._vars: self._vars[key] = value # Since we heavily post-process things, freeze ones which should @@ -737,7 +741,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): if 'recursedeps' in local_scope: for ent in local_scope['recursedeps']: - if isinstance(ent, basestring): + if isinstance(ent, str): self.recursedeps[ent] = self.deps_file else: # (depname, depsfilename) self.recursedeps[ent[0]] = ent[1] @@ -746,7 +750,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): if rel_prefix: logging.warning('Updating recursedeps by prepending %s.', rel_prefix) rel_deps = {} - for depname, options in self.recursedeps.iteritems(): + for depname, options in self.recursedeps.items(): rel_deps[ os.path.normpath(os.path.join(rel_prefix, depname))] = options self.recursedeps = rel_deps @@ -1004,7 +1008,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): variables = self.get_vars() for arg in self._gn_args: value = variables[arg] - if isinstance(value, basestring): + if isinstance(value, str): value = gclient_eval.EvaluateCondition(value, variables) lines.append('%s = %s' % (arg, ToGNString(value))) with open(os.path.join(self.root.root_dir, self._gn_args_file), 'w') as f: @@ -1353,7 +1357,8 @@ solutions = %(solution_list)s mirror.exists()) else: mirror_string = 'not used' - raise gclient_utils.Error(''' + raise gclient_utils.Error( + ''' Your .gclient file seems to be broken. The requested URL is different from what is actually checked out in %(checkout_path)s. @@ -1581,7 +1586,7 @@ it or fix the checkout. full_entries = [os.path.join(self.root_dir, e.replace('/', os.path.sep)) for e in entries] - for entry, prev_url in self._ReadEntries().iteritems(): + for entry, prev_url in self._ReadEntries().items(): if not prev_url: # entry must have been overridden via .gclient custom_deps continue diff --git a/gclient_eval.py b/gclient_eval.py index 3f561cc8e..6ea2fc78e 100644 --- a/gclient_eval.py +++ b/gclient_eval.py @@ -3,15 +3,21 @@ # found in the LICENSE file. import ast -import cStringIO import collections import logging +import sys import tokenize import gclient_utils from third_party import schema +if sys.version_info.major == 2: + # We use cStringIO.StringIO because it is equivalent to Py3's io.StringIO. + from cStringIO import StringIO +else: + from io import StringIO + class _NodeDict(collections.MutableMapping): """Dict-like type that also stores information on AST nodes and tokens.""" @@ -20,7 +26,7 @@ class _NodeDict(collections.MutableMapping): self.tokens = tokens def __str__(self): - return str({k: v[0] for k, v in self.data.iteritems()}) + return str({k: v[0] for k, v in self.data.items()}) def __repr__(self): return self.__str__() @@ -72,151 +78,169 @@ def _NodeDictSchema(dict_schema): # See https://github.com/keleshev/schema for docs how to configure schema. _GCLIENT_DEPS_SCHEMA = _NodeDictSchema({ - schema.Optional(basestring): schema.Or( - None, - basestring, - _NodeDictSchema({ - # Repo and revision to check out under the path - # (same as if no dict was used). - 'url': schema.Or(None, basestring), - - # Optional condition string. The dep will only be processed - # if the condition evaluates to True. - schema.Optional('condition'): basestring, - - schema.Optional('dep_type', default='git'): basestring, - }), - # CIPD package. - _NodeDictSchema({ - 'packages': [ - _NodeDictSchema({ - 'package': basestring, - - 'version': basestring, - }) - ], - - schema.Optional('condition'): basestring, - - schema.Optional('dep_type', default='cipd'): basestring, - }), - ), + schema.Optional(str): + schema.Or( + None, + str, + _NodeDictSchema({ + # Repo and revision to check out under the path + # (same as if no dict was used). + 'url': + schema.Or(None, str), + + # Optional condition string. The dep will only be processed + # if the condition evaluates to True. + schema.Optional('condition'): + str, + schema.Optional('dep_type', default='git'): + str, + }), + # CIPD package. + _NodeDictSchema({ + 'packages': [ + _NodeDictSchema({ + 'package': str, + 'version': str, + }) + ], + schema.Optional('condition'): + str, + schema.Optional('dep_type', default='cipd'): + str, + }), + ), }) -_GCLIENT_HOOKS_SCHEMA = [_NodeDictSchema({ - # Hook action: list of command-line arguments to invoke. - 'action': [basestring], - - # Name of the hook. Doesn't affect operation. - schema.Optional('name'): basestring, - - # Hook pattern (regex). Originally intended to limit some hooks to run - # only when files matching the pattern have changed. In practice, with git, - # gclient runs all the hooks regardless of this field. - schema.Optional('pattern'): basestring, - - # Working directory where to execute the hook. - schema.Optional('cwd'): basestring, - - # Optional condition string. The hook will only be run - # if the condition evaluates to True. - schema.Optional('condition'): basestring, -})] - -_GCLIENT_SCHEMA = schema.Schema(_NodeDictSchema({ - # List of host names from which dependencies are allowed (whitelist). - # NOTE: when not present, all hosts are allowed. - # NOTE: scoped to current DEPS file, not recursive. - schema.Optional('allowed_hosts'): [schema.Optional(basestring)], - - # Mapping from paths to repo and revision to check out under that path. - # Applying this mapping to the on-disk checkout is the main purpose - # of gclient, and also why the config file is called DEPS. - # - # The following functions are allowed: - # - # Var(): allows variable substitution (either from 'vars' dict below, - # or command-line override) - schema.Optional('deps'): _GCLIENT_DEPS_SCHEMA, - - # Similar to 'deps' (see above) - also keyed by OS (e.g. 'linux'). - # Also see 'target_os'. - schema.Optional('deps_os'): _NodeDictSchema({ - schema.Optional(basestring): _GCLIENT_DEPS_SCHEMA, - }), - - # Dependency to get gclient_gn_args* settings from. This allows these values - # to be set in a recursedeps file, rather than requiring that they exist in - # the top-level solution. - schema.Optional('gclient_gn_args_from'): basestring, - - # Path to GN args file to write selected variables. - schema.Optional('gclient_gn_args_file'): basestring, - - # Subset of variables to write to the GN args file (see above). - schema.Optional('gclient_gn_args'): [schema.Optional(basestring)], - - # Hooks executed after gclient sync (unless suppressed), or explicitly - # on gclient hooks. See _GCLIENT_HOOKS_SCHEMA for details. - # Also see 'pre_deps_hooks'. - schema.Optional('hooks'): _GCLIENT_HOOKS_SCHEMA, - - # Similar to 'hooks', also keyed by OS. - schema.Optional('hooks_os'): _NodeDictSchema({ - schema.Optional(basestring): _GCLIENT_HOOKS_SCHEMA - }), - - # Rules which #includes are allowed in the directory. - # Also see 'skip_child_includes' and 'specific_include_rules'. - schema.Optional('include_rules'): [schema.Optional(basestring)], - - # Hooks executed before processing DEPS. See 'hooks' for more details. - schema.Optional('pre_deps_hooks'): _GCLIENT_HOOKS_SCHEMA, - - # Recursion limit for nested DEPS. - schema.Optional('recursion'): int, - - # Whitelists deps for which recursion should be enabled. - schema.Optional('recursedeps'): [ - schema.Optional(schema.Or( - basestring, - (basestring, basestring), - [basestring, basestring] - )), - ], - - # Blacklists directories for checking 'include_rules'. - schema.Optional('skip_child_includes'): [schema.Optional(basestring)], - - # Mapping from paths to include rules specific for that path. - # See 'include_rules' for more details. - schema.Optional('specific_include_rules'): _NodeDictSchema({ - schema.Optional(basestring): [basestring] - }), - - # List of additional OS names to consider when selecting dependencies - # from deps_os. - schema.Optional('target_os'): [schema.Optional(basestring)], - - # For recursed-upon sub-dependencies, check out their own dependencies - # relative to the parent's path, rather than relative to the .gclient file. - schema.Optional('use_relative_paths'): bool, - - # For recursed-upon sub-dependencies, run their hooks relative to the - # parent's path instead of relative to the .gclient file. - schema.Optional('use_relative_hooks'): bool, - - # Variables that can be referenced using Var() - see 'deps'. - schema.Optional('vars'): _NodeDictSchema({ - schema.Optional(basestring): schema.Or(basestring, bool), - }), -})) +_GCLIENT_HOOKS_SCHEMA = [ + _NodeDictSchema({ + # Hook action: list of command-line arguments to invoke. + 'action': [str], + + # Name of the hook. Doesn't affect operation. + schema.Optional('name'): + str, + + # Hook pattern (regex). Originally intended to limit some hooks to run + # only when files matching the pattern have changed. In practice, with + # git, gclient runs all the hooks regardless of this field. + schema.Optional('pattern'): + str, + + # Working directory where to execute the hook. + schema.Optional('cwd'): + str, + + # Optional condition string. The hook will only be run + # if the condition evaluates to True. + schema.Optional('condition'): + str, + }) +] + +_GCLIENT_SCHEMA = schema.Schema( + _NodeDictSchema({ + # List of host names from which dependencies are allowed (whitelist). + # NOTE: when not present, all hosts are allowed. + # NOTE: scoped to current DEPS file, not recursive. + schema.Optional('allowed_hosts'): [schema.Optional(str)], + + # Mapping from paths to repo and revision to check out under that path. + # Applying this mapping to the on-disk checkout is the main purpose + # of gclient, and also why the config file is called DEPS. + # + # The following functions are allowed: + # + # Var(): allows variable substitution (either from 'vars' dict below, + # or command-line override) + schema.Optional('deps'): + _GCLIENT_DEPS_SCHEMA, + + # Similar to 'deps' (see above) - also keyed by OS (e.g. 'linux'). + # Also see 'target_os'. + schema.Optional('deps_os'): + _NodeDictSchema({ + schema.Optional(str): _GCLIENT_DEPS_SCHEMA, + }), + + # Dependency to get gclient_gn_args* settings from. This allows these + # values to be set in a recursedeps file, rather than requiring that + # they exist in the top-level solution. + schema.Optional('gclient_gn_args_from'): + str, + + # Path to GN args file to write selected variables. + schema.Optional('gclient_gn_args_file'): + str, + + # Subset of variables to write to the GN args file (see above). + schema.Optional('gclient_gn_args'): [schema.Optional(str)], + + # Hooks executed after gclient sync (unless suppressed), or explicitly + # on gclient hooks. See _GCLIENT_HOOKS_SCHEMA for details. + # Also see 'pre_deps_hooks'. + schema.Optional('hooks'): + _GCLIENT_HOOKS_SCHEMA, + + # Similar to 'hooks', also keyed by OS. + schema.Optional('hooks_os'): + _NodeDictSchema({ + schema.Optional(str): _GCLIENT_HOOKS_SCHEMA + }), + + # Rules which #includes are allowed in the directory. + # Also see 'skip_child_includes' and 'specific_include_rules'. + schema.Optional('include_rules'): [schema.Optional(str)], + + # Hooks executed before processing DEPS. See 'hooks' for more details. + schema.Optional('pre_deps_hooks'): + _GCLIENT_HOOKS_SCHEMA, + + # Recursion limit for nested DEPS. + schema.Optional('recursion'): + int, + + # Whitelists deps for which recursion should be enabled. + schema.Optional('recursedeps'): [ + schema.Optional(schema.Or(str, (str, str), [str, str])), + ], + + # Blacklists directories for checking 'include_rules'. + schema.Optional('skip_child_includes'): [schema.Optional(str)], + + # Mapping from paths to include rules specific for that path. + # See 'include_rules' for more details. + schema.Optional('specific_include_rules'): + _NodeDictSchema({ + schema.Optional(str): [str] + }), + + # List of additional OS names to consider when selecting dependencies + # from deps_os. + schema.Optional('target_os'): [schema.Optional(str)], + + # For recursed-upon sub-dependencies, check out their own dependencies + # relative to the parent's path, rather than relative to the .gclient + # file. + schema.Optional('use_relative_paths'): + bool, + + # For recursed-upon sub-dependencies, run their hooks relative to the + # parent's path instead of relative to the .gclient file. + schema.Optional('use_relative_hooks'): + bool, + + # Variables that can be referenced using Var() - see 'deps'. + schema.Optional('vars'): + _NodeDictSchema({ + schema.Optional(str): schema.Or(str, bool), + }), + })) def _gclient_eval(node_or_string, filename='', vars_dict=None): """Safely evaluates a single expression. Returns the result.""" _allowed_names = {'None': None, 'True': True, 'False': False} - if isinstance(node_or_string, basestring): + if isinstance(node_or_string, str): node_or_string = ast.parse(node_or_string, filename=filename, mode='eval') if isinstance(node_or_string, ast.Expression): node_or_string = node_or_string.body @@ -246,17 +270,21 @@ def _gclient_eval(node_or_string, filename='', vars_dict=None): 'invalid name %r (file %r, line %s)' % ( node.id, filename, getattr(node, 'lineno', ''))) return _allowed_names[node.id] + elif not sys.version_info[:2] < (3, 4) and isinstance( + node, ast.NameConstant): # Since Python 3.4 + return node.value elif isinstance(node, ast.Call): if not isinstance(node.func, ast.Name) or node.func.id != 'Var': raise ValueError( 'Var is the only allowed function (file %r, line %s)' % ( filename, getattr(node, 'lineno', ''))) - if node.keywords or node.starargs or node.kwargs or len(node.args) != 1: + if node.keywords or getattr(node, 'starargs', None) or getattr( + node, 'kwargs', None) or len(node.args) != 1: raise ValueError( 'Var takes exactly one argument (file %r, line %s)' % ( filename, getattr(node, 'lineno', ''))) arg = _convert(node.args[0]) - if not isinstance(arg, basestring): + if not isinstance(arg, str): raise ValueError( 'Var\'s argument must be a variable name (file %r, line %s)' % ( filename, getattr(node, 'lineno', ''))) @@ -321,11 +349,19 @@ def Exec(content, filename='', vars_override=None, builtin_vars=None): _validate_statement(statement, statements) statements[statement.targets[0].id] = statement.value + # The tokenized representation needs to end with a newline token, otherwise + # untokenization will trigger an assert later on. + # In Python 2.7 on Windows we need to ensure the input ends with a newline + # for a newline token to be generated. + # In other cases a newline token is always generated during tokenization so + # this has no effect. + # TODO: Remove this workaround after migrating to Python 3. + content += '\n' tokens = { - token[2]: list(token) - for token in tokenize.generate_tokens( - cStringIO.StringIO(content).readline) + token[2]: list(token) for token in tokenize.generate_tokens( + StringIO(content).readline) } + local_scope = _NodeDict({}, tokens) # Process vars first, so we can expand variables in the rest of the DEPS file. @@ -342,12 +378,9 @@ def Exec(content, filename='', vars_override=None, builtin_vars=None): vars_dict.update(builtin_vars) if vars_override: - vars_dict.update({ - k: v - for k, v in vars_override.iteritems() - if k in vars_dict}) + vars_dict.update({k: v for k, v in vars_override.items() if k in vars_dict}) - for name, node in statements.iteritems(): + for name, node in statements.items(): value = _gclient_eval(node, filename, vars_dict) local_scope.SetNode(name, value, node) @@ -371,23 +404,16 @@ def ExecLegacy(content, filename='', vars_override=None, if builtin_vars: vars_dict.update(builtin_vars) if vars_override: - vars_dict.update({ - k: v - for k, v in vars_override.iteritems() - if k in vars_dict - }) + vars_dict.update({k: v for k, v in vars_override.items() if k in vars_dict}) if not vars_dict: return local_scope def _DeepFormat(node): - if isinstance(node, basestring): + if isinstance(node, str): return node.format(**vars_dict) elif isinstance(node, dict): - return { - k.format(**vars_dict): _DeepFormat(v) - for k, v in node.iteritems() - } + return {k.format(**vars_dict): _DeepFormat(v) for k, v in node.items()} elif isinstance(node, list): return [_DeepFormat(elem) for elem in node] elif isinstance(node, tuple): @@ -536,7 +562,7 @@ def EvaluateCondition(condition, variables, referenced_variables=None): # Allow using "native" types, without wrapping everything in strings. # Note that schema constraints still apply to variables. - if not isinstance(value, basestring): + if not isinstance(value, str): return value # Recursively evaluate the variable reference. @@ -614,7 +640,11 @@ def EvaluateCondition(condition, variables, referenced_variables=None): def RenderDEPSFile(gclient_dict): contents = sorted(gclient_dict.tokens.values(), key=lambda token: token[2]) - return tokenize.untokenize(contents) + # The last token is a newline, which we ensure in Exec() for compatibility. + # However tests pass in inputs not ending with a newline and expect the same + # back, so for backwards compatibility need to remove that newline character. + # TODO: Fix tests to expect the newline + return tokenize.untokenize(contents)[:-1] def _UpdateAstString(tokens, node, value): @@ -679,8 +709,7 @@ def AddVar(gclient_dict, var_name, value): vars_node.values.insert(0, value_node) # Update the tokens. - var_tokens = list(tokenize.generate_tokens( - cStringIO.StringIO(var_content).readline)) + var_tokens = list(tokenize.generate_tokens(StringIO(var_content).readline)) var_tokens = { token[2]: list(token) # Ignore the tokens corresponding to braces and new lines. diff --git a/gclient_paths.py b/gclient_paths.py index b860855fd..36e96f77b 100644 --- a/gclient_paths.py +++ b/gclient_paths.py @@ -7,6 +7,9 @@ # code, and even more importantly don't add more toplevel import statements, # particularly for modules that are not builtin (see sys.builtin_modules_names, # os isn't built in, but it's essential to this file). + +from __future__ import print_function + import os import sys @@ -30,16 +33,17 @@ def FindGclientRoot(from_dir, filename='.gclient'): # might have failed. In that case, we cannot verify that the .gclient # is the one we want to use. In order to not to cause too much trouble, # just issue a warning and return the path anyway. - print >> sys.stderr, ("%s missing, %s file in parent directory %s might " - "not be the file you want to use." % - (entries_filename, filename, path)) + print( + "%s missing, %s file in parent directory %s might not be the file " + "you want to use." % (entries_filename, filename, path), + file=sys.stderr) return path scope = {} try: import io with io.open(entries_filename, encoding='utf-8') as f: exec(f.read(), scope) - except SyntaxError, e: + except SyntaxError as e: SyntaxErrorToError(filename, e) all_directories = scope['entries'].keys() path_to_check = real_from_dir[len(path)+1:] @@ -113,7 +117,7 @@ def GetBuildtoolsPlatformBinaryPath(): elif sys.platform == 'darwin': subdir = 'mac' elif sys.platform.startswith('linux'): - subdir = 'linux64' + subdir = 'linux64' else: raise Error('Unknown platform: ' + sys.platform) return os.path.join(buildtools_path, subdir) @@ -130,7 +134,8 @@ def GetGClientPrimarySolutionName(gclient_root_dir_path): """Returns the name of the primary solution in the .gclient file specified.""" gclient_config_file = os.path.join(gclient_root_dir_path, '.gclient') env = {} - execfile(gclient_config_file, env) + exec(compile(open(gclient_config_file).read(), gclient_config_file, 'exec'), + env) solutions = env.get('solutions', []) if solutions: return solutions[0].get('name') diff --git a/gclient_scm.py b/gclient_scm.py index 69faa6cbc..00263b00d 100644 --- a/gclient_scm.py +++ b/gclient_scm.py @@ -18,7 +18,11 @@ import sys import tempfile import threading import traceback -import urlparse + +try: + import urlparse +except ImportError: # For Py3 compatibility + import urllib.parse as urlparse import download_from_google_storage import gclient_utils @@ -311,7 +315,8 @@ class GitWrapper(SCMWrapper): if file_list is not None: files = self._Capture( ['-c', 'core.quotePath=false', 'ls-files']).splitlines() - file_list.extend([os.path.join(self.checkout_path, f) for f in files]) + file_list.extend( + [os.path.join(self.checkout_path, f.decode()) for f in files]) def _DisableHooks(self): hook_dir = os.path.join(self.checkout_path, '.git', 'hooks') @@ -590,10 +595,10 @@ class GitWrapper(SCMWrapper): # Skip url auto-correction if remote.origin.gclient-auto-fix-url is set. # This allows devs to use experimental repos which have a different url # but whose branch(s) are the same as official repos. - if (current_url.rstrip('/') != url.rstrip('/') and - url != 'git://foo' and + if (current_url.rstrip(b'/') != url.rstrip('/') and url != 'git://foo' and subprocess2.capture( - ['git', 'config', 'remote.%s.gclient-auto-fix-url' % self.remote], + ['git', 'config', + 'remote.%s.gclient-auto-fix-url' % self.remote], cwd=self.checkout_path).strip() != 'False'): self.Print('_____ switching %s to a new upstream' % self.relpath) if not (options.force or options.reset): @@ -1117,7 +1122,7 @@ class GitWrapper(SCMWrapper): try: rebase_output = scm.GIT.Capture(rebase_cmd, cwd=self.checkout_path) - except subprocess2.CalledProcessError, e: + except subprocess2.CalledProcessError as e: if (re.match(r'cannot rebase: you have unstaged changes', e.stderr) or re.match(r'cannot rebase: your index contains uncommitted changes', e.stderr)): @@ -1456,9 +1461,9 @@ class CipdRoot(object): try: ensure_file = None with tempfile.NamedTemporaryFile( - suffix='.ensure', delete=False) as ensure_file: + suffix='.ensure', delete=False, mode='w') as ensure_file: ensure_file.write('$ParanoidMode CheckPresence\n\n') - for subdir, packages in sorted(self._packages_by_subdir.iteritems()): + for subdir, packages in sorted(self._packages_by_subdir.items()): ensure_file.write('@Subdir %s\n' % subdir) for package in sorted(packages, key=lambda p: p.name): ensure_file.write('%s %s\n' % (package.name, package.version)) diff --git a/gclient_utils.py b/gclient_utils.py index 48c023fdd..ae9d7217a 100644 --- a/gclient_utils.py +++ b/gclient_utils.py @@ -4,17 +4,25 @@ """Generic utils.""" +from __future__ import print_function + import codecs import collections import contextlib -import cStringIO import datetime +import functools +import io import logging import operator import os import pipes import platform -import Queue + +try: + import Queue as queue +except ImportError: # For Py3 compatibility + import queue + import re import stat import subprocess @@ -22,10 +30,19 @@ import sys import tempfile import threading import time -import urlparse + +try: + import urlparse +except ImportError: # For Py3 compatibility + import urllib.parse as urlparse import subprocess2 +if sys.version_info.major == 2: + from cStringIO import StringIO +else: + from io import StringIO + RETRY_MAX = 3 RETRY_INITIAL_SLEEP = 0.5 @@ -42,6 +59,18 @@ THREADED_INDEX_PACK_BLACKLIST = [ 'https://chromium.googlesource.com/chromium/reference_builds/chrome_win.git' ] +"""To support rethrowing exceptions with tracebacks on both Py2 and 3.""" +if sys.version_info.major == 2: + # We have to use exec to avoid a SyntaxError in Python 3. + exec("def reraise(typ, value, tb=None):\n raise typ, value, tb\n") +else: + def reraise(typ, value, tb=None): + if value is None: + value = typ() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + class Error(Exception): """gclient exception class.""" @@ -61,9 +90,9 @@ def Elapsed(until=None): def PrintWarnings(): """Prints any accumulated warnings.""" if _WARNINGS: - print >> sys.stderr, '\n\nWarnings:' + print('\n\nWarnings:', file=sys.stderr) for warning in _WARNINGS: - print >> sys.stderr, warning + print(warning, file=sys.stderr) def AddWarning(msg): @@ -142,7 +171,8 @@ def FileRead(filename, mode='rU'): s = f.read() try: return s.decode('utf-8') - except UnicodeDecodeError: + # AttributeError is for Py3 compatibility + except (UnicodeDecodeError, AttributeError): return s @@ -230,7 +260,7 @@ def rmtree(path): if exitcode == 0: return else: - print >> sys.stderr, 'rd exited with code %d' % exitcode + print('rd exited with code %d' % exitcode, file=sys.stderr) time.sleep(3) raise Exception('Failed to remove path %s' % path) @@ -268,7 +298,7 @@ def safe_makedirs(tree): count += 1 try: os.makedirs(tree) - except OSError, e: + except OSError as e: # 17 POSIX, 183 Windows if e.errno not in (17, 183): raise @@ -491,9 +521,9 @@ class GClientChildren(object): with GCLIENT_CHILDREN_LOCK: if GCLIENT_CHILDREN: - print >> sys.stderr, 'Could not kill the following subprocesses:' + print('Could not kill the following subprocesses:', file=sys.stderr) for zombie in GCLIENT_CHILDREN: - print >> sys.stderr, ' ', zombie.pid + print(' ', zombie.pid, file=sys.stderr) def CheckCallAndFilter(args, stdout=None, filter_fn=None, @@ -514,12 +544,12 @@ def CheckCallAndFilter(args, stdout=None, filter_fn=None, """ assert print_stdout or filter_fn stdout = stdout or sys.stdout - output = cStringIO.StringIO() + output = io.BytesIO() filter_fn = filter_fn or (lambda x: None) sleep_interval = RETRY_INITIAL_SLEEP run_cwd = kwargs.get('cwd', os.getcwd()) - for _ in xrange(RETRY_MAX + 1): + for _ in range(RETRY_MAX + 1): kid = subprocess2.Popen( args, bufsize=0, stdout=subprocess2.PIPE, stderr=subprocess2.STDOUT, **kwargs) @@ -539,16 +569,16 @@ def CheckCallAndFilter(args, stdout=None, filter_fn=None, if in_byte: if call_filter_on_first_line: filter_fn(None) - in_line = '' + in_line = b'' while in_byte: output.write(in_byte) if print_stdout: - stdout.write(in_byte) + stdout.write(in_byte.decode()) if in_byte not in ['\r', '\n']: in_line += in_byte else: filter_fn(in_line) - in_line = '' + in_line = b'' in_byte = kid.stdout.read(1) # Flush the rest of buffered output. This is only an issue with # stdout/stderr not ending with a \n. @@ -561,15 +591,15 @@ def CheckCallAndFilter(args, stdout=None, filter_fn=None, GClientChildren.remove(kid) except KeyboardInterrupt: - print >> sys.stderr, 'Failed while running "%s"' % ' '.join(args) + print('Failed while running "%s"' % ' '.join(args), file=sys.stderr) raise if rv == 0: return output.getvalue() if not retry: break - print ("WARNING: subprocess '%s' in %s failed; will retry after a short " - 'nap...' % (' '.join('"%s"' % x for x in args), run_cwd)) + print("WARNING: subprocess '%s' in %s failed; will retry after a short " + 'nap...' % (' '.join('"%s"' % x for x in args), run_cwd)) time.sleep(sleep_interval) sleep_interval *= 2 raise subprocess2.CalledProcessError( @@ -602,13 +632,13 @@ class GitFilter(object): def __call__(self, line): # git uses an escape sequence to clear the line; elide it. - esc = line.find(unichr(033)) + esc = line.find(chr(0o33).encode()) if esc > -1: line = line[:esc] if self.predicate and not self.predicate(line): return now = time.time() - match = self.PERCENT_RE.match(line) + match = self.PERCENT_RE.match(line.decode()) if match: if match.group(1) != self.progress_prefix: self.progress_prefix = match.group(1) @@ -616,7 +646,7 @@ class GitFilter(object): return self.last_time = now self.out_fh.write('[%s] ' % Elapsed()) - print >> self.out_fh, line + print(line, file=self.out_fh) def FindFileUpwards(filename, path=None): @@ -653,7 +683,7 @@ def GetGClientRootAndEntries(path=None): config_file = '.gclient_entries' root = FindFileUpwards(config_file, path) if not root: - print "Can't find %s" % config_file + print("Can't find %s" % config_file) return None config_path = os.path.join(root, config_file) env = {} @@ -669,7 +699,7 @@ def lockedmethod(method): try: self.lock.acquire() except KeyboardInterrupt: - print >> sys.stderr, 'Was deadlocked' + print('Was deadlocked', file=sys.stderr) raise return method(self, *args, **kwargs) finally: @@ -687,7 +717,7 @@ class WorkItem(object): def __init__(self, name): # A unique string representing this work item. self._name = name - self.outbuf = cStringIO.StringIO() + self.outbuf = StringIO() self.start = self.finish = None self.resources = [] # List of resources this work item requires. @@ -724,7 +754,7 @@ class ExecutionQueue(object): # List of items currently running. self.running = [] # Exceptions thrown if any. - self.exceptions = Queue.Queue() + self.exceptions = queue.Queue() # Progress status self.progress = progress if self.progress: @@ -802,7 +832,7 @@ class ExecutionQueue(object): break # Check for new tasks to start. - for i in xrange(len(self.queued)): + for i in range(len(self.queued)): # Verify its requirements. if (self.ignore_requirements or not (set(self.queued[i].requirements) - set(self.ran))): @@ -826,28 +856,28 @@ class ExecutionQueue(object): if (now - self.last_join > datetime.timedelta(seconds=60) and self.last_subproc_output > self.last_join): if self.progress: - print >> sys.stdout, '' + print('') sys.stdout.flush() elapsed = Elapsed() - print >> sys.stdout, '[%s] Still working on:' % elapsed + print('[%s] Still working on:' % elapsed) sys.stdout.flush() for task in self.running: - print >> sys.stdout, '[%s] %s' % (elapsed, task.item.name) + print('[%s] %s' % (elapsed, task.item.name)) sys.stdout.flush() except KeyboardInterrupt: # Help debugging by printing some information: - print >> sys.stderr, ( + print( ('\nAllowed parallel jobs: %d\n# queued: %d\nRan: %s\n' - 'Running: %d') % ( - self.jobs, - len(self.queued), - ', '.join(self.ran), - len(self.running))) + 'Running: %d') % (self.jobs, len(self.queued), ', '.join( + self.ran), len(self.running)), + file=sys.stderr) for i in self.queued: - print >> sys.stderr, '%s (not started): %s' % ( - i.name, ', '.join(i.requirements)) + print( + '%s (not started): %s' % (i.name, ', '.join(i.requirements)), + file=sys.stderr) for i in self.running: - print >> sys.stderr, self.format_task_output(i.item, 'interrupted') + print( + self.format_task_output(i.item, 'interrupted'), file=sys.stderr) raise # Something happened: self.enqueue() or a thread terminated. Loop again. finally: @@ -856,12 +886,12 @@ class ExecutionQueue(object): assert not self.running, 'Now guaranteed to be single-threaded' if not self.exceptions.empty(): if self.progress: - print >> sys.stdout, '' + print('') # To get back the stack location correctly, the raise a, b, c form must be # used, passing a tuple as the first argument doesn't work. e, task = self.exceptions.get() - print >> sys.stderr, self.format_task_output(task.item, 'ERROR') - raise e[0], e[1], e[2] + print(self.format_task_output(task.item, 'ERROR'), file=sys.stderr) + reraise(e[0], e[1], e[2]) elif self.progress: self.progress.end() @@ -877,7 +907,7 @@ class ExecutionQueue(object): self.last_join = datetime.datetime.now() sys.stdout.flush() if self.verbose: - print >> sys.stdout, self.format_task_output(t.item) + print(self.format_task_output(t.item)) if self.progress: self.progress.update(1, t.item.name) if t.item.name in self.ran: @@ -899,22 +929,24 @@ class ExecutionQueue(object): # exception. try: task_item.start = datetime.datetime.now() - print >> task_item.outbuf, '[%s] Started.' % Elapsed(task_item.start) + print('[%s] Started.' % Elapsed(task_item.start), file=task_item.outbuf) task_item.run(*args, **kwargs) task_item.finish = datetime.datetime.now() - print >> task_item.outbuf, '[%s] Finished.' % Elapsed(task_item.finish) + print( + '[%s] Finished.' % Elapsed(task_item.finish), file=task_item.outbuf) self.ran.append(task_item.name) if self.verbose: if self.progress: - print >> sys.stdout, '' - print >> sys.stdout, self.format_task_output(task_item) + print('') + print(self.format_task_output(task_item)) if self.progress: self.progress.update(1, ', '.join(t.item.name for t in self.running)) except KeyboardInterrupt: - print >> sys.stderr, self.format_task_output(task_item, 'interrupted') + print( + self.format_task_output(task_item, 'interrupted'), file=sys.stderr) raise except Exception: - print >> sys.stderr, self.format_task_output(task_item, 'ERROR') + print(self.format_task_output(task_item, 'ERROR'), file=sys.stderr) raise @@ -935,10 +967,11 @@ class ExecutionQueue(object): work_queue = self.kwargs['work_queue'] try: self.item.start = datetime.datetime.now() - print >> self.item.outbuf, '[%s] Started.' % Elapsed(self.item.start) + print('[%s] Started.' % Elapsed(self.item.start), file=self.item.outbuf) self.item.run(*self.args, **self.kwargs) self.item.finish = datetime.datetime.now() - print >> self.item.outbuf, '[%s] Finished.' % Elapsed(self.item.finish) + print( + '[%s] Finished.' % Elapsed(self.item.finish), file=self.item.outbuf) except KeyboardInterrupt: logging.info('Caught KeyboardInterrupt in thread %s', self.item.name) logging.info(str(sys.exc_info())) @@ -989,8 +1022,8 @@ def RunEditor(content, git, git_editor=None): file_handle, filename = tempfile.mkstemp(text=True, prefix='cl_description') # Make sure CRLF is handled properly by requiring none. if '\r' in content: - print >> sys.stderr, ( - '!! Please remove \\r from your change description !!') + print( + '!! Please remove \\r from your change description !!', file=sys.stderr) fileobj = os.fdopen(file_handle, 'w') # Still remove \r if present. content = re.sub('\r?\n', '\n', content) @@ -1143,7 +1176,7 @@ def freeze(obj): Will raise TypeError if you pass an object which is not hashable. """ if isinstance(obj, collections.Mapping): - return FrozenDict((freeze(k), freeze(v)) for k, v in obj.iteritems()) + return FrozenDict((freeze(k), freeze(v)) for k, v in obj.items()) elif isinstance(obj, (list, tuple)): return tuple(freeze(i) for i in obj) elif isinstance(obj, set): @@ -1163,8 +1196,8 @@ class FrozenDict(collections.Mapping): # Calculate the hash immediately so that we know all the items are # hashable too. - self._hash = reduce(operator.xor, - (hash(i) for i in enumerate(self._d.iteritems())), 0) + self._hash = functools.reduce( + operator.xor, (hash(i) for i in enumerate(self._d.items())), 0) def __eq__(self, other): if not isinstance(other, collections.Mapping): diff --git a/git_cache.py b/git_cache.py index 9e2390562..f17f466da 100755 --- a/git_cache.py +++ b/git_cache.py @@ -6,6 +6,7 @@ """A git command for managing a local cache of git repositories.""" from __future__ import print_function + import contextlib import errno import logging @@ -17,7 +18,12 @@ import threading import time import subprocess import sys -import urlparse + +try: + import urlparse +except ImportError: # For Py3 compatibility + import urllib.parse as urlparse + import zipfile from download_from_google_storage import Gsutil diff --git a/git_cl.py b/git_cl.py index b1383a8bd..59a9af7b2 100755 --- a/git_cl.py +++ b/git_cl.py @@ -4443,7 +4443,7 @@ def GenerateGerritChangeId(message): # entropy. lines.append(message) change_hash = RunCommand(['git', 'hash-object', '-t', 'commit', '--stdin'], - stdin='\n'.join(lines)) + stdin=('\n'.join(lines)).encode()) return 'I%s' % change_hash.strip() diff --git a/gsutil.py b/gsutil.py index 8bfed540e..55e4cb035 100755 --- a/gsutil.py +++ b/gsutil.py @@ -17,7 +17,12 @@ import subprocess import sys import tempfile import time -import urllib2 + +try: + import urllib2 as urllib +except ImportError: # For Py3 compatibility + import urllib.request as urllib + import zipfile @@ -53,7 +58,7 @@ def download_gsutil(version, target_dir): local_md5 = md5_calc.hexdigest() metadata_url = '%s%s' % (API_URL, filename) - metadata = json.load(urllib2.urlopen(metadata_url)) + metadata = json.load(urllib.urlopen(metadata_url)) remote_md5 = base64.b64decode(metadata['md5Hash']) if local_md5 == remote_md5: @@ -62,7 +67,7 @@ def download_gsutil(version, target_dir): # Do the download. url = '%s%s' % (GSUTIL_URL, filename) - u = urllib2.urlopen(url) + u = urllib.urlopen(url) with open(target_filename, 'wb') as f: while True: buf = u.read(4096) diff --git a/metrics.py b/metrics.py index bcf033227..00d6e6d5b 100644 --- a/metrics.py +++ b/metrics.py @@ -15,7 +15,11 @@ import tempfile import threading import time import traceback -import urllib2 + +try: + import urllib2 as urllib +except ImportError: # For Py3 compatibility + import urllib.request as urllib import detect_host_arch import gclient_utils @@ -60,9 +64,9 @@ class _Config(object): # check if we can reach the page. An external developer would get access # denied. try: - req = urllib2.urlopen(metrics_utils.APP_URL + '/should-upload') + req = urllib.urlopen(metrics_utils.APP_URL + '/should-upload') self._config['is-googler'] = req.getcode() == 200 - except (urllib2.URLError, urllib2.HTTPError): + except (urllib.URLError, urllib.HTTPError): self._config['is-googler'] = False # Make sure the config variables we need are present, and initialize them to @@ -224,7 +228,7 @@ class MetricsCollector(object): self._upload_metrics_data() if exception: - raise exception[0], exception[1], exception[2] + gclient_utils.reraise(exception[0], exception[1], exception[2]) return result def collect_metrics(self, command_name): diff --git a/metrics_utils.py b/metrics_utils.py index f71ec523c..5df124a74 100644 --- a/metrics_utils.py +++ b/metrics_utils.py @@ -9,7 +9,11 @@ import re import scm import subprocess2 import sys -import urlparse + +try: + import urlparse +except ImportError: # For Py3 compatibility + import urllib.parse as urlparse # Current version of metrics recording. @@ -280,7 +284,7 @@ def print_boxed_text(out, min_width, lines): width = max(min_width, max(len(line) for line in lines)) out(SE + EW * (width + 2) + SW + '\n') for line in lines: - out('%s %-*s %s\n' % (NS, width, line, NS)) + out('%s %-*s %s\n' % (NS, width, line, NS)) out(NE + EW * (width + 2) + NW + '\n') def print_notice(countdown): diff --git a/scm.py b/scm.py index d7b70ea66..43a68fd2d 100644 --- a/scm.py +++ b/scm.py @@ -4,8 +4,8 @@ """SCM-specific utility classes.""" -import cStringIO import glob +import io import logging import os import platform @@ -51,7 +51,7 @@ def GenFakeDiff(filename): filename = filename.replace(os.sep, '/') nb_lines = len(file_content) # We need to use / since patch on unix will fail otherwise. - data = cStringIO.StringIO() + data = io.StringIO() data.write("Index: %s\n" % filename) data.write('=' * 67 + '\n') # Note: Should we use /dev/null instead? @@ -369,9 +369,9 @@ class GIT(object): """Asserts git's version is at least min_version.""" if cls.current_version is None: current_version = cls.Capture(['--version'], '.') - matched = re.search(r'version ([0-9\.]+)', current_version) + matched = re.search(r'version ([0-9\.]+)', current_version.decode()) cls.current_version = matched.group(1) - current_version_list = map(only_int, cls.current_version.split('.')) + current_version_list = list(map(only_int, cls.current_version.split('.'))) for min_ver in map(int, min_version.split('.')): ver = current_version_list.pop(0) if ver < min_ver: diff --git a/setup_color.py b/setup_color.py index 5baa88014..45f655673 100644 --- a/setup_color.py +++ b/setup_color.py @@ -3,6 +3,8 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +from __future__ import print_function + import os import sys from third_party import colorama @@ -90,5 +92,5 @@ def init(): if __name__ == '__main__': init() - print 'IS_TTY:', IS_TTY - print 'OUT_TYPE:', OUT_TYPE + print('IS_TTY:', IS_TTY) + print('OUT_TYPE:', OUT_TYPE) diff --git a/subprocess2.py b/subprocess2.py index d899e4729..6ec6d50ae 100644 --- a/subprocess2.py +++ b/subprocess2.py @@ -7,12 +7,17 @@ In theory you shouldn't need anything else in subprocess, or this module failed. """ -import cStringIO import codecs import errno +import io import logging import os -import Queue + +try: + import Queue +except ImportError: # For Py3 compatibility + import queue as Queue + import subprocess import sys import time @@ -20,7 +25,8 @@ import threading # Cache the string-escape codec to ensure subprocess can find it later. # See crbug.com/912292#c2 for context. -codecs.lookup('string-escape') +if sys.version_info.major == 2: + codecs.lookup('string-escape') # Constants forwarded from subprocess. PIPE = subprocess.PIPE @@ -208,7 +214,8 @@ class Popen(subprocess.Popen): # the list. kwargs['shell'] = bool(sys.platform=='win32') - if isinstance(args, basestring): + if isinstance(args, str) or (sys.version_info.major == 2 and + isinstance(args, unicode)): tmp_str = args elif isinstance(args, (list, tuple)): tmp_str = ' '.join(args) @@ -248,7 +255,7 @@ class Popen(subprocess.Popen): try: with self.popen_lock: super(Popen, self).__init__(args, **kwargs) - except OSError, e: + except OSError as e: if e.errno == errno.EAGAIN and sys.platform == 'cygwin': # Convert fork() emulation failure into a CygwinRebaseError(). raise CygwinRebaseError( @@ -285,7 +292,7 @@ class Popen(subprocess.Popen): def write_stdin(): try: - stdin_io = cStringIO.StringIO(input) + stdin_io = io.BytesIO(input) while True: data = stdin_io.read(1024) if data: @@ -451,7 +458,8 @@ def communicate(args, timeout=None, nag_timer=None, nag_max=None, **kwargs): """ stdin = kwargs.pop('stdin', None) if stdin is not None: - if isinstance(stdin, basestring): + if isinstance(stdin, str) or (sys.version_info.major == 2 and + isinstance(stdin, unicode)): # When stdin is passed as an argument, use it as the actual input data and # set the Popen() parameter accordingly. kwargs['stdin'] = PIPE diff --git a/tests/metrics_test.py b/tests/metrics_test.py index 2debaec64..b2e9fd605 100644 --- a/tests/metrics_test.py +++ b/tests/metrics_test.py @@ -33,7 +33,7 @@ class MetricsCollectorTest(unittest.TestCase): self.collector = metrics.MetricsCollector() # Keep track of the URL requests, file reads/writes and subprocess spawned. - self.urllib2 = mock.Mock() + self.urllib = mock.Mock() self.print_notice = mock.Mock() self.print_version_change = mock.Mock() self.Popen = mock.Mock() @@ -42,7 +42,7 @@ class MetricsCollectorTest(unittest.TestCase): # So that we don't have to update the tests everytime we change the version. mock.patch('metrics.metrics_utils.CURRENT_VERSION', 0).start() - mock.patch('metrics.urllib2', self.urllib2).start() + mock.patch('metrics.urllib', self.urllib).start() mock.patch('metrics.subprocess.Popen', self.Popen).start() mock.patch('metrics.gclient_utils.FileWrite', self.FileWrite).start() mock.patch('metrics.gclient_utils.FileRead', self.FileRead).start() @@ -92,7 +92,7 @@ class MetricsCollectorTest(unittest.TestCase): def test_writes_config_if_not_exists(self): self.FileRead.side_effect = [IOError(2, "No such file or directory")] mock_response = mock.Mock() - self.urllib2.urlopen.side_effect = [mock_response] + self.urllib.urlopen.side_effect = [mock_response] mock_response.getcode.side_effect = [200] self.assertTrue(self.collector.config.is_googler) @@ -106,7 +106,7 @@ class MetricsCollectorTest(unittest.TestCase): def test_writes_config_if_not_exists_non_googler(self): self.FileRead.side_effect = [IOError(2, "No such file or directory")] mock_response = mock.Mock() - self.urllib2.urlopen.side_effect = [mock_response] + self.urllib.urlopen.side_effect = [mock_response] mock_response.getcode.side_effect = [403] self.assertFalse(self.collector.config.is_googler) @@ -120,7 +120,7 @@ class MetricsCollectorTest(unittest.TestCase): def test_disables_metrics_if_cant_write_config(self): self.FileRead.side_effect = [IOError(2, 'No such file or directory')] mock_response = mock.Mock() - self.urllib2.urlopen.side_effect = [mock_response] + self.urllib.urlopen.side_effect = [mock_response] mock_response.getcode.side_effect = [200] self.FileWrite.side_effect = [IOError(13, 'Permission denied.')] diff --git a/tests/scm_unittest.py b/tests/scm_unittest.py index 29b7767e4..16a308774 100755 --- a/tests/scm_unittest.py +++ b/tests/scm_unittest.py @@ -47,7 +47,6 @@ class RootTestCase(BaseSCMTestCase): def testMembersChanged(self): self.mox.ReplayAll() members = [ - 'cStringIO', 'determine_scm', 'ElementTree', 'gclient_utils', @@ -55,6 +54,7 @@ class RootTestCase(BaseSCMTestCase): 'GetCasedPath', 'GIT', 'glob', + 'io', 'logging', 'only_int', 'os',