diff --git a/annotated_gclient.py b/annotated_gclient.py index f98500714..af5bcc5bd 100755 --- a/annotated_gclient.py +++ b/annotated_gclient.py @@ -41,7 +41,7 @@ def parse_got_revision(filename, revision_mapping): with open(filename) as f: data = json.load(f) - for path, info in data['solutions'].iteritems(): + for path, info in data['solutions'].items(): # gclient json paths always end with a slash path = path.rstrip('/') if path in revision_mapping: @@ -52,7 +52,7 @@ def parse_got_revision(filename, revision_mapping): def emit_buildprops(got_revisions): - for prop, revision in got_revisions.iteritems(): + for prop, revision in got_revisions.items(): print('@@@SET_BUILD_PROPERTY@%s@%s@@@' % (prop, json.dumps(revision))) diff --git a/autoninja b/autoninja index cee4578d8..5dbcc5bac 100755 --- a/autoninja +++ b/autoninja @@ -5,7 +5,7 @@ # found in the LICENSE file. # Set unique build ID. -AUTONINJA_BUILD_ID="$(python -c "import uuid; print uuid.uuid4()")" +AUTONINJA_BUILD_ID="$(python -c "import uuid; print(uuid.uuid4())")" export AUTONINJA_BUILD_ID if [ "$NINJA_SUMMARIZE_BUILD" == "1" ]; then diff --git a/cpplint.py b/cpplint.py index 04b99cbc4..d6c63df0a 100755 --- a/cpplint.py +++ b/cpplint.py @@ -908,7 +908,7 @@ class _CppLintState(object): def PrintErrorCounts(self): """Print a summary of errors by category, and the total.""" - for category, count in self.errors_by_category.iteritems(): + for category, count in self.errors_by_category.items(): sys.stderr.write('Category \'%s\' errors found: %d\n' % (category, count)) sys.stderr.write('Total errors found: %d\n' % self.error_count) @@ -4481,7 +4481,7 @@ def _GetTextInside(text, start_pattern): # Give opening punctuations to get the matching close-punctuations. matching_punctuation = {'(': ')', '{': '}', '[': ']'} - closing_punctuation = set(matching_punctuation.itervalues()) + closing_punctuation = set(matching_punctuation.values()) # Find the position to start extracting text. match = re.search(start_pattern, text, re.M) diff --git a/gclient.py b/gclient.py index 9bc33fda6..102cd43f1 100755 --- a/gclient.py +++ b/gclient.py @@ -590,7 +590,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): # If a line is in custom_deps, but not in the solution, we want to append # this line to the solution. - for dep_name, dep_info in six.iteritems(self.custom_deps): + for dep_name, dep_info in self.custom_deps.items(): if dep_name not in deps: deps[dep_name] = {'url': dep_info, 'dep_type': 'git'} @@ -601,13 +601,13 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): # recursively included by "src/ios_foo/DEPS" should also require # "checkout_ios=True". if self.condition: - for value in six.itervalues(deps): + for value in deps.values(): gclient_eval.UpdateCondition(value, 'and', self.condition) if rel_prefix: logging.warning('use_relative_paths enabled.') rel_deps = {} - for d, url in six.iteritems(deps): + for d, url in deps.items(): # normpath is required to allow DEPS to use .. in their # dependency local path. rel_deps[os.path.normpath(os.path.join(rel_prefix, d))] = url @@ -619,7 +619,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): def _deps_to_objects(self, deps, use_relative_paths): """Convert a deps dict to a dict of Dependency objects.""" deps_to_add = [] - for name, dep_value in six.iteritems(deps): + for name, dep_value in deps.items(): should_process = self.should_process if dep_value is None: continue @@ -727,7 +727,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): self._vars = local_scope.get('vars', {}) if self.parent: - for key, value in six.iteritems(self.parent.get_vars()): + for key, value in self.parent.get_vars().items(): if key in self._vars: self._vars[key] = value # Since we heavily post-process things, freeze ones which should @@ -764,7 +764,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): if rel_prefix: logging.warning('Updating recursedeps by prepending %s.', rel_prefix) rel_deps = {} - for depname, options in six.iteritems(self.recursedeps): + for depname, options in self.recursedeps.items(): rel_deps[ os.path.normpath(os.path.join(rel_prefix, depname))] = options self.recursedeps = rel_deps @@ -1600,7 +1600,7 @@ it or fix the checkout. full_entries = [os.path.join(self.root_dir, e.replace('/', os.path.sep)) for e in entries] - for entry, prev_url in six.iteritems(self._ReadEntries()): + for entry, prev_url in self._ReadEntries().items(): if not prev_url: # entry must have been overridden via .gclient custom_deps continue @@ -1747,7 +1747,7 @@ it or fix the checkout. 'The following --patch-ref flags were not used. Please fix it:\n%s' % ('\n'.join( patch_repo + '@' + patch_ref - for patch_repo, patch_ref in six.iteritems(patch_refs)))) + for patch_repo, patch_ref in patch_refs.items()))) # Once all the dependencies have been processed, it's now safe to write # out the gn_args_file and run the hooks. @@ -1834,7 +1834,7 @@ it or fix the checkout. 'url': rev.split('@')[0] if rev else None, 'rev': rev.split('@')[1] if rev and '@' in rev else None, } - for name, rev in six.iteritems(entries) + for name, rev in entries.items() } if self._options.output_json == '-': print(json.dumps(json_output, indent=2, separators=(',', ': '))) @@ -2122,7 +2122,7 @@ class Flattener(object): self._flatten_dep(solution) if pin_all_deps: - for dep in six.itervalues(self._deps): + for dep in self._deps.values(): self._pin_dep(dep) def add_deps_file(dep): @@ -2140,7 +2140,7 @@ class Flattener(object): return assert dep.url self._deps_files.add((dep.url, deps_file, dep.hierarchy_data())) - for dep in six.itervalues(self._deps): + for dep in self._deps.values(): add_deps_file(dep) gn_args_dep = self._deps.get(self._client.dependencies[0]._gn_args_from, @@ -2183,7 +2183,7 @@ class Flattener(object): # Only include vars explicitly listed in the DEPS files or gclient solution, # not automatic, local overrides (i.e. not all of dep.get_vars()). hierarchy = dep.hierarchy(include_url=False) - for key, value in six.iteritems(dep._vars): + for key, value in dep._vars.items(): # Make sure there are no conflicting variables. It is fine however # to use same variable name, as long as the value is consistent. assert key not in self._vars or self._vars[key][1] == value, ( @@ -2191,7 +2191,7 @@ class Flattener(object): dep.name, key, value, self._vars[key][1])) self._vars[key] = (hierarchy, value) # Override explicit custom variables. - for key, value in six.iteritems(dep.custom_vars): + for key, value in dep.custom_vars.items(): # Do custom_vars that don't correspond to DEPS vars ever make sense? DEPS # conditionals shouldn't be using vars that aren't also defined in the # DEPS (presubmit actually disallows this), so any new custom_var must be @@ -2344,7 +2344,7 @@ def _HooksOsToLines(hooks_os): if not hooks_os: return [] s = ['hooks_os = {'] - for hook_os, os_hooks in six.iteritems(hooks_os): + for hook_os, os_hooks in hooks_os.items(): s.append(' "%s": [' % hook_os) for dep, hook in os_hooks: s.extend([ diff --git a/gclient_eval.py b/gclient_eval.py index 66656ae86..002ebb45d 100644 --- a/gclient_eval.py +++ b/gclient_eval.py @@ -52,7 +52,7 @@ class _NodeDict(collections.MutableMapping): def MoveTokens(self, origin, delta): if self.tokens: new_tokens = {} - for pos, token in six.iteritems(self.tokens): + for pos, token in self.tokens.items(): if pos[0] >= origin: pos = (pos[0] + delta, pos[1]) token = token[:2] + (pos,) + token[3:] @@ -509,14 +509,14 @@ def Parse(content, validate_syntax, filename, vars_override=None, if 'deps_os' in result: deps = result.setdefault('deps', {}) - for os_name, os_deps in six.iteritems(result['deps_os']): + for os_name, os_deps in result['deps_os'].items(): os_deps = _StandardizeDeps(os_deps, vars_dict) _MergeDepsOs(deps, os_deps, os_name) del result['deps_os'] if 'hooks_os' in result: hooks = result.setdefault('hooks', []) - for os_name, os_hooks in six.iteritems(result['hooks_os']): + for os_name, os_hooks in result['hooks_os'].items(): for hook in os_hooks: UpdateCondition(hook, 'and', 'checkout_' + os_name) hooks.extend(os_hooks) diff --git a/gclient_utils.py b/gclient_utils.py index dde73def6..5e609ed97 100644 --- a/gclient_utils.py +++ b/gclient_utils.py @@ -1229,7 +1229,7 @@ class FrozenDict(collections.Mapping): return True if len(self) != len(other): return False - for k, v in self.iteritems(): + for k, v in self.items(): if k not in other or other[k] != v: return False return True diff --git a/gerrit_util.py b/gerrit_util.py index 9e694cf37..21ec65bea 100644 --- a/gerrit_util.py +++ b/gerrit_util.py @@ -238,7 +238,7 @@ class CookiesAuthenticator(Authenticator): return gitcookies def _get_auth_for_host(self, host): - for domain, creds in self.gitcookies.iteritems(): + for domain, creds in self.gitcookies.items(): if cookielib.domain_match(host, domain): return (creds[0], None, creds[1]) return self.netrc.authenticators(host) @@ -386,7 +386,7 @@ def CreateHttpConn(host, path, reqtype='GET', headers=None, body=None): headers.setdefault('Content-Type', 'application/json') if LOGGER.isEnabledFor(logging.DEBUG): LOGGER.debug('%s %s://%s%s' % (reqtype, GERRIT_PROTOCOL, host, url)) - for key, val in headers.iteritems(): + for key, val in headers.items(): if key == 'Authorization': val = 'HIDDEN' LOGGER.debug('%s: %s' % (key, val)) @@ -798,7 +798,7 @@ def AddReviewers(host, change, reviewers=None, ccs=None, notify=True, resp = ReadHttpJsonResponse(conn, accept_statuses=accept_statuses) errored = set() - for result in resp.get('reviewers', {}).itervalues(): + for result in resp.get('reviewers', {}).values(): r = result.get('input') state = 'REVIEWER' if r in reviewers else 'CC' if result.get('error'): @@ -845,7 +845,7 @@ def SetReview(host, change, msg=None, labels=None, notify=None, ready=None): conn = CreateHttpConn(host, path, reqtype='POST', body=body) response = ReadHttpJsonResponse(conn) if labels: - for key, val in labels.iteritems(): + for key, val in labels.items(): if ('labels' not in response or key not in response['labels'] or int(response['labels'][key] != int(val))): raise GerritError(200, 'Unable to set "%s" label on change %s.' % ( diff --git a/git_cl.py b/git_cl.py index e79490b73..39c6c5ee8 100755 --- a/git_cl.py +++ b/git_cl.py @@ -315,7 +315,7 @@ def _git_set_branch_config_value(key, value, branch=None, **kwargs): def _get_properties_from_options(options): prop_list = getattr(options, 'properties', []) properties = dict(x.split('=', 1) for x in prop_list) - for key, val in properties.iteritems(): + for key, val in properties.items(): try: properties[key] = json.loads(val) except ValueError: @@ -415,7 +415,7 @@ def _get_bucket_map(changelist, options, option_parser): output_stream=sys.stdout) if masters is None: return None - return {m: b for m, b in masters.iteritems()} + return {m: b for m, b in masters.items()} if options.bucket: return {options.bucket: {b: [] for b in options.bot}} @@ -452,11 +452,11 @@ def _trigger_try_jobs(auth_config, changelist, buckets, options, patchset): options: Command-line options. """ print('Scheduling jobs on:') - for bucket, builders_and_tests in sorted(buckets.iteritems()): + for bucket, builders_and_tests in sorted(buckets.items()): print('Bucket:', bucket) print('\n'.join( ' %s: %s' % (builder, tests) - for builder, tests in sorted(builders_and_tests.iteritems()))) + for builder, tests in sorted(builders_and_tests.items()))) print('To see results here, run: git cl try-results') print('To see results in browser, run: git cl web') @@ -495,13 +495,13 @@ def _make_try_job_schedule_requests(changelist, buckets, options, patchset): 'value': '1'}) requests = [] - for raw_bucket, builders_and_tests in sorted(buckets.iteritems()): + for raw_bucket, builders_and_tests in sorted(buckets.items()): project, bucket = _parse_bucket(raw_bucket) if not project or not bucket: print('WARNING Could not parse bucket "%s". Skipping.' % raw_bucket) continue - for builder, tests in sorted(builders_and_tests.iteritems()): + for builder, tests in sorted(builders_and_tests.items()): properties = shared_properties.copy() if 'presubmit' in builder.lower(): properties['dry_run'] = 'true' @@ -1874,7 +1874,7 @@ class Changelist(object): # Add the robot comments onto the list of comments, but only # keep those that are from the latest patchset. latest_patch_set = self.GetMostRecentPatchset() - for path, robot_comments in robot_file_comments.iteritems(): + for path, robot_comments in robot_file_comments.items(): line_comments = file_comments.setdefault(path, []) line_comments.extend( [c for c in robot_comments if c['patch_set'] == latest_patch_set]) @@ -1883,7 +1883,7 @@ class Changelist(object): # {author+date: {path: {patchset: {line: url+message}}}} comments = collections.defaultdict( lambda: collections.defaultdict(lambda: collections.defaultdict(dict))) - for path, line_comments in file_comments.iteritems(): + for path, line_comments in file_comments.items(): for comment in line_comments: tag = comment.get('tag', '') if tag.startswith('autogenerated') and 'robot_id' not in comment: @@ -2095,7 +2095,7 @@ class Changelist(object): patchset = int(revision_info['_number']) else: patchset = parsed_issue_arg.patchset - for revision_info in detail['revisions'].itervalues(): + for revision_info in detail['revisions'].values(): if int(revision_info['_number']) == parsed_issue_arg.patchset: break else: @@ -2672,7 +2672,7 @@ class Changelist(object): has_patchset = any( int(revision_data['_number']) == patchset - for revision_data in data['revisions'].itervalues()) + for revision_data in data['revisions'].values()) if not has_patchset: raise Exception('Patchset %d is not known in Gerrit change %d' % (patchset, self.GetIssue())) @@ -3246,8 +3246,8 @@ class _GitCookiesChecker(object): self._all_hosts = [ (h, u, s) for h, u, s in itertools.chain( - ((h, u, '.netrc') for h, (u, _, _) in a.netrc.hosts.iteritems()), - ((h, u, '.gitcookies') for h, (u, _) in a.gitcookies.iteritems()) + ((h, u, '.netrc') for h, (u, _, _) in a.netrc.hosts.items()), + ((h, u, '.gitcookies') for h, (u, _) in a.gitcookies.items()) ) if h.endswith(self._GOOGLESOURCE) ] @@ -3328,18 +3328,18 @@ class _GitCookiesChecker(object): def get_partially_configured_hosts(self): return set( (host if i1 else self._canonical_gerrit_googlesource_host(host)) - for host, (i1, i2) in self._get_git_gerrit_identity_pairs().iteritems() + for host, (i1, i2) in self._get_git_gerrit_identity_pairs().items() if None in (i1, i2) and host != '.' + self._GOOGLESOURCE) def get_conflicting_hosts(self): return set( host - for host, (i1, i2) in self._get_git_gerrit_identity_pairs().iteritems() + for host, (i1, i2) in self._get_git_gerrit_identity_pairs().items() if None not in (i1, i2) and i1 != i2) def get_duplicated_hosts(self): counters = collections.Counter(h for h, _, _ in self.get_hosts_with_creds()) - return set(host for host, count in counters.iteritems() if count > 1) + return set(host for host, count in counters.items() if count > 1) _EXPECTED_HOST_IDENTITY_DOMAINS = { 'chromium.googlesource.com': 'chromium.org', @@ -3352,7 +3352,7 @@ class _GitCookiesChecker(object): Note: skips hosts which have conflicting identities for Git and Gerrit. """ hosts = set() - for host, expected in self._EXPECTED_HOST_IDENTITY_DOMAINS.iteritems(): + for host, expected in self._EXPECTED_HOST_IDENTITY_DOMAINS.items(): pair = self._get_git_gerrit_identity_pairs().get(host) if pair and pair[0] == pair[1]: _, domain = self._parse_identity(pair[0]) @@ -4722,7 +4722,7 @@ def CMDtry(parser, args): print('There are no failed jobs in the latest set of jobs ' '(patchset #%d), doing nothing.' % patchset) return 0 - num_builders = sum(map(len, buckets.itervalues())) + num_builders = sum(map(len, buckets.values())) if num_builders > 10: confirm_or_exit('There are %d builders with failed builds.' % num_builders, action='continue') @@ -4740,7 +4740,7 @@ def CMDtry(parser, args): print('Scheduling CQ dry run on: %s' % cl.GetIssueURL()) return cl.SetCQState(_CQState.DRY_RUN) - for builders in buckets.itervalues(): + for builders in buckets.values(): if any('triggered' in b for b in builders): print('ERROR You are trying to send a job to a triggered bot. This type ' 'of bot requires an initial job from a parent (usually a builder). ' diff --git a/git_map.py b/git_map.py index 278cfbe7c..f76d0dff1 100755 --- a/git_map.py +++ b/git_map.py @@ -77,7 +77,7 @@ def main(argv): current = current_branch() all_branches = set(branches()) merge_base_map = {b: get_or_create_merge_base(b) for b in all_branches} - merge_base_map = {b: v for b, v in merge_base_map.iteritems() if v} + merge_base_map = {b: v for b, v in merge_base_map.items() if v} if current in all_branches: all_branches.remove(current) all_tags = set(tags()) @@ -86,7 +86,7 @@ def main(argv): if merge_base_map: commit = line[line.find(BRIGHT_RED)+len(BRIGHT_RED):line.find('\t')] base_for_branches = set() - for branch, sha in merge_base_map.iteritems(): + for branch, sha in merge_base_map.items(): if sha.startswith(commit): base_for_branches.add(branch) if base_for_branches: diff --git a/git_map_branches.py b/git_map_branches.py index 60cf317b2..55d574b53 100755 --- a/git_map_branches.py +++ b/git_map_branches.py @@ -147,7 +147,7 @@ class BranchMapper(object): roots = set() # A map of parents to a list of their children. - for branch, branch_info in self.__branches_info.iteritems(): + for branch, branch_info in self.__branches_info.items(): if not branch_info: continue diff --git a/git_rebase_update.py b/git_rebase_update.py index e1d6b2222..a559d072a 100755 --- a/git_rebase_update.py +++ b/git_rebase_update.py @@ -57,12 +57,12 @@ def fetch_remotes(branch_tree): dest_spec = fetchspec.partition(':')[2] remote_name = key.split('.')[1] fetchspec_map[dest_spec] = remote_name - for parent in branch_tree.itervalues(): + for parent in branch_tree.values(): if parent in tag_set: fetch_tags = True else: full_ref = git.run('rev-parse', '--symbolic-full-name', parent) - for dest_spec, remote_name in fetchspec_map.iteritems(): + for dest_spec, remote_name in fetchspec_map.items(): if fnmatch(full_ref, dest_spec): remotes.add(remote_name) break @@ -121,7 +121,7 @@ def remove_empty_branches(branch_tree): reparents[down] = (order, parent, old_parent) # Apply all reparenting recorded, in order. - for branch, value in sorted(reparents.iteritems(), key=lambda x:x[1][0]): + for branch, value in sorted(reparents.items(), key=lambda x:x[1][0]): _, parent, old_parent = value if parent in tag_set: git.set_branch_config(branch, 'remote', '.') @@ -134,7 +134,7 @@ def remove_empty_branches(branch_tree): old_parent)) # Apply all deletions recorded, in order. - for branch, _ in sorted(deletions.iteritems(), key=lambda x: x[1]): + for branch, _ in sorted(deletions.items(), key=lambda x: x[1]): print(git.run('branch', '-d', branch)) @@ -272,7 +272,7 @@ def main(args=None): fetch_remotes(branch_tree) merge_base = {} - for branch, parent in branch_tree.iteritems(): + for branch, parent in branch_tree.items(): merge_base[branch] = git.get_or_create_merge_base(branch, parent) logging.debug('branch_tree: %s' % pformat(branch_tree)) diff --git a/git_rename_branch.py b/git_rename_branch.py index c0ac42ec1..8a07535f7 100755 --- a/git_rename_branch.py +++ b/git_rename_branch.py @@ -36,7 +36,7 @@ def main(args): run('branch', '-m', opts.old_name, opts.new_name) # update the downstreams - for branch, merge in branch_config_map('merge').iteritems(): + for branch, merge in branch_config_map('merge').items(): if merge == 'refs/heads/' + opts.old_name: # Only care about local branches if branch_config(branch, 'remote') == '.': diff --git a/man/src/common_demo_functions.sh b/man/src/common_demo_functions.sh index 7501f5a5a..cfb26a083 100755 --- a/man/src/common_demo_functions.sh +++ b/man/src/common_demo_functions.sh @@ -37,7 +37,7 @@ praw() { pcommand() { praw "$(python -c '\ import sys, pipes; \ - print " ".join(map(pipes.quote, sys.argv[1:]))' "$@")" + print(" ".join(map(pipes.quote, sys.argv[1:])))' "$@")" } # run a visible command @@ -71,7 +71,7 @@ add() { if [[ ! $CONTENT ]] then CONTENT=$(python -c 'import random, string; \ - print "".join(random.sample(string.lowercase, 16))') + print("".join(random.sample(string.lowercase, 16)))') fi echo "$CONTENT" > $1 silent git add $1 diff --git a/my_activity.py b/my_activity.py index 81b901d0a..5620ca10d 100755 --- a/my_activity.py +++ b/my_activity.py @@ -603,7 +603,7 @@ class MyActivity(object): project, issue_id = issue_uid.split(':') missing_issues_by_project[project].append(issue_id) - for project, issue_ids in missing_issues_by_project.iteritems(): + for project, issue_ids in missing_issues_by_project.items(): self.referenced_issues += self.monorail_get_issues(project, issue_ids) def print_issues(self): @@ -676,7 +676,7 @@ class MyActivity(object): if not url: raise Exception('Dumped item %s does not specify url' % item) output[url] = dict( - (k, v) for k,v in item.iteritems() if k not in ignore_keys) + (k, v) for k,v in item.items() if k not in ignore_keys) return output class PythonObjectEncoder(json.JSONEncoder): diff --git a/owners.py b/owners.py index b8578023c..8c503f12c 100644 --- a/owners.py +++ b/owners.py @@ -271,7 +271,7 @@ class Database(object): while True: dir_owner_rules = self._paths_to_owners.get(dirname) if dir_owner_rules: - for owned_path, path_owners in dir_owner_rules.iteritems(): + for owned_path, path_owners in dir_owner_rules.items(): if self._fnmatch(objname, owned_path): obj_owners |= path_owners up_dirname = self.os_path.dirname(dirname) @@ -539,7 +539,7 @@ class Database(object): # Merge the parent information with our information, adjusting # distances as necessary, and replacing the parent directory # names with our names. - for owner, par_dir_and_distances in parent_res.iteritems(): + for owner, par_dir_and_distances in parent_res.items(): if owner in res: # If the same person is in multiple OWNERS files above a given # directory, only count the closest one. @@ -564,7 +564,7 @@ class Database(object): dir_owners = self._all_possible_owners_for_dir_or_file( current_dir, author, all_possible_owners_for_dir_or_file_cache) - for owner, dir_and_distance in dir_owners.iteritems(): + for owner, dir_and_distance in dir_owners.items(): if owner in all_possible_owners: all_possible_owners[owner].append(dir_and_distance) else: @@ -605,7 +605,7 @@ class Database(object): total_costs_by_owner = Database.total_costs_by_owner(all_possible_owners, dirs) # Return the lowest cost owner. In the case of a tie, pick one randomly. - lowest_cost = min(total_costs_by_owner.itervalues()) + lowest_cost = min(total_costs_by_owner.values()) lowest_cost_owners = filter( lambda owner: total_costs_by_owner[owner] == lowest_cost, total_costs_by_owner) diff --git a/presubmit_canned_checks.py b/presubmit_canned_checks.py index 2b559af7e..22599a00e 100644 --- a/presubmit_canned_checks.py +++ b/presubmit_canned_checks.py @@ -949,7 +949,7 @@ def CheckBuildbotPendingBuilds(input_api, output_api, url, max_pendings, 'looking up buildbot status')] out = [] - for (builder_name, builder) in data.iteritems(): + for (builder_name, builder) in data.items(): if builder_name in ignored: continue if builder.get('state', '') == 'offline': @@ -1326,7 +1326,7 @@ def CheckCIPDPackages(input_api, output_api, platforms, packages): manifest = [] for p in platforms: manifest.append('$VerifiedPlatform %s' % (p,)) - for k, v in packages.iteritems(): + for k, v in packages.items(): manifest.append('%s %s' % (k, v)) return CheckCIPDManifest(input_api, output_api, content='\n'.join(manifest)) @@ -1468,7 +1468,7 @@ def CheckChangedLUCIConfigs(input_api, output_api): # windows file_path = f.LocalPath().replace(_os.sep, '/') logging.debug('Affected file path: %s', file_path) - for dr, cs in dir_to_config_set.iteritems(): + for dr, cs in dir_to_config_set.items(): if dr == '/' or file_path.startswith(dr): cs_to_files[cs].append({ 'path': file_path[len(dr):] if dr != '/' else file_path, @@ -1476,7 +1476,7 @@ def CheckChangedLUCIConfigs(input_api, output_api): '\n'.join(f.NewContents()).encode('utf-8')) }) outputs = [] - for cs, f in cs_to_files.iteritems(): + for cs, f in cs_to_files.items(): try: # TODO(myjang): parallelize res = request( diff --git a/presubmit_support.py b/presubmit_support.py index 672f5e736..bdf9fd640 100755 --- a/presubmit_support.py +++ b/presubmit_support.py @@ -386,7 +386,7 @@ class GerritAccessor(object): # Find revision info for the patchset we want. if patchset is not None: - for rev, rev_info in info['revisions'].iteritems(): + for rev, rev_info in info['revisions'].items(): if str(rev_info['_number']) == str(patchset): break else: @@ -1279,10 +1279,10 @@ class GetPostUploadExecuter(object): def _MergeMasters(masters1, masters2): """Merges two master maps. Merges also the tests of each builder.""" result = {} - for (master, builders) in itertools.chain(masters1.iteritems(), - masters2.iteritems()): + for (master, builders) in itertools.chain(masters1.items(), + masters2.items()): new_builders = result.setdefault(master, {}) - for (builder, tests) in builders.iteritems(): + for (builder, tests) in builders.items(): new_builders.setdefault(builder, set([])).update(tests) return result @@ -1329,7 +1329,7 @@ def DoGetTryMasters(change, presubmit_script, filename, project, change)) # Make sets to lists again for later JSON serialization. - for builders in results.itervalues(): + for builders in results.values(): for builder in builders: builders[builder] = list(builders[builder]) @@ -1659,7 +1659,7 @@ def canned_check_filter(method_names): setattr(presubmit_canned_checks, method_name, lambda *_a, **_kw: []) yield finally: - for name, method in filtered.iteritems(): + for name, method in filtered.items(): setattr(presubmit_canned_checks, name, method) diff --git a/recipes/recipe_modules/bot_update/api.py b/recipes/recipe_modules/bot_update/api.py index 715bb24f4..200da9642 100644 --- a/recipes/recipe_modules/bot_update/api.py +++ b/recipes/recipe_modules/bot_update/api.py @@ -183,7 +183,7 @@ class BotUpdateApi(recipe_api.RecipeApi): # Only update with non-empty values. Some recipe might otherwise # overwrite the HEAD default with an empty string. revisions.update( - (k, v) for k, v in cfg.revisions.iteritems() if v) + (k, v) for k, v in cfg.revisions.items() if v) if cfg.solutions and root_solution_revision: revisions[first_sol] = root_solution_revision # Allow for overrides required to bisect into rolls. @@ -275,7 +275,7 @@ class BotUpdateApi(recipe_api.RecipeApi): if update_presentation: # Set properties such as got_revision. for prop_name, prop_value in ( - self.last_returned_properties.iteritems()): + self.last_returned_properties.items()): step_result.presentation.properties[prop_name] = prop_value # Add helpful step description in the step UI. @@ -471,7 +471,7 @@ class BotUpdateApi(recipe_api.RecipeApi): rev_reverse_map = self.m.gclient.got_revision_reverse_mapping(cfg) return sorted( prop - for prop, project in rev_reverse_map.iteritems() + for prop, project in rev_reverse_map.items() if project == project_name ) diff --git a/recipes/recipe_modules/bot_update/resources/bot_update.py b/recipes/recipe_modules/bot_update/resources/bot_update.py index 03a2f60b4..9eaeb5976 100755 --- a/recipes/recipe_modules/bot_update/resources/bot_update.py +++ b/recipes/recipe_modules/bot_update/resources/bot_update.py @@ -5,6 +5,8 @@ # TODO(hinoka): Use logging. +from __future__ import print_function + import cStringIO import codecs from contextlib import contextmanager @@ -147,11 +149,11 @@ def call(*args, **kwargs): # pragma: no cover kwargs['env'] = env if new_env: - print '===Injecting Environment Variables===' + print('===Injecting Environment Variables===') for k, v in sorted(new_env.items()): - print '%s: %s' % (k, v) - print '===Running %s ===' % (' '.join(args),) - print 'In directory: %s' % cwd + print('%s: %s' % (k, v)) + print('===Running %s ===' % (' '.join(args),)) + print('In directory: %s' % cwd) start_time = time.time() proc = subprocess.Popen(args, **kwargs) if stdin_data: @@ -183,14 +185,14 @@ def call(*args, **kwargs): # pragma: no cover elapsed_time = ((time.time() - start_time) / 60.0) outval = out.getvalue() if code: - print '===Failed in %.1f mins of %s ===' % (elapsed_time, ' '.join(args)) - print + print('===Failed in %.1f mins of %s ===' % (elapsed_time, ' '.join(args))) + print() raise SubprocessFailed('%s failed with code %d in %s.' % (' '.join(args), code, cwd), code, outval) - print '===Succeeded in %.1f mins of %s ===' % (elapsed_time, ' '.join(args)) - print + print('===Succeeded in %.1f mins of %s ===' % (elapsed_time, ' '.join(args))) + print() return outval @@ -222,36 +224,36 @@ def get_gclient_spec(solutions, target_os, target_os_only, target_cpu, def solutions_printer(solutions): """Prints gclient solution to stdout.""" - print 'Gclient Solutions' - print '=================' + print('Gclient Solutions') + print('=================') for solution in solutions: name = solution.get('name') url = solution.get('url') - print '%s (%s)' % (name, url) + print('%s (%s)' % (name, url)) if solution.get('deps_file'): - print ' Dependencies file is %s' % solution['deps_file'] + print(' Dependencies file is %s' % solution['deps_file']) if 'managed' in solution: - print ' Managed mode is %s' % ('ON' if solution['managed'] else 'OFF') + print(' Managed mode is %s' % ('ON' if solution['managed'] else 'OFF')) custom_vars = solution.get('custom_vars') if custom_vars: - print ' Custom Variables:' - for var_name, var_value in sorted(custom_vars.iteritems()): - print ' %s = %s' % (var_name, var_value) + print(' Custom Variables:') + for var_name, var_value in sorted(custom_vars.items()): + print(' %s = %s' % (var_name, var_value)) custom_deps = solution.get('custom_deps') if 'custom_deps' in solution: - print ' Custom Dependencies:' - for deps_name, deps_value in sorted(custom_deps.iteritems()): + print(' Custom Dependencies:') + for deps_name, deps_value in sorted(custom_deps.items()): if deps_value: - print ' %s -> %s' % (deps_name, deps_value) + print(' %s -> %s' % (deps_name, deps_value)) else: - print ' %s: Ignore' % deps_name - for k, v in solution.iteritems(): + print(' %s: Ignore' % deps_name) + for k, v in solution.items(): # Print out all the keys we don't know about. if k in ['name', 'url', 'deps_file', 'custom_vars', 'custom_deps', 'managed']: continue - print ' %s is %s' % (k, v) - print + print(' %s is %s' % (k, v)) + print() def modify_solutions(input_solutions): @@ -270,8 +272,8 @@ def modify_solutions(input_solutions): # We don't want gclient to be using a safesync URL. Instead it should # using the lkgr/lkcr branch/tags. if 'safesync_url' in solution: - print 'Removing safesync url %s from %s' % (solution['safesync_url'], - parsed_path) + print('Removing safesync url %s from %s' % (solution['safesync_url'], + parsed_path)) del solution['safesync_url'] return solutions @@ -283,11 +285,11 @@ def remove(target, cleanup_dir): os.makedirs(cleanup_dir) dest = path.join(cleanup_dir, '%s_%s' % ( path.basename(target), uuid.uuid4().hex)) - print 'Marking for removal %s => %s' % (target, dest) + print('Marking for removal %s => %s' % (target, dest)) try: os.rename(target, dest) except Exception as e: - print 'Error renaming %s to %s: %s' % (target, dest, str(e)) + print('Error renaming %s to %s: %s' % (target, dest, str(e))) raise @@ -299,9 +301,9 @@ def ensure_no_checkout(dir_names, cleanup_dir): if has_checkout: for filename in os.listdir(build_dir): deletion_target = path.join(build_dir, filename) - print '.git detected in checkout, deleting %s...' % deletion_target, + print('.git detected in checkout, deleting %s...' % deletion_target,) remove(deletion_target, cleanup_dir) - print 'done' + print('done') def call_gclient(*args, **kwargs): @@ -363,7 +365,7 @@ def gclient_sync( args += ['--break_repo_locks'] if disable_syntax_validation: args += ['--disable-syntax-validation'] - for name, revision in sorted(revisions.iteritems()): + for name, revision in sorted(revisions.items()): if revision.upper() == 'HEAD': revision = 'origin/master' args.extend(['--revision', '%s@%s' % (name, revision)]) @@ -432,7 +434,7 @@ def create_manifest_old(): 'revision': match.group(3), } else: - print "WARNING: Couldn't match revinfo line:\n%s" % line + print("WARNING: Couldn't match revinfo line:\n%s" % line) return manifest @@ -456,7 +458,7 @@ def create_manifest(gclient_output, patch_root): dirs = {} if patch_root: patch_root = patch_root.strip('/') # Normalize directory names. - for directory, info in gclient_output.get('solutions', {}).iteritems(): + for directory, info in gclient_output.get('solutions', {}).items(): directory = directory.strip('/') # Normalize the directory name. # The format of the url is "https://repo.url/blah.git@abcdefabcdef" or # just "https://repo.url/blah.git" @@ -617,11 +619,11 @@ def _maybe_break_locks(checkout_path, tries=3): for filename in filenames: if filename.endswith('.lock'): to_break = os.path.join(dirpath, filename) - print 'breaking lock: %s' % to_break + print('breaking lock: %s' % to_break) try: os.remove(to_break) except OSError as ex: - print 'FAILED to break lock: %s: %s' % (to_break, ex) + print('FAILED to break lock: %s: %s' % (to_break, ex)) raise for _ in xrange(tries): @@ -688,14 +690,14 @@ def _git_checkout(sln, sln_dir, revisions, refs, git_cache_dir, cleanup_dir): # Only kick in deadline after second attempt to ensure we retry at least # once after initial fetch from not-yet-replicated server. if attempt >= 2 and overrun > 0: - print 'Ran %s seconds past deadline. Aborting.' % (overrun,) + print('Ran %s seconds past deadline. Aborting.' % (overrun,)) # TODO(tandrii): raise exception immediately here, instead of doing # useless step 2 trying to fetch something that we know doesn't exist # in cache **after production data gives us confidence to do so**. break sleep_secs = min(60, 2**attempt) - print 'waiting %s seconds and trying to fetch again...' % sleep_secs + print('waiting %s seconds and trying to fetch again...' % sleep_secs) time.sleep(sleep_secs) # Step 2: populate a checkout from local cache. All operations are local. @@ -707,7 +709,7 @@ def _git_checkout(sln, sln_dir, revisions, refs, git_cache_dir, cleanup_dir): # If repo deletion was aborted midway, it may have left .git in broken # state. if path.exists(sln_dir) and is_broken_repo_dir(sln_dir): - print 'Git repo %s appears to be broken, removing it' % sln_dir + print('Git repo %s appears to be broken, removing it' % sln_dir) remove(sln_dir, cleanup_dir) # Use "tries=1", since we retry manually in this loop. @@ -735,7 +737,7 @@ def _git_checkout(sln, sln_dir, revisions, refs, git_cache_dir, cleanup_dir): return except SubprocessFailed as e: # Exited abnormally, theres probably something wrong. - print 'Something failed: %s.' % str(e) + print('Something failed: %s.' % str(e)) if first_try: first_try = False # Lets wipe the checkout and try again. @@ -783,9 +785,9 @@ def parse_got_revision(gclient_output, got_revision_mapping): solutions_output = { # Make sure path always ends with a single slash. '%s/' % path.rstrip('/') : solution_output for path, solution_output - in gclient_output['solutions'].iteritems() + in gclient_output['solutions'].items() } - for property_name, dir_name in got_revision_mapping.iteritems(): + for property_name, dir_name in got_revision_mapping.items(): # Make sure dir_name always ends with a single slash. dir_name = '%s/' % dir_name.rstrip('/') if dir_name not in solutions_output: @@ -824,7 +826,7 @@ def ensure_checkout(solutions, revisions, first_sln, target_os, target_os_only, # Get a checkout of each solution, without DEPS or hooks. # Calling git directly because there is no way to run Gclient without # invoking DEPS. - print 'Fetching Git checkout' + print('Fetching Git checkout') git_checkouts(solutions, revisions, refs, git_cache_dir, cleanup_dir) @@ -838,7 +840,7 @@ def ensure_checkout(solutions, revisions, first_sln, target_os, target_os_only, # We want to pass all non-solution revisions into the gclient sync call. solution_dirs = {sln['name'] for sln in solutions} gc_revisions = { - dirname: rev for dirname, rev in revisions.iteritems() + dirname: rev for dirname, rev in revisions.items() if dirname not in solution_dirs} # Gclient sometimes ignores "unmanaged": "False" in the gclient solution # if --revision is passed (for example, for subrepos). @@ -903,7 +905,7 @@ def parse_revisions(revisions, root): if not normalized_root.endswith('.git'): normalized_root += '.git' elif parsed_root.scheme: - print 'WARNING: Unrecognized scheme %s, ignoring' % parsed_root.scheme + print('WARNING: Unrecognized scheme %s, ignoring' % parsed_root.scheme) continue else: # This is probably a local path. @@ -992,10 +994,9 @@ def parse_args(): with open(options.revision_mapping_file, 'r') as f: options.revision_mapping = json.load(f) except Exception as e: - print ( + print( 'WARNING: Caught execption while parsing revision_mapping*: %s' - % (str(e),) - ) + % (str(e),)) # Because we print CACHE_DIR out into a .gclient file, and then later run # eval() on it, backslashes need to be escaped, otherwise "E:\b\build" gets @@ -1026,23 +1027,23 @@ def prepare(options, git_slns, active): first_sln = dir_names[0] # Split all the revision specifications into a nice dict. - print 'Revisions: %s' % options.revision + print('Revisions: %s' % options.revision) revisions = parse_revisions(options.revision, first_sln) - print 'Fetching Git checkout at %s@%s' % (first_sln, revisions[first_sln]) + print('Fetching Git checkout at %s@%s' % (first_sln, revisions[first_sln])) return revisions, step_text def checkout(options, git_slns, specs, revisions, step_text): - print 'Using Python version: %s' % (sys.version,) - print 'Checking git version...' + print('Using Python version: %s' % (sys.version,)) + print('Checking git version...') ver = git('version').strip() - print 'Using %s' % ver + print('Using %s' % ver) try: protocol = git('config', '--get', 'protocol.version') - print 'Using git protocol version %s' % protocol + print('Using git protocol version %s' % protocol) except SubprocessFailed as e: - print 'git protocol version is not specified.' + print('git protocol version is not specified.') first_sln = git_slns[0]['name'] dir_names = [sln.get('name') for sln in git_slns if 'name' in sln] @@ -1076,7 +1077,7 @@ def checkout(options, git_slns, specs, revisions, step_text): disable_syntax_validation=options.disable_syntax_validation) gclient_output = ensure_checkout(**checkout_parameters) except GclientSyncFailed: - print 'We failed gclient sync, lets delete the checkout and retry.' + print('We failed gclient sync, lets delete the checkout and retry.') ensure_no_checkout(dir_names, options.cleanup_dir) gclient_output = ensure_checkout(**checkout_parameters) except PatchFailed as e: @@ -1126,14 +1127,14 @@ def checkout(options, git_slns, specs, revisions, step_text): def print_debug_info(): - print "Debugging info:" + print("Debugging info:") debug_params = { 'CURRENT_DIR': path.abspath(os.getcwd()), 'THIS_DIR': THIS_DIR, 'DEPOT_TOOLS_DIR': DEPOT_TOOLS_DIR, } - for k, v in sorted(debug_params.iteritems()): - print "%s: %r" % (k, v) + for k, v in sorted(debug_params.items()): + print("%s: %r" % (k, v)) def main(): diff --git a/recipes/recipe_modules/bot_update/test_api.py b/recipes/recipe_modules/bot_update/test_api.py index 7851d1746..0202a952b 100644 --- a/recipes/recipe_modules/bot_update/test_api.py +++ b/recipes/recipe_modules/bot_update/test_api.py @@ -20,12 +20,12 @@ class BotUpdateTestApi(recipe_test_api.RecipeTestApi): properties = { property_name: self.gen_revision(project_name) - for property_name, project_name in revision_mapping.iteritems() + for property_name, project_name in revision_mapping.items() } properties.update({ '%s_cp' % property_name: ('refs/heads/master@{#%s}' % self.gen_commit_position(project_name)) - for property_name, project_name in revision_mapping.iteritems() + for property_name, project_name in revision_mapping.items() }) output.update({ diff --git a/recipes/recipe_modules/cipd/api.py b/recipes/recipe_modules/cipd/api.py index eb5161197..3b9f1553e 100644 --- a/recipes/recipe_modules/cipd/api.py +++ b/recipes/recipe_modules/cipd/api.py @@ -36,7 +36,7 @@ def check_list_type(name, var, expect_inner): def check_dict_type(name, var, expect_key, expect_value): check_type(name, var, dict) - for key, value in var.iteritems(): + for key, value in var.items(): check_type('%s: key' % name, key, expect_key) check_type('%s[%s]' % (name, key), value, expect_value) diff --git a/recipes/recipe_modules/gclient/api.py b/recipes/recipe_modules/gclient/api.py index 6a8e31fba..9fb0e1962 100644 --- a/recipes/recipe_modules/gclient/api.py +++ b/recipes/recipe_modules/gclient/api.py @@ -133,7 +133,7 @@ class GclientApi(recipe_api.RecipeApi): rev_map = cfg.got_revision_mapping.as_jsonish() reverse_rev_map = cfg.got_revision_reverse_mapping.as_jsonish() combined_length = len(rev_map) + len(reverse_rev_map) - reverse_rev_map.update({v: k for k, v in rev_map.iteritems()}) + reverse_rev_map.update({v: k for k, v in rev_map.items()}) # Make sure we never have duplicate values in the old map. assert combined_length == len(reverse_rev_map) @@ -194,7 +194,7 @@ class GclientApi(recipe_api.RecipeApi): result = self.m.step.active_result solutions = result.json.output['solutions'] for propname, path in sorted( - self.got_revision_reverse_mapping(cfg).iteritems()): + self.got_revision_reverse_mapping(cfg).items()): # gclient json paths always end with a slash info = solutions.get(path + '/') or solutions.get(path) if info: @@ -213,7 +213,7 @@ class GclientApi(recipe_api.RecipeApi): """ cfg = gclient_config or self.c - for prop, custom_var in cfg.parent_got_revision_mapping.iteritems(): + for prop, custom_var in cfg.parent_got_revision_mapping.items(): val = str(self.m.properties.get(prop, '')) # TODO(infra): Fix coverage. if val: # pragma: no cover @@ -282,7 +282,7 @@ class GclientApi(recipe_api.RecipeApi): for cur_file in files: if cur_file.endswith('index.lock'): path_to_file = os.path.join(path, cur_file) - print 'deleting %s' % path_to_file + print('deleting %s' % path_to_file) os.remove(path_to_file) """, args=[self.m.path['start_dir']], @@ -336,7 +336,7 @@ class GclientApi(recipe_api.RecipeApi): return rel_path # repo_path_map keys may be non-canonical. - for key, (rel_path, _) in cfg.repo_path_map.iteritems(): + for key, (rel_path, _) in cfg.repo_path_map.items(): if self._canonicalize_repo_url(key) == repo_url: return rel_path diff --git a/recipes/recipe_modules/gclient/examples/full.expected/basic.json b/recipes/recipe_modules/gclient/examples/full.expected/basic.json index 1de9d09fa..14239ec3d 100644 --- a/recipes/recipe_modules/gclient/examples/full.expected/basic.json +++ b/recipes/recipe_modules/gclient/examples/full.expected/basic.json @@ -197,7 +197,7 @@ "cmd": [ "python", "-u", - "\nimport os, sys\n\nbuild_path = sys.argv[1]\nif os.path.exists(build_path):\n for (path, dir, files) in os.walk(build_path):\n for cur_file in files:\n if cur_file.endswith('index.lock'):\n path_to_file = os.path.join(path, cur_file)\n print 'deleting %s' % path_to_file\n os.remove(path_to_file)\n", + "\nimport os, sys\n\nbuild_path = sys.argv[1]\nif os.path.exists(build_path):\n for (path, dir, files) in os.walk(build_path):\n for cur_file in files:\n if cur_file.endswith('index.lock'):\n path_to_file = os.path.join(path, cur_file)\n print('deleting %s' % path_to_file)\n os.remove(path_to_file)\n", "[START_DIR]" ], "infra_step": true, @@ -212,7 +212,7 @@ "@@@STEP_LOG_LINE@python.inline@ for cur_file in files:@@@", "@@@STEP_LOG_LINE@python.inline@ if cur_file.endswith('index.lock'):@@@", "@@@STEP_LOG_LINE@python.inline@ path_to_file = os.path.join(path, cur_file)@@@", - "@@@STEP_LOG_LINE@python.inline@ print 'deleting %s' % path_to_file@@@", + "@@@STEP_LOG_LINE@python.inline@ print('deleting %s' % path_to_file)@@@", "@@@STEP_LOG_LINE@python.inline@ os.remove(path_to_file)@@@", "@@@STEP_LOG_END@python.inline@@@" ] diff --git a/recipes/recipe_modules/gclient/examples/full.expected/buildbot.json b/recipes/recipe_modules/gclient/examples/full.expected/buildbot.json index 7493e628f..a6f7a066e 100644 --- a/recipes/recipe_modules/gclient/examples/full.expected/buildbot.json +++ b/recipes/recipe_modules/gclient/examples/full.expected/buildbot.json @@ -197,7 +197,7 @@ "cmd": [ "python", "-u", - "\nimport os, sys\n\nbuild_path = sys.argv[1]\nif os.path.exists(build_path):\n for (path, dir, files) in os.walk(build_path):\n for cur_file in files:\n if cur_file.endswith('index.lock'):\n path_to_file = os.path.join(path, cur_file)\n print 'deleting %s' % path_to_file\n os.remove(path_to_file)\n", + "\nimport os, sys\n\nbuild_path = sys.argv[1]\nif os.path.exists(build_path):\n for (path, dir, files) in os.walk(build_path):\n for cur_file in files:\n if cur_file.endswith('index.lock'):\n path_to_file = os.path.join(path, cur_file)\n print('deleting %s' % path_to_file)\n os.remove(path_to_file)\n", "[START_DIR]" ], "infra_step": true, @@ -212,7 +212,7 @@ "@@@STEP_LOG_LINE@python.inline@ for cur_file in files:@@@", "@@@STEP_LOG_LINE@python.inline@ if cur_file.endswith('index.lock'):@@@", "@@@STEP_LOG_LINE@python.inline@ path_to_file = os.path.join(path, cur_file)@@@", - "@@@STEP_LOG_LINE@python.inline@ print 'deleting %s' % path_to_file@@@", + "@@@STEP_LOG_LINE@python.inline@ print('deleting %s' % path_to_file)@@@", "@@@STEP_LOG_LINE@python.inline@ os.remove(path_to_file)@@@", "@@@STEP_LOG_END@python.inline@@@" ] diff --git a/recipes/recipe_modules/gclient/examples/full.expected/revision.json b/recipes/recipe_modules/gclient/examples/full.expected/revision.json index f4ec3f49d..9c7da1d53 100644 --- a/recipes/recipe_modules/gclient/examples/full.expected/revision.json +++ b/recipes/recipe_modules/gclient/examples/full.expected/revision.json @@ -199,7 +199,7 @@ "cmd": [ "python", "-u", - "\nimport os, sys\n\nbuild_path = sys.argv[1]\nif os.path.exists(build_path):\n for (path, dir, files) in os.walk(build_path):\n for cur_file in files:\n if cur_file.endswith('index.lock'):\n path_to_file = os.path.join(path, cur_file)\n print 'deleting %s' % path_to_file\n os.remove(path_to_file)\n", + "\nimport os, sys\n\nbuild_path = sys.argv[1]\nif os.path.exists(build_path):\n for (path, dir, files) in os.walk(build_path):\n for cur_file in files:\n if cur_file.endswith('index.lock'):\n path_to_file = os.path.join(path, cur_file)\n print('deleting %s' % path_to_file)\n os.remove(path_to_file)\n", "[START_DIR]" ], "infra_step": true, @@ -214,7 +214,7 @@ "@@@STEP_LOG_LINE@python.inline@ for cur_file in files:@@@", "@@@STEP_LOG_LINE@python.inline@ if cur_file.endswith('index.lock'):@@@", "@@@STEP_LOG_LINE@python.inline@ path_to_file = os.path.join(path, cur_file)@@@", - "@@@STEP_LOG_LINE@python.inline@ print 'deleting %s' % path_to_file@@@", + "@@@STEP_LOG_LINE@python.inline@ print('deleting %s' % path_to_file)@@@", "@@@STEP_LOG_LINE@python.inline@ os.remove(path_to_file)@@@", "@@@STEP_LOG_END@python.inline@@@" ] diff --git a/recipes/recipe_modules/gclient/examples/full.expected/tryserver.json b/recipes/recipe_modules/gclient/examples/full.expected/tryserver.json index 1de9d09fa..14239ec3d 100644 --- a/recipes/recipe_modules/gclient/examples/full.expected/tryserver.json +++ b/recipes/recipe_modules/gclient/examples/full.expected/tryserver.json @@ -197,7 +197,7 @@ "cmd": [ "python", "-u", - "\nimport os, sys\n\nbuild_path = sys.argv[1]\nif os.path.exists(build_path):\n for (path, dir, files) in os.walk(build_path):\n for cur_file in files:\n if cur_file.endswith('index.lock'):\n path_to_file = os.path.join(path, cur_file)\n print 'deleting %s' % path_to_file\n os.remove(path_to_file)\n", + "\nimport os, sys\n\nbuild_path = sys.argv[1]\nif os.path.exists(build_path):\n for (path, dir, files) in os.walk(build_path):\n for cur_file in files:\n if cur_file.endswith('index.lock'):\n path_to_file = os.path.join(path, cur_file)\n print('deleting %s' % path_to_file)\n os.remove(path_to_file)\n", "[START_DIR]" ], "infra_step": true, @@ -212,7 +212,7 @@ "@@@STEP_LOG_LINE@python.inline@ for cur_file in files:@@@", "@@@STEP_LOG_LINE@python.inline@ if cur_file.endswith('index.lock'):@@@", "@@@STEP_LOG_LINE@python.inline@ path_to_file = os.path.join(path, cur_file)@@@", - "@@@STEP_LOG_LINE@python.inline@ print 'deleting %s' % path_to_file@@@", + "@@@STEP_LOG_LINE@python.inline@ print('deleting %s' % path_to_file)@@@", "@@@STEP_LOG_LINE@python.inline@ os.remove(path_to_file)@@@", "@@@STEP_LOG_END@python.inline@@@" ] diff --git a/recipes/recipe_modules/gerrit/api.py b/recipes/recipe_modules/gerrit/api.py index c331a090c..94a0e0a3f 100644 --- a/recipes/recipe_modules/gerrit/api.py +++ b/recipes/recipe_modules/gerrit/api.py @@ -105,7 +105,7 @@ class GerritApi(recipe_api.RecipeApi): o_params=['ALL_REVISIONS', 'ALL_COMMITS'], limit=1) cl = cls[0] if len(cls) == 1 else {'revisions': {}} - for ri in cl['revisions'].itervalues(): + for ri in cl['revisions'].values(): # TODO(tandrii): add support for patchset=='current'. if str(ri['_number']) == str(patchset): return ri diff --git a/recipes/recipe_modules/git/api.py b/recipes/recipe_modules/git/api.py index 0be93f775..d26003fc1 100644 --- a/recipes/recipe_modules/git/api.py +++ b/recipes/recipe_modules/git/api.py @@ -18,7 +18,7 @@ class GitApi(recipe_api.RecipeApi): git_cmd = ['git'] options = kwargs.pop('git_config_options', {}) - for k, v in sorted(options.iteritems()): + for k, v in sorted(options.items()): git_cmd.extend(['-c', '%s=%s' % (k, v)]) can_fail_build = kwargs.pop('can_fail_build', True) try: @@ -59,7 +59,7 @@ class GitApi(recipe_api.RecipeApi): """ if previous_result: assert isinstance(previous_result, dict) - assert all(isinstance(v, long) for v in previous_result.itervalues()) + assert all(isinstance(v, long) for v in previous_result.values()) assert 'size' in previous_result assert 'size-pack' in previous_result @@ -78,14 +78,14 @@ class GitApi(recipe_api.RecipeApi): result[name] = long(value.strip()) def results_to_text(results): - return [' %s: %s' % (k, v) for k, v in results.iteritems()] + return [' %s: %s' % (k, v) for k, v in results.items()] step_result.presentation.logs['result'] = results_to_text(result) if previous_result: delta = { key: value - previous_result[key] - for key, value in result.iteritems() + for key, value in result.items() if key in previous_result} step_result.presentation.logs['delta'] = ( ['before:'] + results_to_text(previous_result) + diff --git a/recipes/recipe_modules/gitiles/resources/gerrit_client.py b/recipes/recipe_modules/gitiles/resources/gerrit_client.py index c38fc301d..64b66e885 100755 --- a/recipes/recipe_modules/gitiles/resources/gerrit_client.py +++ b/recipes/recipe_modules/gitiles/resources/gerrit_client.py @@ -167,12 +167,12 @@ def main(arguments): em = tf._extract_member def _extract_member(tarinfo, targetpath): if not os.path.abspath(targetpath).startswith(args.extract_to): - print 'Skipping %s' % (tarinfo.name,) + print('Skipping %s' % (tarinfo.name,)) ret['skipped']['filecount'] += 1 ret['skipped']['bytes'] += tarinfo.size ret['skipped']['names'].append(tarinfo.name) return - print 'Extracting %s' % (tarinfo.name,) + print('Extracting %s' % (tarinfo.name,)) ret['extracted']['filecount'] += 1 ret['extracted']['bytes'] += tarinfo.size return em(tarinfo, targetpath) diff --git a/recipes/recipe_modules/gsutil/api.py b/recipes/recipe_modules/gsutil/api.py index bcae5deb2..c222290a8 100644 --- a/recipes/recipe_modules/gsutil/api.py +++ b/recipes/recipe_modules/gsutil/api.py @@ -160,7 +160,7 @@ class GSUtilApi(recipe_api.RecipeApi): def _generate_metadata_args(self, metadata): result = [] if metadata: - for k, v in sorted(metadata.iteritems(), key=lambda (k, _): k): + for k, v in sorted(metadata.items(), key=lambda (k, _): k): field = self._get_metadata_field(k) param = (field) if v is None else ('%s:%s' % (field, v)) result += ['-h', param] diff --git a/recipes/recipe_modules/tryserver/api.py b/recipes/recipe_modules/tryserver/api.py index 16bffc200..2da8ada17 100644 --- a/recipes/recipe_modules/tryserver/api.py +++ b/recipes/recipe_modules/tryserver/api.py @@ -89,7 +89,7 @@ class TryserverApi(recipe_api.RecipeApi): self._gerrit_change_target_ref = ( 'refs/heads/' + self._gerrit_change_target_ref) - for rev in res['revisions'].itervalues(): + for rev in res['revisions'].values(): if int(rev['_number']) == self.gerrit_change.patchset: self._gerrit_change_fetch_ref = rev['ref'] break diff --git a/roll_dep.py b/roll_dep.py index 63fc3ff96..5e4a08e34 100755 --- a/roll_dep.py +++ b/roll_dep.py @@ -171,7 +171,7 @@ def finalize(commit_msg, current_dir, rolls): # Pull the dependency to the right revision. This is surprising to users # otherwise. - for _head, roll_to, full_dir in sorted(rolls.itervalues()): + for _head, roll_to, full_dir in sorted(rolls.values()): check_call(['git', 'checkout', '--quiet', roll_to], cwd=full_dir) @@ -249,7 +249,7 @@ def main(): logs = [] setdep_args = [] - for dependency, (head, roll_to, full_dir) in sorted(rolls.iteritems()): + for dependency, (head, roll_to, full_dir) in sorted(rolls.items()): log = generate_commit_message( full_dir, dependency, head, roll_to, args.no_log, args.log_limit) logs.append(log) diff --git a/split_cl.py b/split_cl.py index d54a09a9a..f4fc74cc3 100644 --- a/split_cl.py +++ b/split_cl.py @@ -229,7 +229,7 @@ def SplitCl(description_file, comment_file, changelist, cmd_upload, dry_run, return 0 for cl_index, (directory, files) in \ - enumerate(files_split_by_owners.iteritems(), 1): + enumerate(files_split_by_owners.items(), 1): # Use '/' as a path separator in the branch name and the CL description # and comment. directory = directory.replace(os.path.sep, '/') diff --git a/testing_support/auto_stub.py b/testing_support/auto_stub.py index b266692e9..a47a8b244 100644 --- a/testing_support/auto_stub.py +++ b/testing_support/auto_stub.py @@ -26,8 +26,8 @@ class AutoStubMixIn(object): def tearDown(self): """Restore all the mocked members.""" if self._saved: - for obj, items in self._saved.iteritems(): - for member, previous_value in items.iteritems(): + for obj, items in self._saved.items(): + for member, previous_value in items.items(): setattr(obj, member, previous_value) @@ -57,7 +57,7 @@ class SimpleMock(object): """Registers the name of the caller function.""" caller_name = kwargs.pop('caller_name', None) or inspect.stack()[1][3] str_args = ', '.join(repr(arg) for arg in args) - str_kwargs = ', '.join('%s=%r' % (k, v) for k, v in kwargs.iteritems()) + str_kwargs = ', '.join('%s=%r' % (k, v) for k, v in kwargs.items()) self.calls.append('%s(%s)' % ( caller_name, ', '.join(filter(None, [str_args, str_kwargs])))) diff --git a/testing_support/fake_repos.py b/testing_support/fake_repos.py index e6b43a6c3..a4c665f6d 100755 --- a/testing_support/fake_repos.py +++ b/testing_support/fake_repos.py @@ -467,7 +467,7 @@ deps = { pre_deps_hooks = [ { 'action': ['python', '-c', - 'print "pre-deps hook"; open(\\'src/git_pre_deps_hooked\\', \\'w\\').write(\\'git_pre_deps_hooked\\')'], + 'print("pre-deps hook"); open(\\'src/git_pre_deps_hooked\\', \\'w\\').write(\\'git_pre_deps_hooked\\')'], } ] """ % { @@ -489,7 +489,7 @@ deps = { pre_deps_hooks = [ { 'action': ['python', '-c', - 'print "pre-deps hook"; open(\\'src/git_pre_deps_hooked\\', \\'w\\').write(\\'git_pre_deps_hooked\\')'], + 'print("pre-deps hook"); open(\\'src/git_pre_deps_hooked\\', \\'w\\').write(\\'git_pre_deps_hooked\\')'], }, { 'action': ['python', '-c', 'import sys; sys.exit(1)'], diff --git a/tests/presubmit_unittest.py b/tests/presubmit_unittest.py index af7acd3cf..98da9975f 100755 --- a/tests/presubmit_unittest.py +++ b/tests/presubmit_unittest.py @@ -1922,7 +1922,7 @@ the current line as well! "#!/bin/python\n" "# Copyright (c) 2037 Nobody.\n" "# All Rights Reserved.\n" - "print 'foo'\n" + "print('foo')\n" ) license_text = ( r".*? Copyright \(c\) 2037 Nobody." "\n" @@ -1935,7 +1935,7 @@ the current line as well! "#!/bin/python\n" "# Copyright (c) 2037 Nobody.\n" "# All Rights Reserved.\n" - "print 'foo'\n" + "print('foo')\n" ) license_text = ( r".*? Copyright \(c\) 0007 Nobody." "\n" @@ -1949,7 +1949,7 @@ the current line as well! "#!/bin/python\n" "# Copyright (c) 2037 Nobody.\n" "# All Rights Reserved.\n" - "print 'foo'\n" + "print('foo')\n" ) license_text = ( r".*? Copyright \(c\) 0007 Nobody." "\n" diff --git a/watchlists.py b/watchlists.py index 096eb10e7..f8d6a131f 100755 --- a/watchlists.py +++ b/watchlists.py @@ -94,7 +94,7 @@ class Watchlists(object): # Compile the regular expressions ahead of time to avoid creating them # on-the-fly multiple times per file. self._path_regexps = {} - for name, rule in defns.iteritems(): + for name, rule in defns.items(): filepath = rule.get('filepath') if not filepath: continue @@ -117,7 +117,7 @@ class Watchlists(object): watchers = set() # A set, to avoid duplicates for path in paths: path = path.replace(os.sep, '/') - for name, rule in self._path_regexps.iteritems(): + for name, rule in self._path_regexps.items(): if name not in self._watchlists: continue if rule.search(path): diff --git a/win_toolchain/package_from_installed.py b/win_toolchain/package_from_installed.py index ec37c850b..fd495c126 100644 --- a/win_toolchain/package_from_installed.py +++ b/win_toolchain/package_from_installed.py @@ -343,24 +343,24 @@ def GenerateSetEnvCmd(target_dir): with open(set_env_prefix + '.cmd', 'w') as f: f.write('@echo off\n' ':: Generated by win_toolchain\\package_from_installed.py.\n') - for var, dirs in env.iteritems(): + for var, dirs in env.items(): f.write('set %s=%s\n' % (var, BatDirs(dirs))) f.write('if "%1"=="/x64" goto x64\n') f.write('if "%1"=="/arm64" goto arm64\n') - for var, dirs in env_x86.iteritems(): + for var, dirs in env_x86.items(): f.write('set %s=%s%s\n' % ( var, BatDirs(dirs), ';%PATH%' if var == 'PATH' else '')) f.write('goto :EOF\n') f.write(':x64\n') - for var, dirs in env_x64.iteritems(): + for var, dirs in env_x64.items(): f.write('set %s=%s%s\n' % ( var, BatDirs(dirs), ';%PATH%' if var == 'PATH' else '')) f.write('goto :EOF\n') f.write(':arm64\n') - for var, dirs in env_arm64.iteritems(): + for var, dirs in env_arm64.items(): f.write('set %s=%s%s\n' % ( var, BatDirs(dirs), ';%PATH%' if var == 'PATH' else '')) f.write('goto :EOF\n')