Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: port ab4aca868d from upstream #11

Merged
merged 1 commit into from
Mar 15, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pylib/gyp/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -532,7 +532,7 @@ def gyp_main(args):
generator.GenerateOutput(flat_list, targets, data, params)

if options.configs:
valid_configs = targets[flat_list[0]]['configurations'].keys()
valid_configs = targets[flat_list[0]]['configurations']
for conf in options.configs:
if conf not in valid_configs:
raise GypError('Invalid config specified via --build: %s' % conf)
Expand Down
2 changes: 1 addition & 1 deletion pylib/gyp/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -355,7 +355,7 @@ def __init__(self):
prefix=os.path.split(filename)[1] + '.gyp.',
dir=base_temp_dir)
try:
self.tmp_file = os.fdopen(tmp_fd, 'wb')
self.tmp_file = os.fdopen(tmp_fd, 'w')
except Exception:
# Don't leave turds behind.
os.unlink(self.tmp_path)
Expand Down
1 change: 1 addition & 0 deletions pylib/gyp/common_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ def test_platform_default(self):
self.assertFlavor('solaris', 'sunos' , {})
self.assertFlavor('linux' , 'linux2' , {})
self.assertFlavor('linux' , 'linux3' , {})
self.assertFlavor('linux' , 'linux' , {})

def test_param(self):
self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'})
Expand Down
13 changes: 8 additions & 5 deletions pylib/gyp/generator/cmake.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,12 @@
import gyp.common
import gyp.xcode_emulation

try:
# maketrans moved to str in python3.
_maketrans = string.maketrans
except NameError:
_maketrans = str.maketrans

generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
Expand Down Expand Up @@ -240,10 +246,7 @@ def StringToCMakeTargetName(a):
Invalid for make: ':'
Invalid for unknown reasons but cause failures: '.'
"""
try:
return a.translate(str.maketrans(' /():."', '_______'))
except AttributeError:
return a.translate(string.maketrans(' /():."', '_______'))
return a.translate(_maketrans(' /():."', '_______'))


def WriteActions(target_name, actions, extra_sources, extra_deps,
Expand Down Expand Up @@ -1235,7 +1238,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
GenerateOutputForConfig(target_list, target_dicts, data,
params, user_config)
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
config_names = target_dicts[target_list[0]]['configurations']
if params['parallel']:
try:
pool = multiprocessing.Pool(len(config_names))
Expand Down
3 changes: 2 additions & 1 deletion pylib/gyp/generator/dump_dependency_json.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from __future__ import print_function
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

from __future__ import print_function

import collections
import os
import gyp
Expand Down
2 changes: 1 addition & 1 deletion pylib/gyp/generator/eclipse.py
Original file line number Diff line number Diff line change
Expand Up @@ -424,7 +424,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
GenerateOutputForConfig(target_list, target_dicts, data, params,
user_config)
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
config_names = target_dicts[target_list[0]]['configurations']
for config_name in config_names:
GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name)
Expand Down
10 changes: 5 additions & 5 deletions pylib/gyp/generator/make.py
Original file line number Diff line number Diff line change
Expand Up @@ -821,7 +821,7 @@ def Write(self, qualified_target, base_path, output_filename, spec, configs,
gyp.xcode_emulation.MacPrefixHeader(
self.xcode_settings, lambda p: Sourceify(self.Absolutify(p)),
self.Pchify))
sources = list(filter(Compilable, all_sources))
sources = [x for x in all_sources if Compilable(x)]
if sources:
self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
extensions = set([os.path.splitext(s)[1] for s in sources])
Expand Down Expand Up @@ -1314,7 +1314,7 @@ def WriteSources(self, configs, deps, sources,

# If there are any object files in our input file list, link them into our
# output.
extra_link_deps += list(filter(Linkable, sources))
extra_link_deps += [source for source in sources if Linkable(source)]

self.WriteLn()

Expand Down Expand Up @@ -1547,9 +1547,9 @@ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps,
# Postbuilds expect to be run in the gyp file's directory, so insert an
# implicit postbuild to cd to there.
postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.path]))
for i in range(len(postbuilds)):
if not postbuilds[i].startswith('$'):
postbuilds[i] = EscapeShellArgument(postbuilds[i])
for i, postbuild in enumerate(postbuilds):
if not postbuild.startswith('$'):
postbuilds[i] = EscapeShellArgument(postbuild)
self.WriteLn('%s: builddir := $(abs_builddir)' % QuoteSpaces(self.output))
self.WriteLn('%s: POSTBUILDS := %s' % (
QuoteSpaces(self.output), ' '.join(postbuilds)))
Expand Down
8 changes: 4 additions & 4 deletions pylib/gyp/generator/msvs.py
Original file line number Diff line number Diff line change
Expand Up @@ -1787,8 +1787,8 @@ def _CollapseSingles(parent, node):
# such projects up one level.
if (type(node) == dict and
len(node) == 1 and
list(node)[0] == parent + '.vcproj'):
return node[list(node)[0]]
next(iter(node)) == parent + '.vcproj'):
return node[next(iter(node))]
if type(node) != dict:
return node
for child in node:
Expand All @@ -1807,8 +1807,8 @@ def _GatherSolutionFolders(sln_projects, project_objects, flat):
# Walk down from the top until we hit a folder that has more than one entry.
# In practice, this strips the top-level "src/" dir from the hierarchy in
# the solution.
while len(root) == 1 and type(root[list(root)[0]]) == dict:
root = root[list(root)[0]]
while len(root) == 1 and type(root[next(iter(root))]) == dict:
root = root[next(iter(root))]
# Collapse singles.
root = _CollapseSingles('', root)
# Merge buckets until everything is a root entry.
Expand Down
8 changes: 4 additions & 4 deletions pylib/gyp/generator/ninja.py
Original file line number Diff line number Diff line change
Expand Up @@ -1774,7 +1774,7 @@ class MEMORYSTATUSEX(ctypes.Structure):

# VS 2015 uses 20% more working set than VS 2013 and can consume all RAM
# on a 64 GB machine.
mem_limit = max(1, stat.ullTotalPhys / (5 * (2 ** 30))) # total / 5GB
mem_limit = max(1, stat.ullTotalPhys // (5 * (2 ** 30))) # total / 5GB
hard_cap = max(1, int(os.environ.get('GYP_LINK_CONCURRENCY_MAX', 2**32)))
return min(mem_limit, hard_cap)
elif sys.platform.startswith('linux'):
Expand All @@ -1786,14 +1786,14 @@ class MEMORYSTATUSEX(ctypes.Structure):
if not match:
continue
# Allow 8Gb per link on Linux because Gold is quite memory hungry
return max(1, int(match.group(1)) / (8 * (2 ** 20)))
return max(1, int(match.group(1)) // (8 * (2 ** 20)))
return 1
elif sys.platform == 'darwin':
try:
avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
# A static library debug build of Chromium's unit_tests takes ~2.7GB, so
# 4GB per ld process allows for some more bloat.
return max(1, avail_bytes / (4 * (2 ** 30))) # total / 4GB
return max(1, avail_bytes // (4 * (2 ** 30))) # total / 4GB
except:
return 1
else:
Expand Down Expand Up @@ -2483,7 +2483,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
GenerateOutputForConfig(target_list, target_dicts, data, params,
user_config)
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
config_names = target_dicts[target_list[0]]['configurations']
if params['parallel']:
try:
pool = multiprocessing.Pool(len(config_names))
Expand Down
18 changes: 8 additions & 10 deletions pylib/gyp/generator/xcode.py
Original file line number Diff line number Diff line change
Expand Up @@ -446,7 +446,7 @@ def Write(self):
dir=self.path)

try:
output_file = os.fdopen(output_fd, 'wb')
output_file = os.fdopen(output_fd, 'w')

self.project_file.Print(output_file)
output_file.close()
Expand Down Expand Up @@ -1018,22 +1018,21 @@ def GenerateOutput(target_list, target_dicts, data, params):
makefile_name)
# TODO(mark): try/close? Write to a temporary file and swap it only
# if it's got changes?
makefile = open(makefile_path, 'wb')
makefile = open(makefile_path, 'w')

# make will build the first target in the makefile by default. By
# convention, it's called "all". List all (or at least one)
# concrete output for each rule source as a prerequisite of the "all"
# target.
makefile.write('all: \\\n')
for concrete_output_index in \
range(0, len(concrete_outputs_by_rule_source)):
for concrete_output_index, concrete_output_by_rule_source in \
enumerate(concrete_outputs_by_rule_source):
# Only list the first (index [0]) concrete output of each input
# in the "all" target. Otherwise, a parallel make (-j > 1) would
# attempt to process each input multiple times simultaneously.
# Otherwise, "all" could just contain the entire list of
# concrete_outputs_all.
concrete_output = \
concrete_outputs_by_rule_source[concrete_output_index][0]
concrete_output = concrete_output_by_rule_source[0]
if concrete_output_index == len(concrete_outputs_by_rule_source) - 1:
eol = ''
else:
Expand All @@ -1049,8 +1048,8 @@ def GenerateOutput(target_list, target_dicts, data, params):
# rule source. Collect the names of the directories that are
# required.
concrete_output_dirs = []
for concrete_output_index in range(0, len(concrete_outputs)):
concrete_output = concrete_outputs[concrete_output_index]
for concrete_output_index, concrete_output in \
enumerate(concrete_outputs):
if concrete_output_index == 0:
bol = ''
else:
Expand All @@ -1068,8 +1067,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
# the set of additional rule inputs, if any.
prerequisites = [rule_source]
prerequisites.extend(rule.get('inputs', []))
for prerequisite_index in range(0, len(prerequisites)):
prerequisite = prerequisites[prerequisite_index]
for prerequisite_index, prerequisite in enumerate(prerequisites):
if prerequisite_index == len(prerequisites) - 1:
eol = ''
else:
Expand Down
31 changes: 14 additions & 17 deletions pylib/gyp/input.py
Original file line number Diff line number Diff line change
Expand Up @@ -1028,9 +1028,9 @@ def ExpandVariables(input, phase, variables, build_file):

# Convert all strings that are canonically-represented integers into integers.
if type(output) is list:
for index in range(0, len(output)):
if IsStrCanonicalInt(output[index]):
output[index] = int(output[index])
for index, outstr in enumerate(output):
if IsStrCanonicalInt(outstr):
output[index] = int(outstr)
elif IsStrCanonicalInt(output):
output = int(output)

Expand Down Expand Up @@ -1386,9 +1386,9 @@ def QualifyDependencies(targets):
toolset = target_dict['toolset']
for dependency_key in all_dependency_sections:
dependencies = target_dict.get(dependency_key, [])
for index in range(0, len(dependencies)):
for index, dep in enumerate(dependencies):
dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
target_build_file, dependencies[index], toolset)
target_build_file, dep, toolset)
if not multiple_toolsets:
# Ignore toolset specification in the dependency if it is specified.
dep_toolset = toolset
Expand Down Expand Up @@ -1835,7 +1835,7 @@ def BuildDependencyList(targets):
if not root_node.dependents:
# If all targets have dependencies, add the first target as a dependent
# of root_node so that the cycle can be discovered from root_node.
target = targets.keys()[0]
target = next(iter(targets))
target_node = dependency_nodes[target]
target_node.dependencies.append(root_node)
root_node.dependents.append(target_node)
Expand Down Expand Up @@ -1898,7 +1898,7 @@ def VerifyNoGYPFileCircularDependencies(targets):
if not root_node.dependents:
# If all files have dependencies, add the first file as a dependent
# of root_node so that the cycle can be discovered from root_node.
file_node = dependency_nodes.values()[0]
file_node = next(iter(dependency_nodes.values()))
file_node.dependencies.append(root_node)
root_node.dependents.append(file_node)
cycles = []
Expand Down Expand Up @@ -2295,10 +2295,9 @@ def SetUpConfigurations(target, target_dict):
merged_configurations[configuration])

# Now drop all the abstract ones.
for configuration in list(target_dict['configurations']):
old_configuration_dict = target_dict['configurations'][configuration]
if old_configuration_dict.get('abstract'):
del target_dict['configurations'][configuration]
configs = target_dict['configurations']
target_dict['configurations'] = \
{k: v for k, v in configs.items() if not v.get('abstract')}

# Now that all of the target's configurations have been built, go through
# the target dict's keys and remove everything that's been moved into a
Expand Down Expand Up @@ -2406,8 +2405,8 @@ def ProcessListFiltersInDict(name, the_dict):
exclude_key = list_key + '!'
if exclude_key in the_dict:
for exclude_item in the_dict[exclude_key]:
for index in range(0, len(the_list)):
if exclude_item == the_list[index]:
for index, list_item in enumerate(the_list):
if exclude_item == list_item:
# This item matches the exclude_item, so set its action to 0
# (exclude).
list_actions[index] = 0
Expand All @@ -2432,8 +2431,7 @@ def ProcessListFiltersInDict(name, the_dict):
raise ValueError('Unrecognized action ' + action + ' in ' + name + \
' key ' + regex_key)

for index in range(0, len(the_list)):
list_item = the_list[index]
for index, list_item in enumerate(the_list):
if list_actions[index] == action_value:
# Even if the regex matches, nothing will change so continue (regex
# searches are expensive).
Expand Down Expand Up @@ -2672,8 +2670,7 @@ def TurnIntIntoStrInDict(the_dict):
def TurnIntIntoStrInList(the_list):
"""Given list the_list, recursively converts all integers into strings.
"""
for index in range(0, len(the_list)):
item = the_list[index]
for index, item in enumerate(the_list):
if type(item) is int:
the_list[index] = str(item)
elif type(item) is dict:
Expand Down
15 changes: 8 additions & 7 deletions pylib/gyp/mac_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import plistlib
import re
import shutil
import string
import struct
import subprocess
import sys
Expand Down Expand Up @@ -157,9 +156,11 @@ def _DetectInputEncoding(self, file_name):
header = fp.read(3)
except Exception:
return None
if header.startswith(("\xFE\xFF", "\xFF\xFE")):
if header.startswith(b"\xFE\xFF"):
return "UTF-16"
elif header.startswith("\xEF\xBB\xBF"):
elif header.startswith(b"\xFF\xFE"):
return "UTF-16"
elif header.startswith(b"\xEF\xBB\xBF"):
return "UTF-8"
else:
return None
Expand All @@ -173,7 +174,7 @@ def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
# Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
plist = plistlib.readPlistFromString(lines)
if keys:
plist = dict(plist.items() + json.loads(keys[0]).items())
plist.update(json.loads(keys[0]))
lines = plistlib.writePlistToString(plist)

# Go through all the environment variables and replace them as variables in
Expand All @@ -184,7 +185,7 @@ def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
continue
evar = '${%s}' % key
evalue = os.environ[key]
lines = string.replace(lines, evar, evalue)
lines = lines.replace(lines, evar, evalue)

# Xcode supports various suffices on environment variables, which are
# all undocumented. :rfc1034identifier is used in the standard project
Expand All @@ -194,11 +195,11 @@ def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
# in a URL either -- oops, hence :rfc1034identifier was born.
evar = '${%s:identifier}' % key
evalue = IDENT_RE.sub('_', os.environ[key])
lines = string.replace(lines, evar, evalue)
lines = lines.replace(lines, evar, evalue)

evar = '${%s:rfc1034identifier}' % key
evalue = IDENT_RE.sub('-', os.environ[key])
lines = string.replace(lines, evar, evalue)
lines = lines.replace(lines, evar, evalue)

# Remove any keys with values that haven't been replaced.
lines = lines.splitlines()
Expand Down
Loading