mirror of
https://github.com/nodejs/node.git
synced 2025-05-15 22:04:26 +00:00
tools: update gyp to r1601
Among other things, this should make it easier for people to build node.js on openbsd.
This commit is contained in:
parent
329b5388ba
commit
8632af381e
@ -2,5 +2,7 @@
|
|||||||
# Name or Organization <email address>
|
# Name or Organization <email address>
|
||||||
|
|
||||||
Google Inc.
|
Google Inc.
|
||||||
|
Bloomberg Finance L.P.
|
||||||
|
|
||||||
Steven Knight <knight@baldmt.com>
|
Steven Knight <knight@baldmt.com>
|
||||||
Ryan Norton <rnorton10@gmail.com>
|
Ryan Norton <rnorton10@gmail.com>
|
||||||
|
@ -75,13 +75,20 @@ def CheckChangeOnUpload(input_api, output_api):
|
|||||||
|
|
||||||
def CheckChangeOnCommit(input_api, output_api):
|
def CheckChangeOnCommit(input_api, output_api):
|
||||||
report = []
|
report = []
|
||||||
|
|
||||||
|
# Accept any year number from 2009 to the current year.
|
||||||
|
current_year = int(input_api.time.strftime('%Y'))
|
||||||
|
allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1)))
|
||||||
|
years_re = '(' + '|'.join(allowed_years) + ')'
|
||||||
|
|
||||||
|
# The (c) is deprecated, but tolerate it until it's removed from all files.
|
||||||
license = (
|
license = (
|
||||||
r'.*? Copyright \(c\) %(year)s Google Inc\. All rights reserved\.\n'
|
r'.*? Copyright (\(c\) )?%(year)s Google Inc\. All rights reserved\.\n'
|
||||||
r'.*? Use of this source code is governed by a BSD-style license that '
|
r'.*? Use of this source code is governed by a BSD-style license that '
|
||||||
r'can be\n'
|
r'can be\n'
|
||||||
r'.*? found in the LICENSE file\.\n'
|
r'.*? found in the LICENSE file\.\n'
|
||||||
) % {
|
) % {
|
||||||
'year': input_api.time.strftime('%Y'),
|
'year': years_re,
|
||||||
}
|
}
|
||||||
|
|
||||||
report.extend(input_api.canned_checks.PanProjectChecks(
|
report.extend(input_api.canned_checks.PanProjectChecks(
|
||||||
@ -106,4 +113,4 @@ def CheckChangeOnCommit(input_api, output_api):
|
|||||||
|
|
||||||
|
|
||||||
def GetPreferredTrySlaves():
|
def GetPreferredTrySlaves():
|
||||||
return ['gyp-win32', 'gyp-win64', 'gyp-linux', 'gyp-mac']
|
return ['gyp-win32', 'gyp-win64', 'gyp-linux', 'gyp-mac', 'gyp-android']
|
||||||
|
@ -1,98 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
|
||||||
# Use of this source code is governed by a BSD-style license that can be
|
|
||||||
# found in the LICENSE file.
|
|
||||||
|
|
||||||
|
|
||||||
"""Argument-less script to select what to run on the buildbots."""
|
|
||||||
|
|
||||||
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
if sys.platform in ['win32', 'cygwin']:
|
|
||||||
EXE_SUFFIX = '.exe'
|
|
||||||
else:
|
|
||||||
EXE_SUFFIX = ''
|
|
||||||
|
|
||||||
|
|
||||||
BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
||||||
TRUNK_DIR = os.path.dirname(BUILDBOT_DIR)
|
|
||||||
ROOT_DIR = os.path.dirname(TRUNK_DIR)
|
|
||||||
OUT_DIR = os.path.join(TRUNK_DIR, 'out')
|
|
||||||
|
|
||||||
|
|
||||||
def GypTestFormat(title, format=None, msvs_version=None):
|
|
||||||
"""Run the gyp tests for a given format, emitting annotator tags.
|
|
||||||
|
|
||||||
See annotator docs at:
|
|
||||||
https://sites.google.com/a/chromium.org/dev/developers/testing/chromium-build-infrastructure/buildbot-annotations
|
|
||||||
Args:
|
|
||||||
format: gyp format to test.
|
|
||||||
Returns:
|
|
||||||
0 for sucesss, 1 for failure.
|
|
||||||
"""
|
|
||||||
if not format:
|
|
||||||
format = title
|
|
||||||
|
|
||||||
print '@@@BUILD_STEP ' + title + '@@@'
|
|
||||||
sys.stdout.flush()
|
|
||||||
env = os.environ.copy()
|
|
||||||
# TODO(bradnelson): remove this when this issue is resolved:
|
|
||||||
# http://code.google.com/p/chromium/issues/detail?id=108251
|
|
||||||
if format == 'ninja':
|
|
||||||
env['NOGOLD'] = '1'
|
|
||||||
if msvs_version:
|
|
||||||
env['GYP_MSVS_VERSION'] = msvs_version
|
|
||||||
retcode = subprocess.call(' '.join(
|
|
||||||
[sys.executable, 'trunk/gyptest.py',
|
|
||||||
'--all',
|
|
||||||
'--passed',
|
|
||||||
'--format', format,
|
|
||||||
'--chdir', 'trunk',
|
|
||||||
'--path', '../scons']),
|
|
||||||
cwd=ROOT_DIR, env=env, shell=True)
|
|
||||||
if retcode:
|
|
||||||
# Emit failure tag, and keep going.
|
|
||||||
print '@@@STEP_FAILURE@@@'
|
|
||||||
return 1
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
def GypBuild():
|
|
||||||
# Dump out/ directory.
|
|
||||||
print '@@@BUILD_STEP cleanup@@@'
|
|
||||||
print 'Removing %s...' % OUT_DIR
|
|
||||||
shutil.rmtree(OUT_DIR, ignore_errors=True)
|
|
||||||
print 'Done.'
|
|
||||||
|
|
||||||
retcode = 0
|
|
||||||
if sys.platform.startswith('linux'):
|
|
||||||
retcode += GypTestFormat('ninja')
|
|
||||||
retcode += GypTestFormat('scons')
|
|
||||||
retcode += GypTestFormat('make')
|
|
||||||
elif sys.platform == 'darwin':
|
|
||||||
retcode += GypTestFormat('ninja')
|
|
||||||
retcode += GypTestFormat('xcode')
|
|
||||||
retcode += GypTestFormat('make')
|
|
||||||
elif sys.platform == 'win32':
|
|
||||||
retcode += GypTestFormat('ninja')
|
|
||||||
retcode += GypTestFormat('msvs-2008', format='msvs', msvs_version='2008')
|
|
||||||
if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64':
|
|
||||||
retcode += GypTestFormat('msvs-2010', format='msvs', msvs_version='2010')
|
|
||||||
else:
|
|
||||||
raise Exception('Unknown platform')
|
|
||||||
if retcode:
|
|
||||||
# TODO(bradnelson): once the annotator supports a postscript (section for
|
|
||||||
# after the build proper that could be used for cumulative failures),
|
|
||||||
# use that instead of this. This isolates the final return value so
|
|
||||||
# that it isn't misattributed to the last stage.
|
|
||||||
print '@@@BUILD_STEP failures@@@'
|
|
||||||
sys.exit(retcode)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
GypBuild()
|
|
12
tools/gyp/data/win/large-pdb-shim.cc
Normal file
12
tools/gyp/data/win/large-pdb-shim.cc
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
// Copyright (c) 2013 Google Inc. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style license that can be
|
||||||
|
// found in the LICENSE file.
|
||||||
|
|
||||||
|
// This file is used to generate an empty .pdb -- with a 4KB pagesize -- that is
|
||||||
|
// then used during the final link for modules that have large PDBs. Otherwise,
|
||||||
|
// the linker will generate a pdb with a page size of 1KB, which imposes a limit
|
||||||
|
// of 1GB on the .pdb. By generating an initial empty .pdb with the compiler
|
||||||
|
// (rather than the linker), this limit is avoided. With this in place PDBs may
|
||||||
|
// grow to 2GB.
|
||||||
|
//
|
||||||
|
// This file is referenced by the msvs_large_pdb mechanism in MSVSUtil.py.
|
@ -212,6 +212,7 @@ def main(argv=None):
|
|||||||
format_list = {
|
format_list = {
|
||||||
'freebsd7': ['make'],
|
'freebsd7': ['make'],
|
||||||
'freebsd8': ['make'],
|
'freebsd8': ['make'],
|
||||||
|
'openbsd5': ['make'],
|
||||||
'cygwin': ['msvs'],
|
'cygwin': ['msvs'],
|
||||||
'win32': ['msvs', 'ninja'],
|
'win32': ['msvs', 'ninja'],
|
||||||
'linux2': ['make', 'ninja'],
|
'linux2': ['make', 'ninja'],
|
||||||
|
212
tools/gyp/pylib/gyp/MSVSUtil.py
Normal file
212
tools/gyp/pylib/gyp/MSVSUtil.py
Normal file
@ -0,0 +1,212 @@
|
|||||||
|
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""Utility functions shared amongst the Windows generators."""
|
||||||
|
|
||||||
|
import copy
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
_TARGET_TYPE_EXT = {
|
||||||
|
'executable': '.exe',
|
||||||
|
'shared_library': '.dll'
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _GetLargePdbShimCcPath():
|
||||||
|
"""Returns the path of the large_pdb_shim.cc file."""
|
||||||
|
this_dir = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
src_dir = os.path.abspath(os.path.join(this_dir, '..', '..'))
|
||||||
|
win_data_dir = os.path.join(src_dir, 'data', 'win')
|
||||||
|
large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc')
|
||||||
|
return large_pdb_shim_cc
|
||||||
|
|
||||||
|
|
||||||
|
def _DeepCopySomeKeys(in_dict, keys):
|
||||||
|
"""Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
in_dict: The dictionary to copy.
|
||||||
|
keys: The keys to be copied. If a key is in this list and doesn't exist in
|
||||||
|
|in_dict| this is not an error.
|
||||||
|
Returns:
|
||||||
|
The partially deep-copied dictionary.
|
||||||
|
"""
|
||||||
|
d = {}
|
||||||
|
for key in keys:
|
||||||
|
if key not in in_dict:
|
||||||
|
continue
|
||||||
|
d[key] = copy.deepcopy(in_dict[key])
|
||||||
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
def _SuffixName(name, suffix):
|
||||||
|
"""Add a suffix to the end of a target.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
name: name of the target (foo#target)
|
||||||
|
suffix: the suffix to be added
|
||||||
|
Returns:
|
||||||
|
Target name with suffix added (foo_suffix#target)
|
||||||
|
"""
|
||||||
|
parts = name.rsplit('#', 1)
|
||||||
|
parts[0] = '%s_%s' % (parts[0], suffix)
|
||||||
|
return '#'.join(parts)
|
||||||
|
|
||||||
|
|
||||||
|
def _ShardName(name, number):
|
||||||
|
"""Add a shard number to the end of a target.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
name: name of the target (foo#target)
|
||||||
|
number: shard number
|
||||||
|
Returns:
|
||||||
|
Target name with shard added (foo_1#target)
|
||||||
|
"""
|
||||||
|
return _SuffixName(name, str(number))
|
||||||
|
|
||||||
|
|
||||||
|
def ShardTargets(target_list, target_dicts):
|
||||||
|
"""Shard some targets apart to work around the linkers limits.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||||
|
target_dicts: Dict of target properties keyed on target pair.
|
||||||
|
Returns:
|
||||||
|
Tuple of the new sharded versions of the inputs.
|
||||||
|
"""
|
||||||
|
# Gather the targets to shard, and how many pieces.
|
||||||
|
targets_to_shard = {}
|
||||||
|
for t in target_dicts:
|
||||||
|
shards = int(target_dicts[t].get('msvs_shard', 0))
|
||||||
|
if shards:
|
||||||
|
targets_to_shard[t] = shards
|
||||||
|
# Shard target_list.
|
||||||
|
new_target_list = []
|
||||||
|
for t in target_list:
|
||||||
|
if t in targets_to_shard:
|
||||||
|
for i in range(targets_to_shard[t]):
|
||||||
|
new_target_list.append(_ShardName(t, i))
|
||||||
|
else:
|
||||||
|
new_target_list.append(t)
|
||||||
|
# Shard target_dict.
|
||||||
|
new_target_dicts = {}
|
||||||
|
for t in target_dicts:
|
||||||
|
if t in targets_to_shard:
|
||||||
|
for i in range(targets_to_shard[t]):
|
||||||
|
name = _ShardName(t, i)
|
||||||
|
new_target_dicts[name] = copy.copy(target_dicts[t])
|
||||||
|
new_target_dicts[name]['target_name'] = _ShardName(
|
||||||
|
new_target_dicts[name]['target_name'], i)
|
||||||
|
sources = new_target_dicts[name].get('sources', [])
|
||||||
|
new_sources = []
|
||||||
|
for pos in range(i, len(sources), targets_to_shard[t]):
|
||||||
|
new_sources.append(sources[pos])
|
||||||
|
new_target_dicts[name]['sources'] = new_sources
|
||||||
|
else:
|
||||||
|
new_target_dicts[t] = target_dicts[t]
|
||||||
|
# Shard dependencies.
|
||||||
|
for t in new_target_dicts:
|
||||||
|
dependencies = copy.copy(new_target_dicts[t].get('dependencies', []))
|
||||||
|
new_dependencies = []
|
||||||
|
for d in dependencies:
|
||||||
|
if d in targets_to_shard:
|
||||||
|
for i in range(targets_to_shard[d]):
|
||||||
|
new_dependencies.append(_ShardName(d, i))
|
||||||
|
else:
|
||||||
|
new_dependencies.append(d)
|
||||||
|
new_target_dicts[t]['dependencies'] = new_dependencies
|
||||||
|
|
||||||
|
return (new_target_list, new_target_dicts)
|
||||||
|
|
||||||
|
|
||||||
|
def InsertLargePdbShims(target_list, target_dicts, vars):
|
||||||
|
"""Insert a shim target that forces the linker to use 4KB pagesize PDBs.
|
||||||
|
|
||||||
|
This is a workaround for targets with PDBs greater than 1GB in size, the
|
||||||
|
limit for the 1KB pagesize PDBs created by the linker by default.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||||
|
target_dicts: Dict of target properties keyed on target pair.
|
||||||
|
vars: A dictionary of common GYP variables with generator-specific values.
|
||||||
|
Returns:
|
||||||
|
Tuple of the shimmed version of the inputs.
|
||||||
|
"""
|
||||||
|
# Determine which targets need shimming.
|
||||||
|
targets_to_shim = []
|
||||||
|
for t in target_dicts:
|
||||||
|
target_dict = target_dicts[t]
|
||||||
|
# We only want to shim targets that have msvs_large_pdb enabled.
|
||||||
|
if not int(target_dict.get('msvs_large_pdb', 0)):
|
||||||
|
continue
|
||||||
|
# This is intended for executable, shared_library and loadable_module
|
||||||
|
# targets where every configuration is set up to produce a PDB output.
|
||||||
|
# If any of these conditions is not true then the shim logic will fail
|
||||||
|
# below.
|
||||||
|
targets_to_shim.append(t)
|
||||||
|
|
||||||
|
large_pdb_shim_cc = _GetLargePdbShimCcPath()
|
||||||
|
|
||||||
|
for t in targets_to_shim:
|
||||||
|
target_dict = target_dicts[t]
|
||||||
|
target_name = target_dict.get('target_name')
|
||||||
|
|
||||||
|
base_dict = _DeepCopySomeKeys(target_dict,
|
||||||
|
['configurations', 'default_configuration', 'toolset'])
|
||||||
|
|
||||||
|
# This is the dict for copying the source file (part of the GYP tree)
|
||||||
|
# to the intermediate directory of the project. This is necessary because
|
||||||
|
# we can't always build a relative path to the shim source file (on Windows
|
||||||
|
# GYP and the project may be on different drives), and Ninja hates absolute
|
||||||
|
# paths (it ends up generating the .obj and .obj.d alongside the source
|
||||||
|
# file, polluting GYPs tree).
|
||||||
|
copy_suffix = '_large_pdb_copy'
|
||||||
|
copy_target_name = target_name + '_' + copy_suffix
|
||||||
|
full_copy_target_name = _SuffixName(t, copy_suffix)
|
||||||
|
shim_cc_basename = os.path.basename(large_pdb_shim_cc)
|
||||||
|
shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name
|
||||||
|
shim_cc_path = shim_cc_dir + '/' + shim_cc_basename
|
||||||
|
copy_dict = copy.deepcopy(base_dict)
|
||||||
|
copy_dict['target_name'] = copy_target_name
|
||||||
|
copy_dict['type'] = 'none'
|
||||||
|
copy_dict['sources'] = [ large_pdb_shim_cc ]
|
||||||
|
copy_dict['copies'] = [{
|
||||||
|
'destination': shim_cc_dir,
|
||||||
|
'files': [ large_pdb_shim_cc ]
|
||||||
|
}]
|
||||||
|
|
||||||
|
# This is the dict for the PDB generating shim target. It depends on the
|
||||||
|
# copy target.
|
||||||
|
shim_suffix = '_large_pdb_shim'
|
||||||
|
shim_target_name = target_name + '_' + shim_suffix
|
||||||
|
full_shim_target_name = _SuffixName(t, shim_suffix)
|
||||||
|
shim_dict = copy.deepcopy(base_dict)
|
||||||
|
shim_dict['target_name'] = shim_target_name
|
||||||
|
shim_dict['type'] = 'static_library'
|
||||||
|
shim_dict['sources'] = [ shim_cc_path ]
|
||||||
|
shim_dict['dependencies'] = [ full_copy_target_name ]
|
||||||
|
|
||||||
|
# Set up the shim to output its PDB to the same location as the final linker
|
||||||
|
# target.
|
||||||
|
for config in shim_dict.get('configurations').itervalues():
|
||||||
|
msvs = config.setdefault('msvs_settings')
|
||||||
|
|
||||||
|
linker = msvs.pop('VCLinkerTool') # We want to clear this dict.
|
||||||
|
pdb_path = linker.get('ProgramDatabaseFile')
|
||||||
|
|
||||||
|
compiler = msvs.setdefault('VCCLCompilerTool', {})
|
||||||
|
compiler.setdefault('DebugInformationFormat', '3')
|
||||||
|
compiler.setdefault('ProgramDataBaseFileName', pdb_path)
|
||||||
|
|
||||||
|
# Add the new targets.
|
||||||
|
target_list.append(full_copy_target_name)
|
||||||
|
target_list.append(full_shim_target_name)
|
||||||
|
target_dicts[full_copy_target_name] = copy_dict
|
||||||
|
target_dicts[full_shim_target_name] = shim_dict
|
||||||
|
|
||||||
|
# Update the original target to depend on the shim target.
|
||||||
|
target_dict.setdefault('dependencies', []).append(full_shim_target_name)
|
||||||
|
|
||||||
|
return (target_list, target_dicts)
|
@ -1,4 +1,4 @@
|
|||||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||||
# Use of this source code is governed by a BSD-style license that can be
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
# found in the LICENSE file.
|
# found in the LICENSE file.
|
||||||
|
|
||||||
@ -355,6 +355,13 @@ def SelectVisualStudioVersion(version='auto'):
|
|||||||
'2012': ('11.0',),
|
'2012': ('11.0',),
|
||||||
'2012e': ('11.0',),
|
'2012e': ('11.0',),
|
||||||
}
|
}
|
||||||
|
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
|
||||||
|
if override_path:
|
||||||
|
msvs_version = os.environ.get('GYP_MSVS_VERSION')
|
||||||
|
if not msvs_version or 'e' not in msvs_version:
|
||||||
|
raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
|
||||||
|
'set to an "e" version (e.g. 2010e)')
|
||||||
|
return _CreateVersion(msvs_version, override_path, sdk_based=True)
|
||||||
version = str(version)
|
version = str(version)
|
||||||
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
|
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
|
||||||
if not versions:
|
if not versions:
|
||||||
|
@ -23,8 +23,8 @@ DEBUG_VARIABLES = 'variables'
|
|||||||
DEBUG_INCLUDES = 'includes'
|
DEBUG_INCLUDES = 'includes'
|
||||||
|
|
||||||
|
|
||||||
def DebugOutput(mode, message):
|
def DebugOutput(mode, message, *args):
|
||||||
if 'all' in gyp.debug.keys() or mode in gyp.debug.keys():
|
if 'all' in gyp.debug or mode in gyp.debug:
|
||||||
ctx = ('unknown', 0, 'unknown')
|
ctx = ('unknown', 0, 'unknown')
|
||||||
try:
|
try:
|
||||||
f = traceback.extract_stack(limit=2)
|
f = traceback.extract_stack(limit=2)
|
||||||
@ -32,6 +32,8 @@ def DebugOutput(mode, message):
|
|||||||
ctx = f[0][:3]
|
ctx = f[0][:3]
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
if args:
|
||||||
|
message %= args
|
||||||
print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]),
|
print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]),
|
||||||
ctx[1], ctx[2], message)
|
ctx[1], ctx[2], message)
|
||||||
|
|
||||||
@ -376,21 +378,22 @@ def gyp_main(args):
|
|||||||
options.generator_output = g_o
|
options.generator_output = g_o
|
||||||
|
|
||||||
if not options.parallel and options.use_environment:
|
if not options.parallel and options.use_environment:
|
||||||
options.parallel = bool(os.environ.get('GYP_PARALLEL'))
|
p = os.environ.get('GYP_PARALLEL')
|
||||||
|
options.parallel = bool(p and p != '0')
|
||||||
|
|
||||||
for mode in options.debug:
|
for mode in options.debug:
|
||||||
gyp.debug[mode] = 1
|
gyp.debug[mode] = 1
|
||||||
|
|
||||||
# Do an extra check to avoid work when we're not debugging.
|
# Do an extra check to avoid work when we're not debugging.
|
||||||
if DEBUG_GENERAL in gyp.debug.keys():
|
if DEBUG_GENERAL in gyp.debug:
|
||||||
DebugOutput(DEBUG_GENERAL, 'running with these options:')
|
DebugOutput(DEBUG_GENERAL, 'running with these options:')
|
||||||
for option, value in sorted(options.__dict__.items()):
|
for option, value in sorted(options.__dict__.items()):
|
||||||
if option[0] == '_':
|
if option[0] == '_':
|
||||||
continue
|
continue
|
||||||
if isinstance(value, basestring):
|
if isinstance(value, basestring):
|
||||||
DebugOutput(DEBUG_GENERAL, " %s: '%s'" % (option, value))
|
DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value)
|
||||||
else:
|
else:
|
||||||
DebugOutput(DEBUG_GENERAL, " %s: %s" % (option, str(value)))
|
DebugOutput(DEBUG_GENERAL, " %s: %s", option, value)
|
||||||
|
|
||||||
if not build_files:
|
if not build_files:
|
||||||
build_files = FindBuildFiles()
|
build_files = FindBuildFiles()
|
||||||
@ -440,9 +443,9 @@ def gyp_main(args):
|
|||||||
if options.defines:
|
if options.defines:
|
||||||
defines += options.defines
|
defines += options.defines
|
||||||
cmdline_default_variables = NameValueListToDict(defines)
|
cmdline_default_variables = NameValueListToDict(defines)
|
||||||
if DEBUG_GENERAL in gyp.debug.keys():
|
if DEBUG_GENERAL in gyp.debug:
|
||||||
DebugOutput(DEBUG_GENERAL,
|
DebugOutput(DEBUG_GENERAL,
|
||||||
"cmdline_default_variables: %s" % cmdline_default_variables)
|
"cmdline_default_variables: %s", cmdline_default_variables)
|
||||||
|
|
||||||
# Set up includes.
|
# Set up includes.
|
||||||
includes = []
|
includes = []
|
||||||
@ -468,7 +471,7 @@ def gyp_main(args):
|
|||||||
gen_flags += options.generator_flags
|
gen_flags += options.generator_flags
|
||||||
generator_flags = NameValueListToDict(gen_flags)
|
generator_flags = NameValueListToDict(gen_flags)
|
||||||
if DEBUG_GENERAL in gyp.debug.keys():
|
if DEBUG_GENERAL in gyp.debug.keys():
|
||||||
DebugOutput(DEBUG_GENERAL, "generator_flags: %s" % generator_flags)
|
DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags)
|
||||||
|
|
||||||
# TODO: Remove this and the option after we've gotten folks to move to the
|
# TODO: Remove this and the option after we've gotten folks to move to the
|
||||||
# generator flag.
|
# generator flag.
|
||||||
|
@ -127,9 +127,9 @@ def RelativePath(path, relative_to):
|
|||||||
# directory, returns a relative path that identifies path relative to
|
# directory, returns a relative path that identifies path relative to
|
||||||
# relative_to.
|
# relative_to.
|
||||||
|
|
||||||
# Convert to absolute (and therefore normalized paths).
|
# Convert to normalized (and therefore absolute paths).
|
||||||
path = os.path.abspath(path)
|
path = os.path.realpath(path)
|
||||||
relative_to = os.path.abspath(relative_to)
|
relative_to = os.path.realpath(relative_to)
|
||||||
|
|
||||||
# Split the paths into components.
|
# Split the paths into components.
|
||||||
path_split = path.split(os.path.sep)
|
path_split = path.split(os.path.sep)
|
||||||
@ -151,6 +151,20 @@ def RelativePath(path, relative_to):
|
|||||||
return os.path.join(*relative_split)
|
return os.path.join(*relative_split)
|
||||||
|
|
||||||
|
|
||||||
|
@memoize
|
||||||
|
def InvertRelativePath(path, toplevel_dir=None):
|
||||||
|
"""Given a path like foo/bar that is relative to toplevel_dir, return
|
||||||
|
the inverse relative path back to the toplevel_dir.
|
||||||
|
|
||||||
|
E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
|
||||||
|
should always produce the empty string, unless the path contains symlinks.
|
||||||
|
"""
|
||||||
|
if not path:
|
||||||
|
return path
|
||||||
|
toplevel_dir = '.' if toplevel_dir is None else toplevel_dir
|
||||||
|
return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path))
|
||||||
|
|
||||||
|
|
||||||
def FixIfRelativePath(path, relative_to):
|
def FixIfRelativePath(path, relative_to):
|
||||||
# Like RelativePath but returns |path| unchanged if it is absolute.
|
# Like RelativePath but returns |path| unchanged if it is absolute.
|
||||||
if os.path.isabs(path):
|
if os.path.isabs(path):
|
||||||
@ -378,8 +392,10 @@ def GetFlavor(params):
|
|||||||
return 'solaris'
|
return 'solaris'
|
||||||
if sys.platform.startswith('freebsd'):
|
if sys.platform.startswith('freebsd'):
|
||||||
return 'freebsd'
|
return 'freebsd'
|
||||||
if sys.platform.startswith('dragonfly'):
|
if sys.platform.startswith('openbsd'):
|
||||||
return 'dragonflybsd'
|
return 'openbsd'
|
||||||
|
if sys.platform.startswith('aix'):
|
||||||
|
return 'aix'
|
||||||
|
|
||||||
return 'linux'
|
return 'linux'
|
||||||
|
|
||||||
|
@ -56,13 +56,13 @@ class TestGetFlavor(unittest.TestCase):
|
|||||||
self.assertEqual(expected, gyp.common.GetFlavor(param))
|
self.assertEqual(expected, gyp.common.GetFlavor(param))
|
||||||
|
|
||||||
def test_platform_default(self):
|
def test_platform_default(self):
|
||||||
self.assertFlavor('dragonflybsd', 'dragonfly3', {})
|
self.assertFlavor('freebsd', 'freebsd9' , {})
|
||||||
self.assertFlavor('freebsd' , 'freebsd9' , {})
|
self.assertFlavor('freebsd', 'freebsd10', {})
|
||||||
self.assertFlavor('freebsd' , 'freebsd10' , {})
|
self.assertFlavor('openbsd', 'openbsd5' , {})
|
||||||
self.assertFlavor('solaris' , 'sunos5' , {});
|
self.assertFlavor('solaris', 'sunos5' , {});
|
||||||
self.assertFlavor('solaris' , 'sunos' , {});
|
self.assertFlavor('solaris', 'sunos' , {});
|
||||||
self.assertFlavor('linux' , 'linux2' , {});
|
self.assertFlavor('linux' , 'linux2' , {});
|
||||||
self.assertFlavor('linux' , 'linux3' , {});
|
self.assertFlavor('linux' , 'linux3' , {});
|
||||||
|
|
||||||
def test_param(self):
|
def test_param(self):
|
||||||
self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'})
|
self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'})
|
||||||
|
@ -19,6 +19,7 @@ import gyp.common
|
|||||||
import gyp.generator.make as make # Reuse global functions from make backend.
|
import gyp.generator.make as make # Reuse global functions from make backend.
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import subprocess
|
||||||
|
|
||||||
generator_default_variables = {
|
generator_default_variables = {
|
||||||
'OS': 'android',
|
'OS': 'android',
|
||||||
@ -38,7 +39,7 @@ generator_default_variables = {
|
|||||||
'RULE_INPUT_PATH': '$(RULE_SOURCES)',
|
'RULE_INPUT_PATH': '$(RULE_SOURCES)',
|
||||||
'RULE_INPUT_EXT': '$(suffix $<)',
|
'RULE_INPUT_EXT': '$(suffix $<)',
|
||||||
'RULE_INPUT_NAME': '$(notdir $<)',
|
'RULE_INPUT_NAME': '$(notdir $<)',
|
||||||
'CONFIGURATION_NAME': 'NOT_USED_ON_ANDROID',
|
'CONFIGURATION_NAME': '$(GYP_DEFAULT_CONFIGURATION)',
|
||||||
}
|
}
|
||||||
|
|
||||||
# Make supports multiple toolsets
|
# Make supports multiple toolsets
|
||||||
@ -131,12 +132,13 @@ class AndroidMkWriter(object):
|
|||||||
def __init__(self, android_top_dir):
|
def __init__(self, android_top_dir):
|
||||||
self.android_top_dir = android_top_dir
|
self.android_top_dir = android_top_dir
|
||||||
|
|
||||||
def Write(self, qualified_target, base_path, output_filename, spec, configs,
|
def Write(self, qualified_target, relative_target, base_path, output_filename,
|
||||||
part_of_all):
|
spec, configs, part_of_all):
|
||||||
"""The main entry point: writes a .mk file for a single target.
|
"""The main entry point: writes a .mk file for a single target.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
qualified_target: target we're generating
|
qualified_target: target we're generating
|
||||||
|
relative_target: qualified target name relative to the root
|
||||||
base_path: path relative to source root we're building in, used to resolve
|
base_path: path relative to source root we're building in, used to resolve
|
||||||
target-relative paths
|
target-relative paths
|
||||||
output_filename: output .mk file name to write
|
output_filename: output .mk file name to write
|
||||||
@ -150,6 +152,7 @@ class AndroidMkWriter(object):
|
|||||||
self.fp.write(header)
|
self.fp.write(header)
|
||||||
|
|
||||||
self.qualified_target = qualified_target
|
self.qualified_target = qualified_target
|
||||||
|
self.relative_target = relative_target
|
||||||
self.path = base_path
|
self.path = base_path
|
||||||
self.target = spec['target_name']
|
self.target = spec['target_name']
|
||||||
self.type = spec['type']
|
self.type = spec['type']
|
||||||
@ -248,7 +251,7 @@ class AndroidMkWriter(object):
|
|||||||
actions)
|
actions)
|
||||||
"""
|
"""
|
||||||
for action in actions:
|
for action in actions:
|
||||||
name = make.StringToMakefileVariable('%s_%s' % (self.qualified_target,
|
name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
|
||||||
action['action_name']))
|
action['action_name']))
|
||||||
self.WriteLn('### Rules for action "%s":' % action['action_name'])
|
self.WriteLn('### Rules for action "%s":' % action['action_name'])
|
||||||
inputs = action['inputs']
|
inputs = action['inputs']
|
||||||
@ -295,6 +298,15 @@ class AndroidMkWriter(object):
|
|||||||
'$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_shared_intermediate_dir)' %
|
'$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_shared_intermediate_dir)' %
|
||||||
main_output)
|
main_output)
|
||||||
|
|
||||||
|
# Android's envsetup.sh adds a number of directories to the path including
|
||||||
|
# the built host binary directory. This causes actions/rules invoked by
|
||||||
|
# gyp to sometimes use these instead of system versions, e.g. bison.
|
||||||
|
# The built host binaries may not be suitable, and can cause errors.
|
||||||
|
# So, we remove them from the PATH using the ANDROID_BUILD_PATHS variable
|
||||||
|
# set by envsetup.
|
||||||
|
self.WriteLn('%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))'
|
||||||
|
% main_output)
|
||||||
|
|
||||||
for input in inputs:
|
for input in inputs:
|
||||||
assert ' ' not in input, (
|
assert ' ' not in input, (
|
||||||
"Spaces in action input filenames not supported (%s)" % input)
|
"Spaces in action input filenames not supported (%s)" % input)
|
||||||
@ -334,7 +346,7 @@ class AndroidMkWriter(object):
|
|||||||
if len(rule.get('rule_sources', [])) == 0:
|
if len(rule.get('rule_sources', [])) == 0:
|
||||||
continue
|
continue
|
||||||
did_write_rule = True
|
did_write_rule = True
|
||||||
name = make.StringToMakefileVariable('%s_%s' % (self.qualified_target,
|
name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
|
||||||
rule['rule_name']))
|
rule['rule_name']))
|
||||||
self.WriteLn('\n### Generated for rule "%s":' % name)
|
self.WriteLn('\n### Generated for rule "%s":' % name)
|
||||||
self.WriteLn('# "%s":' % rule)
|
self.WriteLn('# "%s":' % rule)
|
||||||
@ -388,6 +400,10 @@ class AndroidMkWriter(object):
|
|||||||
'$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_shared_intermediate_dir)'
|
'$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_shared_intermediate_dir)'
|
||||||
% main_output)
|
% main_output)
|
||||||
|
|
||||||
|
# See explanation in WriteActions.
|
||||||
|
self.WriteLn('%s: export PATH := '
|
||||||
|
'$(subst $(ANDROID_BUILD_PATHS),,$(PATH))' % main_output)
|
||||||
|
|
||||||
main_output_deps = self.LocalPathify(rule_source)
|
main_output_deps = self.LocalPathify(rule_source)
|
||||||
if inputs:
|
if inputs:
|
||||||
main_output_deps += ' '
|
main_output_deps += ' '
|
||||||
@ -415,7 +431,7 @@ class AndroidMkWriter(object):
|
|||||||
"""
|
"""
|
||||||
self.WriteLn('### Generated for copy rule.')
|
self.WriteLn('### Generated for copy rule.')
|
||||||
|
|
||||||
variable = make.StringToMakefileVariable(self.qualified_target + '_copies')
|
variable = make.StringToMakefileVariable(self.relative_target + '_copies')
|
||||||
outputs = []
|
outputs = []
|
||||||
for copy in copies:
|
for copy in copies:
|
||||||
for path in copy['files']:
|
for path in copy['files']:
|
||||||
@ -940,30 +956,16 @@ class AndroidMkWriter(object):
|
|||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
def WriteAutoRegenerationRule(params, root_makefile, makefile_name,
|
def PerformBuild(data, configurations, params):
|
||||||
build_files):
|
# The android backend only supports the default configuration.
|
||||||
"""Write the target to regenerate the Makefile."""
|
|
||||||
options = params['options']
|
options = params['options']
|
||||||
# Sort to avoid non-functional changes to makefile.
|
makefile = os.path.abspath(os.path.join(options.toplevel_dir,
|
||||||
build_files = sorted([os.path.join('$(LOCAL_PATH)', f) for f in build_files])
|
'GypAndroid.mk'))
|
||||||
build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir)
|
env = dict(os.environ)
|
||||||
for filename in params['build_files_arg']]
|
env['ONE_SHOT_MAKEFILE'] = makefile
|
||||||
build_files_args = [os.path.join('$(PRIVATE_LOCAL_PATH)', f)
|
arguments = ['make', '-C', os.environ['ANDROID_BUILD_TOP'], 'gyp_all_modules']
|
||||||
for f in build_files_args]
|
print 'Building: %s' % arguments
|
||||||
gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'],
|
subprocess.check_call(arguments, env=env)
|
||||||
options.toplevel_dir)
|
|
||||||
makefile_path = os.path.join('$(LOCAL_PATH)', makefile_name)
|
|
||||||
if not gyp_binary.startswith(os.sep):
|
|
||||||
gyp_binary = os.path.join('.', gyp_binary)
|
|
||||||
root_makefile.write('GYP_FILES := \\\n %s\n\n' %
|
|
||||||
'\\\n '.join(map(Sourceify, build_files)))
|
|
||||||
root_makefile.write('%s: PRIVATE_LOCAL_PATH := $(LOCAL_PATH)\n' %
|
|
||||||
makefile_path)
|
|
||||||
root_makefile.write('%s: $(GYP_FILES)\n' % makefile_path)
|
|
||||||
root_makefile.write('\techo ACTION Regenerating $@\n\t%s\n\n' %
|
|
||||||
gyp.common.EncodePOSIXShellList([gyp_binary, '-fandroid'] +
|
|
||||||
gyp.RegenerateFlags(options) +
|
|
||||||
build_files_args))
|
|
||||||
|
|
||||||
|
|
||||||
def GenerateOutput(target_list, target_dicts, data, params):
|
def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
@ -1030,7 +1032,9 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
|||||||
for qualified_target in target_list:
|
for qualified_target in target_list:
|
||||||
build_file, target, toolset = gyp.common.ParseQualifiedTarget(
|
build_file, target, toolset = gyp.common.ParseQualifiedTarget(
|
||||||
qualified_target)
|
qualified_target)
|
||||||
build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir))
|
relative_build_file = gyp.common.RelativePath(build_file,
|
||||||
|
options.toplevel_dir)
|
||||||
|
build_files.add(relative_build_file)
|
||||||
included_files = data[build_file]['included_files']
|
included_files = data[build_file]['included_files']
|
||||||
for included_file in included_files:
|
for included_file in included_files:
|
||||||
# The included_files entries are relative to the dir of the build file
|
# The included_files entries are relative to the dir of the build file
|
||||||
@ -1058,9 +1062,13 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
|||||||
not int(spec.get('suppress_wildcard', False)))
|
not int(spec.get('suppress_wildcard', False)))
|
||||||
if limit_to_target_all and not part_of_all:
|
if limit_to_target_all and not part_of_all:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
relative_target = gyp.common.QualifiedTarget(relative_build_file, target,
|
||||||
|
toolset)
|
||||||
writer = AndroidMkWriter(android_top_dir)
|
writer = AndroidMkWriter(android_top_dir)
|
||||||
android_module = writer.Write(qualified_target, base_path, output_file,
|
android_module = writer.Write(qualified_target, relative_target, base_path,
|
||||||
spec, configs, part_of_all=part_of_all)
|
output_file, spec, configs,
|
||||||
|
part_of_all=part_of_all)
|
||||||
if android_module in android_modules:
|
if android_module in android_modules:
|
||||||
print ('ERROR: Android module names must be unique. The following '
|
print ('ERROR: Android module names must be unique. The following '
|
||||||
'targets both generate Android module name %s.\n %s\n %s' %
|
'targets both generate Android module name %s.\n %s\n %s' %
|
||||||
@ -1077,6 +1085,8 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
|||||||
|
|
||||||
# Some tools need to know the absolute path of the top directory.
|
# Some tools need to know the absolute path of the top directory.
|
||||||
root_makefile.write('GYP_ABS_ANDROID_TOP_DIR := $(shell pwd)\n')
|
root_makefile.write('GYP_ABS_ANDROID_TOP_DIR := $(shell pwd)\n')
|
||||||
|
root_makefile.write('GYP_DEFAULT_CONFIGURATION := %s\n' %
|
||||||
|
default_configuration)
|
||||||
|
|
||||||
# Write out the sorted list of includes.
|
# Write out the sorted list of includes.
|
||||||
root_makefile.write('\n')
|
root_makefile.write('\n')
|
||||||
@ -1084,9 +1094,6 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
|||||||
root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n')
|
root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n')
|
||||||
root_makefile.write('\n')
|
root_makefile.write('\n')
|
||||||
|
|
||||||
if generator_flags.get('auto_regeneration', True):
|
|
||||||
WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files)
|
|
||||||
|
|
||||||
root_makefile.write(SHARED_FOOTER)
|
root_makefile.write(SHARED_FOOTER)
|
||||||
|
|
||||||
root_makefile.close()
|
root_makefile.close()
|
||||||
|
@ -41,11 +41,11 @@ for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
|
|||||||
'CONFIGURATION_NAME']:
|
'CONFIGURATION_NAME']:
|
||||||
generator_default_variables[unused] = ''
|
generator_default_variables[unused] = ''
|
||||||
|
|
||||||
# Include dirs will occasionaly use the SHARED_INTERMEDIATE_DIR variable as
|
# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as
|
||||||
# part of the path when dealing with generated headers. This value will be
|
# part of the path when dealing with generated headers. This value will be
|
||||||
# replaced dynamically for each configuration.
|
# replaced dynamically for each configuration.
|
||||||
generator_default_variables['SHARED_INTERMEDIATE_DIR'] = \
|
generator_default_variables['SHARED_INTERMEDIATE_DIR'] = \
|
||||||
'$SHARED_INTERMEDIATES_DIR'
|
'$SHARED_INTERMEDIATE_DIR'
|
||||||
|
|
||||||
|
|
||||||
def CalculateVariables(default_variables, params):
|
def CalculateVariables(default_variables, params):
|
||||||
@ -65,7 +65,7 @@ def CalculateGeneratorInputInfo(params):
|
|||||||
|
|
||||||
|
|
||||||
def GetAllIncludeDirectories(target_list, target_dicts,
|
def GetAllIncludeDirectories(target_list, target_dicts,
|
||||||
shared_intermediates_dir, config_name):
|
shared_intermediate_dirs, config_name):
|
||||||
"""Calculate the set of include directories to be used.
|
"""Calculate the set of include directories to be used.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@ -96,17 +96,18 @@ def GetAllIncludeDirectories(target_list, target_dicts,
|
|||||||
# Find standard gyp include dirs.
|
# Find standard gyp include dirs.
|
||||||
if config.has_key('include_dirs'):
|
if config.has_key('include_dirs'):
|
||||||
include_dirs = config['include_dirs']
|
include_dirs = config['include_dirs']
|
||||||
for include_dir in include_dirs:
|
for shared_intermediate_dir in shared_intermediate_dirs:
|
||||||
include_dir = include_dir.replace('$SHARED_INTERMEDIATES_DIR',
|
for include_dir in include_dirs:
|
||||||
shared_intermediates_dir)
|
include_dir = include_dir.replace('$SHARED_INTERMEDIATE_DIR',
|
||||||
if not os.path.isabs(include_dir):
|
shared_intermediate_dir)
|
||||||
base_dir = os.path.dirname(target_name)
|
if not os.path.isabs(include_dir):
|
||||||
|
base_dir = os.path.dirname(target_name)
|
||||||
|
|
||||||
include_dir = base_dir + '/' + include_dir
|
include_dir = base_dir + '/' + include_dir
|
||||||
include_dir = os.path.abspath(include_dir)
|
include_dir = os.path.abspath(include_dir)
|
||||||
|
|
||||||
if not include_dir in gyp_includes_set:
|
if not include_dir in gyp_includes_set:
|
||||||
gyp_includes_set.add(include_dir)
|
gyp_includes_set.add(include_dir)
|
||||||
|
|
||||||
|
|
||||||
# Generate a list that has all the include dirs.
|
# Generate a list that has all the include dirs.
|
||||||
@ -234,7 +235,10 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
|||||||
config_name)
|
config_name)
|
||||||
|
|
||||||
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
|
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
|
||||||
shared_intermediate_dir = os.path.join(toplevel_build, 'obj', 'gen')
|
# Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the
|
||||||
|
# SHARED_INTERMEDIATE_DIR. Include both possible locations.
|
||||||
|
shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'),
|
||||||
|
os.path.join(toplevel_build, 'gen')]
|
||||||
|
|
||||||
if not os.path.exists(toplevel_build):
|
if not os.path.exists(toplevel_build):
|
||||||
os.makedirs(toplevel_build)
|
os.makedirs(toplevel_build)
|
||||||
@ -246,7 +250,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
|||||||
eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
|
eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
|
||||||
'GNU C++', 'GNU C', 'Assembly']
|
'GNU C++', 'GNU C', 'Assembly']
|
||||||
include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
|
include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
|
||||||
shared_intermediate_dir, config_name)
|
shared_intermediate_dirs, config_name)
|
||||||
WriteIncludePaths(out, eclipse_langs, include_dirs)
|
WriteIncludePaths(out, eclipse_langs, include_dirs)
|
||||||
defines = GetAllDefines(target_list, target_dicts, data, config_name)
|
defines = GetAllDefines(target_list, target_dicts, data, config_name)
|
||||||
WriteMacros(out, eclipse_langs, defines)
|
WriteMacros(out, eclipse_langs, defines)
|
||||||
|
@ -259,7 +259,7 @@ all_deps :=
|
|||||||
# export LINK=g++
|
# export LINK=g++
|
||||||
#
|
#
|
||||||
# This will allow make to invoke N linker processes as specified in -jN.
|
# This will allow make to invoke N linker processes as specified in -jN.
|
||||||
LINK ?= %(flock)s $(builddir)/linker.lock $(CXX)
|
LINK ?= %(flock)s $(builddir)/linker.lock $(CXX.target)
|
||||||
|
|
||||||
CC.target ?= %(CC.target)s
|
CC.target ?= %(CC.target)s
|
||||||
CFLAGS.target ?= $(CFLAGS)
|
CFLAGS.target ?= $(CFLAGS)
|
||||||
@ -395,15 +395,14 @@ command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\\
|
|||||||
# $| -- order-only dependencies
|
# $| -- order-only dependencies
|
||||||
prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
|
prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
|
||||||
|
|
||||||
# Helper that executes all postbuilds, and deletes the output file when done
|
# Helper that executes all postbuilds until one fails.
|
||||||
# if any of the postbuilds failed.
|
|
||||||
define do_postbuilds
|
define do_postbuilds
|
||||||
@E=0;\\
|
@E=0;\\
|
||||||
for p in $(POSTBUILDS); do\\
|
for p in $(POSTBUILDS); do\\
|
||||||
eval $$p;\\
|
eval $$p;\\
|
||||||
F=$$?;\\
|
E=$$?;\\
|
||||||
if [ $$F -ne 0 ]; then\\
|
if [ $$E -ne 0 ]; then\\
|
||||||
E=$$F;\\
|
break;\\
|
||||||
fi;\\
|
fi;\\
|
||||||
done;\\
|
done;\\
|
||||||
if [ $$E -ne 0 ]; then\\
|
if [ $$E -ne 0 ]; then\\
|
||||||
@ -619,21 +618,6 @@ def QuoteSpaces(s, quote=r'\ '):
|
|||||||
return s.replace(' ', quote)
|
return s.replace(' ', quote)
|
||||||
|
|
||||||
|
|
||||||
def InvertRelativePath(path):
|
|
||||||
"""Given a relative path like foo/bar, return the inverse relative path:
|
|
||||||
the path from the relative path back to the origin dir.
|
|
||||||
|
|
||||||
E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
|
|
||||||
should always produce the empty string."""
|
|
||||||
|
|
||||||
if not path:
|
|
||||||
return path
|
|
||||||
# Only need to handle relative paths into subdirectories for now.
|
|
||||||
assert '..' not in path, path
|
|
||||||
depth = len(path.split(os.path.sep))
|
|
||||||
return os.path.sep.join(['..'] * depth)
|
|
||||||
|
|
||||||
|
|
||||||
# Map from qualified target to path to output.
|
# Map from qualified target to path to output.
|
||||||
target_outputs = {}
|
target_outputs = {}
|
||||||
# Map from qualified target to any linkable output. A subset
|
# Map from qualified target to any linkable output. A subset
|
||||||
@ -1417,7 +1401,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
|||||||
lambda p: Sourceify(self.Absolutify(p)))
|
lambda p: Sourceify(self.Absolutify(p)))
|
||||||
|
|
||||||
# TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on.
|
# TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on.
|
||||||
gyp_to_build = InvertRelativePath(self.path)
|
gyp_to_build = gyp.common.InvertRelativePath(self.path)
|
||||||
target_postbuild = self.xcode_settings.GetTargetPostbuilds(
|
target_postbuild = self.xcode_settings.GetTargetPostbuilds(
|
||||||
configname,
|
configname,
|
||||||
QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
|
QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
|
||||||
@ -1541,7 +1525,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
|||||||
for link_dep in link_deps:
|
for link_dep in link_deps:
|
||||||
assert ' ' not in link_dep, (
|
assert ' ' not in link_dep, (
|
||||||
"Spaces in alink input filenames not supported (%s)" % link_dep)
|
"Spaces in alink input filenames not supported (%s)" % link_dep)
|
||||||
if (self.flavor not in ('mac', 'win') and not
|
if (self.flavor not in ('mac', 'openbsd', 'win') and not
|
||||||
self.is_standalone_static_library):
|
self.is_standalone_static_library):
|
||||||
self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin',
|
self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin',
|
||||||
part_of_all, postbuilds=postbuilds)
|
part_of_all, postbuilds=postbuilds)
|
||||||
@ -2000,7 +1984,8 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
|||||||
'flock_index': 2,
|
'flock_index': 2,
|
||||||
'extra_commands': SHARED_HEADER_SUN_COMMANDS,
|
'extra_commands': SHARED_HEADER_SUN_COMMANDS,
|
||||||
})
|
})
|
||||||
elif flavor == 'freebsd' or flavor == 'dragonflybsd':
|
elif flavor == 'freebsd':
|
||||||
|
# Note: OpenBSD has sysutils/flock. lockf seems to be FreeBSD specific.
|
||||||
header_params.update({
|
header_params.update({
|
||||||
'flock': 'lockf',
|
'flock': 'lockf',
|
||||||
})
|
})
|
||||||
@ -2018,14 +2003,22 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
|||||||
|
|
||||||
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
|
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
|
||||||
make_global_settings_array = data[build_file].get('make_global_settings', [])
|
make_global_settings_array = data[build_file].get('make_global_settings', [])
|
||||||
|
wrappers = {}
|
||||||
|
wrappers['LINK'] = '%s $(builddir)/linker.lock' % flock_command
|
||||||
|
for key, value in make_global_settings_array:
|
||||||
|
if key.endswith('_wrapper'):
|
||||||
|
wrappers[key[:-len('_wrapper')]] = '$(abspath %s)' % value
|
||||||
make_global_settings = ''
|
make_global_settings = ''
|
||||||
for key, value in make_global_settings_array:
|
for key, value in make_global_settings_array:
|
||||||
|
if re.match('.*_wrapper', key):
|
||||||
|
continue
|
||||||
if value[0] != '$':
|
if value[0] != '$':
|
||||||
value = '$(abspath %s)' % value
|
value = '$(abspath %s)' % value
|
||||||
if key == 'LINK':
|
wrapper = wrappers.get(key)
|
||||||
make_global_settings += ('%s ?= %s $(builddir)/linker.lock %s\n' %
|
if wrapper:
|
||||||
(key, flock_command, value))
|
value = '%s %s' % (wrapper, value)
|
||||||
elif key in ('CC', 'CC.host', 'CXX', 'CXX.host'):
|
del wrappers[key]
|
||||||
|
if key in ('CC', 'CC.host', 'CXX', 'CXX.host'):
|
||||||
make_global_settings += (
|
make_global_settings += (
|
||||||
'ifneq (,$(filter $(origin %s), undefined default))\n' % key)
|
'ifneq (,$(filter $(origin %s), undefined default))\n' % key)
|
||||||
# Let gyp-time envvars win over global settings.
|
# Let gyp-time envvars win over global settings.
|
||||||
@ -2035,6 +2028,9 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
|||||||
make_global_settings += 'endif\n'
|
make_global_settings += 'endif\n'
|
||||||
else:
|
else:
|
||||||
make_global_settings += '%s ?= %s\n' % (key, value)
|
make_global_settings += '%s ?= %s\n' % (key, value)
|
||||||
|
# TODO(ukai): define cmd when only wrapper is specified in
|
||||||
|
# make_global_settings.
|
||||||
|
|
||||||
header_params['make_global_settings'] = make_global_settings
|
header_params['make_global_settings'] = make_global_settings
|
||||||
|
|
||||||
ensure_directory_exists(makefile_path)
|
ensure_directory_exists(makefile_path)
|
||||||
|
@ -17,6 +17,7 @@ import gyp.MSVSProject as MSVSProject
|
|||||||
import gyp.MSVSSettings as MSVSSettings
|
import gyp.MSVSSettings as MSVSSettings
|
||||||
import gyp.MSVSToolFile as MSVSToolFile
|
import gyp.MSVSToolFile as MSVSToolFile
|
||||||
import gyp.MSVSUserFile as MSVSUserFile
|
import gyp.MSVSUserFile as MSVSUserFile
|
||||||
|
import gyp.MSVSUtil as MSVSUtil
|
||||||
import gyp.MSVSVersion as MSVSVersion
|
import gyp.MSVSVersion as MSVSVersion
|
||||||
from gyp.common import GypError
|
from gyp.common import GypError
|
||||||
|
|
||||||
@ -63,6 +64,7 @@ generator_additional_path_sections = [
|
|||||||
generator_additional_non_configuration_keys = [
|
generator_additional_non_configuration_keys = [
|
||||||
'msvs_cygwin_dirs',
|
'msvs_cygwin_dirs',
|
||||||
'msvs_cygwin_shell',
|
'msvs_cygwin_shell',
|
||||||
|
'msvs_large_pdb',
|
||||||
'msvs_shard',
|
'msvs_shard',
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -204,6 +206,10 @@ def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None,
|
|||||||
|
|
||||||
def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False):
|
def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False):
|
||||||
if not value: return
|
if not value: return
|
||||||
|
_ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset)
|
||||||
|
|
||||||
|
|
||||||
|
def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
|
||||||
# TODO(bradnelson): ugly hack, fix this more generally!!!
|
# TODO(bradnelson): ugly hack, fix this more generally!!!
|
||||||
if 'Directories' in setting or 'Dependencies' in setting:
|
if 'Directories' in setting or 'Dependencies' in setting:
|
||||||
if type(value) == str:
|
if type(value) == str:
|
||||||
@ -232,7 +238,7 @@ def _ConfigPlatform(config_data):
|
|||||||
|
|
||||||
def _ConfigBaseName(config_name, platform_name):
|
def _ConfigBaseName(config_name, platform_name):
|
||||||
if config_name.endswith('_' + platform_name):
|
if config_name.endswith('_' + platform_name):
|
||||||
return config_name[0:-len(platform_name)-1]
|
return config_name[0:-len(platform_name) - 1]
|
||||||
else:
|
else:
|
||||||
return config_name
|
return config_name
|
||||||
|
|
||||||
@ -270,7 +276,7 @@ def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path,
|
|||||||
'`cygpath -m "${INPUTPATH}"`')
|
'`cygpath -m "${INPUTPATH}"`')
|
||||||
for i in direct_cmd]
|
for i in direct_cmd]
|
||||||
direct_cmd = ['\\"%s\\"' % i.replace('"', '\\\\\\"') for i in direct_cmd]
|
direct_cmd = ['\\"%s\\"' % i.replace('"', '\\\\\\"') for i in direct_cmd]
|
||||||
#direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd)
|
# direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd)
|
||||||
direct_cmd = ' '.join(direct_cmd)
|
direct_cmd = ' '.join(direct_cmd)
|
||||||
# TODO(quote): regularize quoting path names throughout the module
|
# TODO(quote): regularize quoting path names throughout the module
|
||||||
cmd = ''
|
cmd = ''
|
||||||
@ -306,7 +312,7 @@ def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path,
|
|||||||
# If the argument starts with a slash or dash, it's probably a command line
|
# If the argument starts with a slash or dash, it's probably a command line
|
||||||
# switch
|
# switch
|
||||||
arguments = [i if (i[:1] in "/-") else _FixPath(i) for i in cmd[1:]]
|
arguments = [i if (i[:1] in "/-") else _FixPath(i) for i in cmd[1:]]
|
||||||
arguments = [i.replace('$(InputDir)','%INPUTDIR%') for i in arguments]
|
arguments = [i.replace('$(InputDir)', '%INPUTDIR%') for i in arguments]
|
||||||
arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments]
|
arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments]
|
||||||
if quote_cmd:
|
if quote_cmd:
|
||||||
# Support a mode for using cmd directly.
|
# Support a mode for using cmd directly.
|
||||||
@ -720,7 +726,7 @@ def _EscapeCommandLineArgumentForMSBuild(s):
|
|||||||
"""Escapes a Windows command-line argument for use by MSBuild."""
|
"""Escapes a Windows command-line argument for use by MSBuild."""
|
||||||
|
|
||||||
def _Replace(match):
|
def _Replace(match):
|
||||||
return (len(match.group(1))/2*4)*'\\' + '\\"'
|
return (len(match.group(1)) / 2 * 4) * '\\' + '\\"'
|
||||||
|
|
||||||
# Escape all quotes so that they are interpreted literally.
|
# Escape all quotes so that they are interpreted literally.
|
||||||
s = quote_replacer_regex2.sub(_Replace, s)
|
s = quote_replacer_regex2.sub(_Replace, s)
|
||||||
@ -1001,12 +1007,12 @@ def _GetMSVSConfigurationType(spec, build_file):
|
|||||||
}[spec['type']]
|
}[spec['type']]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
if spec.get('type'):
|
if spec.get('type'):
|
||||||
raise Exception('Target type %s is not a valid target type for '
|
raise GypError('Target type %s is not a valid target type for '
|
||||||
'target %s in %s.' %
|
'target %s in %s.' %
|
||||||
(spec['type'], spec['target_name'], build_file))
|
(spec['type'], spec['target_name'], build_file))
|
||||||
else:
|
else:
|
||||||
raise Exception('Missing type field for target %s in %s.' %
|
raise GypError('Missing type field for target %s in %s.' %
|
||||||
(spec['target_name'], build_file))
|
(spec['target_name'], build_file))
|
||||||
return config_type
|
return config_type
|
||||||
|
|
||||||
|
|
||||||
@ -1041,6 +1047,10 @@ def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
|
|||||||
# Add in user specified msvs_settings.
|
# Add in user specified msvs_settings.
|
||||||
msvs_settings = config.get('msvs_settings', {})
|
msvs_settings = config.get('msvs_settings', {})
|
||||||
MSVSSettings.ValidateMSVSSettings(msvs_settings)
|
MSVSSettings.ValidateMSVSSettings(msvs_settings)
|
||||||
|
|
||||||
|
# Prevent default library inheritance from the environment.
|
||||||
|
_ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', ['$(NOINHERIT)'])
|
||||||
|
|
||||||
for tool in msvs_settings:
|
for tool in msvs_settings:
|
||||||
settings = config['msvs_settings'][tool]
|
settings = config['msvs_settings'][tool]
|
||||||
for setting in settings:
|
for setting in settings:
|
||||||
@ -1663,7 +1673,7 @@ def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
|
|||||||
for qualified_target in target_list:
|
for qualified_target in target_list:
|
||||||
spec = target_dicts[qualified_target]
|
spec = target_dicts[qualified_target]
|
||||||
if spec['toolset'] != 'target':
|
if spec['toolset'] != 'target':
|
||||||
raise Exception(
|
raise GypError(
|
||||||
'Multiple toolsets not supported in msvs build (target %s)' %
|
'Multiple toolsets not supported in msvs build (target %s)' %
|
||||||
qualified_target)
|
qualified_target)
|
||||||
proj_path, fixpath_prefix = _GetPathOfProject(qualified_target, spec,
|
proj_path, fixpath_prefix = _GetPathOfProject(qualified_target, spec,
|
||||||
@ -1718,74 +1728,6 @@ def CalculateVariables(default_variables, params):
|
|||||||
default_variables['MSVS_OS_BITS'] = 32
|
default_variables['MSVS_OS_BITS'] = 32
|
||||||
|
|
||||||
|
|
||||||
def _ShardName(name, number):
|
|
||||||
"""Add a shard number to the end of a target.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
name: name of the target (foo#target)
|
|
||||||
number: shard number
|
|
||||||
Returns:
|
|
||||||
Target name with shard added (foo_1#target)
|
|
||||||
"""
|
|
||||||
parts = name.rsplit('#', 1)
|
|
||||||
parts[0] = '%s_%d' % (parts[0], number)
|
|
||||||
return '#'.join(parts)
|
|
||||||
|
|
||||||
|
|
||||||
def _ShardTargets(target_list, target_dicts):
|
|
||||||
"""Shard some targets apart to work around the linkers limits.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
target_list: List of target pairs: 'base/base.gyp:base'.
|
|
||||||
target_dicts: Dict of target properties keyed on target pair.
|
|
||||||
Returns:
|
|
||||||
Tuple of the new sharded versions of the inputs.
|
|
||||||
"""
|
|
||||||
# Gather the targets to shard, and how many pieces.
|
|
||||||
targets_to_shard = {}
|
|
||||||
for t in target_dicts:
|
|
||||||
shards = int(target_dicts[t].get('msvs_shard', 0))
|
|
||||||
if shards:
|
|
||||||
targets_to_shard[t] = shards
|
|
||||||
# Shard target_list.
|
|
||||||
new_target_list = []
|
|
||||||
for t in target_list:
|
|
||||||
if t in targets_to_shard:
|
|
||||||
for i in range(targets_to_shard[t]):
|
|
||||||
new_target_list.append(_ShardName(t, i))
|
|
||||||
else:
|
|
||||||
new_target_list.append(t)
|
|
||||||
# Shard target_dict.
|
|
||||||
new_target_dicts = {}
|
|
||||||
for t in target_dicts:
|
|
||||||
if t in targets_to_shard:
|
|
||||||
for i in range(targets_to_shard[t]):
|
|
||||||
name = _ShardName(t, i)
|
|
||||||
new_target_dicts[name] = copy.copy(target_dicts[t])
|
|
||||||
new_target_dicts[name]['target_name'] = _ShardName(
|
|
||||||
new_target_dicts[name]['target_name'], i)
|
|
||||||
sources = new_target_dicts[name].get('sources', [])
|
|
||||||
new_sources = []
|
|
||||||
for pos in range(i, len(sources), targets_to_shard[t]):
|
|
||||||
new_sources.append(sources[pos])
|
|
||||||
new_target_dicts[name]['sources'] = new_sources
|
|
||||||
else:
|
|
||||||
new_target_dicts[t] = target_dicts[t]
|
|
||||||
# Shard dependencies.
|
|
||||||
for t in new_target_dicts:
|
|
||||||
dependencies = copy.copy(new_target_dicts[t].get('dependencies', []))
|
|
||||||
new_dependencies = []
|
|
||||||
for d in dependencies:
|
|
||||||
if d in targets_to_shard:
|
|
||||||
for i in range(targets_to_shard[d]):
|
|
||||||
new_dependencies.append(_ShardName(d, i))
|
|
||||||
else:
|
|
||||||
new_dependencies.append(d)
|
|
||||||
new_target_dicts[t]['dependencies'] = new_dependencies
|
|
||||||
|
|
||||||
return (new_target_list, new_target_dicts)
|
|
||||||
|
|
||||||
|
|
||||||
def PerformBuild(data, configurations, params):
|
def PerformBuild(data, configurations, params):
|
||||||
options = params['options']
|
options = params['options']
|
||||||
msvs_version = params['msvs_version']
|
msvs_version = params['msvs_version']
|
||||||
@ -1825,7 +1767,12 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
|||||||
generator_flags = params.get('generator_flags', {})
|
generator_flags = params.get('generator_flags', {})
|
||||||
|
|
||||||
# Optionally shard targets marked with 'msvs_shard': SHARD_COUNT.
|
# Optionally shard targets marked with 'msvs_shard': SHARD_COUNT.
|
||||||
(target_list, target_dicts) = _ShardTargets(target_list, target_dicts)
|
(target_list, target_dicts) = MSVSUtil.ShardTargets(target_list, target_dicts)
|
||||||
|
|
||||||
|
# Optionally use the large PDB workaround for targets marked with
|
||||||
|
# 'msvs_large_pdb': 1.
|
||||||
|
(target_list, target_dicts) = MSVSUtil.InsertLargePdbShims(
|
||||||
|
target_list, target_dicts, generator_default_variables)
|
||||||
|
|
||||||
# Prepare the set of configurations.
|
# Prepare the set of configurations.
|
||||||
configs = set()
|
configs = set()
|
||||||
@ -1872,9 +1819,9 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
|||||||
error_message = "Missing input files:\n" + \
|
error_message = "Missing input files:\n" + \
|
||||||
'\n'.join(set(missing_sources))
|
'\n'.join(set(missing_sources))
|
||||||
if generator_flags.get('msvs_error_on_missing_sources', False):
|
if generator_flags.get('msvs_error_on_missing_sources', False):
|
||||||
raise Exception(error_message)
|
raise GypError(error_message)
|
||||||
else:
|
else:
|
||||||
print >>sys.stdout, "Warning: " + error_message
|
print >> sys.stdout, "Warning: " + error_message
|
||||||
|
|
||||||
|
|
||||||
def _GenerateMSBuildFiltersFile(filters_path, source_files,
|
def _GenerateMSBuildFiltersFile(filters_path, source_files,
|
||||||
@ -2809,8 +2756,10 @@ def _FinalizeMSBuildSettings(spec, configuration):
|
|||||||
'AdditionalIncludeDirectories', include_dirs)
|
'AdditionalIncludeDirectories', include_dirs)
|
||||||
_ToolAppend(msbuild_settings, 'ResourceCompile',
|
_ToolAppend(msbuild_settings, 'ResourceCompile',
|
||||||
'AdditionalIncludeDirectories', resource_include_dirs)
|
'AdditionalIncludeDirectories', resource_include_dirs)
|
||||||
# Add in libraries.
|
# Add in libraries, note that even for empty libraries, we want this
|
||||||
_ToolAppend(msbuild_settings, 'Link', 'AdditionalDependencies', libraries)
|
# set, to prevent inheriting default libraries from the enviroment.
|
||||||
|
_ToolSetOrAppend(msbuild_settings, 'Link', 'AdditionalDependencies',
|
||||||
|
libraries)
|
||||||
if out_file:
|
if out_file:
|
||||||
_ToolAppend(msbuild_settings, msbuild_tool, 'OutputFile', out_file,
|
_ToolAppend(msbuild_settings, msbuild_tool, 'OutputFile', out_file,
|
||||||
only_if_unset=True)
|
only_if_unset=True)
|
||||||
@ -2844,8 +2793,7 @@ def _GetValueFormattedForMSBuild(tool_name, name, value):
|
|||||||
if type(value) == list:
|
if type(value) == list:
|
||||||
# For some settings, VS2010 does not automatically extends the settings
|
# For some settings, VS2010 does not automatically extends the settings
|
||||||
# TODO(jeanluc) Is this what we want?
|
# TODO(jeanluc) Is this what we want?
|
||||||
if name in ['AdditionalDependencies',
|
if name in ['AdditionalIncludeDirectories',
|
||||||
'AdditionalIncludeDirectories',
|
|
||||||
'AdditionalLibraryDirectories',
|
'AdditionalLibraryDirectories',
|
||||||
'AdditionalOptions',
|
'AdditionalOptions',
|
||||||
'DelayLoadDLLs',
|
'DelayLoadDLLs',
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||||
# Use of this source code is governed by a BSD-style license that can be
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
# found in the LICENSE file.
|
# found in the LICENSE file.
|
||||||
|
|
||||||
@ -13,7 +13,7 @@ import sys
|
|||||||
import gyp
|
import gyp
|
||||||
import gyp.common
|
import gyp.common
|
||||||
import gyp.msvs_emulation
|
import gyp.msvs_emulation
|
||||||
import gyp.MSVSVersion
|
import gyp.MSVSUtil as MSVSUtil
|
||||||
import gyp.xcode_emulation
|
import gyp.xcode_emulation
|
||||||
|
|
||||||
from gyp.common import GetEnvironFallback
|
from gyp.common import GetEnvironFallback
|
||||||
@ -97,21 +97,6 @@ def Define(d, flavor):
|
|||||||
return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor)
|
return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor)
|
||||||
|
|
||||||
|
|
||||||
def InvertRelativePath(path):
|
|
||||||
"""Given a relative path like foo/bar, return the inverse relative path:
|
|
||||||
the path from the relative path back to the origin dir.
|
|
||||||
|
|
||||||
E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
|
|
||||||
should always produce the empty string."""
|
|
||||||
|
|
||||||
if not path:
|
|
||||||
return path
|
|
||||||
# Only need to handle relative paths into subdirectories for now.
|
|
||||||
assert '..' not in path, path
|
|
||||||
depth = len(path.split(os.path.sep))
|
|
||||||
return os.path.sep.join(['..'] * depth)
|
|
||||||
|
|
||||||
|
|
||||||
class Target:
|
class Target:
|
||||||
"""Target represents the paths used within a single gyp target.
|
"""Target represents the paths used within a single gyp target.
|
||||||
|
|
||||||
@ -218,12 +203,12 @@ class Target:
|
|||||||
|
|
||||||
class NinjaWriter:
|
class NinjaWriter:
|
||||||
def __init__(self, qualified_target, target_outputs, base_dir, build_dir,
|
def __init__(self, qualified_target, target_outputs, base_dir, build_dir,
|
||||||
output_file, flavor, abs_build_dir=None):
|
output_file, flavor, toplevel_dir=None):
|
||||||
"""
|
"""
|
||||||
base_dir: path from source root to directory containing this gyp file,
|
base_dir: path from source root to directory containing this gyp file,
|
||||||
by gyp semantics, all input paths are relative to this
|
by gyp semantics, all input paths are relative to this
|
||||||
build_dir: path from source root to build output
|
build_dir: path from source root to build output
|
||||||
abs_build_dir: absolute path to the build directory
|
toplevel_dir: path to the toplevel directory
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.qualified_target = qualified_target
|
self.qualified_target = qualified_target
|
||||||
@ -232,7 +217,10 @@ class NinjaWriter:
|
|||||||
self.build_dir = build_dir
|
self.build_dir = build_dir
|
||||||
self.ninja = ninja_syntax.Writer(output_file)
|
self.ninja = ninja_syntax.Writer(output_file)
|
||||||
self.flavor = flavor
|
self.flavor = flavor
|
||||||
self.abs_build_dir = abs_build_dir
|
self.abs_build_dir = None
|
||||||
|
if toplevel_dir is not None:
|
||||||
|
self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir,
|
||||||
|
build_dir))
|
||||||
self.obj_ext = '.obj' if flavor == 'win' else '.o'
|
self.obj_ext = '.obj' if flavor == 'win' else '.o'
|
||||||
if flavor == 'win':
|
if flavor == 'win':
|
||||||
# See docstring of msvs_emulation.GenerateEnvironmentFiles().
|
# See docstring of msvs_emulation.GenerateEnvironmentFiles().
|
||||||
@ -241,9 +229,11 @@ class NinjaWriter:
|
|||||||
self.win_env[arch] = 'environment.' + arch
|
self.win_env[arch] = 'environment.' + arch
|
||||||
|
|
||||||
# Relative path from build output dir to base dir.
|
# Relative path from build output dir to base dir.
|
||||||
self.build_to_base = os.path.join(InvertRelativePath(build_dir), base_dir)
|
build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir)
|
||||||
|
self.build_to_base = os.path.join(build_to_top, base_dir)
|
||||||
# Relative path from base dir to build dir.
|
# Relative path from base dir to build dir.
|
||||||
self.base_to_build = os.path.join(InvertRelativePath(base_dir), build_dir)
|
base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir)
|
||||||
|
self.base_to_build = os.path.join(base_to_top, build_dir)
|
||||||
|
|
||||||
def ExpandSpecial(self, path, product_dir=None):
|
def ExpandSpecial(self, path, product_dir=None):
|
||||||
"""Expand specials like $!PRODUCT_DIR in |path|.
|
"""Expand specials like $!PRODUCT_DIR in |path|.
|
||||||
@ -428,7 +418,8 @@ class NinjaWriter:
|
|||||||
gyp.msvs_emulation.VerifyMissingSources(
|
gyp.msvs_emulation.VerifyMissingSources(
|
||||||
sources, self.abs_build_dir, generator_flags, self.GypPathToNinja)
|
sources, self.abs_build_dir, generator_flags, self.GypPathToNinja)
|
||||||
pch = gyp.msvs_emulation.PrecompiledHeader(
|
pch = gyp.msvs_emulation.PrecompiledHeader(
|
||||||
self.msvs_settings, config_name, self.GypPathToNinja)
|
self.msvs_settings, config_name, self.GypPathToNinja,
|
||||||
|
self.GypPathToUniqueOutput, self.obj_ext)
|
||||||
else:
|
else:
|
||||||
pch = gyp.xcode_emulation.MacPrefixHeader(
|
pch = gyp.xcode_emulation.MacPrefixHeader(
|
||||||
self.xcode_settings, self.GypPathToNinja,
|
self.xcode_settings, self.GypPathToNinja,
|
||||||
@ -743,7 +734,15 @@ class NinjaWriter:
|
|||||||
cflags_c = self.msvs_settings.GetCflagsC(config_name)
|
cflags_c = self.msvs_settings.GetCflagsC(config_name)
|
||||||
cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
|
cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
|
||||||
extra_defines = self.msvs_settings.GetComputedDefines(config_name)
|
extra_defines = self.msvs_settings.GetComputedDefines(config_name)
|
||||||
self.WriteVariableList('pdbname', [self.name + '.pdb'])
|
pdbpath = self.msvs_settings.GetCompilerPdbName(
|
||||||
|
config_name, self.ExpandSpecial)
|
||||||
|
if not pdbpath:
|
||||||
|
obj = 'obj'
|
||||||
|
if self.toolset != 'target':
|
||||||
|
obj += '.' + self.toolset
|
||||||
|
pdbpath = os.path.normpath(os.path.join(obj, self.base_dir,
|
||||||
|
self.name + '.pdb'))
|
||||||
|
self.WriteVariableList('pdbname', [pdbpath])
|
||||||
self.WriteVariableList('pchprefix', [self.name])
|
self.WriteVariableList('pchprefix', [self.name])
|
||||||
else:
|
else:
|
||||||
cflags = config.get('cflags', [])
|
cflags = config.get('cflags', [])
|
||||||
@ -824,9 +823,14 @@ class NinjaWriter:
|
|||||||
if not case_sensitive_filesystem:
|
if not case_sensitive_filesystem:
|
||||||
output = output.lower()
|
output = output.lower()
|
||||||
implicit = precompiled_header.GetObjDependencies([input], [output])
|
implicit = precompiled_header.GetObjDependencies([input], [output])
|
||||||
|
variables = []
|
||||||
|
if self.flavor == 'win':
|
||||||
|
variables, output, implicit = precompiled_header.GetFlagsModifications(
|
||||||
|
input, output, implicit, command, cflags_c, cflags_cc,
|
||||||
|
self.ExpandSpecial)
|
||||||
self.ninja.build(output, command, input,
|
self.ninja.build(output, command, input,
|
||||||
implicit=[gch for _, _, gch in implicit],
|
implicit=[gch for _, _, gch in implicit],
|
||||||
order_only=predepends)
|
order_only=predepends, variables=variables)
|
||||||
outputs.append(output)
|
outputs.append(output)
|
||||||
|
|
||||||
self.WritePchTargets(pch_commands)
|
self.WritePchTargets(pch_commands)
|
||||||
@ -848,8 +852,6 @@ class NinjaWriter:
|
|||||||
}[lang]
|
}[lang]
|
||||||
|
|
||||||
map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', }
|
map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', }
|
||||||
if self.flavor == 'win':
|
|
||||||
map.update({'c': 'cc_pch', 'cc': 'cxx_pch'})
|
|
||||||
cmd = map.get(lang)
|
cmd = map.get(lang)
|
||||||
self.ninja.build(gch, cmd, input, variables=[(var_name, lang_flag)])
|
self.ninja.build(gch, cmd, input, variables=[(var_name, lang_flag)])
|
||||||
|
|
||||||
@ -903,16 +905,12 @@ class NinjaWriter:
|
|||||||
extra_bindings.append(('postbuilds',
|
extra_bindings.append(('postbuilds',
|
||||||
self.GetPostbuildCommand(spec, output, output)))
|
self.GetPostbuildCommand(spec, output, output)))
|
||||||
|
|
||||||
|
is_executable = spec['type'] == 'executable'
|
||||||
if self.flavor == 'mac':
|
if self.flavor == 'mac':
|
||||||
ldflags = self.xcode_settings.GetLdflags(config_name,
|
ldflags = self.xcode_settings.GetLdflags(config_name,
|
||||||
self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
|
self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
|
||||||
self.GypPathToNinja)
|
self.GypPathToNinja)
|
||||||
elif self.flavor == 'win':
|
elif self.flavor == 'win':
|
||||||
libflags = self.msvs_settings.GetLibFlags(config_name,
|
|
||||||
self.GypPathToNinja)
|
|
||||||
self.WriteVariableList(
|
|
||||||
'libflags', gyp.common.uniquer(map(self.ExpandSpecial, libflags)))
|
|
||||||
is_executable = spec['type'] == 'executable'
|
|
||||||
manifest_name = self.GypPathToUniqueOutput(
|
manifest_name = self.GypPathToUniqueOutput(
|
||||||
self.ComputeOutputFileName(spec))
|
self.ComputeOutputFileName(spec))
|
||||||
ldflags, manifest_files = self.msvs_settings.GetLdflags(config_name,
|
ldflags, manifest_files = self.msvs_settings.GetLdflags(config_name,
|
||||||
@ -920,6 +918,9 @@ class NinjaWriter:
|
|||||||
self.WriteVariableList('manifests', manifest_files)
|
self.WriteVariableList('manifests', manifest_files)
|
||||||
else:
|
else:
|
||||||
ldflags = config.get('ldflags', [])
|
ldflags = config.get('ldflags', [])
|
||||||
|
if is_executable and len(solibs):
|
||||||
|
ldflags.append('-Wl,-rpath=\$$ORIGIN/lib/')
|
||||||
|
ldflags.append('-Wl,-rpath-link=lib/')
|
||||||
self.WriteVariableList('ldflags',
|
self.WriteVariableList('ldflags',
|
||||||
gyp.common.uniquer(map(self.ExpandSpecial,
|
gyp.common.uniquer(map(self.ExpandSpecial,
|
||||||
ldflags)))
|
ldflags)))
|
||||||
@ -975,6 +976,10 @@ class NinjaWriter:
|
|||||||
self.ninja.build(self.target.binary, 'alink_thin', link_deps,
|
self.ninja.build(self.target.binary, 'alink_thin', link_deps,
|
||||||
order_only=compile_deps, variables=variables)
|
order_only=compile_deps, variables=variables)
|
||||||
else:
|
else:
|
||||||
|
if self.msvs_settings:
|
||||||
|
libflags = self.msvs_settings.GetLibFlags(config_name,
|
||||||
|
self.GypPathToNinja)
|
||||||
|
variables.append(('libflags', libflags))
|
||||||
self.ninja.build(self.target.binary, 'alink', link_deps,
|
self.ninja.build(self.target.binary, 'alink', link_deps,
|
||||||
order_only=compile_deps, variables=variables)
|
order_only=compile_deps, variables=variables)
|
||||||
else:
|
else:
|
||||||
@ -1046,10 +1051,9 @@ class NinjaWriter:
|
|||||||
env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv())
|
env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv())
|
||||||
# G will be non-null if any postbuild fails. Run all postbuilds in a
|
# G will be non-null if any postbuild fails. Run all postbuilds in a
|
||||||
# subshell.
|
# subshell.
|
||||||
commands = env + ' (F=0; ' + \
|
commands = env + ' (' + \
|
||||||
' '.join([ninja_syntax.escape(command) + ' || F=$$?;'
|
' && '.join([ninja_syntax.escape(command) for command in postbuilds])
|
||||||
for command in postbuilds])
|
command_string = (commands + '); G=$$?; '
|
||||||
command_string = (commands + ' exit $$F); G=$$?; '
|
|
||||||
# Remove the final output if any postbuild failed.
|
# Remove the final output if any postbuild failed.
|
||||||
'((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)')
|
'((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)')
|
||||||
if is_command_start:
|
if is_command_start:
|
||||||
@ -1315,6 +1319,13 @@ def OpenOutput(path, mode='w'):
|
|||||||
return open(path, mode)
|
return open(path, mode)
|
||||||
|
|
||||||
|
|
||||||
|
def CommandWithWrapper(cmd, wrappers, prog):
|
||||||
|
wrapper = wrappers.get(cmd, '')
|
||||||
|
if wrapper:
|
||||||
|
return wrapper + ' ' + prog
|
||||||
|
return prog
|
||||||
|
|
||||||
|
|
||||||
def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
config_name):
|
config_name):
|
||||||
options = params['options']
|
options = params['options']
|
||||||
@ -1372,7 +1383,14 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
|||||||
|
|
||||||
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
|
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
|
||||||
make_global_settings = data[build_file].get('make_global_settings', [])
|
make_global_settings = data[build_file].get('make_global_settings', [])
|
||||||
build_to_root = InvertRelativePath(build_dir)
|
build_to_root = gyp.common.InvertRelativePath(build_dir,
|
||||||
|
options.toplevel_dir)
|
||||||
|
flock = 'flock'
|
||||||
|
if flavor == 'mac':
|
||||||
|
flock = './gyp-mac-tool flock'
|
||||||
|
wrappers = {}
|
||||||
|
if flavor != 'win':
|
||||||
|
wrappers['LINK'] = flock + ' linker.lock'
|
||||||
for key, value in make_global_settings:
|
for key, value in make_global_settings:
|
||||||
if key == 'CC':
|
if key == 'CC':
|
||||||
cc = os.path.join(build_to_root, value)
|
cc = os.path.join(build_to_root, value)
|
||||||
@ -1388,14 +1406,13 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
|||||||
cxx_host_global_setting = value
|
cxx_host_global_setting = value
|
||||||
if key == 'LD.host':
|
if key == 'LD.host':
|
||||||
ld_host = os.path.join(build_to_root, value)
|
ld_host = os.path.join(build_to_root, value)
|
||||||
|
if key.endswith('_wrapper'):
|
||||||
|
wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
|
||||||
|
|
||||||
flock = 'flock'
|
|
||||||
if flavor == 'mac':
|
|
||||||
flock = './gyp-mac-tool flock'
|
|
||||||
cc = GetEnvironFallback(['CC_target', 'CC'], cc)
|
cc = GetEnvironFallback(['CC_target', 'CC'], cc)
|
||||||
master_ninja.variable('cc', cc)
|
master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc))
|
||||||
cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx)
|
cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx)
|
||||||
master_ninja.variable('cxx', cxx)
|
master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx))
|
||||||
ld = GetEnvironFallback(['LD_target', 'LD'], ld)
|
ld = GetEnvironFallback(['LD_target', 'LD'], ld)
|
||||||
|
|
||||||
if not cc_host:
|
if not cc_host:
|
||||||
@ -1412,7 +1429,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
|||||||
master_ninja.variable('mt', 'mt.exe')
|
master_ninja.variable('mt', 'mt.exe')
|
||||||
master_ninja.variable('use_dep_database', '1')
|
master_ninja.variable('use_dep_database', '1')
|
||||||
else:
|
else:
|
||||||
master_ninja.variable('ld', flock + ' linker.lock ' + ld)
|
master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld))
|
||||||
master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], 'ar'))
|
master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], 'ar'))
|
||||||
|
|
||||||
master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], 'ar'))
|
master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], 'ar'))
|
||||||
@ -1426,12 +1443,15 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
|||||||
cc_host = cc_host_global_setting.replace('$(CC)', cc)
|
cc_host = cc_host_global_setting.replace('$(CC)', cc)
|
||||||
if '$(CXX)' in cxx_host and cxx_host_global_setting:
|
if '$(CXX)' in cxx_host and cxx_host_global_setting:
|
||||||
cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx)
|
cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx)
|
||||||
master_ninja.variable('cc_host', cc_host)
|
master_ninja.variable('cc_host',
|
||||||
master_ninja.variable('cxx_host', cxx_host)
|
CommandWithWrapper('CC.host', wrappers, cc_host))
|
||||||
|
master_ninja.variable('cxx_host',
|
||||||
|
CommandWithWrapper('CXX.host', wrappers, cxx_host))
|
||||||
if flavor == 'win':
|
if flavor == 'win':
|
||||||
master_ninja.variable('ld_host', ld_host)
|
master_ninja.variable('ld_host', ld_host)
|
||||||
else:
|
else:
|
||||||
master_ninja.variable('ld_host', flock + ' linker.lock ' + ld_host)
|
master_ninja.variable('ld_host', CommandWithWrapper(
|
||||||
|
'LINK', wrappers, ld_host))
|
||||||
|
|
||||||
master_ninja.newline()
|
master_ninja.newline()
|
||||||
|
|
||||||
@ -1454,45 +1474,25 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
|||||||
'$cflags_pch_cc -c $in -o $out'),
|
'$cflags_pch_cc -c $in -o $out'),
|
||||||
depfile='$out.d')
|
depfile='$out.d')
|
||||||
else:
|
else:
|
||||||
# Template for compile commands mostly shared between compiling files
|
cc_command = ('ninja -t msvc -o $out -e $arch '
|
||||||
# and generating PCH. In the case of PCH, the "output" is specified by /Fp
|
'-- '
|
||||||
# rather than /Fo (for object files), but we still need to specify an /Fo
|
'$cc /nologo /showIncludes /FC '
|
||||||
# when compiling PCH.
|
'@$out.rsp /c $in /Fo$out /Fd$pdbname ')
|
||||||
cc_template = ('ninja -t msvc -r . -o $out -e $arch '
|
cxx_command = ('ninja -t msvc -o $out -e $arch '
|
||||||
'-- '
|
'-- '
|
||||||
'$cc /nologo /showIncludes /FC '
|
'$cxx /nologo /showIncludes /FC '
|
||||||
'@$out.rsp '
|
'@$out.rsp /c $in /Fo$out /Fd$pdbname ')
|
||||||
'$cflags_pch_c /c $in %(outspec)s /Fd$pdbname ')
|
|
||||||
cxx_template = ('ninja -t msvc -r . -o $out -e $arch '
|
|
||||||
'-- '
|
|
||||||
'$cxx /nologo /showIncludes /FC '
|
|
||||||
'@$out.rsp '
|
|
||||||
'$cflags_pch_cc /c $in %(outspec)s $pchobj /Fd$pdbname ')
|
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
'cc',
|
'cc',
|
||||||
description='CC $out',
|
description='CC $out',
|
||||||
command=cc_template % {'outspec': '/Fo$out'},
|
command=cc_command,
|
||||||
depfile='$out.d',
|
|
||||||
rspfile='$out.rsp',
|
|
||||||
rspfile_content='$defines $includes $cflags $cflags_c')
|
|
||||||
master_ninja.rule(
|
|
||||||
'cc_pch',
|
|
||||||
description='CC PCH $out',
|
|
||||||
command=cc_template % {'outspec': '/Fp$out /Fo$out.obj'},
|
|
||||||
depfile='$out.d',
|
depfile='$out.d',
|
||||||
rspfile='$out.rsp',
|
rspfile='$out.rsp',
|
||||||
rspfile_content='$defines $includes $cflags $cflags_c')
|
rspfile_content='$defines $includes $cflags $cflags_c')
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
'cxx',
|
'cxx',
|
||||||
description='CXX $out',
|
description='CXX $out',
|
||||||
command=cxx_template % {'outspec': '/Fo$out'},
|
command=cxx_command,
|
||||||
depfile='$out.d',
|
|
||||||
rspfile='$out.rsp',
|
|
||||||
rspfile_content='$defines $includes $cflags $cflags_cc')
|
|
||||||
master_ninja.rule(
|
|
||||||
'cxx_pch',
|
|
||||||
description='CXX PCH $out',
|
|
||||||
command=cxx_template % {'outspec': '/Fp$out /Fo$out.obj'},
|
|
||||||
depfile='$out.d',
|
depfile='$out.d',
|
||||||
rspfile='$out.rsp',
|
rspfile='$out.rsp',
|
||||||
rspfile_content='$defines $includes $cflags $cflags_cc')
|
rspfile_content='$defines $includes $cflags $cflags_cc')
|
||||||
@ -1559,7 +1559,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
|||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
'link',
|
'link',
|
||||||
description='LINK $out',
|
description='LINK $out',
|
||||||
command=('$ld $ldflags -o $out -Wl,-rpath=\$$ORIGIN/lib '
|
command=('$ld $ldflags -o $out '
|
||||||
'-Wl,--start-group $in $solibs -Wl,--end-group $libs'))
|
'-Wl,--start-group $in $solibs -Wl,--end-group $libs'))
|
||||||
elif flavor == 'win':
|
elif flavor == 'win':
|
||||||
master_ninja.rule(
|
master_ninja.rule(
|
||||||
@ -1574,6 +1574,9 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
|||||||
dllcmd = ('%s gyp-win-tool link-wrapper $arch '
|
dllcmd = ('%s gyp-win-tool link-wrapper $arch '
|
||||||
'$ld /nologo $implibflag /DLL /OUT:$dll '
|
'$ld /nologo $implibflag /DLL /OUT:$dll '
|
||||||
'/PDB:$dll.pdb @$dll.rsp' % sys.executable)
|
'/PDB:$dll.pdb @$dll.rsp' % sys.executable)
|
||||||
|
dllcmd += (' && %s gyp-win-tool manifest-wrapper $arch '
|
||||||
|
'cmd /c if exist $dll.manifest del $dll.manifest' %
|
||||||
|
sys.executable)
|
||||||
dllcmd += (' && %s gyp-win-tool manifest-wrapper $arch '
|
dllcmd += (' && %s gyp-win-tool manifest-wrapper $arch '
|
||||||
'$mt -nologo -manifest $manifests -out:$dll.manifest' %
|
'$mt -nologo -manifest $manifests -out:$dll.manifest' %
|
||||||
sys.executable)
|
sys.executable)
|
||||||
@ -1593,8 +1596,10 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
|||||||
command=('%s gyp-win-tool link-wrapper $arch '
|
command=('%s gyp-win-tool link-wrapper $arch '
|
||||||
'$ld /nologo /OUT:$out /PDB:$out.pdb @$out.rsp && '
|
'$ld /nologo /OUT:$out /PDB:$out.pdb @$out.rsp && '
|
||||||
'%s gyp-win-tool manifest-wrapper $arch '
|
'%s gyp-win-tool manifest-wrapper $arch '
|
||||||
|
'cmd /c if exist $out.manifest del $out.manifest && '
|
||||||
|
'%s gyp-win-tool manifest-wrapper $arch '
|
||||||
'$mt -nologo -manifest $manifests -out:$out.manifest' %
|
'$mt -nologo -manifest $manifests -out:$out.manifest' %
|
||||||
(sys.executable, sys.executable)),
|
(sys.executable, sys.executable, sys.executable)),
|
||||||
rspfile='$out.rsp',
|
rspfile='$out.rsp',
|
||||||
rspfile_content='$in_newline $libs $ldflags')
|
rspfile_content='$in_newline $libs $ldflags')
|
||||||
else:
|
else:
|
||||||
@ -1729,7 +1734,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
|||||||
abs_build_dir = os.path.abspath(toplevel_build)
|
abs_build_dir = os.path.abspath(toplevel_build)
|
||||||
writer = NinjaWriter(qualified_target, target_outputs, base_path, build_dir,
|
writer = NinjaWriter(qualified_target, target_outputs, base_path, build_dir,
|
||||||
OpenOutput(os.path.join(toplevel_build, output_file)),
|
OpenOutput(os.path.join(toplevel_build, output_file)),
|
||||||
flavor, abs_build_dir=abs_build_dir)
|
flavor, toplevel_dir=options.toplevel_dir)
|
||||||
master_ninja.subninja(output_file)
|
master_ninja.subninja(output_file)
|
||||||
|
|
||||||
target = writer.WriteSpec(
|
target = writer.WriteSpec(
|
||||||
@ -1777,6 +1782,11 @@ def CallGenerateOutputForConfig(arglist):
|
|||||||
|
|
||||||
def GenerateOutput(target_list, target_dicts, data, params):
|
def GenerateOutput(target_list, target_dicts, data, params):
|
||||||
user_config = params.get('generator_flags', {}).get('config', None)
|
user_config = params.get('generator_flags', {}).get('config', None)
|
||||||
|
if gyp.common.GetFlavor(params) == 'win':
|
||||||
|
target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts)
|
||||||
|
target_list, target_dicts = MSVSUtil.InsertLargePdbShims(
|
||||||
|
target_list, target_dicts, generator_default_variables)
|
||||||
|
|
||||||
if user_config:
|
if user_config:
|
||||||
GenerateOutputForConfig(target_list, target_dicts, data, params,
|
GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||||
user_config)
|
user_config)
|
||||||
|
@ -1110,20 +1110,29 @@ exit 1
|
|||||||
AddHeaderToTarget(header, pbxp, xct, True)
|
AddHeaderToTarget(header, pbxp, xct, True)
|
||||||
|
|
||||||
# Add "copies".
|
# Add "copies".
|
||||||
|
pbxcp_dict = {}
|
||||||
for copy_group in spec.get('copies', []):
|
for copy_group in spec.get('copies', []):
|
||||||
pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({
|
|
||||||
'name': 'Copy to ' + copy_group['destination']
|
|
||||||
},
|
|
||||||
parent=xct)
|
|
||||||
dest = copy_group['destination']
|
dest = copy_group['destination']
|
||||||
if dest[0] not in ('/', '$'):
|
if dest[0] not in ('/', '$'):
|
||||||
# Relative paths are relative to $(SRCROOT).
|
# Relative paths are relative to $(SRCROOT).
|
||||||
dest = '$(SRCROOT)/' + dest
|
dest = '$(SRCROOT)/' + dest
|
||||||
pbxcp.SetDestination(dest)
|
|
||||||
|
|
||||||
# TODO(mark): The usual comment about this knowing too much about
|
# Coalesce multiple "copies" sections in the same target with the same
|
||||||
# gyp.xcodeproj_file internals applies.
|
# "destination" property into the same PBXCopyFilesBuildPhase, otherwise
|
||||||
xct._properties['buildPhases'].insert(prebuild_index, pbxcp)
|
# they'll wind up with ID collisions.
|
||||||
|
pbxcp = pbxcp_dict.get(dest, None)
|
||||||
|
if pbxcp is None:
|
||||||
|
pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({
|
||||||
|
'name': 'Copy to ' + copy_group['destination']
|
||||||
|
},
|
||||||
|
parent=xct)
|
||||||
|
pbxcp.SetDestination(dest)
|
||||||
|
|
||||||
|
# TODO(mark): The usual comment about this knowing too much about
|
||||||
|
# gyp.xcodeproj_file internals applies.
|
||||||
|
xct._properties['buildPhases'].insert(prebuild_index, pbxcp)
|
||||||
|
|
||||||
|
pbxcp_dict[dest] = pbxcp
|
||||||
|
|
||||||
for file in copy_group['files']:
|
for file in copy_group['files']:
|
||||||
pbxcp.AddFile(file)
|
pbxcp.AddFile(file)
|
||||||
|
@ -46,21 +46,16 @@ base_path_sections = [
|
|||||||
]
|
]
|
||||||
path_sections = []
|
path_sections = []
|
||||||
|
|
||||||
|
is_path_section_charset = set('=+?!')
|
||||||
|
is_path_section_match_re = re.compile('_(dir|file|path)s?$')
|
||||||
|
|
||||||
def IsPathSection(section):
|
def IsPathSection(section):
|
||||||
# If section ends in one of these characters, it's applied to a section
|
# If section ends in one of these characters, it's applied to a section
|
||||||
# without the trailing characters. '/' is notably absent from this list,
|
# without the trailing characters. '/' is notably absent from this list,
|
||||||
# because there's no way for a regular expression to be treated as a path.
|
# because there's no way for a regular expression to be treated as a path.
|
||||||
while section[-1:] in ('=', '+', '?', '!'):
|
while section[-1:] in is_path_section_charset:
|
||||||
section = section[0:-1]
|
section = section[:-1]
|
||||||
|
return section in path_sections or is_path_section_match_re.search(section)
|
||||||
if section in path_sections or \
|
|
||||||
section.endswith('_dir') or section.endswith('_dirs') or \
|
|
||||||
section.endswith('_file') or section.endswith('_files') or \
|
|
||||||
section.endswith('_path') or section.endswith('_paths'):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
# base_non_configuraiton_keys is a list of key names that belong in the target
|
# base_non_configuraiton_keys is a list of key names that belong in the target
|
||||||
# itself and should not be propagated into its configurations. It is merged
|
# itself and should not be propagated into its configurations. It is merged
|
||||||
@ -269,7 +264,7 @@ def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
|
|||||||
aux_data[subdict_path]['included'] = []
|
aux_data[subdict_path]['included'] = []
|
||||||
aux_data[subdict_path]['included'].append(include)
|
aux_data[subdict_path]['included'].append(include)
|
||||||
|
|
||||||
gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'" % include)
|
gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
|
||||||
|
|
||||||
MergeDicts(subdict,
|
MergeDicts(subdict,
|
||||||
LoadOneBuildFile(include, data, aux_data, variables, None,
|
LoadOneBuildFile(include, data, aux_data, variables, None,
|
||||||
@ -359,7 +354,7 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
|
|||||||
data['target_build_files'].add(build_file_path)
|
data['target_build_files'].add(build_file_path)
|
||||||
|
|
||||||
gyp.DebugOutput(gyp.DEBUG_INCLUDES,
|
gyp.DebugOutput(gyp.DEBUG_INCLUDES,
|
||||||
"Loading Target Build File '%s'" % build_file_path)
|
"Loading Target Build File '%s'", build_file_path)
|
||||||
|
|
||||||
build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables,
|
build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables,
|
||||||
includes, True, check)
|
includes, True, check)
|
||||||
@ -494,7 +489,7 @@ def CallLoadTargetBuildFile(global_flags,
|
|||||||
aux_data_out,
|
aux_data_out,
|
||||||
dependencies)
|
dependencies)
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
print "Exception: ", e
|
print >>sys.stderr, 'Exception: ', e
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
@ -569,6 +564,12 @@ def LoadTargetBuildFileParallel(build_file_path, data, aux_data,
|
|||||||
parallel_state.condition.acquire()
|
parallel_state.condition.acquire()
|
||||||
while parallel_state.dependencies or parallel_state.pending:
|
while parallel_state.dependencies or parallel_state.pending:
|
||||||
if parallel_state.error:
|
if parallel_state.error:
|
||||||
|
print >>sys.stderr, (
|
||||||
|
'\n'
|
||||||
|
'Note: an error occurred while running gyp using multiprocessing.\n'
|
||||||
|
'For more verbose output, set GYP_PARALLEL=0 in your environment.\n'
|
||||||
|
'If the error only occurs when GYP_PARALLEL=1, '
|
||||||
|
'please report a bug!')
|
||||||
break
|
break
|
||||||
if not parallel_state.dependencies:
|
if not parallel_state.dependencies:
|
||||||
parallel_state.condition.wait()
|
parallel_state.condition.wait()
|
||||||
@ -608,32 +609,27 @@ def LoadTargetBuildFileParallel(build_file_path, data, aux_data,
|
|||||||
# the input is something like "<(foo <(bar)) blah", then it would
|
# the input is something like "<(foo <(bar)) blah", then it would
|
||||||
# return (1, 13), indicating the entire string except for the leading
|
# return (1, 13), indicating the entire string except for the leading
|
||||||
# "<" and trailing " blah".
|
# "<" and trailing " blah".
|
||||||
def FindEnclosingBracketGroup(input):
|
LBRACKETS= set('{[(')
|
||||||
brackets = { '}': '{',
|
BRACKETS = {'}': '{', ']': '[', ')': '('}
|
||||||
']': '[',
|
def FindEnclosingBracketGroup(input_str):
|
||||||
')': '(', }
|
|
||||||
stack = []
|
stack = []
|
||||||
count = 0
|
|
||||||
start = -1
|
start = -1
|
||||||
for char in input:
|
for index, char in enumerate(input_str):
|
||||||
if char in brackets.values():
|
if char in LBRACKETS:
|
||||||
stack.append(char)
|
stack.append(char)
|
||||||
if start == -1:
|
if start == -1:
|
||||||
start = count
|
start = index
|
||||||
if char in brackets.keys():
|
elif char in BRACKETS:
|
||||||
try:
|
if not stack:
|
||||||
last_bracket = stack.pop()
|
|
||||||
except IndexError:
|
|
||||||
return (-1, -1)
|
return (-1, -1)
|
||||||
if last_bracket != brackets[char]:
|
if stack.pop() != BRACKETS[char]:
|
||||||
return (-1, -1)
|
return (-1, -1)
|
||||||
if len(stack) == 0:
|
if not stack:
|
||||||
return (start, count + 1)
|
return (start, index + 1)
|
||||||
count = count + 1
|
|
||||||
return (-1, -1)
|
return (-1, -1)
|
||||||
|
|
||||||
|
|
||||||
canonical_int_re = re.compile('^(0|-?[1-9][0-9]*)$')
|
canonical_int_re = re.compile('(0|-?[1-9][0-9]*)$')
|
||||||
|
|
||||||
|
|
||||||
def IsStrCanonicalInt(string):
|
def IsStrCanonicalInt(string):
|
||||||
@ -641,10 +637,7 @@ def IsStrCanonicalInt(string):
|
|||||||
|
|
||||||
The canonical form is such that str(int(string)) == string.
|
The canonical form is such that str(int(string)) == string.
|
||||||
"""
|
"""
|
||||||
if not isinstance(string, str) or not canonical_int_re.match(string):
|
return isinstance(string, str) and canonical_int_re.match(string)
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
# This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
|
# This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
|
||||||
@ -713,7 +706,7 @@ def ExpandVariables(input, phase, variables, build_file):
|
|||||||
|
|
||||||
# Get the entire list of matches as a list of MatchObject instances.
|
# Get the entire list of matches as a list of MatchObject instances.
|
||||||
# (using findall here would return strings instead of MatchObjects).
|
# (using findall here would return strings instead of MatchObjects).
|
||||||
matches = [match for match in variable_re.finditer(input_str)]
|
matches = list(variable_re.finditer(input_str))
|
||||||
if not matches:
|
if not matches:
|
||||||
return input_str
|
return input_str
|
||||||
|
|
||||||
@ -725,8 +718,7 @@ def ExpandVariables(input, phase, variables, build_file):
|
|||||||
matches.reverse()
|
matches.reverse()
|
||||||
for match_group in matches:
|
for match_group in matches:
|
||||||
match = match_group.groupdict()
|
match = match_group.groupdict()
|
||||||
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
|
gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match)
|
||||||
"Matches: %s" % repr(match))
|
|
||||||
# match['replace'] is the substring to look for, match['type']
|
# match['replace'] is the substring to look for, match['type']
|
||||||
# is the character code for the replacement type (< > <! >! <| >| <@
|
# is the character code for the replacement type (< > <! >! <| >| <@
|
||||||
# >@ <!@ >!@), match['is_array'] contains a '[' for command
|
# >@ <!@ >!@), match['is_array'] contains a '[' for command
|
||||||
@ -839,8 +831,8 @@ def ExpandVariables(input, phase, variables, build_file):
|
|||||||
cached_value = cached_command_results.get(cache_key, None)
|
cached_value = cached_command_results.get(cache_key, None)
|
||||||
if cached_value is None:
|
if cached_value is None:
|
||||||
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
|
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
|
||||||
"Executing command '%s' in directory '%s'" %
|
"Executing command '%s' in directory '%s'",
|
||||||
(contents,build_file_dir))
|
contents, build_file_dir)
|
||||||
|
|
||||||
replacement = ''
|
replacement = ''
|
||||||
|
|
||||||
@ -852,12 +844,17 @@ def ExpandVariables(input, phase, variables, build_file):
|
|||||||
# <!(python modulename param eters). Do this in |build_file_dir|.
|
# <!(python modulename param eters). Do this in |build_file_dir|.
|
||||||
oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir.
|
oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir.
|
||||||
os.chdir(build_file_dir)
|
os.chdir(build_file_dir)
|
||||||
|
try:
|
||||||
|
|
||||||
parsed_contents = shlex.split(contents)
|
parsed_contents = shlex.split(contents)
|
||||||
py_module = __import__(parsed_contents[0])
|
try:
|
||||||
replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
|
py_module = __import__(parsed_contents[0])
|
||||||
|
except ImportError as e:
|
||||||
os.chdir(oldwd)
|
raise GypError("Error importing pymod_do_main"
|
||||||
|
"module (%s): %s" % (parsed_contents[0], e))
|
||||||
|
replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
|
||||||
|
finally:
|
||||||
|
os.chdir(oldwd)
|
||||||
assert replacement != None
|
assert replacement != None
|
||||||
elif command_string:
|
elif command_string:
|
||||||
raise GypError("Unknown command string '%s' in '%s'." %
|
raise GypError("Unknown command string '%s' in '%s'." %
|
||||||
@ -884,8 +881,8 @@ def ExpandVariables(input, phase, variables, build_file):
|
|||||||
cached_command_results[cache_key] = replacement
|
cached_command_results[cache_key] = replacement
|
||||||
else:
|
else:
|
||||||
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
|
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
|
||||||
"Had cache value for command '%s' in directory '%s'" %
|
"Had cache value for command '%s' in directory '%s'",
|
||||||
(contents,build_file_dir))
|
contents,build_file_dir)
|
||||||
replacement = cached_value
|
replacement = cached_value
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@ -960,8 +957,7 @@ def ExpandVariables(input, phase, variables, build_file):
|
|||||||
# Look for more matches now that we've replaced some, to deal with
|
# Look for more matches now that we've replaced some, to deal with
|
||||||
# expanding local variables (variables defined in the same
|
# expanding local variables (variables defined in the same
|
||||||
# variables block as this one).
|
# variables block as this one).
|
||||||
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
|
gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
|
||||||
"Found output %s, recursing." % repr(output))
|
|
||||||
if isinstance(output, list):
|
if isinstance(output, list):
|
||||||
if output and isinstance(output[0], list):
|
if output and isinstance(output[0], list):
|
||||||
# Leave output alone if it's a list of lists.
|
# Leave output alone if it's a list of lists.
|
||||||
@ -1062,7 +1058,7 @@ def ProcessConditionsInDict(the_dict, phase, variables, build_file):
|
|||||||
except NameError, e:
|
except NameError, e:
|
||||||
gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
|
gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
|
||||||
(cond_expr_expanded, build_file))
|
(cond_expr_expanded, build_file))
|
||||||
raise
|
raise GypError(e)
|
||||||
|
|
||||||
if merge_dict != None:
|
if merge_dict != None:
|
||||||
# Expand variables and nested conditinals in the merge_dict before
|
# Expand variables and nested conditinals in the merge_dict before
|
||||||
@ -1407,6 +1403,25 @@ def RemoveDuplicateDependencies(targets):
|
|||||||
target_dict[dependency_key] = Unify(dependencies)
|
target_dict[dependency_key] = Unify(dependencies)
|
||||||
|
|
||||||
|
|
||||||
|
def Filter(l, item):
|
||||||
|
"""Removes item from l."""
|
||||||
|
res = {}
|
||||||
|
return [res.setdefault(e, e) for e in l if e != item]
|
||||||
|
|
||||||
|
|
||||||
|
def RemoveSelfDependencies(targets):
|
||||||
|
"""Remove self dependencies from targets that have the prune_self_dependency
|
||||||
|
variable set."""
|
||||||
|
for target_name, target_dict in targets.iteritems():
|
||||||
|
for dependency_key in dependency_sections:
|
||||||
|
dependencies = target_dict.get(dependency_key, [])
|
||||||
|
if dependencies:
|
||||||
|
for t in dependencies:
|
||||||
|
if t == target_name:
|
||||||
|
if targets[t].get('variables', {}).get('prune_self_dependency', 0):
|
||||||
|
target_dict[dependency_key] = Filter(dependencies, target_name)
|
||||||
|
|
||||||
|
|
||||||
class DependencyGraphNode(object):
|
class DependencyGraphNode(object):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -1845,12 +1860,10 @@ def MakePathRelative(to_file, fro_file, item):
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
|
def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
|
||||||
def is_hashable(x):
|
# Python documentation recommends objects which do not support hash
|
||||||
try:
|
# set this value to None. Python library objects follow this rule.
|
||||||
hash(x)
|
is_hashable = lambda val: val.__hash__
|
||||||
except TypeError:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
# If x is hashable, returns whether x is in s. Else returns whether x is in l.
|
# If x is hashable, returns whether x is in s. Else returns whether x is in l.
|
||||||
def is_in_set_or_list(x, s, l):
|
def is_in_set_or_list(x, s, l):
|
||||||
if is_hashable(x):
|
if is_hashable(x):
|
||||||
@ -1861,8 +1874,7 @@ def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
|
|||||||
|
|
||||||
# Make membership testing of hashables in |to| (in particular, strings)
|
# Make membership testing of hashables in |to| (in particular, strings)
|
||||||
# faster.
|
# faster.
|
||||||
hashable_to_set = set([x for x in to if is_hashable(x)])
|
hashable_to_set = set(x for x in to if is_hashable(x))
|
||||||
|
|
||||||
for item in fro:
|
for item in fro:
|
||||||
singleton = False
|
singleton = False
|
||||||
if isinstance(item, str) or isinstance(item, int):
|
if isinstance(item, str) or isinstance(item, int):
|
||||||
@ -2056,7 +2068,7 @@ def SetUpConfigurations(target, target_dict):
|
|||||||
if not 'configurations' in target_dict:
|
if not 'configurations' in target_dict:
|
||||||
target_dict['configurations'] = {'Default': {}}
|
target_dict['configurations'] = {'Default': {}}
|
||||||
if not 'default_configuration' in target_dict:
|
if not 'default_configuration' in target_dict:
|
||||||
concrete = [i for i in target_dict['configurations'].keys()
|
concrete = [i for i in target_dict['configurations'].iterkeys()
|
||||||
if not target_dict['configurations'][i].get('abstract')]
|
if not target_dict['configurations'][i].get('abstract')]
|
||||||
target_dict['default_configuration'] = sorted(concrete)[0]
|
target_dict['default_configuration'] = sorted(concrete)[0]
|
||||||
|
|
||||||
@ -2315,8 +2327,8 @@ def ValidateTargetType(target, target_dict):
|
|||||||
|
|
||||||
|
|
||||||
def ValidateSourcesInTarget(target, target_dict, build_file):
|
def ValidateSourcesInTarget(target, target_dict, build_file):
|
||||||
# TODO: Check if MSVC allows this for non-static_library targets.
|
# TODO: Check if MSVC allows this for loadable_module targets.
|
||||||
if target_dict.get('type', None) != 'static_library':
|
if target_dict.get('type', None) not in ('static_library', 'shared_library'):
|
||||||
return
|
return
|
||||||
sources = target_dict.get('sources', [])
|
sources = target_dict.get('sources', [])
|
||||||
basenames = {}
|
basenames = {}
|
||||||
@ -2548,7 +2560,7 @@ def Load(build_files, variables, includes, depth, generator_input_info, check,
|
|||||||
build_file = os.path.normpath(build_file)
|
build_file = os.path.normpath(build_file)
|
||||||
try:
|
try:
|
||||||
if parallel:
|
if parallel:
|
||||||
print >>sys.stderr, 'Using parallel processing (experimental).'
|
print >>sys.stderr, 'Using parallel processing.'
|
||||||
LoadTargetBuildFileParallel(build_file, data, aux_data,
|
LoadTargetBuildFileParallel(build_file, data, aux_data,
|
||||||
variables, includes, depth, check)
|
variables, includes, depth, check)
|
||||||
else:
|
else:
|
||||||
@ -2564,6 +2576,10 @@ def Load(build_files, variables, includes, depth, generator_input_info, check,
|
|||||||
# Fully qualify all dependency links.
|
# Fully qualify all dependency links.
|
||||||
QualifyDependencies(targets)
|
QualifyDependencies(targets)
|
||||||
|
|
||||||
|
# Remove self-dependencies from targets that have 'prune_self_dependencies'
|
||||||
|
# set to 1.
|
||||||
|
RemoveSelfDependencies(targets)
|
||||||
|
|
||||||
# Expand dependencies specified as build_file:*.
|
# Expand dependencies specified as build_file:*.
|
||||||
ExpandWildcardDependencies(targets, data)
|
ExpandWildcardDependencies(targets, data)
|
||||||
|
|
||||||
|
@ -80,6 +80,19 @@ class MacTool(object):
|
|||||||
def _CopyStringsFile(self, source, dest):
|
def _CopyStringsFile(self, source, dest):
|
||||||
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
|
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
|
||||||
input_code = self._DetectInputEncoding(source) or "UTF-8"
|
input_code = self._DetectInputEncoding(source) or "UTF-8"
|
||||||
|
|
||||||
|
# Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
|
||||||
|
# CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
|
||||||
|
# CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
|
||||||
|
# semicolon in dictionary.
|
||||||
|
# on invalid files. Do the same kind of validation.
|
||||||
|
import CoreFoundation
|
||||||
|
s = open(source).read()
|
||||||
|
d = CoreFoundation.CFDataCreate(None, s, len(s))
|
||||||
|
_, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
|
||||||
|
if error:
|
||||||
|
return
|
||||||
|
|
||||||
fp = open(dest, 'w')
|
fp = open(dest, 'w')
|
||||||
args = ['/usr/bin/iconv', '--from-code', input_code, '--to-code',
|
args = ['/usr/bin/iconv', '--from-code', input_code, '--to-code',
|
||||||
'UTF-16', source]
|
'UTF-16', source]
|
||||||
|
@ -168,8 +168,6 @@ class MsvsSettings(object):
|
|||||||
equivalents."""
|
equivalents."""
|
||||||
target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
|
target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
|
||||||
replacements = {
|
replacements = {
|
||||||
'$(VSInstallDir)': self.vs_version.Path(),
|
|
||||||
'$(VCInstallDir)': os.path.join(self.vs_version.Path(), 'VC') + '\\',
|
|
||||||
'$(OutDir)\\': base_to_build + '\\' if base_to_build else '',
|
'$(OutDir)\\': base_to_build + '\\' if base_to_build else '',
|
||||||
'$(IntDir)': '$!INTERMEDIATE_DIR',
|
'$(IntDir)': '$!INTERMEDIATE_DIR',
|
||||||
'$(InputPath)': '${source}',
|
'$(InputPath)': '${source}',
|
||||||
@ -178,6 +176,12 @@ class MsvsSettings(object):
|
|||||||
'$(PlatformName)': target_platform,
|
'$(PlatformName)': target_platform,
|
||||||
'$(ProjectDir)\\': '',
|
'$(ProjectDir)\\': '',
|
||||||
}
|
}
|
||||||
|
# '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
|
||||||
|
# Visual Studio is actually installed.
|
||||||
|
if self.vs_version.Path():
|
||||||
|
replacements['$(VSInstallDir)'] = self.vs_version.Path()
|
||||||
|
replacements['$(VCInstallDir)'] = os.path.join(self.vs_version.Path(),
|
||||||
|
'VC') + '\\'
|
||||||
# Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
|
# Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
|
||||||
# set. This happens when the SDK is sync'd via src-internal, rather than
|
# set. This happens when the SDK is sync'd via src-internal, rather than
|
||||||
# by typical end-user installation of the SDK. If it's not set, we don't
|
# by typical end-user installation of the SDK. If it's not set, we don't
|
||||||
@ -275,6 +279,16 @@ class MsvsSettings(object):
|
|||||||
('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[]))
|
('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[]))
|
||||||
return defines
|
return defines
|
||||||
|
|
||||||
|
def GetCompilerPdbName(self, config, expand_special):
|
||||||
|
"""Get the pdb file name that should be used for compiler invocations, or
|
||||||
|
None if there's no explicit name specified."""
|
||||||
|
config = self._TargetConfig(config)
|
||||||
|
pdbname = self._Setting(
|
||||||
|
('VCCLCompilerTool', 'ProgramDataBaseFileName'), config)
|
||||||
|
if pdbname:
|
||||||
|
pdbname = expand_special(self.ConvertVSMacros(pdbname))
|
||||||
|
return pdbname
|
||||||
|
|
||||||
def GetOutputName(self, config, expand_special):
|
def GetOutputName(self, config, expand_special):
|
||||||
"""Gets the explicitly overridden output name for a target or returns None
|
"""Gets the explicitly overridden output name for a target or returns None
|
||||||
if it's not overridden."""
|
if it's not overridden."""
|
||||||
@ -309,6 +323,7 @@ class MsvsSettings(object):
|
|||||||
map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O')
|
map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O')
|
||||||
cl('InlineFunctionExpansion', prefix='/Ob')
|
cl('InlineFunctionExpansion', prefix='/Ob')
|
||||||
cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
|
cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
|
||||||
|
cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
|
||||||
cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
|
cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
|
||||||
cl('WholeProgramOptimization', map={'true': '/GL'})
|
cl('WholeProgramOptimization', map={'true': '/GL'})
|
||||||
cl('WarningLevel', prefix='/W')
|
cl('WarningLevel', prefix='/W')
|
||||||
@ -323,8 +338,13 @@ class MsvsSettings(object):
|
|||||||
cl('RuntimeLibrary',
|
cl('RuntimeLibrary',
|
||||||
map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
|
map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
|
||||||
cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
|
cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
|
||||||
|
cl('DefaultCharIsUnsigned', map={'true': '/J'})
|
||||||
|
cl('TreatWChar_tAsBuiltInType',
|
||||||
|
map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
|
||||||
cl('EnablePREfast', map={'true': '/analyze'})
|
cl('EnablePREfast', map={'true': '/analyze'})
|
||||||
cl('AdditionalOptions', prefix='')
|
cl('AdditionalOptions', prefix='')
|
||||||
|
cflags.extend(['/FI' + f for f in self._Setting(
|
||||||
|
('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
|
||||||
# ninja handles parallelism by itself, don't have the compiler do it too.
|
# ninja handles parallelism by itself, don't have the compiler do it too.
|
||||||
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
|
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
|
||||||
return cflags
|
return cflags
|
||||||
@ -378,6 +398,7 @@ class MsvsSettings(object):
|
|||||||
'VCLibrarianTool', append=libflags)
|
'VCLibrarianTool', append=libflags)
|
||||||
libflags.extend(self._GetAdditionalLibraryDirectories(
|
libflags.extend(self._GetAdditionalLibraryDirectories(
|
||||||
'VCLibrarianTool', config, gyp_to_build_path))
|
'VCLibrarianTool', config, gyp_to_build_path))
|
||||||
|
lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
|
||||||
lib('AdditionalOptions')
|
lib('AdditionalOptions')
|
||||||
return libflags
|
return libflags
|
||||||
|
|
||||||
@ -414,6 +435,7 @@ class MsvsSettings(object):
|
|||||||
ldflags.append('/PDB:' + pdb)
|
ldflags.append('/PDB:' + pdb)
|
||||||
ld('AdditionalOptions', prefix='')
|
ld('AdditionalOptions', prefix='')
|
||||||
ld('SubSystem', map={'1': 'CONSOLE', '2': 'WINDOWS'}, prefix='/SUBSYSTEM:')
|
ld('SubSystem', map={'1': 'CONSOLE', '2': 'WINDOWS'}, prefix='/SUBSYSTEM:')
|
||||||
|
ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
|
||||||
ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
|
ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
|
||||||
ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
|
ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
|
||||||
ld('RandomizedBaseAddress',
|
ld('RandomizedBaseAddress',
|
||||||
@ -426,13 +448,11 @@ class MsvsSettings(object):
|
|||||||
ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
|
ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
|
||||||
ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
|
ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
|
||||||
ld('EntryPointSymbol', prefix='/ENTRY:')
|
ld('EntryPointSymbol', prefix='/ENTRY:')
|
||||||
ld('Profile', map={ 'true': '/PROFILE'})
|
ld('Profile', map={'true': '/PROFILE'})
|
||||||
|
ld('LargeAddressAware',
|
||||||
|
map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE')
|
||||||
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
|
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
|
||||||
ld('AdditionalDependencies', prefix='')
|
ld('AdditionalDependencies', prefix='')
|
||||||
# TODO(scottmg): These too.
|
|
||||||
ldflags.extend(('kernel32.lib', 'user32.lib', 'gdi32.lib', 'winspool.lib',
|
|
||||||
'comdlg32.lib', 'advapi32.lib', 'shell32.lib', 'ole32.lib',
|
|
||||||
'oleaut32.lib', 'uuid.lib', 'odbc32.lib', 'DelayImp.lib'))
|
|
||||||
|
|
||||||
# If the base address is not specifically controlled, DYNAMICBASE should
|
# If the base address is not specifically controlled, DYNAMICBASE should
|
||||||
# be on by default.
|
# be on by default.
|
||||||
@ -576,7 +596,8 @@ class MsvsSettings(object):
|
|||||||
('iid', iid),
|
('iid', iid),
|
||||||
('proxy', proxy)]
|
('proxy', proxy)]
|
||||||
# TODO(scottmg): Are there configuration settings to set these flags?
|
# TODO(scottmg): Are there configuration settings to set these flags?
|
||||||
flags = ['/char', 'signed', '/env', 'win32', '/Oicf']
|
target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64'
|
||||||
|
flags = ['/char', 'signed', '/env', target_platform, '/Oicf']
|
||||||
return outdir, output, variables, flags
|
return outdir, output, variables, flags
|
||||||
|
|
||||||
|
|
||||||
@ -586,29 +607,25 @@ def _LanguageMatchesForPch(source_ext, pch_source_ext):
|
|||||||
return ((source_ext in c_exts and pch_source_ext in c_exts) or
|
return ((source_ext in c_exts and pch_source_ext in c_exts) or
|
||||||
(source_ext in cc_exts and pch_source_ext in cc_exts))
|
(source_ext in cc_exts and pch_source_ext in cc_exts))
|
||||||
|
|
||||||
|
|
||||||
class PrecompiledHeader(object):
|
class PrecompiledHeader(object):
|
||||||
"""Helper to generate dependencies and build rules to handle generation of
|
"""Helper to generate dependencies and build rules to handle generation of
|
||||||
precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
|
precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
|
||||||
"""
|
"""
|
||||||
def __init__(self, settings, config, gyp_to_build_path):
|
def __init__(
|
||||||
|
self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext):
|
||||||
self.settings = settings
|
self.settings = settings
|
||||||
self.config = config
|
self.config = config
|
||||||
self.gyp_to_build_path = gyp_to_build_path
|
pch_source = self.settings.msvs_precompiled_source[self.config]
|
||||||
|
self.pch_source = gyp_to_build_path(pch_source)
|
||||||
|
filename, _ = os.path.splitext(pch_source)
|
||||||
|
self.output_obj = gyp_to_unique_output(filename + obj_ext).lower()
|
||||||
|
|
||||||
def _PchHeader(self):
|
def _PchHeader(self):
|
||||||
"""Get the header that will appear in an #include line for all source
|
"""Get the header that will appear in an #include line for all source
|
||||||
files."""
|
files."""
|
||||||
return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
|
return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
|
||||||
|
|
||||||
def _PchSource(self):
|
|
||||||
"""Get the source file that is built once to compile the pch data."""
|
|
||||||
return self.gyp_to_build_path(
|
|
||||||
self.settings.msvs_precompiled_source[self.config])
|
|
||||||
|
|
||||||
def _PchOutput(self):
|
|
||||||
"""Get the name of the output of the compiled pch data."""
|
|
||||||
return '${pchprefix}.' + self._PchHeader() + '.pch'
|
|
||||||
|
|
||||||
def GetObjDependencies(self, sources, objs):
|
def GetObjDependencies(self, sources, objs):
|
||||||
"""Given a list of sources files and the corresponding object files,
|
"""Given a list of sources files and the corresponding object files,
|
||||||
returns a list of the pch files that should be depended upon. The
|
returns a list of the pch files that should be depended upon. The
|
||||||
@ -616,24 +633,30 @@ class PrecompiledHeader(object):
|
|||||||
with make.py on Mac, and xcode_emulation.py."""
|
with make.py on Mac, and xcode_emulation.py."""
|
||||||
if not self._PchHeader():
|
if not self._PchHeader():
|
||||||
return []
|
return []
|
||||||
source = self._PchSource()
|
pch_ext = os.path.splitext(self.pch_source)[1]
|
||||||
assert source
|
|
||||||
pch_ext = os.path.splitext(self._PchSource())[1]
|
|
||||||
for source in sources:
|
for source in sources:
|
||||||
if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
|
if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
|
||||||
return [(None, None, self._PchOutput())]
|
return [(None, None, self.output_obj)]
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def GetPchBuildCommands(self):
|
def GetPchBuildCommands(self):
|
||||||
"""Returns [(path_to_pch, language_flag, language, header)].
|
"""Not used on Windows as there are no additional build steps required
|
||||||
|path_to_gch| and |header| are relative to the build directory."""
|
(instead, existing steps are modified in GetFlagsModifications below)."""
|
||||||
header = self._PchHeader()
|
return []
|
||||||
source = self._PchSource()
|
|
||||||
if not source or not header:
|
def GetFlagsModifications(self, input, output, implicit, command,
|
||||||
return []
|
cflags_c, cflags_cc, expand_special):
|
||||||
ext = os.path.splitext(source)[1]
|
"""Get the modified cflags and implicit dependencies that should be used
|
||||||
lang = 'c' if ext == '.c' else 'cc'
|
for the pch compilation step."""
|
||||||
return [(self._PchOutput(), '/Yc' + header, lang, source)]
|
if input == self.pch_source:
|
||||||
|
pch_output = ['/Yc' + self._PchHeader()]
|
||||||
|
if command == 'cxx':
|
||||||
|
return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))],
|
||||||
|
self.output_obj, [])
|
||||||
|
elif command == 'cc':
|
||||||
|
return ([('cflags_c', map(expand_special, cflags_c + pch_output))],
|
||||||
|
self.output_obj, [])
|
||||||
|
return [], output, implicit
|
||||||
|
|
||||||
|
|
||||||
vs_version = None
|
vs_version = None
|
||||||
@ -711,7 +734,13 @@ def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out):
|
|||||||
of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
|
of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
|
||||||
sets up the environment, and then we do not prefix the compiler with
|
sets up the environment, and then we do not prefix the compiler with
|
||||||
an absolute path, instead preferring something like "cl.exe" in the rule
|
an absolute path, instead preferring something like "cl.exe" in the rule
|
||||||
which will then run whichever the environment setup has put in the path."""
|
which will then run whichever the environment setup has put in the path.
|
||||||
|
When the following procedure to generate environment files does not
|
||||||
|
meet your requirement (e.g. for custom toolchains), you can pass
|
||||||
|
"-G ninja_use_custom_environment_files" to the gyp to suppress file
|
||||||
|
generation and use custom environment files prepared by yourself."""
|
||||||
|
if generator_flags.get('ninja_use_custom_environment_files', 0):
|
||||||
|
return
|
||||||
vs = GetVSVersion(generator_flags)
|
vs = GetVSVersion(generator_flags)
|
||||||
for arch in ('x86', 'x64'):
|
for arch in ('x86', 'x64'):
|
||||||
args = vs.SetupScript(arch)
|
args = vs.SetupScript(arch)
|
||||||
|
@ -11,13 +11,16 @@ import gyp.common
|
|||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
import shlex
|
import shlex
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from gyp.common import GypError
|
||||||
|
|
||||||
class XcodeSettings(object):
|
class XcodeSettings(object):
|
||||||
"""A class that understands the gyp 'xcode_settings' object."""
|
"""A class that understands the gyp 'xcode_settings' object."""
|
||||||
|
|
||||||
# Computed lazily by _GetSdkBaseDir(). Shared by all XcodeSettings, so cached
|
# Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
|
||||||
# at class-level for efficiency.
|
# at class-level for efficiency.
|
||||||
_sdk_base_dir = None
|
_sdk_path_cache = {}
|
||||||
|
|
||||||
def __init__(self, spec):
|
def __init__(self, spec):
|
||||||
self.spec = spec
|
self.spec = spec
|
||||||
@ -219,34 +222,34 @@ class XcodeSettings(object):
|
|||||||
else:
|
else:
|
||||||
return self._GetStandaloneBinaryPath()
|
return self._GetStandaloneBinaryPath()
|
||||||
|
|
||||||
def _GetSdkBaseDir(self):
|
def _GetSdkVersionInfoItem(self, sdk, infoitem):
|
||||||
"""Returns the root of the 'Developer' directory. On Xcode 4.2 and prior,
|
job = subprocess.Popen(['xcodebuild', '-version', '-sdk', sdk, infoitem],
|
||||||
this is usually just /Developer. Xcode 4.3 moved that folder into the Xcode
|
stdout=subprocess.PIPE,
|
||||||
bundle."""
|
stderr=subprocess.STDOUT)
|
||||||
if not XcodeSettings._sdk_base_dir:
|
out = job.communicate()[0]
|
||||||
import subprocess
|
if job.returncode != 0:
|
||||||
job = subprocess.Popen(['xcode-select', '-print-path'],
|
sys.stderr.write(out + '\n')
|
||||||
stdout=subprocess.PIPE,
|
raise GypError('Error %d running xcodebuild' % job.returncode)
|
||||||
stderr=subprocess.STDOUT)
|
return out.rstrip('\n')
|
||||||
out, err = job.communicate()
|
|
||||||
if job.returncode != 0:
|
|
||||||
print out
|
|
||||||
raise Exception('Error %d running xcode-select' % job.returncode)
|
|
||||||
# The Developer folder moved in Xcode 4.3.
|
|
||||||
xcode43_sdk_path = os.path.join(
|
|
||||||
out.rstrip(), 'Platforms/MacOSX.platform/Developer/SDKs')
|
|
||||||
if os.path.isdir(xcode43_sdk_path):
|
|
||||||
XcodeSettings._sdk_base_dir = xcode43_sdk_path
|
|
||||||
else:
|
|
||||||
XcodeSettings._sdk_base_dir = os.path.join(out.rstrip(), 'SDKs')
|
|
||||||
return XcodeSettings._sdk_base_dir
|
|
||||||
|
|
||||||
def _SdkPath(self):
|
def _SdkPath(self):
|
||||||
sdk_root = self.GetPerTargetSetting('SDKROOT', default='macosx10.5')
|
sdk_root = self.GetPerTargetSetting('SDKROOT', default='macosx')
|
||||||
if sdk_root.startswith('macosx'):
|
if sdk_root not in XcodeSettings._sdk_path_cache:
|
||||||
return os.path.join(self._GetSdkBaseDir(),
|
XcodeSettings._sdk_path_cache[sdk_root] = self._GetSdkVersionInfoItem(
|
||||||
'MacOSX' + sdk_root[len('macosx'):] + '.sdk')
|
sdk_root, 'Path')
|
||||||
return sdk_root
|
return XcodeSettings._sdk_path_cache[sdk_root]
|
||||||
|
|
||||||
|
def _AppendPlatformVersionMinFlags(self, lst):
|
||||||
|
self._Appendf(lst, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s')
|
||||||
|
if 'IPHONEOS_DEPLOYMENT_TARGET' in self._Settings():
|
||||||
|
# TODO: Implement this better?
|
||||||
|
sdk_path_basename = os.path.basename(self._SdkPath())
|
||||||
|
if sdk_path_basename.lower().startswith('iphonesimulator'):
|
||||||
|
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
|
||||||
|
'-mios-simulator-version-min=%s')
|
||||||
|
else:
|
||||||
|
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
|
||||||
|
'-miphoneos-version-min=%s')
|
||||||
|
|
||||||
def GetCflags(self, configname):
|
def GetCflags(self, configname):
|
||||||
"""Returns flags that need to be added to .c, .cc, .m, and .mm
|
"""Returns flags that need to be added to .c, .cc, .m, and .mm
|
||||||
@ -261,6 +264,9 @@ class XcodeSettings(object):
|
|||||||
if 'SDKROOT' in self._Settings():
|
if 'SDKROOT' in self._Settings():
|
||||||
cflags.append('-isysroot %s' % sdk_root)
|
cflags.append('-isysroot %s' % sdk_root)
|
||||||
|
|
||||||
|
if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'):
|
||||||
|
cflags.append('-Wconstant-conversion')
|
||||||
|
|
||||||
if self._Test('GCC_CHAR_IS_UNSIGNED_CHAR', 'YES', default='NO'):
|
if self._Test('GCC_CHAR_IS_UNSIGNED_CHAR', 'YES', default='NO'):
|
||||||
cflags.append('-funsigned-char')
|
cflags.append('-funsigned-char')
|
||||||
|
|
||||||
@ -301,7 +307,7 @@ class XcodeSettings(object):
|
|||||||
if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'):
|
if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'):
|
||||||
cflags.append('-Wnewline-eof')
|
cflags.append('-Wnewline-eof')
|
||||||
|
|
||||||
self._Appendf(cflags, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s')
|
self._AppendPlatformVersionMinFlags(cflags)
|
||||||
|
|
||||||
# TODO:
|
# TODO:
|
||||||
if self._Test('COPY_PHASE_STRIP', 'YES', default='NO'):
|
if self._Test('COPY_PHASE_STRIP', 'YES', default='NO'):
|
||||||
@ -354,6 +360,16 @@ class XcodeSettings(object):
|
|||||||
"""Returns flags that need to be added to .cc, and .mm compilations."""
|
"""Returns flags that need to be added to .cc, and .mm compilations."""
|
||||||
self.configname = configname
|
self.configname = configname
|
||||||
cflags_cc = []
|
cflags_cc = []
|
||||||
|
|
||||||
|
clang_cxx_language_standard = self._Settings().get(
|
||||||
|
'CLANG_CXX_LANGUAGE_STANDARD')
|
||||||
|
# Note: Don't make c++0x to c++11 so that c++0x can be used with older
|
||||||
|
# clangs that don't understand c++11 yet (like Xcode 4.2's).
|
||||||
|
if clang_cxx_language_standard:
|
||||||
|
cflags_cc.append('-std=%s' % clang_cxx_language_standard)
|
||||||
|
|
||||||
|
self._Appendf(cflags_cc, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
|
||||||
|
|
||||||
if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'):
|
if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'):
|
||||||
cflags_cc.append('-fno-rtti')
|
cflags_cc.append('-fno-rtti')
|
||||||
if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'):
|
if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'):
|
||||||
@ -362,6 +378,7 @@ class XcodeSettings(object):
|
|||||||
cflags_cc.append('-fvisibility-inlines-hidden')
|
cflags_cc.append('-fvisibility-inlines-hidden')
|
||||||
if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'):
|
if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'):
|
||||||
cflags_cc.append('-fno-threadsafe-statics')
|
cflags_cc.append('-fno-threadsafe-statics')
|
||||||
|
# Note: This flag is a no-op for clang, it only has an effect for gcc.
|
||||||
if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'):
|
if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'):
|
||||||
cflags_cc.append('-Wno-invalid-offsetof')
|
cflags_cc.append('-Wno-invalid-offsetof')
|
||||||
|
|
||||||
@ -524,8 +541,9 @@ class XcodeSettings(object):
|
|||||||
ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s')
|
ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s')
|
||||||
self._Appendf(
|
self._Appendf(
|
||||||
ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s')
|
ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s')
|
||||||
self._Appendf(
|
|
||||||
ldflags, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s')
|
self._AppendPlatformVersionMinFlags(ldflags)
|
||||||
|
|
||||||
if 'SDKROOT' in self._Settings():
|
if 'SDKROOT' in self._Settings():
|
||||||
ldflags.append('-isysroot ' + self._SdkPath())
|
ldflags.append('-isysroot ' + self._SdkPath())
|
||||||
|
|
||||||
@ -1042,7 +1060,7 @@ def _TopologicallySortedEnvVarKeys(env):
|
|||||||
order.reverse()
|
order.reverse()
|
||||||
return order
|
return order
|
||||||
except gyp.common.CycleError, e:
|
except gyp.common.CycleError, e:
|
||||||
raise Exception(
|
raise GypError(
|
||||||
'Xcode environment variables are cyclically dependent: ' + str(e.nodes))
|
'Xcode environment variables are cyclically dependent: ' + str(e.nodes))
|
||||||
|
|
||||||
|
|
||||||
|
@ -1503,6 +1503,7 @@ class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
|
|||||||
'r': 'sourcecode.rez',
|
'r': 'sourcecode.rez',
|
||||||
'rez': 'sourcecode.rez',
|
'rez': 'sourcecode.rez',
|
||||||
's': 'sourcecode.asm',
|
's': 'sourcecode.asm',
|
||||||
|
'storyboard': 'file.storyboard',
|
||||||
'strings': 'text.plist.strings',
|
'strings': 'text.plist.strings',
|
||||||
'ttf': 'file',
|
'ttf': 'file',
|
||||||
'xcconfig': 'text.xcconfig',
|
'xcconfig': 'text.xcconfig',
|
||||||
|
@ -26,11 +26,20 @@
|
|||||||
(insert-file-contents-literally (concat filename ".fontified"))
|
(insert-file-contents-literally (concat filename ".fontified"))
|
||||||
(read (current-buffer))))
|
(read (current-buffer))))
|
||||||
|
|
||||||
|
(defun equivalent-face (face)
|
||||||
|
"For the purposes of face comparison, we're not interested in the
|
||||||
|
differences between certain faces. For example, the difference between
|
||||||
|
font-lock-comment-delimiter and font-lock-comment-face."
|
||||||
|
(case face
|
||||||
|
((font-lock-comment-delimiter-face) font-lock-comment-face)
|
||||||
|
(t face)))
|
||||||
|
|
||||||
(defun text-face-properties (s)
|
(defun text-face-properties (s)
|
||||||
"Extract the text properties from s"
|
"Extract the text properties from s"
|
||||||
(let ((result (list t)))
|
(let ((result (list t)))
|
||||||
(dotimes (i (length s))
|
(dotimes (i (length s))
|
||||||
(setq result (cons (get-text-property i 'face s) result)))
|
(setq result (cons (equivalent-face (get-text-property i 'face s))
|
||||||
|
result)))
|
||||||
(nreverse result)))
|
(nreverse result)))
|
||||||
|
|
||||||
(ert-deftest test-golden-samples ()
|
(ert-deftest test-golden-samples ()
|
||||||
|
@ -135,7 +135,7 @@
|
|||||||
(setq sections (cdr sections)) ; pop out a level
|
(setq sections (cdr sections)) ; pop out a level
|
||||||
(cond ((looking-at-p "['\"]") ; a string
|
(cond ((looking-at-p "['\"]") ; a string
|
||||||
(setq string-start (point))
|
(setq string-start (point))
|
||||||
(forward-sexp 1)
|
(goto-char (scan-sexps (point) 1))
|
||||||
(if (gyp-inside-dictionary-p)
|
(if (gyp-inside-dictionary-p)
|
||||||
;; Look for sections inside a dictionary
|
;; Look for sections inside a dictionary
|
||||||
(let ((section (gyp-section-name
|
(let ((section (gyp-section-name
|
||||||
|
Loading…
Reference in New Issue
Block a user