Source release v3.1.0
This commit is contained in:
741
third_party/gyp/generator/analyzer.py
vendored
Normal file
741
third_party/gyp/generator/analyzer.py
vendored
Normal file
@@ -0,0 +1,741 @@
|
||||
# Copyright (c) 2014 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""
|
||||
This script is intended for use as a GYP_GENERATOR. It takes as input (by way of
|
||||
the generator flag config_path) the path of a json file that dictates the files
|
||||
and targets to search for. The following keys are supported:
|
||||
files: list of paths (relative) of the files to search for.
|
||||
test_targets: unqualified target names to search for. Any target in this list
|
||||
that depends upon a file in |files| is output regardless of the type of target
|
||||
or chain of dependencies.
|
||||
additional_compile_targets: Unqualified targets to search for in addition to
|
||||
test_targets. Targets in the combined list that depend upon a file in |files|
|
||||
are not necessarily output. For example, if the target is of type none then the
|
||||
target is not output (but one of the descendants of the target will be).
|
||||
|
||||
The following is output:
|
||||
error: only supplied if there is an error.
|
||||
compile_targets: minimal set of targets that directly or indirectly (for
|
||||
targets of type none) depend on the files in |files| and is one of the
|
||||
supplied targets or a target that one of the supplied targets depends on.
|
||||
The expectation is this set of targets is passed into a build step. This list
|
||||
always contains the output of test_targets as well.
|
||||
test_targets: set of targets from the supplied |test_targets| that either
|
||||
directly or indirectly depend upon a file in |files|. This list if useful
|
||||
if additional processing needs to be done for certain targets after the
|
||||
build, such as running tests.
|
||||
status: outputs one of three values: none of the supplied files were found,
|
||||
one of the include files changed so that it should be assumed everything
|
||||
changed (in this case test_targets and compile_targets are not output) or at
|
||||
least one file was found.
|
||||
invalid_targets: list of supplied targets that were not found.
|
||||
|
||||
Example:
|
||||
Consider a graph like the following:
|
||||
A D
|
||||
/ \
|
||||
B C
|
||||
A depends upon both B and C, A is of type none and B and C are executables.
|
||||
D is an executable, has no dependencies and nothing depends on it.
|
||||
If |additional_compile_targets| = ["A"], |test_targets| = ["B", "C"] and
|
||||
files = ["b.cc", "d.cc"] (B depends upon b.cc and D depends upon d.cc), then
|
||||
the following is output:
|
||||
|compile_targets| = ["B"] B must built as it depends upon the changed file b.cc
|
||||
and the supplied target A depends upon it. A is not output as a build_target
|
||||
as it is of type none with no rules and actions.
|
||||
|test_targets| = ["B"] B directly depends upon the change file b.cc.
|
||||
|
||||
Even though the file d.cc, which D depends upon, has changed D is not output
|
||||
as it was not supplied by way of |additional_compile_targets| or |test_targets|.
|
||||
|
||||
If the generator flag analyzer_output_path is specified, output is written
|
||||
there. Otherwise output is written to stdout.
|
||||
|
||||
In Gyp the "all" target is shorthand for the root targets in the files passed
|
||||
to gyp. For example, if file "a.gyp" contains targets "a1" and
|
||||
"a2", and file "b.gyp" contains targets "b1" and "b2" and "a2" has a dependency
|
||||
on "b2" and gyp is supplied "a.gyp" then "all" consists of "a1" and "a2".
|
||||
Notice that "b1" and "b2" are not in the "all" target as "b.gyp" was not
|
||||
directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp
|
||||
then the "all" target includes "b1" and "b2".
|
||||
"""
|
||||
|
||||
import gyp.common
|
||||
import gyp.ninja_syntax as ninja_syntax
|
||||
import json
|
||||
import os
|
||||
import posixpath
|
||||
import sys
|
||||
|
||||
debug = False
|
||||
|
||||
found_dependency_string = 'Found dependency'
|
||||
no_dependency_string = 'No dependencies'
|
||||
# Status when it should be assumed that everything has changed.
|
||||
all_changed_string = 'Found dependency (all)'
|
||||
|
||||
# MatchStatus is used indicate if and how a target depends upon the supplied
|
||||
# sources.
|
||||
# The target's sources contain one of the supplied paths.
|
||||
MATCH_STATUS_MATCHES = 1
|
||||
# The target has a dependency on another target that contains one of the
|
||||
# supplied paths.
|
||||
MATCH_STATUS_MATCHES_BY_DEPENDENCY = 2
|
||||
# The target's sources weren't in the supplied paths and none of the target's
|
||||
# dependencies depend upon a target that matched.
|
||||
MATCH_STATUS_DOESNT_MATCH = 3
|
||||
# The target doesn't contain the source, but the dependent targets have not yet
|
||||
# been visited to determine a more specific status yet.
|
||||
MATCH_STATUS_TBD = 4
|
||||
|
||||
generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
|
||||
|
||||
generator_wants_static_library_dependencies_adjusted = False
|
||||
|
||||
generator_default_variables = {
|
||||
}
|
||||
for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
|
||||
'LIB_DIR', 'SHARED_LIB_DIR']:
|
||||
generator_default_variables[dirname] = '!!!'
|
||||
|
||||
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
|
||||
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
|
||||
'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
|
||||
'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
|
||||
'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
|
||||
'CONFIGURATION_NAME']:
|
||||
generator_default_variables[unused] = ''
|
||||
|
||||
|
||||
def _ToGypPath(path):
|
||||
"""Converts a path to the format used by gyp."""
|
||||
if os.sep == '\\' and os.altsep == '/':
|
||||
return path.replace('\\', '/')
|
||||
return path
|
||||
|
||||
|
||||
def _ResolveParent(path, base_path_components):
|
||||
"""Resolves |path|, which starts with at least one '../'. Returns an empty
|
||||
string if the path shouldn't be considered. See _AddSources() for a
|
||||
description of |base_path_components|."""
|
||||
depth = 0
|
||||
while path.startswith('../'):
|
||||
depth += 1
|
||||
path = path[3:]
|
||||
# Relative includes may go outside the source tree. For example, an action may
|
||||
# have inputs in /usr/include, which are not in the source tree.
|
||||
if depth > len(base_path_components):
|
||||
return ''
|
||||
if depth == len(base_path_components):
|
||||
return path
|
||||
return '/'.join(base_path_components[0:len(base_path_components) - depth]) + \
|
||||
'/' + path
|
||||
|
||||
|
||||
def _AddSources(sources, base_path, base_path_components, result):
|
||||
"""Extracts valid sources from |sources| and adds them to |result|. Each
|
||||
source file is relative to |base_path|, but may contain '..'. To make
|
||||
resolving '..' easier |base_path_components| contains each of the
|
||||
directories in |base_path|. Additionally each source may contain variables.
|
||||
Such sources are ignored as it is assumed dependencies on them are expressed
|
||||
and tracked in some other means."""
|
||||
# NOTE: gyp paths are always posix style.
|
||||
for source in sources:
|
||||
if not len(source) or source.startswith('!!!') or source.startswith('$'):
|
||||
continue
|
||||
# variable expansion may lead to //.
|
||||
org_source = source
|
||||
source = source[0] + source[1:].replace('//', '/')
|
||||
if source.startswith('../'):
|
||||
source = _ResolveParent(source, base_path_components)
|
||||
if len(source):
|
||||
result.append(source)
|
||||
continue
|
||||
result.append(base_path + source)
|
||||
if debug:
|
||||
print 'AddSource', org_source, result[len(result) - 1]
|
||||
|
||||
|
||||
def _ExtractSourcesFromAction(action, base_path, base_path_components,
|
||||
results):
|
||||
if 'inputs' in action:
|
||||
_AddSources(action['inputs'], base_path, base_path_components, results)
|
||||
|
||||
|
||||
def _ToLocalPath(toplevel_dir, path):
|
||||
"""Converts |path| to a path relative to |toplevel_dir|."""
|
||||
if path == toplevel_dir:
|
||||
return ''
|
||||
if path.startswith(toplevel_dir + '/'):
|
||||
return path[len(toplevel_dir) + len('/'):]
|
||||
return path
|
||||
|
||||
|
||||
def _ExtractSources(target, target_dict, toplevel_dir):
|
||||
# |target| is either absolute or relative and in the format of the OS. Gyp
|
||||
# source paths are always posix. Convert |target| to a posix path relative to
|
||||
# |toplevel_dir_|. This is done to make it easy to build source paths.
|
||||
base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target)))
|
||||
base_path_components = base_path.split('/')
|
||||
|
||||
# Add a trailing '/' so that _AddSources() can easily build paths.
|
||||
if len(base_path):
|
||||
base_path += '/'
|
||||
|
||||
if debug:
|
||||
print 'ExtractSources', target, base_path
|
||||
|
||||
results = []
|
||||
if 'sources' in target_dict:
|
||||
_AddSources(target_dict['sources'], base_path, base_path_components,
|
||||
results)
|
||||
# Include the inputs from any actions. Any changes to these affect the
|
||||
# resulting output.
|
||||
if 'actions' in target_dict:
|
||||
for action in target_dict['actions']:
|
||||
_ExtractSourcesFromAction(action, base_path, base_path_components,
|
||||
results)
|
||||
if 'rules' in target_dict:
|
||||
for rule in target_dict['rules']:
|
||||
_ExtractSourcesFromAction(rule, base_path, base_path_components, results)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
class Target(object):
|
||||
"""Holds information about a particular target:
|
||||
deps: set of Targets this Target depends upon. This is not recursive, only the
|
||||
direct dependent Targets.
|
||||
match_status: one of the MatchStatus values.
|
||||
back_deps: set of Targets that have a dependency on this Target.
|
||||
visited: used during iteration to indicate whether we've visited this target.
|
||||
This is used for two iterations, once in building the set of Targets and
|
||||
again in _GetBuildTargets().
|
||||
name: fully qualified name of the target.
|
||||
requires_build: True if the target type is such that it needs to be built.
|
||||
See _DoesTargetTypeRequireBuild for details.
|
||||
added_to_compile_targets: used when determining if the target was added to the
|
||||
set of targets that needs to be built.
|
||||
in_roots: true if this target is a descendant of one of the root nodes.
|
||||
is_executable: true if the type of target is executable.
|
||||
is_static_library: true if the type of target is static_library.
|
||||
is_or_has_linked_ancestor: true if the target does a link (eg executable), or
|
||||
if there is a target in back_deps that does a link."""
|
||||
def __init__(self, name):
|
||||
self.deps = set()
|
||||
self.match_status = MATCH_STATUS_TBD
|
||||
self.back_deps = set()
|
||||
self.name = name
|
||||
# TODO(sky): I don't like hanging this off Target. This state is specific
|
||||
# to certain functions and should be isolated there.
|
||||
self.visited = False
|
||||
self.requires_build = False
|
||||
self.added_to_compile_targets = False
|
||||
self.in_roots = False
|
||||
self.is_executable = False
|
||||
self.is_static_library = False
|
||||
self.is_or_has_linked_ancestor = False
|
||||
|
||||
|
||||
class Config(object):
|
||||
"""Details what we're looking for
|
||||
files: set of files to search for
|
||||
targets: see file description for details."""
|
||||
def __init__(self):
|
||||
self.files = []
|
||||
self.targets = set()
|
||||
self.additional_compile_target_names = set()
|
||||
self.test_target_names = set()
|
||||
|
||||
def Init(self, params):
|
||||
"""Initializes Config. This is a separate method as it raises an exception
|
||||
if there is a parse error."""
|
||||
generator_flags = params.get('generator_flags', {})
|
||||
config_path = generator_flags.get('config_path', None)
|
||||
if not config_path:
|
||||
return
|
||||
try:
|
||||
f = open(config_path, 'r')
|
||||
config = json.load(f)
|
||||
f.close()
|
||||
except IOError:
|
||||
raise Exception('Unable to open file ' + config_path)
|
||||
except ValueError as e:
|
||||
raise Exception('Unable to parse config file ' + config_path + str(e))
|
||||
if not isinstance(config, dict):
|
||||
raise Exception('config_path must be a JSON file containing a dictionary')
|
||||
self.files = config.get('files', [])
|
||||
self.additional_compile_target_names = set(
|
||||
config.get('additional_compile_targets', []))
|
||||
self.test_target_names = set(config.get('test_targets', []))
|
||||
|
||||
|
||||
def _WasBuildFileModified(build_file, data, files, toplevel_dir):
|
||||
"""Returns true if the build file |build_file| is either in |files| or
|
||||
one of the files included by |build_file| is in |files|. |toplevel_dir| is
|
||||
the root of the source tree."""
|
||||
if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
|
||||
if debug:
|
||||
print 'gyp file modified', build_file
|
||||
return True
|
||||
|
||||
# First element of included_files is the file itself.
|
||||
if len(data[build_file]['included_files']) <= 1:
|
||||
return False
|
||||
|
||||
for include_file in data[build_file]['included_files'][1:]:
|
||||
# |included_files| are relative to the directory of the |build_file|.
|
||||
rel_include_file = \
|
||||
_ToGypPath(gyp.common.UnrelativePath(include_file, build_file))
|
||||
if _ToLocalPath(toplevel_dir, rel_include_file) in files:
|
||||
if debug:
|
||||
print 'included gyp file modified, gyp_file=', build_file, \
|
||||
'included file=', rel_include_file
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _GetOrCreateTargetByName(targets, target_name):
|
||||
"""Creates or returns the Target at targets[target_name]. If there is no
|
||||
Target for |target_name| one is created. Returns a tuple of whether a new
|
||||
Target was created and the Target."""
|
||||
if target_name in targets:
|
||||
return False, targets[target_name]
|
||||
target = Target(target_name)
|
||||
targets[target_name] = target
|
||||
return True, target
|
||||
|
||||
|
||||
def _DoesTargetTypeRequireBuild(target_dict):
|
||||
"""Returns true if the target type is such that it needs to be built."""
|
||||
# If a 'none' target has rules or actions we assume it requires a build.
|
||||
return bool(target_dict['type'] != 'none' or
|
||||
target_dict.get('actions') or target_dict.get('rules'))
|
||||
|
||||
|
||||
def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files,
|
||||
build_files):
|
||||
"""Returns a tuple of the following:
|
||||
. A dictionary mapping from fully qualified name to Target.
|
||||
. A list of the targets that have a source file in |files|.
|
||||
. Targets that constitute the 'all' target. See description at top of file
|
||||
for details on the 'all' target.
|
||||
This sets the |match_status| of the targets that contain any of the source
|
||||
files in |files| to MATCH_STATUS_MATCHES.
|
||||
|toplevel_dir| is the root of the source tree."""
|
||||
# Maps from target name to Target.
|
||||
name_to_target = {}
|
||||
|
||||
# Targets that matched.
|
||||
matching_targets = []
|
||||
|
||||
# Queue of targets to visit.
|
||||
targets_to_visit = target_list[:]
|
||||
|
||||
# Maps from build file to a boolean indicating whether the build file is in
|
||||
# |files|.
|
||||
build_file_in_files = {}
|
||||
|
||||
# Root targets across all files.
|
||||
roots = set()
|
||||
|
||||
# Set of Targets in |build_files|.
|
||||
build_file_targets = set()
|
||||
|
||||
while len(targets_to_visit) > 0:
|
||||
target_name = targets_to_visit.pop()
|
||||
created_target, target = _GetOrCreateTargetByName(name_to_target,
|
||||
target_name)
|
||||
if created_target:
|
||||
roots.add(target)
|
||||
elif target.visited:
|
||||
continue
|
||||
|
||||
target.visited = True
|
||||
target.requires_build = _DoesTargetTypeRequireBuild(
|
||||
target_dicts[target_name])
|
||||
target_type = target_dicts[target_name]['type']
|
||||
target.is_executable = target_type == 'executable'
|
||||
target.is_static_library = target_type == 'static_library'
|
||||
target.is_or_has_linked_ancestor = (target_type == 'executable' or
|
||||
target_type == 'shared_library')
|
||||
|
||||
build_file = gyp.common.ParseQualifiedTarget(target_name)[0]
|
||||
if not build_file in build_file_in_files:
|
||||
build_file_in_files[build_file] = \
|
||||
_WasBuildFileModified(build_file, data, files, toplevel_dir)
|
||||
|
||||
if build_file in build_files:
|
||||
build_file_targets.add(target)
|
||||
|
||||
# If a build file (or any of its included files) is modified we assume all
|
||||
# targets in the file are modified.
|
||||
if build_file_in_files[build_file]:
|
||||
print 'matching target from modified build file', target_name
|
||||
target.match_status = MATCH_STATUS_MATCHES
|
||||
matching_targets.append(target)
|
||||
else:
|
||||
sources = _ExtractSources(target_name, target_dicts[target_name],
|
||||
toplevel_dir)
|
||||
for source in sources:
|
||||
if _ToGypPath(os.path.normpath(source)) in files:
|
||||
print 'target', target_name, 'matches', source
|
||||
target.match_status = MATCH_STATUS_MATCHES
|
||||
matching_targets.append(target)
|
||||
break
|
||||
|
||||
# Add dependencies to visit as well as updating back pointers for deps.
|
||||
for dep in target_dicts[target_name].get('dependencies', []):
|
||||
targets_to_visit.append(dep)
|
||||
|
||||
created_dep_target, dep_target = _GetOrCreateTargetByName(name_to_target,
|
||||
dep)
|
||||
if not created_dep_target:
|
||||
roots.discard(dep_target)
|
||||
|
||||
target.deps.add(dep_target)
|
||||
dep_target.back_deps.add(target)
|
||||
|
||||
return name_to_target, matching_targets, roots & build_file_targets
|
||||
|
||||
|
||||
def _GetUnqualifiedToTargetMapping(all_targets, to_find):
|
||||
"""Returns a tuple of the following:
|
||||
. mapping (dictionary) from unqualified name to Target for all the
|
||||
Targets in |to_find|.
|
||||
. any target names not found. If this is empty all targets were found."""
|
||||
result = {}
|
||||
if not to_find:
|
||||
return {}, []
|
||||
to_find = set(to_find)
|
||||
for target_name in all_targets.keys():
|
||||
extracted = gyp.common.ParseQualifiedTarget(target_name)
|
||||
if len(extracted) > 1 and extracted[1] in to_find:
|
||||
to_find.remove(extracted[1])
|
||||
result[extracted[1]] = all_targets[target_name]
|
||||
if not to_find:
|
||||
return result, []
|
||||
return result, [x for x in to_find]
|
||||
|
||||
|
||||
def _DoesTargetDependOnMatchingTargets(target):
|
||||
"""Returns true if |target| or any of its dependencies is one of the
|
||||
targets containing the files supplied as input to analyzer. This updates
|
||||
|matches| of the Targets as it recurses.
|
||||
target: the Target to look for."""
|
||||
if target.match_status == MATCH_STATUS_DOESNT_MATCH:
|
||||
return False
|
||||
if target.match_status == MATCH_STATUS_MATCHES or \
|
||||
target.match_status == MATCH_STATUS_MATCHES_BY_DEPENDENCY:
|
||||
return True
|
||||
for dep in target.deps:
|
||||
if _DoesTargetDependOnMatchingTargets(dep):
|
||||
target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
|
||||
print '\t', target.name, 'matches by dep', dep.name
|
||||
return True
|
||||
target.match_status = MATCH_STATUS_DOESNT_MATCH
|
||||
return False
|
||||
|
||||
|
||||
def _GetTargetsDependingOnMatchingTargets(possible_targets):
|
||||
"""Returns the list of Targets in |possible_targets| that depend (either
|
||||
directly on indirectly) on at least one of the targets containing the files
|
||||
supplied as input to analyzer.
|
||||
possible_targets: targets to search from."""
|
||||
found = []
|
||||
print 'Targets that matched by dependency:'
|
||||
for target in possible_targets:
|
||||
if _DoesTargetDependOnMatchingTargets(target):
|
||||
found.append(target)
|
||||
return found
|
||||
|
||||
|
||||
def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
|
||||
"""Recurses through all targets that depend on |target|, adding all targets
|
||||
that need to be built (and are in |roots|) to |result|.
|
||||
roots: set of root targets.
|
||||
add_if_no_ancestor: If true and there are no ancestors of |target| then add
|
||||
|target| to |result|. |target| must still be in |roots|.
|
||||
result: targets that need to be built are added here."""
|
||||
if target.visited:
|
||||
return
|
||||
|
||||
target.visited = True
|
||||
target.in_roots = target in roots
|
||||
|
||||
for back_dep_target in target.back_deps:
|
||||
_AddCompileTargets(back_dep_target, roots, False, result)
|
||||
target.added_to_compile_targets |= back_dep_target.added_to_compile_targets
|
||||
target.in_roots |= back_dep_target.in_roots
|
||||
target.is_or_has_linked_ancestor |= (
|
||||
back_dep_target.is_or_has_linked_ancestor)
|
||||
|
||||
# Always add 'executable' targets. Even though they may be built by other
|
||||
# targets that depend upon them it makes detection of what is going to be
|
||||
# built easier.
|
||||
# And always add static_libraries that have no dependencies on them from
|
||||
# linkables. This is necessary as the other dependencies on them may be
|
||||
# static libraries themselves, which are not compile time dependencies.
|
||||
if target.in_roots and \
|
||||
(target.is_executable or
|
||||
(not target.added_to_compile_targets and
|
||||
(add_if_no_ancestor or target.requires_build)) or
|
||||
(target.is_static_library and add_if_no_ancestor and
|
||||
not target.is_or_has_linked_ancestor)):
|
||||
print '\t\tadding to compile targets', target.name, 'executable', \
|
||||
target.is_executable, 'added_to_compile_targets', \
|
||||
target.added_to_compile_targets, 'add_if_no_ancestor', \
|
||||
add_if_no_ancestor, 'requires_build', target.requires_build, \
|
||||
'is_static_library', target.is_static_library, \
|
||||
'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor
|
||||
result.add(target)
|
||||
target.added_to_compile_targets = True
|
||||
|
||||
|
||||
def _GetCompileTargets(matching_targets, supplied_targets):
|
||||
"""Returns the set of Targets that require a build.
|
||||
matching_targets: targets that changed and need to be built.
|
||||
supplied_targets: set of targets supplied to analyzer to search from."""
|
||||
result = set()
|
||||
for target in matching_targets:
|
||||
print 'finding compile targets for match', target.name
|
||||
_AddCompileTargets(target, supplied_targets, True, result)
|
||||
return result
|
||||
|
||||
|
||||
def _WriteOutput(params, **values):
|
||||
"""Writes the output, either to stdout or a file is specified."""
|
||||
if 'error' in values:
|
||||
print 'Error:', values['error']
|
||||
if 'status' in values:
|
||||
print values['status']
|
||||
if 'targets' in values:
|
||||
values['targets'].sort()
|
||||
print 'Supplied targets that depend on changed files:'
|
||||
for target in values['targets']:
|
||||
print '\t', target
|
||||
if 'invalid_targets' in values:
|
||||
values['invalid_targets'].sort()
|
||||
print 'The following targets were not found:'
|
||||
for target in values['invalid_targets']:
|
||||
print '\t', target
|
||||
if 'build_targets' in values:
|
||||
values['build_targets'].sort()
|
||||
print 'Targets that require a build:'
|
||||
for target in values['build_targets']:
|
||||
print '\t', target
|
||||
if 'compile_targets' in values:
|
||||
values['compile_targets'].sort()
|
||||
print 'Targets that need to be built:'
|
||||
for target in values['compile_targets']:
|
||||
print '\t', target
|
||||
if 'test_targets' in values:
|
||||
values['test_targets'].sort()
|
||||
print 'Test targets:'
|
||||
for target in values['test_targets']:
|
||||
print '\t', target
|
||||
|
||||
output_path = params.get('generator_flags', {}).get(
|
||||
'analyzer_output_path', None)
|
||||
if not output_path:
|
||||
print json.dumps(values)
|
||||
return
|
||||
try:
|
||||
f = open(output_path, 'w')
|
||||
f.write(json.dumps(values) + '\n')
|
||||
f.close()
|
||||
except IOError as e:
|
||||
print 'Error writing to output file', output_path, str(e)
|
||||
|
||||
|
||||
def _WasGypIncludeFileModified(params, files):
|
||||
"""Returns true if one of the files in |files| is in the set of included
|
||||
files."""
|
||||
if params['options'].includes:
|
||||
for include in params['options'].includes:
|
||||
if _ToGypPath(os.path.normpath(include)) in files:
|
||||
print 'Include file modified, assuming all changed', include
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _NamesNotIn(names, mapping):
|
||||
"""Returns a list of the values in |names| that are not in |mapping|."""
|
||||
return [name for name in names if name not in mapping]
|
||||
|
||||
|
||||
def _LookupTargets(names, mapping):
|
||||
"""Returns a list of the mapping[name] for each value in |names| that is in
|
||||
|mapping|."""
|
||||
return [mapping[name] for name in names if name in mapping]
|
||||
|
||||
|
||||
def CalculateVariables(default_variables, params):
|
||||
"""Calculate additional variables for use in the build (called by gyp)."""
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor == 'mac':
|
||||
default_variables.setdefault('OS', 'mac')
|
||||
elif flavor == 'win':
|
||||
default_variables.setdefault('OS', 'win')
|
||||
# Copy additional generator configuration data from VS, which is shared
|
||||
# by the Windows Ninja generator.
|
||||
import gyp.generator.msvs as msvs_generator
|
||||
generator_additional_non_configuration_keys = getattr(msvs_generator,
|
||||
'generator_additional_non_configuration_keys', [])
|
||||
generator_additional_path_sections = getattr(msvs_generator,
|
||||
'generator_additional_path_sections', [])
|
||||
|
||||
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
||||
else:
|
||||
operating_system = flavor
|
||||
if flavor == 'android':
|
||||
operating_system = 'linux' # Keep this legacy behavior for now.
|
||||
default_variables.setdefault('OS', operating_system)
|
||||
|
||||
|
||||
class TargetCalculator(object):
|
||||
"""Calculates the matching test_targets and matching compile_targets."""
|
||||
def __init__(self, files, additional_compile_target_names, test_target_names,
|
||||
data, target_list, target_dicts, toplevel_dir, build_files):
|
||||
self._additional_compile_target_names = set(additional_compile_target_names)
|
||||
self._test_target_names = set(test_target_names)
|
||||
self._name_to_target, self._changed_targets, self._root_targets = (
|
||||
_GenerateTargets(data, target_list, target_dicts, toplevel_dir,
|
||||
frozenset(files), build_files))
|
||||
self._unqualified_mapping, self.invalid_targets = (
|
||||
_GetUnqualifiedToTargetMapping(self._name_to_target,
|
||||
self._supplied_target_names_no_all()))
|
||||
|
||||
def _supplied_target_names(self):
|
||||
return self._additional_compile_target_names | self._test_target_names
|
||||
|
||||
def _supplied_target_names_no_all(self):
|
||||
"""Returns the supplied test targets without 'all'."""
|
||||
result = self._supplied_target_names();
|
||||
result.discard('all')
|
||||
return result
|
||||
|
||||
def is_build_impacted(self):
|
||||
"""Returns true if the supplied files impact the build at all."""
|
||||
return self._changed_targets
|
||||
|
||||
def find_matching_test_target_names(self):
|
||||
"""Returns the set of output test targets."""
|
||||
assert self.is_build_impacted()
|
||||
# Find the test targets first. 'all' is special cased to mean all the
|
||||
# root targets. To deal with all the supplied |test_targets| are expanded
|
||||
# to include the root targets during lookup. If any of the root targets
|
||||
# match, we remove it and replace it with 'all'.
|
||||
test_target_names_no_all = set(self._test_target_names)
|
||||
test_target_names_no_all.discard('all')
|
||||
test_targets_no_all = _LookupTargets(test_target_names_no_all,
|
||||
self._unqualified_mapping)
|
||||
test_target_names_contains_all = 'all' in self._test_target_names
|
||||
if test_target_names_contains_all:
|
||||
test_targets = [x for x in (set(test_targets_no_all) |
|
||||
set(self._root_targets))]
|
||||
else:
|
||||
test_targets = [x for x in test_targets_no_all]
|
||||
print 'supplied test_targets'
|
||||
for target_name in self._test_target_names:
|
||||
print '\t', target_name
|
||||
print 'found test_targets'
|
||||
for target in test_targets:
|
||||
print '\t', target.name
|
||||
print 'searching for matching test targets'
|
||||
matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets)
|
||||
matching_test_targets_contains_all = (test_target_names_contains_all and
|
||||
set(matching_test_targets) &
|
||||
set(self._root_targets))
|
||||
if matching_test_targets_contains_all:
|
||||
# Remove any of the targets for all that were not explicitly supplied,
|
||||
# 'all' is subsequentely added to the matching names below.
|
||||
matching_test_targets = [x for x in (set(matching_test_targets) &
|
||||
set(test_targets_no_all))]
|
||||
print 'matched test_targets'
|
||||
for target in matching_test_targets:
|
||||
print '\t', target.name
|
||||
matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1]
|
||||
for target in matching_test_targets]
|
||||
if matching_test_targets_contains_all:
|
||||
matching_target_names.append('all')
|
||||
print '\tall'
|
||||
return matching_target_names
|
||||
|
||||
def find_matching_compile_target_names(self):
|
||||
"""Returns the set of output compile targets."""
|
||||
assert self.is_build_impacted();
|
||||
# Compile targets are found by searching up from changed targets.
|
||||
# Reset the visited status for _GetBuildTargets.
|
||||
for target in self._name_to_target.itervalues():
|
||||
target.visited = False
|
||||
|
||||
supplied_targets = _LookupTargets(self._supplied_target_names_no_all(),
|
||||
self._unqualified_mapping)
|
||||
if 'all' in self._supplied_target_names():
|
||||
supplied_targets = [x for x in (set(supplied_targets) |
|
||||
set(self._root_targets))]
|
||||
print 'Supplied test_targets & compile_targets'
|
||||
for target in supplied_targets:
|
||||
print '\t', target.name
|
||||
print 'Finding compile targets'
|
||||
compile_targets = _GetCompileTargets(self._changed_targets,
|
||||
supplied_targets)
|
||||
return [gyp.common.ParseQualifiedTarget(target.name)[1]
|
||||
for target in compile_targets]
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
"""Called by gyp as the final stage. Outputs results."""
|
||||
config = Config()
|
||||
try:
|
||||
config.Init(params)
|
||||
|
||||
if not config.files:
|
||||
raise Exception('Must specify files to analyze via config_path generator '
|
||||
'flag')
|
||||
|
||||
toplevel_dir = _ToGypPath(os.path.abspath(params['options'].toplevel_dir))
|
||||
if debug:
|
||||
print 'toplevel_dir', toplevel_dir
|
||||
|
||||
if _WasGypIncludeFileModified(params, config.files):
|
||||
result_dict = { 'status': all_changed_string,
|
||||
'test_targets': list(config.test_target_names),
|
||||
'compile_targets': list(
|
||||
config.additional_compile_target_names |
|
||||
config.test_target_names) }
|
||||
_WriteOutput(params, **result_dict)
|
||||
return
|
||||
|
||||
calculator = TargetCalculator(config.files,
|
||||
config.additional_compile_target_names,
|
||||
config.test_target_names, data,
|
||||
target_list, target_dicts, toplevel_dir,
|
||||
params['build_files'])
|
||||
if not calculator.is_build_impacted():
|
||||
result_dict = { 'status': no_dependency_string,
|
||||
'test_targets': [],
|
||||
'compile_targets': [] }
|
||||
if calculator.invalid_targets:
|
||||
result_dict['invalid_targets'] = calculator.invalid_targets
|
||||
_WriteOutput(params, **result_dict)
|
||||
return
|
||||
|
||||
test_target_names = calculator.find_matching_test_target_names()
|
||||
compile_target_names = calculator.find_matching_compile_target_names()
|
||||
found_at_least_one_target = compile_target_names or test_target_names
|
||||
result_dict = { 'test_targets': test_target_names,
|
||||
'status': found_dependency_string if
|
||||
found_at_least_one_target else no_dependency_string,
|
||||
'compile_targets': list(
|
||||
set(compile_target_names) |
|
||||
set(test_target_names)) }
|
||||
if calculator.invalid_targets:
|
||||
result_dict['invalid_targets'] = calculator.invalid_targets
|
||||
_WriteOutput(params, **result_dict)
|
||||
|
||||
except Exception as e:
|
||||
_WriteOutput(params, error=str(e))
|
||||
1069
third_party/gyp/generator/android.py
vendored
1069
third_party/gyp/generator/android.py
vendored
File diff suppressed because it is too large
Load Diff
423
third_party/gyp/generator/cmake.py
vendored
423
third_party/gyp/generator/cmake.py
vendored
@@ -34,6 +34,7 @@ import signal
|
||||
import string
|
||||
import subprocess
|
||||
import gyp.common
|
||||
import gyp.xcode_emulation
|
||||
|
||||
generator_default_variables = {
|
||||
'EXECUTABLE_PREFIX': '',
|
||||
@@ -55,7 +56,7 @@ generator_default_variables = {
|
||||
'CONFIGURATION_NAME': '${configuration}',
|
||||
}
|
||||
|
||||
FULL_PATH_VARS = ('${CMAKE_SOURCE_DIR}', '${builddir}', '${obj}')
|
||||
FULL_PATH_VARS = ('${CMAKE_CURRENT_LIST_DIR}', '${builddir}', '${obj}')
|
||||
|
||||
generator_supports_multiple_toolsets = True
|
||||
generator_wants_static_library_dependencies_adjusted = True
|
||||
@@ -103,7 +104,7 @@ def NormjoinPathForceCMakeSource(base_path, rel_path):
|
||||
if any([rel_path.startswith(var) for var in FULL_PATH_VARS]):
|
||||
return rel_path
|
||||
# TODO: do we need to check base_path for absolute variables as well?
|
||||
return os.path.join('${CMAKE_SOURCE_DIR}',
|
||||
return os.path.join('${CMAKE_CURRENT_LIST_DIR}',
|
||||
os.path.normpath(os.path.join(base_path, rel_path)))
|
||||
|
||||
|
||||
@@ -150,20 +151,17 @@ def SetFileProperty(output, source_name, property_name, values, sep):
|
||||
output.write('")\n')
|
||||
|
||||
|
||||
def SetFilesProperty(output, source_names, property_name, values, sep):
|
||||
def SetFilesProperty(output, variable, property_name, values, sep):
|
||||
"""Given a set of source files, sets the given property on them."""
|
||||
output.write('set_source_files_properties(\n')
|
||||
for source_name in source_names:
|
||||
output.write(' ')
|
||||
output.write(source_name)
|
||||
output.write('\n')
|
||||
output.write(' PROPERTIES\n ')
|
||||
output.write('set_source_files_properties(')
|
||||
WriteVariable(output, variable)
|
||||
output.write(' PROPERTIES ')
|
||||
output.write(property_name)
|
||||
output.write(' "')
|
||||
for value in values:
|
||||
output.write(CMakeStringEscape(value))
|
||||
output.write(sep)
|
||||
output.write('"\n)\n')
|
||||
output.write('")\n')
|
||||
|
||||
|
||||
def SetTargetProperty(output, target_name, property_name, values, sep=''):
|
||||
@@ -216,7 +214,7 @@ def WriteVariable(output, variable_name, prepend=None):
|
||||
output.write('}')
|
||||
|
||||
|
||||
class CMakeTargetType:
|
||||
class CMakeTargetType(object):
|
||||
def __init__(self, command, modifier, property_modifier):
|
||||
self.command = command
|
||||
self.modifier = modifier
|
||||
@@ -236,11 +234,11 @@ def StringToCMakeTargetName(a):
|
||||
"""Converts the given string 'a' to a valid CMake target name.
|
||||
|
||||
All invalid characters are replaced by '_'.
|
||||
Invalid for cmake: ' ', '/', '(', ')'
|
||||
Invalid for cmake: ' ', '/', '(', ')', '"'
|
||||
Invalid for make: ':'
|
||||
Invalid for unknown reasons but cause failures: '.'
|
||||
"""
|
||||
return a.translate(string.maketrans(' /():.', '______'))
|
||||
return a.translate(string.maketrans(' /():."', '_______'))
|
||||
|
||||
|
||||
def WriteActions(target_name, actions, extra_sources, extra_deps,
|
||||
@@ -296,7 +294,7 @@ def WriteActions(target_name, actions, extra_sources, extra_deps,
|
||||
WriteVariable(output, inputs_name)
|
||||
output.write('\n')
|
||||
|
||||
output.write(' WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/')
|
||||
output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
|
||||
output.write(path_to_gyp)
|
||||
output.write('\n')
|
||||
|
||||
@@ -401,9 +399,9 @@ def WriteRules(target_name, rules, extra_sources, extra_deps,
|
||||
output.write(NormjoinPath(path_to_gyp, rule_source))
|
||||
output.write('\n')
|
||||
|
||||
# CMAKE_SOURCE_DIR is where the CMakeLists.txt lives.
|
||||
# CMAKE_CURRENT_LIST_DIR is where the CMakeLists.txt lives.
|
||||
# The cwd is the current build directory.
|
||||
output.write(' WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/')
|
||||
output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
|
||||
output.write(path_to_gyp)
|
||||
output.write('\n')
|
||||
|
||||
@@ -464,7 +462,7 @@ def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
|
||||
extra_deps.append(copy_name)
|
||||
return
|
||||
|
||||
class Copy:
|
||||
class Copy(object):
|
||||
def __init__(self, ext, command):
|
||||
self.cmake_inputs = []
|
||||
self.cmake_outputs = []
|
||||
@@ -488,7 +486,7 @@ def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
|
||||
|
||||
copy = file_copy if os.path.basename(src) else dir_copy
|
||||
|
||||
copy.cmake_inputs.append(NormjoinPath(path_to_gyp, src))
|
||||
copy.cmake_inputs.append(NormjoinPathForceCMakeSource(path_to_gyp, src))
|
||||
copy.cmake_outputs.append(NormjoinPathForceCMakeSource(path_to_gyp, dst))
|
||||
copy.gyp_inputs.append(src)
|
||||
copy.gyp_outputs.append(dst)
|
||||
@@ -525,7 +523,7 @@ def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
|
||||
WriteVariable(output, copy.inputs_name, ' ')
|
||||
output.write('\n')
|
||||
|
||||
output.write('WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/')
|
||||
output.write('WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
|
||||
output.write(path_to_gyp)
|
||||
output.write('\n')
|
||||
|
||||
@@ -611,8 +609,8 @@ class CMakeNamer(object):
|
||||
|
||||
|
||||
def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
|
||||
options, generator_flags, all_qualified_targets, output):
|
||||
|
||||
options, generator_flags, all_qualified_targets, flavor,
|
||||
output):
|
||||
# The make generator does this always.
|
||||
# TODO: It would be nice to be able to tell CMake all dependencies.
|
||||
circular_libs = generator_flags.get('circular', True)
|
||||
@@ -636,10 +634,20 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
|
||||
spec = target_dicts.get(qualified_target, {})
|
||||
config = spec.get('configurations', {}).get(config_to_use, {})
|
||||
|
||||
xcode_settings = None
|
||||
if flavor == 'mac':
|
||||
xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
|
||||
|
||||
target_name = spec.get('target_name', '<missing target name>')
|
||||
target_type = spec.get('type', '<missing target type>')
|
||||
target_toolset = spec.get('toolset')
|
||||
|
||||
cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type)
|
||||
if cmake_target_type is None:
|
||||
print ('Target %s has unknown target type %s, skipping.' %
|
||||
( target_name, target_type ) )
|
||||
return
|
||||
|
||||
SetVariable(output, 'TARGET', target_name)
|
||||
SetVariable(output, 'TOOLSET', target_toolset)
|
||||
|
||||
@@ -667,27 +675,89 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
|
||||
srcs = spec.get('sources', [])
|
||||
|
||||
# Gyp separates the sheep from the goats based on file extensions.
|
||||
def partition(l, p):
|
||||
return reduce(lambda x, e: x[not p(e)].append(e) or x, l, ([], []))
|
||||
compilable_srcs, other_srcs = partition(srcs, Compilable)
|
||||
# A full separation is done here because of flag handing (see below).
|
||||
s_sources = []
|
||||
c_sources = []
|
||||
cxx_sources = []
|
||||
linkable_sources = []
|
||||
other_sources = []
|
||||
for src in srcs:
|
||||
_, ext = os.path.splitext(src)
|
||||
src_type = COMPILABLE_EXTENSIONS.get(ext, None)
|
||||
src_norm_path = NormjoinPath(path_from_cmakelists_to_gyp, src);
|
||||
|
||||
if src_type == 's':
|
||||
s_sources.append(src_norm_path)
|
||||
elif src_type == 'cc':
|
||||
c_sources.append(src_norm_path)
|
||||
elif src_type == 'cxx':
|
||||
cxx_sources.append(src_norm_path)
|
||||
elif Linkable(ext):
|
||||
linkable_sources.append(src_norm_path)
|
||||
else:
|
||||
other_sources.append(src_norm_path)
|
||||
|
||||
for extra_source in extra_sources:
|
||||
src, real_source = extra_source
|
||||
_, ext = os.path.splitext(real_source)
|
||||
src_type = COMPILABLE_EXTENSIONS.get(ext, None)
|
||||
|
||||
if src_type == 's':
|
||||
s_sources.append(src)
|
||||
elif src_type == 'cc':
|
||||
c_sources.append(src)
|
||||
elif src_type == 'cxx':
|
||||
cxx_sources.append(src)
|
||||
elif Linkable(ext):
|
||||
linkable_sources.append(src)
|
||||
else:
|
||||
other_sources.append(src)
|
||||
|
||||
s_sources_name = None
|
||||
if s_sources:
|
||||
s_sources_name = cmake_target_name + '__asm_srcs'
|
||||
SetVariableList(output, s_sources_name, s_sources)
|
||||
|
||||
c_sources_name = None
|
||||
if c_sources:
|
||||
c_sources_name = cmake_target_name + '__c_srcs'
|
||||
SetVariableList(output, c_sources_name, c_sources)
|
||||
|
||||
cxx_sources_name = None
|
||||
if cxx_sources:
|
||||
cxx_sources_name = cmake_target_name + '__cxx_srcs'
|
||||
SetVariableList(output, cxx_sources_name, cxx_sources)
|
||||
|
||||
linkable_sources_name = None
|
||||
if linkable_sources:
|
||||
linkable_sources_name = cmake_target_name + '__linkable_srcs'
|
||||
SetVariableList(output, linkable_sources_name, linkable_sources)
|
||||
|
||||
other_sources_name = None
|
||||
if other_sources:
|
||||
other_sources_name = cmake_target_name + '__other_srcs'
|
||||
SetVariableList(output, other_sources_name, other_sources)
|
||||
|
||||
# CMake gets upset when executable targets provide no sources.
|
||||
if target_type == 'executable' and not compilable_srcs and not extra_sources:
|
||||
print ('Executable %s has no complilable sources, treating as "none".' %
|
||||
target_name )
|
||||
target_type = 'none'
|
||||
# http://www.cmake.org/pipermail/cmake/2010-July/038461.html
|
||||
dummy_sources_name = None
|
||||
has_sources = (s_sources_name or
|
||||
c_sources_name or
|
||||
cxx_sources_name or
|
||||
linkable_sources_name or
|
||||
other_sources_name)
|
||||
if target_type == 'executable' and not has_sources:
|
||||
dummy_sources_name = cmake_target_name + '__dummy_srcs'
|
||||
SetVariable(output, dummy_sources_name,
|
||||
"${obj}.${TOOLSET}/${TARGET}/genc/dummy.c")
|
||||
output.write('if(NOT EXISTS "')
|
||||
WriteVariable(output, dummy_sources_name)
|
||||
output.write('")\n')
|
||||
output.write(' file(WRITE "')
|
||||
WriteVariable(output, dummy_sources_name)
|
||||
output.write('" "")\n')
|
||||
output.write("endif()\n")
|
||||
|
||||
cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type)
|
||||
if cmake_target_type is None:
|
||||
print ('Target %s has unknown target type %s, skipping.' %
|
||||
( target_name, target_type ) )
|
||||
return
|
||||
|
||||
other_srcs_name = None
|
||||
if other_srcs:
|
||||
other_srcs_name = cmake_target_name + '__other_srcs'
|
||||
SetVariableList(output, other_srcs_name,
|
||||
[NormjoinPath(path_from_cmakelists_to_gyp, src) for src in other_srcs])
|
||||
|
||||
# CMake is opposed to setting linker directories and considers the practice
|
||||
# of setting linker directories dangerous. Instead, it favors the use of
|
||||
@@ -713,37 +783,54 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
|
||||
output.write(' ')
|
||||
output.write(cmake_target_type.modifier)
|
||||
|
||||
if other_srcs_name:
|
||||
WriteVariable(output, other_srcs_name, ' ')
|
||||
|
||||
output.write('\n')
|
||||
|
||||
for src in compilable_srcs:
|
||||
output.write(' ')
|
||||
output.write(NormjoinPath(path_from_cmakelists_to_gyp, src))
|
||||
output.write('\n')
|
||||
for extra_source in extra_sources:
|
||||
output.write(' ')
|
||||
src, _ = extra_source
|
||||
output.write(NormjoinPath(path_from_cmakelists_to_gyp, src))
|
||||
output.write('\n')
|
||||
if s_sources_name:
|
||||
WriteVariable(output, s_sources_name, ' ')
|
||||
if c_sources_name:
|
||||
WriteVariable(output, c_sources_name, ' ')
|
||||
if cxx_sources_name:
|
||||
WriteVariable(output, cxx_sources_name, ' ')
|
||||
if linkable_sources_name:
|
||||
WriteVariable(output, linkable_sources_name, ' ')
|
||||
if other_sources_name:
|
||||
WriteVariable(output, other_sources_name, ' ')
|
||||
if dummy_sources_name:
|
||||
WriteVariable(output, dummy_sources_name, ' ')
|
||||
|
||||
output.write(')\n')
|
||||
|
||||
# Let CMake know if the 'all' target should depend on this target.
|
||||
exclude_from_all = ('TRUE' if qualified_target not in all_qualified_targets
|
||||
else 'FALSE')
|
||||
SetTargetProperty(output, cmake_target_name,
|
||||
'EXCLUDE_FROM_ALL', exclude_from_all)
|
||||
for extra_target_name in extra_deps:
|
||||
SetTargetProperty(output, extra_target_name,
|
||||
'EXCLUDE_FROM_ALL', exclude_from_all)
|
||||
|
||||
# Output name and location.
|
||||
if target_type != 'none':
|
||||
# Link as 'C' if there are no other files
|
||||
if not c_sources and not cxx_sources:
|
||||
SetTargetProperty(output, cmake_target_name, 'LINKER_LANGUAGE', ['C'])
|
||||
|
||||
# Mark uncompiled sources as uncompiled.
|
||||
if other_srcs_name:
|
||||
if other_sources_name:
|
||||
output.write('set_source_files_properties(')
|
||||
WriteVariable(output, other_srcs_name, '')
|
||||
WriteVariable(output, other_sources_name, '')
|
||||
output.write(' PROPERTIES HEADER_FILE_ONLY "TRUE")\n')
|
||||
|
||||
# Mark object sources as linkable.
|
||||
if linkable_sources_name:
|
||||
output.write('set_source_files_properties(')
|
||||
WriteVariable(output, other_sources_name, '')
|
||||
output.write(' PROPERTIES EXTERNAL_OBJECT "TRUE")\n')
|
||||
|
||||
# Output directory
|
||||
target_output_directory = spec.get('product_dir')
|
||||
if target_output_directory is None:
|
||||
if target_type in ('executable', 'loadable_module'):
|
||||
target_output_directory = generator_default_variables['PRODUCT_DIR']
|
||||
elif target_type in ('shared_library'):
|
||||
elif target_type == 'shared_library':
|
||||
target_output_directory = '${builddir}/lib.${TOOLSET}'
|
||||
elif spec.get('standalone_static_library', False):
|
||||
target_output_directory = generator_default_variables['PRODUCT_DIR']
|
||||
@@ -804,122 +891,98 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
|
||||
cmake_target_output_basename)
|
||||
SetFileProperty(output, cmake_target_output, 'GENERATED', ['TRUE'], '')
|
||||
|
||||
# Let CMake know if the 'all' target should depend on this target.
|
||||
exclude_from_all = ('TRUE' if qualified_target not in all_qualified_targets
|
||||
else 'FALSE')
|
||||
SetTargetProperty(output, cmake_target_name,
|
||||
'EXCLUDE_FROM_ALL', exclude_from_all)
|
||||
for extra_target_name in extra_deps:
|
||||
SetTargetProperty(output, extra_target_name,
|
||||
'EXCLUDE_FROM_ALL', exclude_from_all)
|
||||
# Includes
|
||||
includes = config.get('include_dirs')
|
||||
if includes:
|
||||
# This (target include directories) is what requires CMake 2.8.8
|
||||
includes_name = cmake_target_name + '__include_dirs'
|
||||
SetVariableList(output, includes_name,
|
||||
[NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include)
|
||||
for include in includes])
|
||||
output.write('set_property(TARGET ')
|
||||
output.write(cmake_target_name)
|
||||
output.write(' APPEND PROPERTY INCLUDE_DIRECTORIES ')
|
||||
WriteVariable(output, includes_name, '')
|
||||
output.write(')\n')
|
||||
|
||||
# Includes
|
||||
includes = config.get('include_dirs')
|
||||
if includes:
|
||||
# This (target include directories) is what requires CMake 2.8.8
|
||||
includes_name = cmake_target_name + '__include_dirs'
|
||||
SetVariableList(output, includes_name,
|
||||
[NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include)
|
||||
for include in includes])
|
||||
output.write('set_property(TARGET ')
|
||||
output.write(cmake_target_name)
|
||||
output.write(' APPEND PROPERTY INCLUDE_DIRECTORIES ')
|
||||
WriteVariable(output, includes_name, '')
|
||||
output.write(')\n')
|
||||
|
||||
# Defines
|
||||
defines = config.get('defines')
|
||||
if defines is not None:
|
||||
SetTargetProperty(output,
|
||||
# Defines
|
||||
defines = config.get('defines')
|
||||
if defines is not None:
|
||||
SetTargetProperty(output,
|
||||
cmake_target_name,
|
||||
'COMPILE_DEFINITIONS',
|
||||
defines,
|
||||
';')
|
||||
|
||||
# Compile Flags - http://www.cmake.org/Bug/view.php?id=6493
|
||||
# CMake currently does not have target C and CXX flags.
|
||||
# So, instead of doing...
|
||||
# Compile Flags - http://www.cmake.org/Bug/view.php?id=6493
|
||||
# CMake currently does not have target C and CXX flags.
|
||||
# So, instead of doing...
|
||||
|
||||
# cflags_c = config.get('cflags_c')
|
||||
# if cflags_c is not None:
|
||||
# SetTargetProperty(output, cmake_target_name,
|
||||
# 'C_COMPILE_FLAGS', cflags_c, ' ')
|
||||
# cflags_c = config.get('cflags_c')
|
||||
# if cflags_c is not None:
|
||||
# SetTargetProperty(output, cmake_target_name,
|
||||
# 'C_COMPILE_FLAGS', cflags_c, ' ')
|
||||
|
||||
# cflags_cc = config.get('cflags_cc')
|
||||
# if cflags_cc is not None:
|
||||
# SetTargetProperty(output, cmake_target_name,
|
||||
# 'CXX_COMPILE_FLAGS', cflags_cc, ' ')
|
||||
# cflags_cc = config.get('cflags_cc')
|
||||
# if cflags_cc is not None:
|
||||
# SetTargetProperty(output, cmake_target_name,
|
||||
# 'CXX_COMPILE_FLAGS', cflags_cc, ' ')
|
||||
|
||||
# Instead we must...
|
||||
s_sources = []
|
||||
c_sources = []
|
||||
cxx_sources = []
|
||||
for src in srcs:
|
||||
_, ext = os.path.splitext(src)
|
||||
src_type = COMPILABLE_EXTENSIONS.get(ext, None)
|
||||
# Instead we must...
|
||||
cflags = config.get('cflags', [])
|
||||
cflags_c = config.get('cflags_c', [])
|
||||
cflags_cxx = config.get('cflags_cc', [])
|
||||
if xcode_settings:
|
||||
cflags = xcode_settings.GetCflags(config_to_use)
|
||||
cflags_c = xcode_settings.GetCflagsC(config_to_use)
|
||||
cflags_cxx = xcode_settings.GetCflagsCC(config_to_use)
|
||||
#cflags_objc = xcode_settings.GetCflagsObjC(config_to_use)
|
||||
#cflags_objcc = xcode_settings.GetCflagsObjCC(config_to_use)
|
||||
|
||||
if src_type == 's':
|
||||
s_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
|
||||
if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources):
|
||||
SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', cflags, ' ')
|
||||
|
||||
if src_type == 'cc':
|
||||
c_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
|
||||
|
||||
if src_type == 'cxx':
|
||||
cxx_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
|
||||
|
||||
for extra_source in extra_sources:
|
||||
src, real_source = extra_source
|
||||
_, ext = os.path.splitext(real_source)
|
||||
src_type = COMPILABLE_EXTENSIONS.get(ext, None)
|
||||
|
||||
if src_type == 's':
|
||||
s_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
|
||||
|
||||
if src_type == 'cc':
|
||||
c_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
|
||||
|
||||
if src_type == 'cxx':
|
||||
cxx_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
|
||||
|
||||
cflags = config.get('cflags', [])
|
||||
cflags_c = config.get('cflags_c', [])
|
||||
cflags_cxx = config.get('cflags_cc', [])
|
||||
if c_sources and not (s_sources or cxx_sources):
|
||||
flags = []
|
||||
flags.extend(cflags)
|
||||
flags.extend(cflags_c)
|
||||
SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
|
||||
|
||||
elif cxx_sources and not (s_sources or c_sources):
|
||||
flags = []
|
||||
flags.extend(cflags)
|
||||
flags.extend(cflags_cxx)
|
||||
SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
|
||||
|
||||
else:
|
||||
if s_sources and cflags:
|
||||
SetFilesProperty(output, s_sources, 'COMPILE_FLAGS', cflags, ' ')
|
||||
|
||||
if c_sources and (cflags or cflags_c):
|
||||
elif c_sources and not (s_sources or cxx_sources):
|
||||
flags = []
|
||||
flags.extend(cflags)
|
||||
flags.extend(cflags_c)
|
||||
SetFilesProperty(output, c_sources, 'COMPILE_FLAGS', flags, ' ')
|
||||
SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
|
||||
|
||||
if cxx_sources and (cflags or cflags_cxx):
|
||||
elif cxx_sources and not (s_sources or c_sources):
|
||||
flags = []
|
||||
flags.extend(cflags)
|
||||
flags.extend(cflags_cxx)
|
||||
SetFilesProperty(output, cxx_sources, 'COMPILE_FLAGS', flags, ' ')
|
||||
SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
|
||||
|
||||
# Have assembly link as c if there are no other files
|
||||
if not c_sources and not cxx_sources and s_sources:
|
||||
SetTargetProperty(output, cmake_target_name, 'LINKER_LANGUAGE', ['C'])
|
||||
else:
|
||||
# TODO: This is broken, one cannot generally set properties on files,
|
||||
# as other targets may require different properties on the same files.
|
||||
if s_sources and cflags:
|
||||
SetFilesProperty(output, s_sources_name, 'COMPILE_FLAGS', cflags, ' ')
|
||||
|
||||
# Linker flags
|
||||
ldflags = config.get('ldflags')
|
||||
if ldflags is not None:
|
||||
SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ')
|
||||
if c_sources and (cflags or cflags_c):
|
||||
flags = []
|
||||
flags.extend(cflags)
|
||||
flags.extend(cflags_c)
|
||||
SetFilesProperty(output, c_sources_name, 'COMPILE_FLAGS', flags, ' ')
|
||||
|
||||
if cxx_sources and (cflags or cflags_cxx):
|
||||
flags = []
|
||||
flags.extend(cflags)
|
||||
flags.extend(cflags_cxx)
|
||||
SetFilesProperty(output, cxx_sources_name, 'COMPILE_FLAGS', flags, ' ')
|
||||
|
||||
# Linker flags
|
||||
ldflags = config.get('ldflags')
|
||||
if ldflags is not None:
|
||||
SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ')
|
||||
|
||||
# XCode settings
|
||||
xcode_settings = config.get('xcode_settings', {})
|
||||
for xcode_setting, xcode_value in xcode_settings.viewitems():
|
||||
SetTargetProperty(output, cmake_target_name,
|
||||
"XCODE_ATTRIBUTE_%s" % xcode_setting, xcode_value,
|
||||
'' if isinstance(xcode_value, str) else ' ')
|
||||
|
||||
# Note on Dependencies and Libraries:
|
||||
# CMake wants to handle link order, resolving the link line up front.
|
||||
@@ -985,7 +1048,7 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
|
||||
output.write(cmake_target_name)
|
||||
output.write('\n')
|
||||
if static_deps:
|
||||
write_group = circular_libs and len(static_deps) > 1
|
||||
write_group = circular_libs and len(static_deps) > 1 and flavor != 'mac'
|
||||
if write_group:
|
||||
output.write('-Wl,--start-group\n')
|
||||
for dep in gyp.common.uniquer(static_deps):
|
||||
@@ -1001,9 +1064,9 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
|
||||
output.write('\n')
|
||||
if external_libs:
|
||||
for lib in gyp.common.uniquer(external_libs):
|
||||
output.write(' ')
|
||||
output.write(lib)
|
||||
output.write('\n')
|
||||
output.write(' "')
|
||||
output.write(RemovePrefix(lib, "$(SDKROOT)"))
|
||||
output.write('"\n')
|
||||
|
||||
output.write(')\n')
|
||||
|
||||
@@ -1015,6 +1078,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data,
|
||||
params, config_to_use):
|
||||
options = params['options']
|
||||
generator_flags = params['generator_flags']
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
|
||||
# generator_dir: relative path from pwd to where make puts build files.
|
||||
# Makes migrating from make to cmake easier, cmake doesn't put anything here.
|
||||
@@ -1040,20 +1104,49 @@ def GenerateOutputForConfig(target_list, target_dicts, data,
|
||||
output.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
|
||||
output.write('cmake_policy(VERSION 2.8.8)\n')
|
||||
|
||||
_, project_target, _ = gyp.common.ParseQualifiedTarget(target_list[-1])
|
||||
gyp_file, project_target, _ = gyp.common.ParseQualifiedTarget(target_list[-1])
|
||||
output.write('project(')
|
||||
output.write(project_target)
|
||||
output.write(')\n')
|
||||
|
||||
SetVariable(output, 'configuration', config_to_use)
|
||||
|
||||
ar = None
|
||||
cc = None
|
||||
cxx = None
|
||||
|
||||
make_global_settings = data[gyp_file].get('make_global_settings', [])
|
||||
build_to_top = gyp.common.InvertRelativePath(build_dir,
|
||||
options.toplevel_dir)
|
||||
for key, value in make_global_settings:
|
||||
if key == 'AR':
|
||||
ar = os.path.join(build_to_top, value)
|
||||
if key == 'CC':
|
||||
cc = os.path.join(build_to_top, value)
|
||||
if key == 'CXX':
|
||||
cxx = os.path.join(build_to_top, value)
|
||||
|
||||
ar = gyp.common.GetEnvironFallback(['AR_target', 'AR'], ar)
|
||||
cc = gyp.common.GetEnvironFallback(['CC_target', 'CC'], cc)
|
||||
cxx = gyp.common.GetEnvironFallback(['CXX_target', 'CXX'], cxx)
|
||||
|
||||
if ar:
|
||||
SetVariable(output, 'CMAKE_AR', ar)
|
||||
if cc:
|
||||
SetVariable(output, 'CMAKE_C_COMPILER', cc)
|
||||
if cxx:
|
||||
SetVariable(output, 'CMAKE_CXX_COMPILER', cxx)
|
||||
|
||||
# The following appears to be as-yet undocumented.
|
||||
# http://public.kitware.com/Bug/view.php?id=8392
|
||||
output.write('enable_language(ASM)\n')
|
||||
# ASM-ATT does not support .S files.
|
||||
# output.write('enable_language(ASM-ATT)\n')
|
||||
|
||||
SetVariable(output, 'builddir', '${CMAKE_BINARY_DIR}')
|
||||
if cc:
|
||||
SetVariable(output, 'CMAKE_ASM_COMPILER', cc)
|
||||
|
||||
SetVariable(output, 'builddir', '${CMAKE_CURRENT_BINARY_DIR}')
|
||||
SetVariable(output, 'obj', '${builddir}/obj')
|
||||
output.write('\n')
|
||||
|
||||
@@ -1066,6 +1159,13 @@ def GenerateOutputForConfig(target_list, target_dicts, data,
|
||||
output.write('set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE 1)\n')
|
||||
output.write('\n')
|
||||
|
||||
# Force ninja to use rsp files. Otherwise link and ar lines can get too long,
|
||||
# resulting in 'Argument list too long' errors.
|
||||
# However, rsp files don't work correctly on Mac.
|
||||
if flavor != 'mac':
|
||||
output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n')
|
||||
output.write('\n')
|
||||
|
||||
namer = CMakeNamer(target_list)
|
||||
|
||||
# The list of targets upon which the 'all' target should depend.
|
||||
@@ -1078,8 +1178,13 @@ def GenerateOutputForConfig(target_list, target_dicts, data,
|
||||
all_qualified_targets.add(qualified_target)
|
||||
|
||||
for qualified_target in target_list:
|
||||
if flavor == 'mac':
|
||||
gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
|
||||
spec = target_dicts[qualified_target]
|
||||
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[gyp_file], spec)
|
||||
|
||||
WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
|
||||
options, generator_flags, all_qualified_targets, output)
|
||||
options, generator_flags, all_qualified_targets, flavor, output)
|
||||
|
||||
output.close()
|
||||
|
||||
|
||||
@@ -14,6 +14,9 @@ generator_supports_multiple_toolsets = True
|
||||
|
||||
generator_wants_static_library_dependencies_adjusted = False
|
||||
|
||||
generator_filelist_paths = {
|
||||
}
|
||||
|
||||
generator_default_variables = {
|
||||
}
|
||||
for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
|
||||
@@ -56,6 +59,17 @@ def CalculateGeneratorInputInfo(params):
|
||||
global generator_wants_static_library_dependencies_adjusted
|
||||
generator_wants_static_library_dependencies_adjusted = True
|
||||
|
||||
toplevel = params['options'].toplevel_dir
|
||||
generator_dir = os.path.relpath(params['options'].generator_output or '.')
|
||||
# output_dir: relative path from generator_dir to the build directory.
|
||||
output_dir = generator_flags.get('output_dir', 'out')
|
||||
qualified_out_dir = os.path.normpath(os.path.join(
|
||||
toplevel, generator_dir, output_dir, 'gypfiles'))
|
||||
global generator_filelist_paths
|
||||
generator_filelist_paths = {
|
||||
'toplevel': toplevel,
|
||||
'qualified_out_dir': qualified_out_dir,
|
||||
}
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
# Map of target -> list of targets it depends on.
|
||||
@@ -74,7 +88,11 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||
edges[target].append(dep)
|
||||
targets_to_visit.append(dep)
|
||||
|
||||
filename = 'dump.json'
|
||||
try:
|
||||
filepath = params['generator_flags']['output_dir']
|
||||
except KeyError:
|
||||
filepath = '.'
|
||||
filename = os.path.join(filepath, 'dump.json')
|
||||
f = open(filename, 'w')
|
||||
json.dump(edges, f)
|
||||
f.close()
|
||||
|
||||
134
third_party/gyp/generator/eclipse.py
vendored
134
third_party/gyp/generator/eclipse.py
vendored
@@ -24,6 +24,7 @@ import gyp
|
||||
import gyp.common
|
||||
import gyp.msvs_emulation
|
||||
import shlex
|
||||
import xml.etree.cElementTree as ET
|
||||
|
||||
generator_wants_static_library_dependencies_adjusted = False
|
||||
|
||||
@@ -31,8 +32,8 @@ generator_default_variables = {
|
||||
}
|
||||
|
||||
for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']:
|
||||
# Some gyp steps fail if these are empty(!).
|
||||
generator_default_variables[dirname] = 'dir'
|
||||
# Some gyp steps fail if these are empty(!), so we convert them to variables
|
||||
generator_default_variables[dirname] = '$' + dirname
|
||||
|
||||
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
|
||||
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
|
||||
@@ -165,7 +166,7 @@ def GetAllIncludeDirectories(target_list, target_dicts,
|
||||
return all_includes_list
|
||||
|
||||
|
||||
def GetCompilerPath(target_list, data):
|
||||
def GetCompilerPath(target_list, data, options):
|
||||
"""Determine a command that can be used to invoke the compiler.
|
||||
|
||||
Returns:
|
||||
@@ -173,13 +174,12 @@ def GetCompilerPath(target_list, data):
|
||||
the compiler from that. Otherwise, see if a compiler was specified via the
|
||||
CC_target environment variable.
|
||||
"""
|
||||
|
||||
# First, see if the compiler is configured in make's settings.
|
||||
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
|
||||
make_global_settings_dict = data[build_file].get('make_global_settings', {})
|
||||
for key, value in make_global_settings_dict:
|
||||
if key in ['CC', 'CXX']:
|
||||
return value
|
||||
return os.path.join(options.toplevel_dir, value)
|
||||
|
||||
# Check to see if the compiler was specified as an environment variable.
|
||||
for key in ['CC_target', 'CC', 'CXX']:
|
||||
@@ -295,33 +295,123 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||
shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'),
|
||||
os.path.join(toplevel_build, 'gen')]
|
||||
|
||||
out_name = os.path.join(toplevel_build, 'eclipse-cdt-settings.xml')
|
||||
GenerateCdtSettingsFile(target_list,
|
||||
target_dicts,
|
||||
data,
|
||||
params,
|
||||
config_name,
|
||||
os.path.join(toplevel_build,
|
||||
'eclipse-cdt-settings.xml'),
|
||||
options,
|
||||
shared_intermediate_dirs)
|
||||
GenerateClasspathFile(target_list,
|
||||
target_dicts,
|
||||
options.toplevel_dir,
|
||||
toplevel_build,
|
||||
os.path.join(toplevel_build,
|
||||
'eclipse-classpath.xml'))
|
||||
|
||||
|
||||
def GenerateCdtSettingsFile(target_list, target_dicts, data, params,
|
||||
config_name, out_name, options,
|
||||
shared_intermediate_dirs):
|
||||
gyp.common.EnsureDirExists(out_name)
|
||||
out = open(out_name, 'w')
|
||||
with open(out_name, 'w') as out:
|
||||
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
out.write('<cdtprojectproperties>\n')
|
||||
|
||||
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
out.write('<cdtprojectproperties>\n')
|
||||
eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
|
||||
'GNU C++', 'GNU C', 'Assembly']
|
||||
compiler_path = GetCompilerPath(target_list, data, options)
|
||||
include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
|
||||
shared_intermediate_dirs,
|
||||
config_name, params, compiler_path)
|
||||
WriteIncludePaths(out, eclipse_langs, include_dirs)
|
||||
defines = GetAllDefines(target_list, target_dicts, data, config_name,
|
||||
params, compiler_path)
|
||||
WriteMacros(out, eclipse_langs, defines)
|
||||
|
||||
eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
|
||||
'GNU C++', 'GNU C', 'Assembly']
|
||||
compiler_path = GetCompilerPath(target_list, data)
|
||||
include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
|
||||
shared_intermediate_dirs, config_name,
|
||||
params, compiler_path)
|
||||
WriteIncludePaths(out, eclipse_langs, include_dirs)
|
||||
defines = GetAllDefines(target_list, target_dicts, data, config_name, params,
|
||||
compiler_path)
|
||||
WriteMacros(out, eclipse_langs, defines)
|
||||
out.write('</cdtprojectproperties>\n')
|
||||
|
||||
out.write('</cdtprojectproperties>\n')
|
||||
out.close()
|
||||
|
||||
def GenerateClasspathFile(target_list, target_dicts, toplevel_dir,
|
||||
toplevel_build, out_name):
|
||||
'''Generates a classpath file suitable for symbol navigation and code
|
||||
completion of Java code (such as in Android projects) by finding all
|
||||
.java and .jar files used as action inputs.'''
|
||||
gyp.common.EnsureDirExists(out_name)
|
||||
result = ET.Element('classpath')
|
||||
|
||||
def AddElements(kind, paths):
|
||||
# First, we need to normalize the paths so they are all relative to the
|
||||
# toplevel dir.
|
||||
rel_paths = set()
|
||||
for path in paths:
|
||||
if os.path.isabs(path):
|
||||
rel_paths.add(os.path.relpath(path, toplevel_dir))
|
||||
else:
|
||||
rel_paths.add(path)
|
||||
|
||||
for path in sorted(rel_paths):
|
||||
entry_element = ET.SubElement(result, 'classpathentry')
|
||||
entry_element.set('kind', kind)
|
||||
entry_element.set('path', path)
|
||||
|
||||
AddElements('lib', GetJavaJars(target_list, target_dicts, toplevel_dir))
|
||||
AddElements('src', GetJavaSourceDirs(target_list, target_dicts, toplevel_dir))
|
||||
# Include the standard JRE container and a dummy out folder
|
||||
AddElements('con', ['org.eclipse.jdt.launching.JRE_CONTAINER'])
|
||||
# Include a dummy out folder so that Eclipse doesn't use the default /bin
|
||||
# folder in the root of the project.
|
||||
AddElements('output', [os.path.join(toplevel_build, '.eclipse-java-build')])
|
||||
|
||||
ET.ElementTree(result).write(out_name)
|
||||
|
||||
|
||||
def GetJavaJars(target_list, target_dicts, toplevel_dir):
|
||||
'''Generates a sequence of all .jars used as inputs.'''
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
for action in target.get('actions', []):
|
||||
for input_ in action['inputs']:
|
||||
if os.path.splitext(input_)[1] == '.jar' and not input_.startswith('$'):
|
||||
if os.path.isabs(input_):
|
||||
yield input_
|
||||
else:
|
||||
yield os.path.join(os.path.dirname(target_name), input_)
|
||||
|
||||
|
||||
def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir):
|
||||
'''Generates a sequence of all likely java package root directories.'''
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
for action in target.get('actions', []):
|
||||
for input_ in action['inputs']:
|
||||
if (os.path.splitext(input_)[1] == '.java' and
|
||||
not input_.startswith('$')):
|
||||
dir_ = os.path.dirname(os.path.join(os.path.dirname(target_name),
|
||||
input_))
|
||||
# If there is a parent 'src' or 'java' folder, navigate up to it -
|
||||
# these are canonical package root names in Chromium. This will
|
||||
# break if 'src' or 'java' exists in the package structure. This
|
||||
# could be further improved by inspecting the java file for the
|
||||
# package name if this proves to be too fragile in practice.
|
||||
parent_search = dir_
|
||||
while os.path.basename(parent_search) not in ['src', 'java']:
|
||||
parent_search, _ = os.path.split(parent_search)
|
||||
if not parent_search or parent_search == toplevel_dir:
|
||||
# Didn't find a known root, just return the original path
|
||||
yield dir_
|
||||
break
|
||||
else:
|
||||
yield parent_search
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
"""Generate an XML settings file that can be imported into a CDT project."""
|
||||
|
||||
if params['options'].generator_output:
|
||||
raise NotImplementedError, "--generator_output not implemented for eclipse"
|
||||
raise NotImplementedError("--generator_output not implemented for eclipse")
|
||||
|
||||
user_config = params.get('generator_flags', {}).get('config', None)
|
||||
if user_config:
|
||||
|
||||
7
third_party/gyp/generator/gypd.py
vendored
7
third_party/gyp/generator/gypd.py
vendored
@@ -39,9 +39,11 @@ import pprint
|
||||
|
||||
# These variables should just be spit back out as variable references.
|
||||
_generator_identity_variables = [
|
||||
'CONFIGURATION_NAME',
|
||||
'EXECUTABLE_PREFIX',
|
||||
'EXECUTABLE_SUFFIX',
|
||||
'INTERMEDIATE_DIR',
|
||||
'LIB_DIR',
|
||||
'PRODUCT_DIR',
|
||||
'RULE_INPUT_ROOT',
|
||||
'RULE_INPUT_DIRNAME',
|
||||
@@ -49,6 +51,11 @@ _generator_identity_variables = [
|
||||
'RULE_INPUT_NAME',
|
||||
'RULE_INPUT_PATH',
|
||||
'SHARED_INTERMEDIATE_DIR',
|
||||
'SHARED_LIB_DIR',
|
||||
'SHARED_LIB_PREFIX',
|
||||
'SHARED_LIB_SUFFIX',
|
||||
'STATIC_LIB_PREFIX',
|
||||
'STATIC_LIB_SUFFIX',
|
||||
]
|
||||
|
||||
# gypd doesn't define a default value for OS like many other generator
|
||||
|
||||
153
third_party/gyp/generator/make.py
vendored
153
third_party/gyp/generator/make.py
vendored
@@ -29,6 +29,7 @@ import gyp
|
||||
import gyp.common
|
||||
import gyp.xcode_emulation
|
||||
from gyp.common import GetEnvironFallback
|
||||
from gyp.common import GypError
|
||||
|
||||
generator_default_variables = {
|
||||
'EXECUTABLE_PREFIX': '',
|
||||
@@ -210,10 +211,10 @@ cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(T
|
||||
|
||||
LINK_COMMANDS_AIX = """\
|
||||
quiet_cmd_alink = AR($(TOOLSET)) $@
|
||||
cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
|
||||
cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
|
||||
|
||||
quiet_cmd_alink_thin = AR($(TOOLSET)) $@
|
||||
cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
|
||||
cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
|
||||
|
||||
quiet_cmd_link = LINK($(TOOLSET)) $@
|
||||
cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
|
||||
@@ -272,30 +273,22 @@ all_deps :=
|
||||
%(make_global_settings)s
|
||||
|
||||
CC.target ?= %(CC.target)s
|
||||
CFLAGS.target ?= $(CFLAGS)
|
||||
CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
|
||||
CXX.target ?= %(CXX.target)s
|
||||
CXXFLAGS.target ?= $(CXXFLAGS)
|
||||
CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
|
||||
LINK.target ?= %(LINK.target)s
|
||||
LDFLAGS.target ?= $(LDFLAGS)
|
||||
AR.target ?= $(AR)
|
||||
|
||||
# C++ apps need to be linked with g++.
|
||||
#
|
||||
# Note: flock is used to seralize linking. Linking is a memory-intensive
|
||||
# process so running parallel links can often lead to thrashing. To disable
|
||||
# the serialization, override LINK via an envrionment variable as follows:
|
||||
#
|
||||
# export LINK=g++
|
||||
#
|
||||
# This will allow make to invoke N linker processes as specified in -jN.
|
||||
LINK ?= %(flock)s $(builddir)/linker.lock $(CXX.target)
|
||||
LINK ?= $(CXX.target)
|
||||
|
||||
# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
|
||||
# to replicate this environment fallback in make as well.
|
||||
CC.host ?= %(CC.host)s
|
||||
CFLAGS.host ?=
|
||||
CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
|
||||
CXX.host ?= %(CXX.host)s
|
||||
CXXFLAGS.host ?=
|
||||
CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
|
||||
LINK.host ?= %(LINK.host)s
|
||||
LDFLAGS.host ?=
|
||||
AR.host ?= %(AR.host)s
|
||||
@@ -372,7 +365,7 @@ cmd_touch = touch $@
|
||||
|
||||
quiet_cmd_copy = COPY $@
|
||||
# send stderr to /dev/null to ignore messages when linking directories.
|
||||
cmd_copy = ln -f "$<" "$@" 2>/dev/null || (rm -rf "$@" && cp -af "$<" "$@")
|
||||
cmd_copy = ln -f "$<" "$@" 2>/dev/null || (rm -rf "$@" && cp %(copy_archive_args)s "$<" "$@")
|
||||
|
||||
%(link_commands)s
|
||||
"""
|
||||
@@ -631,6 +624,38 @@ def QuoteSpaces(s, quote=r'\ '):
|
||||
return s.replace(' ', quote)
|
||||
|
||||
|
||||
# TODO: Avoid code duplication with _ValidateSourcesForMSVSProject in msvs.py.
|
||||
def _ValidateSourcesForOSX(spec, all_sources):
|
||||
"""Makes sure if duplicate basenames are not specified in the source list.
|
||||
|
||||
Arguments:
|
||||
spec: The target dictionary containing the properties of the target.
|
||||
"""
|
||||
if spec.get('type', None) != 'static_library':
|
||||
return
|
||||
|
||||
basenames = {}
|
||||
for source in all_sources:
|
||||
name, ext = os.path.splitext(source)
|
||||
is_compiled_file = ext in [
|
||||
'.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
|
||||
if not is_compiled_file:
|
||||
continue
|
||||
basename = os.path.basename(name) # Don't include extension.
|
||||
basenames.setdefault(basename, []).append(source)
|
||||
|
||||
error = ''
|
||||
for basename, files in basenames.iteritems():
|
||||
if len(files) > 1:
|
||||
error += ' %s: %s\n' % (basename, ' '.join(files))
|
||||
|
||||
if error:
|
||||
print('static library %s has several files with the same basename:\n' %
|
||||
spec['target_name'] + error + 'libtool on OS X will generate' +
|
||||
' warnings for them.')
|
||||
raise GypError('Duplicate basenames in sources section, see list above')
|
||||
|
||||
|
||||
# Map from qualified target to path to output.
|
||||
target_outputs = {}
|
||||
# Map from qualified target to any linkable output. A subset
|
||||
@@ -640,7 +665,7 @@ target_outputs = {}
|
||||
target_link_deps = {}
|
||||
|
||||
|
||||
class MakefileWriter:
|
||||
class MakefileWriter(object):
|
||||
"""MakefileWriter packages up the writing of one target-specific foobar.mk.
|
||||
|
||||
Its only real entry point is Write(), and is mostly used for namespacing.
|
||||
@@ -758,6 +783,10 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
||||
# Sources.
|
||||
all_sources = spec.get('sources', []) + extra_sources
|
||||
if all_sources:
|
||||
if self.flavor == 'mac':
|
||||
# libtool on OS X generates warnings for duplicate basenames in the same
|
||||
# target.
|
||||
_ValidateSourcesForOSX(spec, all_sources)
|
||||
self.WriteSources(
|
||||
configs, deps, all_sources, extra_outputs,
|
||||
extra_link_deps, part_of_all,
|
||||
@@ -990,7 +1019,8 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
||||
# accidentally writing duplicate dummy rules for those outputs.
|
||||
self.WriteLn('%s: obj := $(abs_obj)' % outputs[0])
|
||||
self.WriteLn('%s: builddir := $(abs_builddir)' % outputs[0])
|
||||
self.WriteMakeRule(outputs, inputs + ['FORCE_DO_CMD'], actions)
|
||||
self.WriteMakeRule(outputs, inputs, actions,
|
||||
command="%s_%d" % (name, count))
|
||||
# Spaces in rule filenames are not supported, but rule variables have
|
||||
# spaces in them (e.g. RULE_INPUT_PATH expands to '$(abspath $<)').
|
||||
# The spaces within the variables are valid, so remove the variables
|
||||
@@ -1101,9 +1131,12 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
||||
for output, res in gyp.xcode_emulation.GetMacBundleResources(
|
||||
generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
|
||||
map(Sourceify, map(self.Absolutify, resources))):
|
||||
self.WriteDoCmd([output], [res], 'mac_tool,,,copy-bundle-resource',
|
||||
part_of_all=True)
|
||||
bundle_deps.append(output)
|
||||
_, ext = os.path.splitext(output)
|
||||
if ext != '.xcassets':
|
||||
# Make does not supports '.xcassets' emulation.
|
||||
self.WriteDoCmd([output], [res], 'mac_tool,,,copy-bundle-resource',
|
||||
part_of_all=True)
|
||||
bundle_deps.append(output)
|
||||
|
||||
|
||||
def WriteMacInfoPlist(self, bundle_deps):
|
||||
@@ -1546,7 +1579,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
||||
for link_dep in link_deps:
|
||||
assert ' ' not in link_dep, (
|
||||
"Spaces in alink input filenames not supported (%s)" % link_dep)
|
||||
if (self.flavor not in ('mac', 'openbsd', 'win') and not
|
||||
if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
|
||||
self.is_standalone_static_library):
|
||||
self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin',
|
||||
part_of_all, postbuilds=postbuilds)
|
||||
@@ -1656,6 +1689,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
||||
self.WriteMakeRule(outputs, inputs,
|
||||
actions = ['$(call do_cmd,%s%s)' % (command, suffix)],
|
||||
comment = comment,
|
||||
command = command,
|
||||
force = True)
|
||||
# Add our outputs to the list of targets we read depfiles from.
|
||||
# all_deps is only used for deps file reading, and for deps files we replace
|
||||
@@ -1666,7 +1700,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
||||
|
||||
|
||||
def WriteMakeRule(self, outputs, inputs, actions=None, comment=None,
|
||||
order_only=False, force=False, phony=False):
|
||||
order_only=False, force=False, phony=False, command=None):
|
||||
"""Write a Makefile rule, with some extra tricks.
|
||||
|
||||
outputs: a list of outputs for the rule (note: this is not directly
|
||||
@@ -1679,6 +1713,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
||||
force: if true, include FORCE_DO_CMD as an order-only dep
|
||||
phony: if true, the rule does not actually generate the named output, the
|
||||
output is just a name to run the rule
|
||||
command: (optional) command name to generate unambiguous labels
|
||||
"""
|
||||
outputs = map(QuoteSpaces, outputs)
|
||||
inputs = map(QuoteSpaces, inputs)
|
||||
@@ -1687,44 +1722,38 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
|
||||
self.WriteLn('# ' + comment)
|
||||
if phony:
|
||||
self.WriteLn('.PHONY: ' + ' '.join(outputs))
|
||||
# TODO(evanm): just make order_only a list of deps instead of these hacks.
|
||||
if order_only:
|
||||
order_insert = '| '
|
||||
pick_output = ' '.join(outputs)
|
||||
else:
|
||||
order_insert = ''
|
||||
pick_output = outputs[0]
|
||||
if force:
|
||||
force_append = ' FORCE_DO_CMD'
|
||||
else:
|
||||
force_append = ''
|
||||
if actions:
|
||||
self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0])
|
||||
self.WriteLn('%s: %s%s%s' % (pick_output, order_insert, ' '.join(inputs),
|
||||
force_append))
|
||||
force_append = ' FORCE_DO_CMD' if force else ''
|
||||
|
||||
if order_only:
|
||||
# Order only rule: Just write a simple rule.
|
||||
# TODO(evanm): just make order_only a list of deps instead of this hack.
|
||||
self.WriteLn('%s: | %s%s' %
|
||||
(' '.join(outputs), ' '.join(inputs), force_append))
|
||||
elif len(outputs) == 1:
|
||||
# Regular rule, one output: Just write a simple rule.
|
||||
self.WriteLn('%s: %s%s' % (outputs[0], ' '.join(inputs), force_append))
|
||||
else:
|
||||
# Regular rule, more than one output: Multiple outputs are tricky in
|
||||
# make. We will write three rules:
|
||||
# - All outputs depend on an intermediate file.
|
||||
# - Make .INTERMEDIATE depend on the intermediate.
|
||||
# - The intermediate file depends on the inputs and executes the
|
||||
# actual command.
|
||||
# - The intermediate recipe will 'touch' the intermediate file.
|
||||
# - The multi-output rule will have an do-nothing recipe.
|
||||
intermediate = "%s.intermediate" % (command if command else self.target)
|
||||
self.WriteLn('%s: %s' % (' '.join(outputs), intermediate))
|
||||
self.WriteLn('\t%s' % '@:');
|
||||
self.WriteLn('%s: %s' % ('.INTERMEDIATE', intermediate))
|
||||
self.WriteLn('%s: %s%s' %
|
||||
(intermediate, ' '.join(inputs), force_append))
|
||||
actions.insert(0, '$(call do_cmd,touch)')
|
||||
|
||||
if actions:
|
||||
for action in actions:
|
||||
self.WriteLn('\t%s' % action)
|
||||
if not order_only and len(outputs) > 1:
|
||||
# If we have more than one output, a rule like
|
||||
# foo bar: baz
|
||||
# that for *each* output we must run the action, potentially
|
||||
# in parallel. That is not what we're trying to write -- what
|
||||
# we want is that we run the action once and it generates all
|
||||
# the files.
|
||||
# http://www.gnu.org/software/hello/manual/automake/Multiple-Outputs.html
|
||||
# discusses this problem and has this solution:
|
||||
# 1) Write the naive rule that would produce parallel runs of
|
||||
# the action.
|
||||
# 2) Make the outputs seralized on each other, so we won't start
|
||||
# a parallel run until the first run finishes, at which point
|
||||
# we'll have generated all the outputs and we're done.
|
||||
self.WriteLn('%s: %s' % (' '.join(outputs[1:]), outputs[0]))
|
||||
# Add a dummy command to the "extra outputs" rule, otherwise make seems to
|
||||
# think these outputs haven't (couldn't have?) changed, and thus doesn't
|
||||
# flag them as changed (i.e. include in '$?') when evaluating dependent
|
||||
# rules, which in turn causes do_cmd() to skip running dependent commands.
|
||||
self.WriteLn('%s: ;' % (' '.join(outputs[1:])))
|
||||
self.WriteLn()
|
||||
|
||||
|
||||
@@ -1981,6 +2010,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||
srcdir_prefix = '$(srcdir)/'
|
||||
|
||||
flock_command= 'flock'
|
||||
copy_archive_arguments = '-af'
|
||||
header_params = {
|
||||
'default_target': default_target,
|
||||
'builddir': builddir_name,
|
||||
@@ -1990,6 +2020,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||
'link_commands': LINK_COMMANDS_LINUX,
|
||||
'extra_commands': '',
|
||||
'srcdir': srcdir,
|
||||
'copy_archive_args': copy_archive_arguments,
|
||||
}
|
||||
if flavor == 'mac':
|
||||
flock_command = './gyp-mac-tool flock'
|
||||
@@ -2013,8 +2044,15 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||
header_params.update({
|
||||
'flock': 'lockf',
|
||||
})
|
||||
elif flavor == 'aix':
|
||||
elif flavor == 'openbsd':
|
||||
copy_archive_arguments = '-pPRf'
|
||||
header_params.update({
|
||||
'copy_archive_args': copy_archive_arguments,
|
||||
})
|
||||
elif flavor == 'aix':
|
||||
copy_archive_arguments = '-pPRf'
|
||||
header_params.update({
|
||||
'copy_archive_args': copy_archive_arguments,
|
||||
'link_commands': LINK_COMMANDS_AIX,
|
||||
'flock': './gyp-flock-tool flock',
|
||||
'flock_index': 2,
|
||||
@@ -2034,7 +2072,6 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
|
||||
make_global_settings_array = data[build_file].get('make_global_settings', [])
|
||||
wrappers = {}
|
||||
wrappers['LINK'] = '%s $(builddir)/linker.lock' % flock_command
|
||||
for key, value in make_global_settings_array:
|
||||
if key.endswith('_wrapper'):
|
||||
wrappers[key[:-len('_wrapper')]] = '$(abspath %s)' % value
|
||||
|
||||
362
third_party/gyp/generator/msvs.py
vendored
362
third_party/gyp/generator/msvs.py
vendored
@@ -2,7 +2,6 @@
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import collections
|
||||
import copy
|
||||
import ntpath
|
||||
import os
|
||||
@@ -13,6 +12,7 @@ import sys
|
||||
|
||||
import gyp.common
|
||||
import gyp.easy_xml as easy_xml
|
||||
import gyp.generator.ninja as ninja_generator
|
||||
import gyp.MSVSNew as MSVSNew
|
||||
import gyp.MSVSProject as MSVSProject
|
||||
import gyp.MSVSSettings as MSVSSettings
|
||||
@@ -21,6 +21,7 @@ import gyp.MSVSUserFile as MSVSUserFile
|
||||
import gyp.MSVSUtil as MSVSUtil
|
||||
import gyp.MSVSVersion as MSVSVersion
|
||||
from gyp.common import GypError
|
||||
from gyp.common import OrderedSet
|
||||
|
||||
# TODO: Remove once bots are on 2.7, http://crbug.com/241769
|
||||
def _import_OrderedDict():
|
||||
@@ -41,7 +42,7 @@ OrderedDict = _import_OrderedDict()
|
||||
# if IncrediBuild is executed from inside Visual Studio. This regex
|
||||
# validates that the string looks like a GUID with all uppercase hex
|
||||
# letters.
|
||||
VALID_MSVS_GUID_CHARS = re.compile('^[A-F0-9\-]+$')
|
||||
VALID_MSVS_GUID_CHARS = re.compile(r'^[A-F0-9\-]+$')
|
||||
|
||||
|
||||
generator_default_variables = {
|
||||
@@ -81,8 +82,16 @@ generator_additional_non_configuration_keys = [
|
||||
'msvs_external_builder_out_dir',
|
||||
'msvs_external_builder_build_cmd',
|
||||
'msvs_external_builder_clean_cmd',
|
||||
'msvs_external_builder_clcompile_cmd',
|
||||
'msvs_enable_winrt',
|
||||
'msvs_requires_importlibrary',
|
||||
'msvs_enable_winphone',
|
||||
'msvs_application_type_revision',
|
||||
'msvs_target_platform_version',
|
||||
'msvs_target_platform_minversion',
|
||||
]
|
||||
|
||||
generator_filelist_paths = None
|
||||
|
||||
# List of precompiled header related keys.
|
||||
precomp_keys = [
|
||||
@@ -97,46 +106,6 @@ cached_username = None
|
||||
cached_domain = None
|
||||
|
||||
|
||||
# Based on http://code.activestate.com/recipes/576694/.
|
||||
class OrderedSet(collections.MutableSet):
|
||||
def __init__(self, iterable=None):
|
||||
self.end = end = []
|
||||
end += [None, end, end] # sentinel node for doubly linked list
|
||||
self.map = {} # key --> [key, prev, next]
|
||||
if iterable is not None:
|
||||
self |= iterable
|
||||
|
||||
def __len__(self):
|
||||
return len(self.map)
|
||||
|
||||
def discard(self, key):
|
||||
if key in self.map:
|
||||
key, prev, next = self.map.pop(key)
|
||||
prev[2] = next
|
||||
next[1] = prev
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.map
|
||||
|
||||
def add(self, key):
|
||||
if key not in self.map:
|
||||
end = self.end
|
||||
curr = end[1]
|
||||
curr[2] = end[1] = self.map[key] = [key, curr, end]
|
||||
|
||||
def update(self, iterable):
|
||||
for i in iterable:
|
||||
if i not in self:
|
||||
self.add(i)
|
||||
|
||||
def __iter__(self):
|
||||
end = self.end
|
||||
curr = end[2]
|
||||
while curr is not end:
|
||||
yield curr[0]
|
||||
curr = curr[2]
|
||||
|
||||
|
||||
# TODO(gspencer): Switch the os.environ calls to be
|
||||
# win32api.GetDomainName() and win32api.GetUserName() once the
|
||||
# python version in depot_tools has been updated to work on Vista
|
||||
@@ -153,11 +122,11 @@ def _GetDomainAndUserName():
|
||||
call = subprocess.Popen(['net', 'config', 'Workstation'],
|
||||
stdout=subprocess.PIPE)
|
||||
config = call.communicate()[0]
|
||||
username_re = re.compile('^User name\s+(\S+)', re.MULTILINE)
|
||||
username_re = re.compile(r'^User name\s+(\S+)', re.MULTILINE)
|
||||
username_match = username_re.search(config)
|
||||
if username_match:
|
||||
username = username_match.group(1)
|
||||
domain_re = re.compile('^Logon domain\s+(\S+)', re.MULTILINE)
|
||||
domain_re = re.compile(r'^Logon domain\s+(\S+)', re.MULTILINE)
|
||||
domain_match = domain_re.search(config)
|
||||
if domain_match:
|
||||
domain = domain_match.group(1)
|
||||
@@ -288,6 +257,8 @@ def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
|
||||
if not tools.get(tool_name):
|
||||
tools[tool_name] = dict()
|
||||
tool = tools[tool_name]
|
||||
if 'CompileAsWinRT' == setting:
|
||||
return
|
||||
if tool.get(setting):
|
||||
if only_if_unset: return
|
||||
if type(tool[setting]) == list and type(value) == list:
|
||||
@@ -317,13 +288,28 @@ def _ConfigFullName(config_name, config_data):
|
||||
return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name)
|
||||
|
||||
|
||||
def _ConfigWindowsTargetPlatformVersion(config_data):
|
||||
ver = config_data.get('msvs_windows_sdk_version')
|
||||
|
||||
for key in [r'HKLM\Software\Microsoft\Microsoft SDKs\Windows\%s',
|
||||
r'HKLM\Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows\%s']:
|
||||
sdk_dir = MSVSVersion._RegistryGetValue(key % ver, 'InstallationFolder')
|
||||
if not sdk_dir:
|
||||
continue
|
||||
version = MSVSVersion._RegistryGetValue(key % ver, 'ProductVersion') or ''
|
||||
# Find a matching entry in sdk_dir\include.
|
||||
names = sorted([x for x in os.listdir(r'%s\include' % sdk_dir)
|
||||
if x.startswith(version)], reverse=True)
|
||||
return names[0]
|
||||
|
||||
|
||||
def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path,
|
||||
quote_cmd, do_setup_env):
|
||||
|
||||
if [x for x in cmd if '$(InputDir)' in x]:
|
||||
input_dir_preamble = (
|
||||
'set INPUTDIR=$(InputDir)\n'
|
||||
'set INPUTDIR=%INPUTDIR:$(ProjectDir)=%\n'
|
||||
'if NOT DEFINED INPUTDIR set INPUTDIR=.\\\n'
|
||||
'set INPUTDIR=%INPUTDIR:~0,-1%\n'
|
||||
)
|
||||
else:
|
||||
@@ -852,23 +838,27 @@ def _GenerateRulesForMSVS(p, output_dir, options, spec,
|
||||
if rules_external:
|
||||
_GenerateExternalRules(rules_external, output_dir, spec,
|
||||
sources, options, actions_to_add)
|
||||
_AdjustSourcesForRules(spec, rules, sources, excluded_sources)
|
||||
_AdjustSourcesForRules(rules, sources, excluded_sources, False)
|
||||
|
||||
|
||||
def _AdjustSourcesForRules(spec, rules, sources, excluded_sources):
|
||||
def _AdjustSourcesForRules(rules, sources, excluded_sources, is_msbuild):
|
||||
# Add outputs generated by each rule (if applicable).
|
||||
for rule in rules:
|
||||
# Done if not processing outputs as sources.
|
||||
if int(rule.get('process_outputs_as_sources', False)):
|
||||
# Add in the outputs from this rule.
|
||||
trigger_files = _FindRuleTriggerFiles(rule, sources)
|
||||
for trigger_file in trigger_files:
|
||||
# Add in the outputs from this rule.
|
||||
trigger_files = _FindRuleTriggerFiles(rule, sources)
|
||||
for trigger_file in trigger_files:
|
||||
# Remove trigger_file from excluded_sources to let the rule be triggered
|
||||
# (e.g. rule trigger ax_enums.idl is added to excluded_sources
|
||||
# because it's also in an action's inputs in the same project)
|
||||
excluded_sources.discard(_FixPath(trigger_file))
|
||||
# Done if not processing outputs as sources.
|
||||
if int(rule.get('process_outputs_as_sources', False)):
|
||||
inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file)
|
||||
inputs = OrderedSet(_FixPaths(inputs))
|
||||
outputs = OrderedSet(_FixPaths(outputs))
|
||||
inputs.remove(_FixPath(trigger_file))
|
||||
sources.update(inputs)
|
||||
if not spec.get('msvs_external_builder'):
|
||||
if not is_msbuild:
|
||||
excluded_sources.update(inputs)
|
||||
sources.update(outputs)
|
||||
|
||||
@@ -955,6 +945,42 @@ def _GenerateProject(project, options, version, generator_flags):
|
||||
return _GenerateMSVSProject(project, options, version, generator_flags)
|
||||
|
||||
|
||||
# TODO: Avoid code duplication with _ValidateSourcesForOSX in make.py.
|
||||
def _ValidateSourcesForMSVSProject(spec, version):
|
||||
"""Makes sure if duplicate basenames are not specified in the source list.
|
||||
|
||||
Arguments:
|
||||
spec: The target dictionary containing the properties of the target.
|
||||
version: The VisualStudioVersion object.
|
||||
"""
|
||||
# This validation should not be applied to MSVC2010 and later.
|
||||
assert not version.UsesVcxproj()
|
||||
|
||||
# TODO: Check if MSVC allows this for loadable_module targets.
|
||||
if spec.get('type', None) not in ('static_library', 'shared_library'):
|
||||
return
|
||||
sources = spec.get('sources', [])
|
||||
basenames = {}
|
||||
for source in sources:
|
||||
name, ext = os.path.splitext(source)
|
||||
is_compiled_file = ext in [
|
||||
'.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
|
||||
if not is_compiled_file:
|
||||
continue
|
||||
basename = os.path.basename(name) # Don't include extension.
|
||||
basenames.setdefault(basename, []).append(source)
|
||||
|
||||
error = ''
|
||||
for basename, files in basenames.iteritems():
|
||||
if len(files) > 1:
|
||||
error += ' %s: %s\n' % (basename, ' '.join(files))
|
||||
|
||||
if error:
|
||||
print('static library %s has several files with the same basename:\n' %
|
||||
spec['target_name'] + error + 'MSVC08 cannot handle that.')
|
||||
raise GypError('Duplicate basenames in sources section, see list above')
|
||||
|
||||
|
||||
def _GenerateMSVSProject(project, options, version, generator_flags):
|
||||
"""Generates a .vcproj file. It may create .rules and .user files too.
|
||||
|
||||
@@ -980,6 +1006,11 @@ def _GenerateMSVSProject(project, options, version, generator_flags):
|
||||
for config_name, config in spec['configurations'].iteritems():
|
||||
_AddConfigurationToMSVSProject(p, spec, config_type, config_name, config)
|
||||
|
||||
# MSVC08 and prior version cannot handle duplicate basenames in the same
|
||||
# target.
|
||||
# TODO: Take excluded sources into consideration if possible.
|
||||
_ValidateSourcesForMSVSProject(spec, version)
|
||||
|
||||
# Prepare list of sources and excluded sources.
|
||||
gyp_file = os.path.split(project.build_file)[1]
|
||||
sources, excluded_sources = _PrepareListOfSources(spec, generator_flags,
|
||||
@@ -1099,7 +1130,8 @@ def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
|
||||
for this configuration.
|
||||
"""
|
||||
# Get the information for this configuration
|
||||
include_dirs, resource_include_dirs = _GetIncludeDirs(config)
|
||||
include_dirs, midl_include_dirs, resource_include_dirs = \
|
||||
_GetIncludeDirs(config)
|
||||
libraries = _GetLibraries(spec)
|
||||
library_dirs = _GetLibraryDirs(config)
|
||||
out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False)
|
||||
@@ -1127,6 +1159,8 @@ def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
|
||||
# Add the information to the appropriate tool
|
||||
_ToolAppend(tools, 'VCCLCompilerTool',
|
||||
'AdditionalIncludeDirectories', include_dirs)
|
||||
_ToolAppend(tools, 'VCMIDLTool',
|
||||
'AdditionalIncludeDirectories', midl_include_dirs)
|
||||
_ToolAppend(tools, 'VCResourceCompilerTool',
|
||||
'AdditionalIncludeDirectories', resource_include_dirs)
|
||||
# Add in libraries.
|
||||
@@ -1182,10 +1216,14 @@ def _GetIncludeDirs(config):
|
||||
include_dirs = (
|
||||
config.get('include_dirs', []) +
|
||||
config.get('msvs_system_include_dirs', []))
|
||||
midl_include_dirs = (
|
||||
config.get('midl_include_dirs', []) +
|
||||
config.get('msvs_system_include_dirs', []))
|
||||
resource_include_dirs = config.get('resource_include_dirs', include_dirs)
|
||||
include_dirs = _FixPaths(include_dirs)
|
||||
midl_include_dirs = _FixPaths(midl_include_dirs)
|
||||
resource_include_dirs = _FixPaths(resource_include_dirs)
|
||||
return include_dirs, resource_include_dirs
|
||||
return include_dirs, midl_include_dirs, resource_include_dirs
|
||||
|
||||
|
||||
def _GetLibraryDirs(config):
|
||||
@@ -1219,7 +1257,7 @@ def _GetLibraries(spec):
|
||||
found = OrderedSet()
|
||||
unique_libraries_list = []
|
||||
for entry in reversed(libraries):
|
||||
library = re.sub('^\-l', '', entry)
|
||||
library = re.sub(r'^\-l', '', entry)
|
||||
if not os.path.splitext(library)[1]:
|
||||
library += '.lib'
|
||||
if library not in found:
|
||||
@@ -1479,8 +1517,14 @@ def _AdjustSourcesAndConvertToFilterHierarchy(
|
||||
|
||||
# Prune filters with a single child to flatten ugly directory structures
|
||||
# such as ../../src/modules/module1 etc.
|
||||
while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter):
|
||||
sources = sources[0].contents
|
||||
if version.UsesVcxproj():
|
||||
while all([isinstance(s, MSVSProject.Filter) for s in sources]) \
|
||||
and len(set([s.name for s in sources])) == 1:
|
||||
assert all([len(s.contents) == 1 for s in sources])
|
||||
sources = [s.contents[0] for s in sources]
|
||||
else:
|
||||
while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter):
|
||||
sources = sources[0].contents
|
||||
|
||||
return sources, excluded_sources, excluded_idl
|
||||
|
||||
@@ -1816,7 +1860,7 @@ def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
|
||||
return projects
|
||||
|
||||
|
||||
def _InitNinjaFlavor(options, target_list, target_dicts):
|
||||
def _InitNinjaFlavor(params, target_list, target_dicts):
|
||||
"""Initialize targets for the ninja flavor.
|
||||
|
||||
This sets up the necessary variables in the targets to generate msvs projects
|
||||
@@ -1824,7 +1868,7 @@ def _InitNinjaFlavor(options, target_list, target_dicts):
|
||||
if they have not been set. This allows individual specs to override the
|
||||
default values initialized here.
|
||||
Arguments:
|
||||
options: Options provided to the generator.
|
||||
params: Params provided to the generator.
|
||||
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||
target_dicts: Dict of target properties keyed on target pair.
|
||||
"""
|
||||
@@ -1838,8 +1882,15 @@ def _InitNinjaFlavor(options, target_list, target_dicts):
|
||||
|
||||
spec['msvs_external_builder'] = 'ninja'
|
||||
if not spec.get('msvs_external_builder_out_dir'):
|
||||
spec['msvs_external_builder_out_dir'] = \
|
||||
options.depth + '/out/$(Configuration)'
|
||||
gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
|
||||
gyp_dir = os.path.dirname(gyp_file)
|
||||
configuration = '$(Configuration)'
|
||||
if params.get('target_arch') == 'x64':
|
||||
configuration += '_x64'
|
||||
spec['msvs_external_builder_out_dir'] = os.path.join(
|
||||
gyp.common.RelativePath(params['options'].toplevel_dir, gyp_dir),
|
||||
ninja_generator.ComputeOutputDir(params),
|
||||
configuration)
|
||||
if not spec.get('msvs_external_builder_build_cmd'):
|
||||
spec['msvs_external_builder_build_cmd'] = [
|
||||
path_to_ninja,
|
||||
@@ -1852,8 +1903,7 @@ def _InitNinjaFlavor(options, target_list, target_dicts):
|
||||
path_to_ninja,
|
||||
'-C',
|
||||
'$(OutDir)',
|
||||
'-t',
|
||||
'clean',
|
||||
'-tclean',
|
||||
'$(ProjectName)',
|
||||
]
|
||||
|
||||
@@ -1905,6 +1955,19 @@ def PerformBuild(data, configurations, params):
|
||||
rtn = subprocess.check_call(arguments)
|
||||
|
||||
|
||||
def CalculateGeneratorInputInfo(params):
|
||||
if params.get('flavor') == 'ninja':
|
||||
toplevel = params['options'].toplevel_dir
|
||||
qualified_out_dir = os.path.normpath(os.path.join(
|
||||
toplevel, ninja_generator.ComputeOutputDir(params),
|
||||
'gypfiles-msvs-ninja'))
|
||||
|
||||
global generator_filelist_paths
|
||||
generator_filelist_paths = {
|
||||
'toplevel': toplevel,
|
||||
'qualified_out_dir': qualified_out_dir,
|
||||
}
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
"""Generate .sln and .vcproj files.
|
||||
|
||||
@@ -1934,7 +1997,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||
|
||||
# Optionally configure each spec to use ninja as the external builder.
|
||||
if params.get('flavor') == 'ninja':
|
||||
_InitNinjaFlavor(options, target_list, target_dicts)
|
||||
_InitNinjaFlavor(params, target_list, target_dicts)
|
||||
|
||||
# Prepare the set of configurations.
|
||||
configs = set()
|
||||
@@ -1987,7 +2050,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||
|
||||
|
||||
def _GenerateMSBuildFiltersFile(filters_path, source_files,
|
||||
extension_to_rule_name):
|
||||
rule_dependencies, extension_to_rule_name):
|
||||
"""Generate the filters file.
|
||||
|
||||
This file is used by Visual Studio to organize the presentation of source
|
||||
@@ -2000,8 +2063,8 @@ def _GenerateMSBuildFiltersFile(filters_path, source_files,
|
||||
"""
|
||||
filter_group = []
|
||||
source_group = []
|
||||
_AppendFiltersForMSBuild('', source_files, extension_to_rule_name,
|
||||
filter_group, source_group)
|
||||
_AppendFiltersForMSBuild('', source_files, rule_dependencies,
|
||||
extension_to_rule_name, filter_group, source_group)
|
||||
if filter_group:
|
||||
content = ['Project',
|
||||
{'ToolsVersion': '4.0',
|
||||
@@ -2016,7 +2079,7 @@ def _GenerateMSBuildFiltersFile(filters_path, source_files,
|
||||
os.unlink(filters_path)
|
||||
|
||||
|
||||
def _AppendFiltersForMSBuild(parent_filter_name, sources,
|
||||
def _AppendFiltersForMSBuild(parent_filter_name, sources, rule_dependencies,
|
||||
extension_to_rule_name,
|
||||
filter_group, source_group):
|
||||
"""Creates the list of filters and sources to be added in the filter file.
|
||||
@@ -2042,11 +2105,12 @@ def _AppendFiltersForMSBuild(parent_filter_name, sources,
|
||||
['UniqueIdentifier', MSVSNew.MakeGuid(source.name)]])
|
||||
# Recurse and add its dependents.
|
||||
_AppendFiltersForMSBuild(filter_name, source.contents,
|
||||
extension_to_rule_name,
|
||||
rule_dependencies, extension_to_rule_name,
|
||||
filter_group, source_group)
|
||||
else:
|
||||
# It's a source. Create a source entry.
|
||||
_, element = _MapFileToMsBuildSourceType(source, extension_to_rule_name)
|
||||
_, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
|
||||
extension_to_rule_name)
|
||||
source_entry = [element, {'Include': source}]
|
||||
# Specify the filter it is part of, if any.
|
||||
if parent_filter_name:
|
||||
@@ -2054,7 +2118,8 @@ def _AppendFiltersForMSBuild(parent_filter_name, sources,
|
||||
source_group.append(source_entry)
|
||||
|
||||
|
||||
def _MapFileToMsBuildSourceType(source, extension_to_rule_name):
|
||||
def _MapFileToMsBuildSourceType(source, rule_dependencies,
|
||||
extension_to_rule_name):
|
||||
"""Returns the group and element type of the source file.
|
||||
|
||||
Arguments:
|
||||
@@ -2077,9 +2142,15 @@ def _MapFileToMsBuildSourceType(source, extension_to_rule_name):
|
||||
elif ext == '.rc':
|
||||
group = 'resource'
|
||||
element = 'ResourceCompile'
|
||||
elif ext == '.asm':
|
||||
group = 'masm'
|
||||
element = 'MASM'
|
||||
elif ext == '.idl':
|
||||
group = 'midl'
|
||||
element = 'Midl'
|
||||
elif source in rule_dependencies:
|
||||
group = 'rule_dependency'
|
||||
element = 'CustomBuild'
|
||||
else:
|
||||
group = 'none'
|
||||
element = 'None'
|
||||
@@ -2089,7 +2160,8 @@ def _MapFileToMsBuildSourceType(source, extension_to_rule_name):
|
||||
def _GenerateRulesForMSBuild(output_dir, options, spec,
|
||||
sources, excluded_sources,
|
||||
props_files_of_rules, targets_files_of_rules,
|
||||
actions_to_add, extension_to_rule_name):
|
||||
actions_to_add, rule_dependencies,
|
||||
extension_to_rule_name):
|
||||
# MSBuild rules are implemented using three files: an XML file, a .targets
|
||||
# file and a .props file.
|
||||
# See http://blogs.msdn.com/b/vcblog/archive/2010/04/21/quick-help-on-vs2010-custom-build-rule.aspx
|
||||
@@ -2105,6 +2177,7 @@ def _GenerateRulesForMSBuild(output_dir, options, spec,
|
||||
continue
|
||||
msbuild_rule = MSBuildRule(rule, spec)
|
||||
msbuild_rules.append(msbuild_rule)
|
||||
rule_dependencies.update(msbuild_rule.additional_dependencies.split(';'))
|
||||
extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name
|
||||
if msbuild_rules:
|
||||
base = spec['target_name'] + options.suffix
|
||||
@@ -2126,7 +2199,7 @@ def _GenerateRulesForMSBuild(output_dir, options, spec,
|
||||
if rules_external:
|
||||
_GenerateExternalRules(rules_external, output_dir, spec,
|
||||
sources, options, actions_to_add)
|
||||
_AdjustSourcesForRules(spec, rules, sources, excluded_sources)
|
||||
_AdjustSourcesForRules(rules, sources, excluded_sources, True)
|
||||
|
||||
|
||||
class MSBuildRule(object):
|
||||
@@ -2305,6 +2378,9 @@ def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
|
||||
rule_name,
|
||||
{'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
|
||||
"'true'" % (rule_name, rule_name),
|
||||
'EchoOff': 'true',
|
||||
'StandardOutputImportance': 'High',
|
||||
'StandardErrorImportance': 'High',
|
||||
'CommandLineTemplate': '%%(%s.CommandLineTemplate)' % rule_name,
|
||||
'AdditionalOptions': '%%(%s.AdditionalOptions)' % rule_name,
|
||||
'Inputs': rule_inputs
|
||||
@@ -2579,14 +2655,60 @@ def _GetMSBuildProjectConfigurations(configurations):
|
||||
|
||||
def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
|
||||
namespace = os.path.splitext(gyp_file_name)[0]
|
||||
return [
|
||||
properties = [
|
||||
['PropertyGroup', {'Label': 'Globals'},
|
||||
['ProjectGuid', guid],
|
||||
['Keyword', 'Win32Proj'],
|
||||
['RootNamespace', namespace],
|
||||
['ProjectGuid', guid],
|
||||
['Keyword', 'Win32Proj'],
|
||||
['RootNamespace', namespace],
|
||||
['IgnoreWarnCompileDuplicatedFilename', 'true'],
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
if os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or \
|
||||
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64':
|
||||
properties[0].append(['PreferredToolArchitecture', 'x64'])
|
||||
|
||||
if spec.get('msvs_enable_winrt'):
|
||||
properties[0].append(['DefaultLanguage', 'en-US'])
|
||||
properties[0].append(['AppContainerApplication', 'true'])
|
||||
if spec.get('msvs_application_type_revision'):
|
||||
app_type_revision = spec.get('msvs_application_type_revision')
|
||||
properties[0].append(['ApplicationTypeRevision', app_type_revision])
|
||||
else:
|
||||
properties[0].append(['ApplicationTypeRevision', '8.1'])
|
||||
|
||||
if spec.get('msvs_target_platform_version'):
|
||||
target_platform_version = spec.get('msvs_target_platform_version')
|
||||
properties[0].append(['WindowsTargetPlatformVersion',
|
||||
target_platform_version])
|
||||
if spec.get('msvs_target_platform_minversion'):
|
||||
target_platform_minversion = spec.get('msvs_target_platform_minversion')
|
||||
properties[0].append(['WindowsTargetPlatformMinVersion',
|
||||
target_platform_minversion])
|
||||
else:
|
||||
properties[0].append(['WindowsTargetPlatformMinVersion',
|
||||
target_platform_version])
|
||||
if spec.get('msvs_enable_winphone'):
|
||||
properties[0].append(['ApplicationType', 'Windows Phone'])
|
||||
else:
|
||||
properties[0].append(['ApplicationType', 'Windows Store'])
|
||||
|
||||
platform_name = None
|
||||
msvs_windows_sdk_version = None
|
||||
for configuration in spec['configurations'].itervalues():
|
||||
platform_name = platform_name or _ConfigPlatform(configuration)
|
||||
msvs_windows_sdk_version = (msvs_windows_sdk_version or
|
||||
_ConfigWindowsTargetPlatformVersion(configuration))
|
||||
if platform_name and msvs_windows_sdk_version:
|
||||
break
|
||||
|
||||
if platform_name == 'ARM':
|
||||
properties[0].append(['WindowsSDKDesktopARMSupport', 'true'])
|
||||
if msvs_windows_sdk_version:
|
||||
properties[0].append(['WindowsTargetPlatformVersion',
|
||||
str(msvs_windows_sdk_version)])
|
||||
|
||||
return properties
|
||||
|
||||
def _GetMSBuildConfigurationDetails(spec, build_file):
|
||||
properties = {}
|
||||
@@ -2597,8 +2719,9 @@ def _GetMSBuildConfigurationDetails(spec, build_file):
|
||||
_AddConditionalProperty(properties, condition, 'ConfigurationType',
|
||||
msbuild_attributes['ConfigurationType'])
|
||||
if character_set:
|
||||
_AddConditionalProperty(properties, condition, 'CharacterSet',
|
||||
character_set)
|
||||
if 'msvs_enable_winrt' not in spec :
|
||||
_AddConditionalProperty(properties, condition, 'CharacterSet',
|
||||
character_set)
|
||||
return _GetMSBuildPropertyGroup(spec, 'Configuration', properties)
|
||||
|
||||
|
||||
@@ -2813,7 +2936,7 @@ def _AddConditionalProperty(properties, condition, name, value):
|
||||
|
||||
|
||||
# Regex for msvs variable references ( i.e. $(FOO) ).
|
||||
MSVS_VARIABLE_REFERENCE = re.compile('\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)')
|
||||
MSVS_VARIABLE_REFERENCE = re.compile(r'\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)')
|
||||
|
||||
|
||||
def _GetMSBuildPropertyGroup(spec, label, properties):
|
||||
@@ -2897,7 +3020,8 @@ def _FinalizeMSBuildSettings(spec, configuration):
|
||||
converted = True
|
||||
msvs_settings = configuration.get('msvs_settings', {})
|
||||
msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings)
|
||||
include_dirs, resource_include_dirs = _GetIncludeDirs(configuration)
|
||||
include_dirs, midl_include_dirs, resource_include_dirs = \
|
||||
_GetIncludeDirs(configuration)
|
||||
libraries = _GetLibraries(spec)
|
||||
library_dirs = _GetLibraryDirs(configuration)
|
||||
out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True)
|
||||
@@ -2927,6 +3051,8 @@ def _FinalizeMSBuildSettings(spec, configuration):
|
||||
# if you don't have any resources.
|
||||
_ToolAppend(msbuild_settings, 'ClCompile',
|
||||
'AdditionalIncludeDirectories', include_dirs)
|
||||
_ToolAppend(msbuild_settings, 'Midl',
|
||||
'AdditionalIncludeDirectories', midl_include_dirs)
|
||||
_ToolAppend(msbuild_settings, 'ResourceCompile',
|
||||
'AdditionalIncludeDirectories', resource_include_dirs)
|
||||
# Add in libraries, note that even for empty libraries, we want this
|
||||
@@ -2957,6 +3083,13 @@ def _FinalizeMSBuildSettings(spec, configuration):
|
||||
'PrecompiledHeaderFile', precompiled_header)
|
||||
_ToolAppend(msbuild_settings, 'ClCompile',
|
||||
'ForcedIncludeFiles', [precompiled_header])
|
||||
else:
|
||||
_ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'NotUsing')
|
||||
# Turn off WinRT compilation
|
||||
_ToolAppend(msbuild_settings, 'ClCompile', 'CompileAsWinRT', 'false')
|
||||
# Turn on import libraries if appropriate
|
||||
if spec.get('msvs_requires_importlibrary'):
|
||||
_ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'false')
|
||||
# Loadable modules don't generate import libraries;
|
||||
# tell dependent projects to not expect one.
|
||||
if spec['type'] == 'loadable_module':
|
||||
@@ -3024,15 +3157,18 @@ def _VerifySourcesExist(sources, root_dir):
|
||||
return missing_sources
|
||||
|
||||
|
||||
def _GetMSBuildSources(spec, sources, exclusions, extension_to_rule_name,
|
||||
actions_spec, sources_handled_by_action, list_excluded):
|
||||
groups = ['none', 'midl', 'include', 'compile', 'resource', 'rule']
|
||||
def _GetMSBuildSources(spec, sources, exclusions, rule_dependencies,
|
||||
extension_to_rule_name, actions_spec,
|
||||
sources_handled_by_action, list_excluded):
|
||||
groups = ['none', 'masm', 'midl', 'include', 'compile', 'resource', 'rule',
|
||||
'rule_dependency']
|
||||
grouped_sources = {}
|
||||
for g in groups:
|
||||
grouped_sources[g] = []
|
||||
|
||||
_AddSources2(spec, sources, exclusions, grouped_sources,
|
||||
extension_to_rule_name, sources_handled_by_action, list_excluded)
|
||||
rule_dependencies, extension_to_rule_name,
|
||||
sources_handled_by_action, list_excluded)
|
||||
sources = []
|
||||
for g in groups:
|
||||
if grouped_sources[g]:
|
||||
@@ -3043,13 +3179,15 @@ def _GetMSBuildSources(spec, sources, exclusions, extension_to_rule_name,
|
||||
|
||||
|
||||
def _AddSources2(spec, sources, exclusions, grouped_sources,
|
||||
extension_to_rule_name, sources_handled_by_action,
|
||||
rule_dependencies, extension_to_rule_name,
|
||||
sources_handled_by_action,
|
||||
list_excluded):
|
||||
extensions_excluded_from_precompile = []
|
||||
for source in sources:
|
||||
if isinstance(source, MSVSProject.Filter):
|
||||
_AddSources2(spec, source.contents, exclusions, grouped_sources,
|
||||
extension_to_rule_name, sources_handled_by_action,
|
||||
rule_dependencies, extension_to_rule_name,
|
||||
sources_handled_by_action,
|
||||
list_excluded)
|
||||
else:
|
||||
if not source in sources_handled_by_action:
|
||||
@@ -3092,7 +3230,7 @@ def _AddSources2(spec, sources, exclusions, grouped_sources,
|
||||
detail.append(['PrecompiledHeader', ''])
|
||||
detail.append(['ForcedIncludeFiles', ''])
|
||||
|
||||
group, element = _MapFileToMsBuildSourceType(source,
|
||||
group, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
|
||||
extension_to_rule_name)
|
||||
grouped_sources[group].append([element, {'Include': source}] + detail)
|
||||
|
||||
@@ -3136,6 +3274,7 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
|
||||
actions_to_add = {}
|
||||
props_files_of_rules = set()
|
||||
targets_files_of_rules = set()
|
||||
rule_dependencies = set()
|
||||
extension_to_rule_name = {}
|
||||
list_excluded = generator_flags.get('msvs_list_excluded_files', True)
|
||||
|
||||
@@ -3144,10 +3283,11 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
|
||||
_GenerateRulesForMSBuild(project_dir, options, spec,
|
||||
sources, excluded_sources,
|
||||
props_files_of_rules, targets_files_of_rules,
|
||||
actions_to_add, extension_to_rule_name)
|
||||
actions_to_add, rule_dependencies,
|
||||
extension_to_rule_name)
|
||||
else:
|
||||
rules = spec.get('rules', [])
|
||||
_AdjustSourcesForRules(spec, rules, sources, excluded_sources)
|
||||
_AdjustSourcesForRules(rules, sources, excluded_sources, True)
|
||||
|
||||
sources, excluded_sources, excluded_idl = (
|
||||
_AdjustSourcesAndConvertToFilterHierarchy(spec, options,
|
||||
@@ -3170,6 +3310,7 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
|
||||
spec, actions_to_add)
|
||||
|
||||
_GenerateMSBuildFiltersFile(project.path + '.filters', sources,
|
||||
rule_dependencies,
|
||||
extension_to_rule_name)
|
||||
missing_sources = _VerifySourcesExist(sources, project_dir)
|
||||
|
||||
@@ -3184,6 +3325,12 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
|
||||
['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.props'}]]
|
||||
import_cpp_targets_section = [
|
||||
['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.targets'}]]
|
||||
import_masm_props_section = [
|
||||
['Import',
|
||||
{'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.props'}]]
|
||||
import_masm_targets_section = [
|
||||
['Import',
|
||||
{'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.targets'}]]
|
||||
macro_section = [['PropertyGroup', {'Label': 'UserMacros'}]]
|
||||
|
||||
content = [
|
||||
@@ -3197,8 +3344,12 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
|
||||
content += _GetMSBuildGlobalProperties(spec, project.guid, project_file_name)
|
||||
content += import_default_section
|
||||
content += _GetMSBuildConfigurationDetails(spec, project.build_file)
|
||||
content += _GetMSBuildLocalProperties(project.msbuild_toolset)
|
||||
if spec.get('msvs_enable_winphone'):
|
||||
content += _GetMSBuildLocalProperties('v120_wp81')
|
||||
else:
|
||||
content += _GetMSBuildLocalProperties(project.msbuild_toolset)
|
||||
content += import_cpp_props_section
|
||||
content += import_masm_props_section
|
||||
content += _GetMSBuildExtensions(props_files_of_rules)
|
||||
content += _GetMSBuildPropertySheets(configurations)
|
||||
content += macro_section
|
||||
@@ -3206,10 +3357,11 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
|
||||
project.build_file)
|
||||
content += _GetMSBuildToolSettingsSections(spec, configurations)
|
||||
content += _GetMSBuildSources(
|
||||
spec, sources, exclusions, extension_to_rule_name, actions_spec,
|
||||
sources_handled_by_action, list_excluded)
|
||||
spec, sources, exclusions, rule_dependencies, extension_to_rule_name,
|
||||
actions_spec, sources_handled_by_action, list_excluded)
|
||||
content += _GetMSBuildProjectReferences(project)
|
||||
content += import_cpp_targets_section
|
||||
content += import_masm_targets_section
|
||||
content += _GetMSBuildExtensionTargets(targets_files_of_rules)
|
||||
|
||||
if spec.get('msvs_external_builder'):
|
||||
@@ -3226,7 +3378,9 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
|
||||
def _GetMSBuildExternalBuilderTargets(spec):
|
||||
"""Return a list of MSBuild targets for external builders.
|
||||
|
||||
Right now, only "Build" and "Clean" targets are generated.
|
||||
The "Build" and "Clean" targets are always generated. If the spec contains
|
||||
'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also
|
||||
be generated, to support building selected C/C++ files.
|
||||
|
||||
Arguments:
|
||||
spec: The gyp target spec.
|
||||
@@ -3245,7 +3399,17 @@ def _GetMSBuildExternalBuilderTargets(spec):
|
||||
clean_target = ['Target', {'Name': 'Clean'}]
|
||||
clean_target.append(['Exec', {'Command': clean_cmd}])
|
||||
|
||||
return [build_target, clean_target]
|
||||
targets = [build_target, clean_target]
|
||||
|
||||
if spec.get('msvs_external_builder_clcompile_cmd'):
|
||||
clcompile_cmd = _BuildCommandLineForRuleRaw(
|
||||
spec, spec['msvs_external_builder_clcompile_cmd'],
|
||||
False, False, False, False)
|
||||
clcompile_target = ['Target', {'Name': 'ClCompile'}]
|
||||
clcompile_target.append(['Exec', {'Command': clcompile_cmd}])
|
||||
targets.append(clcompile_target)
|
||||
|
||||
return targets
|
||||
|
||||
|
||||
def _GetMSBuildExtensions(props_files_of_rules):
|
||||
@@ -3299,8 +3463,8 @@ def _GenerateActionsForMSBuild(spec, actions_to_add):
|
||||
# get too long. See also _AddActions: cygwin's setup_env mustn't be called
|
||||
# for every invocation or the command that sets the PATH will grow too
|
||||
# long.
|
||||
command = (
|
||||
'\r\nif %errorlevel% neq 0 exit /b %errorlevel%\r\n'.join(commands))
|
||||
command = '\r\n'.join([c + '\r\nif %errorlevel% neq 0 exit /b %errorlevel%'
|
||||
for c in commands])
|
||||
_AddMSBuildAction(spec,
|
||||
primary_input,
|
||||
inputs,
|
||||
|
||||
580
third_party/gyp/generator/ninja.py
vendored
580
third_party/gyp/generator/ninja.py
vendored
File diff suppressed because it is too large
Load Diff
21
third_party/gyp/generator/ninja_test.py
vendored
21
third_party/gyp/generator/ninja_test.py
vendored
@@ -15,15 +15,18 @@ import TestCommon
|
||||
|
||||
class TestPrefixesAndSuffixes(unittest.TestCase):
|
||||
def test_BinaryNamesWindows(self):
|
||||
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
|
||||
'build.ninja', 'win')
|
||||
spec = { 'target_name': 'wee' }
|
||||
self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
|
||||
endswith('.exe'))
|
||||
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
|
||||
endswith('.dll'))
|
||||
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
|
||||
endswith('.lib'))
|
||||
# These cannot run on non-Windows as they require a VS installation to
|
||||
# correctly handle variable expansion.
|
||||
if sys.platform.startswith('win'):
|
||||
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
|
||||
'build.ninja', 'win')
|
||||
spec = { 'target_name': 'wee' }
|
||||
self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
|
||||
endswith('.exe'))
|
||||
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
|
||||
endswith('.dll'))
|
||||
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
|
||||
endswith('.lib'))
|
||||
|
||||
def test_BinaryNamesLinux(self):
|
||||
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
|
||||
|
||||
125
third_party/gyp/generator/xcode.py
vendored
125
third_party/gyp/generator/xcode.py
vendored
@@ -5,6 +5,7 @@
|
||||
import filecmp
|
||||
import gyp.common
|
||||
import gyp.xcodeproj_file
|
||||
import gyp.xcode_ninja
|
||||
import errno
|
||||
import os
|
||||
import sys
|
||||
@@ -68,11 +69,15 @@ generator_additional_path_sections = [
|
||||
# The Xcode-specific keys that exist on targets and aren't moved down to
|
||||
# configurations.
|
||||
generator_additional_non_configuration_keys = [
|
||||
'ios_app_extension',
|
||||
'ios_watch_app',
|
||||
'ios_watchkit_extension',
|
||||
'mac_bundle',
|
||||
'mac_bundle_resources',
|
||||
'mac_framework_headers',
|
||||
'mac_framework_private_headers',
|
||||
'mac_xctest_bundle',
|
||||
'mac_xcuitest_bundle',
|
||||
'xcode_create_dependents_test_runner',
|
||||
]
|
||||
|
||||
@@ -83,6 +88,8 @@ generator_extra_sources_for_rules = [
|
||||
'mac_framework_private_headers',
|
||||
]
|
||||
|
||||
generator_filelist_paths = None
|
||||
|
||||
# Xcode's standard set of library directories, which don't need to be duplicated
|
||||
# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay.
|
||||
xcode_standard_library_dirs = frozenset([
|
||||
@@ -484,7 +491,7 @@ sys.exit(subprocess.call(sys.argv[1:]))" """
|
||||
def AddSourceToTarget(source, type, pbxp, xct):
|
||||
# TODO(mark): Perhaps source_extensions and library_extensions can be made a
|
||||
# little bit fancier.
|
||||
source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's']
|
||||
source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's', 'swift']
|
||||
|
||||
# .o is conceptually more of a "source" than a "library," but Xcode thinks
|
||||
# of "sources" as things to compile and "libraries" (or "frameworks") as
|
||||
@@ -520,7 +527,7 @@ def AddHeaderToTarget(header, pbxp, xct, is_public):
|
||||
xct.HeadersPhase().AddFile(header, settings)
|
||||
|
||||
|
||||
_xcode_variable_re = re.compile('(\$\((.*?)\))')
|
||||
_xcode_variable_re = re.compile(r'(\$\((.*?)\))')
|
||||
def ExpandXcodeVariables(string, expansions):
|
||||
"""Expands Xcode-style $(VARIABLES) in string per the expansions dict.
|
||||
|
||||
@@ -574,13 +581,47 @@ def PerformBuild(data, configurations, params):
|
||||
subprocess.check_call(arguments)
|
||||
|
||||
|
||||
def CalculateGeneratorInputInfo(params):
|
||||
toplevel = params['options'].toplevel_dir
|
||||
if params.get('flavor') == 'ninja':
|
||||
generator_dir = os.path.relpath(params['options'].generator_output or '.')
|
||||
output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
|
||||
output_dir = os.path.normpath(os.path.join(generator_dir, output_dir))
|
||||
qualified_out_dir = os.path.normpath(os.path.join(
|
||||
toplevel, output_dir, 'gypfiles-xcode-ninja'))
|
||||
else:
|
||||
output_dir = os.path.normpath(os.path.join(toplevel, 'xcodebuild'))
|
||||
qualified_out_dir = os.path.normpath(os.path.join(
|
||||
toplevel, output_dir, 'gypfiles'))
|
||||
|
||||
global generator_filelist_paths
|
||||
generator_filelist_paths = {
|
||||
'toplevel': toplevel,
|
||||
'qualified_out_dir': qualified_out_dir,
|
||||
}
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
# Optionally configure each spec to use ninja as the external builder.
|
||||
ninja_wrapper = params.get('flavor') == 'ninja'
|
||||
if ninja_wrapper:
|
||||
(target_list, target_dicts, data) = \
|
||||
gyp.xcode_ninja.CreateWrapper(target_list, target_dicts, data, params)
|
||||
|
||||
options = params['options']
|
||||
generator_flags = params.get('generator_flags', {})
|
||||
parallel_builds = generator_flags.get('xcode_parallel_builds', True)
|
||||
serialize_all_tests = \
|
||||
generator_flags.get('xcode_serialize_all_test_runs', True)
|
||||
project_version = generator_flags.get('xcode_project_version', None)
|
||||
upgrade_check_project_version = \
|
||||
generator_flags.get('xcode_upgrade_check_project_version', None)
|
||||
|
||||
# Format upgrade_check_project_version with leading zeros as needed.
|
||||
if upgrade_check_project_version:
|
||||
upgrade_check_project_version = str(upgrade_check_project_version)
|
||||
while len(upgrade_check_project_version) < 4:
|
||||
upgrade_check_project_version = '0' + upgrade_check_project_version
|
||||
|
||||
skip_excluded_files = \
|
||||
not generator_flags.get('xcode_list_excluded_files', True)
|
||||
xcode_projects = {}
|
||||
@@ -595,11 +636,17 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||
xcode_projects[build_file] = xcp
|
||||
pbxp = xcp.project
|
||||
|
||||
# Set project-level attributes from multiple options
|
||||
project_attributes = {};
|
||||
if parallel_builds:
|
||||
pbxp.SetProperty('attributes',
|
||||
{'BuildIndependentTargetsInParallel': 'YES'})
|
||||
if project_version:
|
||||
xcp.project_file.SetXcodeVersion(project_version)
|
||||
project_attributes['BuildIndependentTargetsInParallel'] = 'YES'
|
||||
if upgrade_check_project_version:
|
||||
project_attributes['LastUpgradeCheck'] = upgrade_check_project_version
|
||||
project_attributes['LastTestingUpgradeCheck'] = \
|
||||
upgrade_check_project_version
|
||||
project_attributes['LastSwiftUpdateCheck'] = \
|
||||
upgrade_check_project_version
|
||||
pbxp.SetProperty('attributes', project_attributes)
|
||||
|
||||
# Add gyp/gypi files to project
|
||||
if not generator_flags.get('standalone'):
|
||||
@@ -637,14 +684,22 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||
# com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets
|
||||
# to create a single-file mh_bundle.
|
||||
_types = {
|
||||
'executable': 'com.apple.product-type.tool',
|
||||
'loadable_module': 'com.googlecode.gyp.xcode.bundle',
|
||||
'shared_library': 'com.apple.product-type.library.dynamic',
|
||||
'static_library': 'com.apple.product-type.library.static',
|
||||
'executable+bundle': 'com.apple.product-type.application',
|
||||
'loadable_module+bundle': 'com.apple.product-type.bundle',
|
||||
'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
|
||||
'shared_library+bundle': 'com.apple.product-type.framework',
|
||||
'executable': 'com.apple.product-type.tool',
|
||||
'loadable_module': 'com.googlecode.gyp.xcode.bundle',
|
||||
'shared_library': 'com.apple.product-type.library.dynamic',
|
||||
'static_library': 'com.apple.product-type.library.static',
|
||||
'mac_kernel_extension': 'com.apple.product-type.kernel-extension',
|
||||
'executable+bundle': 'com.apple.product-type.application',
|
||||
'loadable_module+bundle': 'com.apple.product-type.bundle',
|
||||
'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
|
||||
'loadable_module+xcuitest': 'com.apple.product-type.bundle.ui-testing',
|
||||
'shared_library+bundle': 'com.apple.product-type.framework',
|
||||
'executable+extension+bundle': 'com.apple.product-type.app-extension',
|
||||
'executable+watch+extension+bundle':
|
||||
'com.apple.product-type.watchkit-extension',
|
||||
'executable+watch+bundle':
|
||||
'com.apple.product-type.application.watchapp',
|
||||
'mac_kernel_extension+bundle': 'com.apple.product-type.kernel-extension',
|
||||
}
|
||||
|
||||
target_properties = {
|
||||
@@ -654,14 +709,35 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||
|
||||
type = spec['type']
|
||||
is_xctest = int(spec.get('mac_xctest_bundle', 0))
|
||||
is_xcuitest = int(spec.get('mac_xcuitest_bundle', 0))
|
||||
is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest
|
||||
is_app_extension = int(spec.get('ios_app_extension', 0))
|
||||
is_watchkit_extension = int(spec.get('ios_watchkit_extension', 0))
|
||||
is_watch_app = int(spec.get('ios_watch_app', 0))
|
||||
if type != 'none':
|
||||
type_bundle_key = type
|
||||
if is_xctest:
|
||||
if is_xcuitest:
|
||||
type_bundle_key += '+xcuitest'
|
||||
assert type == 'loadable_module', (
|
||||
'mac_xcuitest_bundle targets must have type loadable_module '
|
||||
'(target %s)' % target_name)
|
||||
elif is_xctest:
|
||||
type_bundle_key += '+xctest'
|
||||
assert type == 'loadable_module', (
|
||||
'mac_xctest_bundle targets must have type loadable_module '
|
||||
'(target %s)' % target_name)
|
||||
elif is_app_extension:
|
||||
assert is_bundle, ('ios_app_extension flag requires mac_bundle '
|
||||
'(target %s)' % target_name)
|
||||
type_bundle_key += '+extension+bundle'
|
||||
elif is_watchkit_extension:
|
||||
assert is_bundle, ('ios_watchkit_extension flag requires mac_bundle '
|
||||
'(target %s)' % target_name)
|
||||
type_bundle_key += '+watch+extension+bundle'
|
||||
elif is_watch_app:
|
||||
assert is_bundle, ('ios_watch_app flag requires mac_bundle '
|
||||
'(target %s)' % target_name)
|
||||
type_bundle_key += '+watch+bundle'
|
||||
elif is_bundle:
|
||||
type_bundle_key += '+bundle'
|
||||
|
||||
@@ -677,6 +753,9 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||
assert not is_bundle, (
|
||||
'mac_bundle targets cannot have type none (target "%s")' %
|
||||
target_name)
|
||||
assert not is_xcuitest, (
|
||||
'mac_xcuitest_bundle targets cannot have type none (target "%s")' %
|
||||
target_name)
|
||||
assert not is_xctest, (
|
||||
'mac_xctest_bundle targets cannot have type none (target "%s")' %
|
||||
target_name)
|
||||
@@ -703,11 +782,16 @@ def GenerateOutput(target_list, target_dicts, data, params):
|
||||
# and is made a dependency of this target. This way the work is done
|
||||
# before the dependency checks for what should be recompiled.
|
||||
support_xct = None
|
||||
if type != 'none' and (spec_actions or spec_rules):
|
||||
# The Xcode "issues" don't affect xcode-ninja builds, since the dependency
|
||||
# logic all happens in ninja. Don't bother creating the extra targets in
|
||||
# that case.
|
||||
if type != 'none' and (spec_actions or spec_rules) and not ninja_wrapper:
|
||||
support_xccl = CreateXCConfigurationList(configuration_names);
|
||||
support_target_suffix = generator_flags.get(
|
||||
'support_target_suffix', ' Support')
|
||||
support_target_properties = {
|
||||
'buildConfigurationList': support_xccl,
|
||||
'name': target_name + ' Support',
|
||||
'name': target_name + support_target_suffix,
|
||||
}
|
||||
if target_product_name:
|
||||
support_target_properties['productName'] = \
|
||||
@@ -1096,6 +1180,9 @@ exit 1
|
||||
# Relative paths are relative to $(SRCROOT).
|
||||
dest = '$(SRCROOT)/' + dest
|
||||
|
||||
code_sign = int(copy_group.get('xcode_code_sign', 0))
|
||||
settings = (None, '{ATTRIBUTES = (CodeSignOnCopy, ); }')[code_sign];
|
||||
|
||||
# Coalesce multiple "copies" sections in the same target with the same
|
||||
# "destination" property into the same PBXCopyFilesBuildPhase, otherwise
|
||||
# they'll wind up with ID collisions.
|
||||
@@ -1114,7 +1201,7 @@ exit 1
|
||||
pbxcp_dict[dest] = pbxcp
|
||||
|
||||
for file in copy_group['files']:
|
||||
pbxcp.AddFile(file)
|
||||
pbxcp.AddFile(file, settings)
|
||||
|
||||
# Excluded files can also go into the project file.
|
||||
if not skip_excluded_files:
|
||||
|
||||
Reference in New Issue
Block a user