Source release v3.0.0-0-g8d3792b-ce + third_party
Change-Id: I399e71ddfffcd436171d1c60283c63ab4658e0b1
This commit is contained in:
340
third_party/gyp/MSVSNew.py
vendored
Normal file
340
third_party/gyp/MSVSNew.py
vendored
Normal file
@@ -0,0 +1,340 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""New implementation of Visual Studio project generation."""
|
||||
|
||||
import os
|
||||
import random
|
||||
|
||||
import gyp.common
|
||||
|
||||
# hashlib is supplied as of Python 2.5 as the replacement interface for md5
|
||||
# and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if
|
||||
# available, avoiding a deprecation warning under 2.6. Import md5 otherwise,
|
||||
# preserving 2.4 compatibility.
|
||||
try:
|
||||
import hashlib
|
||||
_new_md5 = hashlib.md5
|
||||
except ImportError:
|
||||
import md5
|
||||
_new_md5 = md5.new
|
||||
|
||||
|
||||
# Initialize random number generator
|
||||
random.seed()
|
||||
|
||||
# GUIDs for project types
|
||||
ENTRY_TYPE_GUIDS = {
|
||||
'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}',
|
||||
'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}',
|
||||
}
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# Helper functions
|
||||
|
||||
|
||||
def MakeGuid(name, seed='msvs_new'):
|
||||
"""Returns a GUID for the specified target name.
|
||||
|
||||
Args:
|
||||
name: Target name.
|
||||
seed: Seed for MD5 hash.
|
||||
Returns:
|
||||
A GUID-line string calculated from the name and seed.
|
||||
|
||||
This generates something which looks like a GUID, but depends only on the
|
||||
name and seed. This means the same name/seed will always generate the same
|
||||
GUID, so that projects and solutions which refer to each other can explicitly
|
||||
determine the GUID to refer to explicitly. It also means that the GUID will
|
||||
not change when the project for a target is rebuilt.
|
||||
"""
|
||||
# Calculate a MD5 signature for the seed and name.
|
||||
d = _new_md5(str(seed) + str(name)).hexdigest().upper()
|
||||
# Convert most of the signature to GUID form (discard the rest)
|
||||
guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20]
|
||||
+ '-' + d[20:32] + '}')
|
||||
return guid
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class MSVSSolutionEntry(object):
|
||||
def __cmp__(self, other):
|
||||
# Sort by name then guid (so things are in order on vs2008).
|
||||
return cmp((self.name, self.get_guid()), (other.name, other.get_guid()))
|
||||
|
||||
|
||||
class MSVSFolder(MSVSSolutionEntry):
|
||||
"""Folder in a Visual Studio project or solution."""
|
||||
|
||||
def __init__(self, path, name = None, entries = None,
|
||||
guid = None, items = None):
|
||||
"""Initializes the folder.
|
||||
|
||||
Args:
|
||||
path: Full path to the folder.
|
||||
name: Name of the folder.
|
||||
entries: List of folder entries to nest inside this folder. May contain
|
||||
Folder or Project objects. May be None, if the folder is empty.
|
||||
guid: GUID to use for folder, if not None.
|
||||
items: List of solution items to include in the folder project. May be
|
||||
None, if the folder does not directly contain items.
|
||||
"""
|
||||
if name:
|
||||
self.name = name
|
||||
else:
|
||||
# Use last layer.
|
||||
self.name = os.path.basename(path)
|
||||
|
||||
self.path = path
|
||||
self.guid = guid
|
||||
|
||||
# Copy passed lists (or set to empty lists)
|
||||
self.entries = sorted(list(entries or []))
|
||||
self.items = list(items or [])
|
||||
|
||||
self.entry_type_guid = ENTRY_TYPE_GUIDS['folder']
|
||||
|
||||
def get_guid(self):
|
||||
if self.guid is None:
|
||||
# Use consistent guids for folders (so things don't regenerate).
|
||||
self.guid = MakeGuid(self.path, seed='msvs_folder')
|
||||
return self.guid
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class MSVSProject(MSVSSolutionEntry):
|
||||
"""Visual Studio project."""
|
||||
|
||||
def __init__(self, path, name = None, dependencies = None, guid = None,
|
||||
spec = None, build_file = None, config_platform_overrides = None,
|
||||
fixpath_prefix = None):
|
||||
"""Initializes the project.
|
||||
|
||||
Args:
|
||||
path: Absolute path to the project file.
|
||||
name: Name of project. If None, the name will be the same as the base
|
||||
name of the project file.
|
||||
dependencies: List of other Project objects this project is dependent
|
||||
upon, if not None.
|
||||
guid: GUID to use for project, if not None.
|
||||
spec: Dictionary specifying how to build this project.
|
||||
build_file: Filename of the .gyp file that the vcproj file comes from.
|
||||
config_platform_overrides: optional dict of configuration platforms to
|
||||
used in place of the default for this target.
|
||||
fixpath_prefix: the path used to adjust the behavior of _fixpath
|
||||
"""
|
||||
self.path = path
|
||||
self.guid = guid
|
||||
self.spec = spec
|
||||
self.build_file = build_file
|
||||
# Use project filename if name not specified
|
||||
self.name = name or os.path.splitext(os.path.basename(path))[0]
|
||||
|
||||
# Copy passed lists (or set to empty lists)
|
||||
self.dependencies = list(dependencies or [])
|
||||
|
||||
self.entry_type_guid = ENTRY_TYPE_GUIDS['project']
|
||||
|
||||
if config_platform_overrides:
|
||||
self.config_platform_overrides = config_platform_overrides
|
||||
else:
|
||||
self.config_platform_overrides = {}
|
||||
self.fixpath_prefix = fixpath_prefix
|
||||
self.msbuild_toolset = None
|
||||
|
||||
def set_dependencies(self, dependencies):
|
||||
self.dependencies = list(dependencies or [])
|
||||
|
||||
def get_guid(self):
|
||||
if self.guid is None:
|
||||
# Set GUID from path
|
||||
# TODO(rspangler): This is fragile.
|
||||
# 1. We can't just use the project filename sans path, since there could
|
||||
# be multiple projects with the same base name (for example,
|
||||
# foo/unittest.vcproj and bar/unittest.vcproj).
|
||||
# 2. The path needs to be relative to $SOURCE_ROOT, so that the project
|
||||
# GUID is the same whether it's included from base/base.sln or
|
||||
# foo/bar/baz/baz.sln.
|
||||
# 3. The GUID needs to be the same each time this builder is invoked, so
|
||||
# that we don't need to rebuild the solution when the project changes.
|
||||
# 4. We should be able to handle pre-built project files by reading the
|
||||
# GUID from the files.
|
||||
self.guid = MakeGuid(self.name)
|
||||
return self.guid
|
||||
|
||||
def set_msbuild_toolset(self, msbuild_toolset):
|
||||
self.msbuild_toolset = msbuild_toolset
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class MSVSSolution:
|
||||
"""Visual Studio solution."""
|
||||
|
||||
def __init__(self, path, version, entries=None, variants=None,
|
||||
websiteProperties=True):
|
||||
"""Initializes the solution.
|
||||
|
||||
Args:
|
||||
path: Path to solution file.
|
||||
version: Format version to emit.
|
||||
entries: List of entries in solution. May contain Folder or Project
|
||||
objects. May be None, if the folder is empty.
|
||||
variants: List of build variant strings. If none, a default list will
|
||||
be used.
|
||||
websiteProperties: Flag to decide if the website properties section
|
||||
is generated.
|
||||
"""
|
||||
self.path = path
|
||||
self.websiteProperties = websiteProperties
|
||||
self.version = version
|
||||
|
||||
# Copy passed lists (or set to empty lists)
|
||||
self.entries = list(entries or [])
|
||||
|
||||
if variants:
|
||||
# Copy passed list
|
||||
self.variants = variants[:]
|
||||
else:
|
||||
# Use default
|
||||
self.variants = ['Debug|Win32', 'Release|Win32']
|
||||
# TODO(rspangler): Need to be able to handle a mapping of solution config
|
||||
# to project config. Should we be able to handle variants being a dict,
|
||||
# or add a separate variant_map variable? If it's a dict, we can't
|
||||
# guarantee the order of variants since dict keys aren't ordered.
|
||||
|
||||
|
||||
# TODO(rspangler): Automatically write to disk for now; should delay until
|
||||
# node-evaluation time.
|
||||
self.Write()
|
||||
|
||||
|
||||
def Write(self, writer=gyp.common.WriteOnDiff):
|
||||
"""Writes the solution file to disk.
|
||||
|
||||
Raises:
|
||||
IndexError: An entry appears multiple times.
|
||||
"""
|
||||
# Walk the entry tree and collect all the folders and projects.
|
||||
all_entries = set()
|
||||
entries_to_check = self.entries[:]
|
||||
while entries_to_check:
|
||||
e = entries_to_check.pop(0)
|
||||
|
||||
# If this entry has been visited, nothing to do.
|
||||
if e in all_entries:
|
||||
continue
|
||||
|
||||
all_entries.add(e)
|
||||
|
||||
# If this is a folder, check its entries too.
|
||||
if isinstance(e, MSVSFolder):
|
||||
entries_to_check += e.entries
|
||||
|
||||
all_entries = sorted(all_entries)
|
||||
|
||||
# Open file and print header
|
||||
f = writer(self.path)
|
||||
f.write('Microsoft Visual Studio Solution File, '
|
||||
'Format Version %s\r\n' % self.version.SolutionVersion())
|
||||
f.write('# %s\r\n' % self.version.Description())
|
||||
|
||||
# Project entries
|
||||
sln_root = os.path.split(self.path)[0]
|
||||
for e in all_entries:
|
||||
relative_path = gyp.common.RelativePath(e.path, sln_root)
|
||||
# msbuild does not accept an empty folder_name.
|
||||
# use '.' in case relative_path is empty.
|
||||
folder_name = relative_path.replace('/', '\\') or '.'
|
||||
f.write('Project("%s") = "%s", "%s", "%s"\r\n' % (
|
||||
e.entry_type_guid, # Entry type GUID
|
||||
e.name, # Folder name
|
||||
folder_name, # Folder name (again)
|
||||
e.get_guid(), # Entry GUID
|
||||
))
|
||||
|
||||
# TODO(rspangler): Need a way to configure this stuff
|
||||
if self.websiteProperties:
|
||||
f.write('\tProjectSection(WebsiteProperties) = preProject\r\n'
|
||||
'\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
|
||||
'\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
|
||||
'\tEndProjectSection\r\n')
|
||||
|
||||
if isinstance(e, MSVSFolder):
|
||||
if e.items:
|
||||
f.write('\tProjectSection(SolutionItems) = preProject\r\n')
|
||||
for i in e.items:
|
||||
f.write('\t\t%s = %s\r\n' % (i, i))
|
||||
f.write('\tEndProjectSection\r\n')
|
||||
|
||||
if isinstance(e, MSVSProject):
|
||||
if e.dependencies:
|
||||
f.write('\tProjectSection(ProjectDependencies) = postProject\r\n')
|
||||
for d in e.dependencies:
|
||||
f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid()))
|
||||
f.write('\tEndProjectSection\r\n')
|
||||
|
||||
f.write('EndProject\r\n')
|
||||
|
||||
# Global section
|
||||
f.write('Global\r\n')
|
||||
|
||||
# Configurations (variants)
|
||||
f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n')
|
||||
for v in self.variants:
|
||||
f.write('\t\t%s = %s\r\n' % (v, v))
|
||||
f.write('\tEndGlobalSection\r\n')
|
||||
|
||||
# Sort config guids for easier diffing of solution changes.
|
||||
config_guids = []
|
||||
config_guids_overrides = {}
|
||||
for e in all_entries:
|
||||
if isinstance(e, MSVSProject):
|
||||
config_guids.append(e.get_guid())
|
||||
config_guids_overrides[e.get_guid()] = e.config_platform_overrides
|
||||
config_guids.sort()
|
||||
|
||||
f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n')
|
||||
for g in config_guids:
|
||||
for v in self.variants:
|
||||
nv = config_guids_overrides[g].get(v, v)
|
||||
# Pick which project configuration to build for this solution
|
||||
# configuration.
|
||||
f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % (
|
||||
g, # Project GUID
|
||||
v, # Solution build configuration
|
||||
nv, # Project build config for that solution config
|
||||
))
|
||||
|
||||
# Enable project in this solution configuration.
|
||||
f.write('\t\t%s.%s.Build.0 = %s\r\n' % (
|
||||
g, # Project GUID
|
||||
v, # Solution build configuration
|
||||
nv, # Project build config for that solution config
|
||||
))
|
||||
f.write('\tEndGlobalSection\r\n')
|
||||
|
||||
# TODO(rspangler): Should be able to configure this stuff too (though I've
|
||||
# never seen this be any different)
|
||||
f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n')
|
||||
f.write('\t\tHideSolutionNode = FALSE\r\n')
|
||||
f.write('\tEndGlobalSection\r\n')
|
||||
|
||||
# Folder mappings
|
||||
# Omit this section if there are no folders
|
||||
if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]):
|
||||
f.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
|
||||
for e in all_entries:
|
||||
if not isinstance(e, MSVSFolder):
|
||||
continue # Does not apply to projects, only folders
|
||||
for subentry in e.entries:
|
||||
f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid()))
|
||||
f.write('\tEndGlobalSection\r\n')
|
||||
|
||||
f.write('EndGlobal\r\n')
|
||||
|
||||
f.close()
|
||||
208
third_party/gyp/MSVSProject.py
vendored
Normal file
208
third_party/gyp/MSVSProject.py
vendored
Normal file
@@ -0,0 +1,208 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Visual Studio project reader/writer."""
|
||||
|
||||
import gyp.common
|
||||
import gyp.easy_xml as easy_xml
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class Tool(object):
|
||||
"""Visual Studio tool."""
|
||||
|
||||
def __init__(self, name, attrs=None):
|
||||
"""Initializes the tool.
|
||||
|
||||
Args:
|
||||
name: Tool name.
|
||||
attrs: Dict of tool attributes; may be None.
|
||||
"""
|
||||
self._attrs = attrs or {}
|
||||
self._attrs['Name'] = name
|
||||
|
||||
def _GetSpecification(self):
|
||||
"""Creates an element for the tool.
|
||||
|
||||
Returns:
|
||||
A new xml.dom.Element for the tool.
|
||||
"""
|
||||
return ['Tool', self._attrs]
|
||||
|
||||
class Filter(object):
|
||||
"""Visual Studio filter - that is, a virtual folder."""
|
||||
|
||||
def __init__(self, name, contents=None):
|
||||
"""Initializes the folder.
|
||||
|
||||
Args:
|
||||
name: Filter (folder) name.
|
||||
contents: List of filenames and/or Filter objects contained.
|
||||
"""
|
||||
self.name = name
|
||||
self.contents = list(contents or [])
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class Writer(object):
|
||||
"""Visual Studio XML project writer."""
|
||||
|
||||
def __init__(self, project_path, version, name, guid=None, platforms=None):
|
||||
"""Initializes the project.
|
||||
|
||||
Args:
|
||||
project_path: Path to the project file.
|
||||
version: Format version to emit.
|
||||
name: Name of the project.
|
||||
guid: GUID to use for project, if not None.
|
||||
platforms: Array of string, the supported platforms. If null, ['Win32']
|
||||
"""
|
||||
self.project_path = project_path
|
||||
self.version = version
|
||||
self.name = name
|
||||
self.guid = guid
|
||||
|
||||
# Default to Win32 for platforms.
|
||||
if not platforms:
|
||||
platforms = ['Win32']
|
||||
|
||||
# Initialize the specifications of the various sections.
|
||||
self.platform_section = ['Platforms']
|
||||
for platform in platforms:
|
||||
self.platform_section.append(['Platform', {'Name': platform}])
|
||||
self.tool_files_section = ['ToolFiles']
|
||||
self.configurations_section = ['Configurations']
|
||||
self.files_section = ['Files']
|
||||
|
||||
# Keep a dict keyed on filename to speed up access.
|
||||
self.files_dict = dict()
|
||||
|
||||
def AddToolFile(self, path):
|
||||
"""Adds a tool file to the project.
|
||||
|
||||
Args:
|
||||
path: Relative path from project to tool file.
|
||||
"""
|
||||
self.tool_files_section.append(['ToolFile', {'RelativePath': path}])
|
||||
|
||||
def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools):
|
||||
"""Returns the specification for a configuration.
|
||||
|
||||
Args:
|
||||
config_type: Type of configuration node.
|
||||
config_name: Configuration name.
|
||||
attrs: Dict of configuration attributes; may be None.
|
||||
tools: List of tools (strings or Tool objects); may be None.
|
||||
Returns:
|
||||
"""
|
||||
# Handle defaults
|
||||
if not attrs:
|
||||
attrs = {}
|
||||
if not tools:
|
||||
tools = []
|
||||
|
||||
# Add configuration node and its attributes
|
||||
node_attrs = attrs.copy()
|
||||
node_attrs['Name'] = config_name
|
||||
specification = [config_type, node_attrs]
|
||||
|
||||
# Add tool nodes and their attributes
|
||||
if tools:
|
||||
for t in tools:
|
||||
if isinstance(t, Tool):
|
||||
specification.append(t._GetSpecification())
|
||||
else:
|
||||
specification.append(Tool(t)._GetSpecification())
|
||||
return specification
|
||||
|
||||
|
||||
def AddConfig(self, name, attrs=None, tools=None):
|
||||
"""Adds a configuration to the project.
|
||||
|
||||
Args:
|
||||
name: Configuration name.
|
||||
attrs: Dict of configuration attributes; may be None.
|
||||
tools: List of tools (strings or Tool objects); may be None.
|
||||
"""
|
||||
spec = self._GetSpecForConfiguration('Configuration', name, attrs, tools)
|
||||
self.configurations_section.append(spec)
|
||||
|
||||
def _AddFilesToNode(self, parent, files):
|
||||
"""Adds files and/or filters to the parent node.
|
||||
|
||||
Args:
|
||||
parent: Destination node
|
||||
files: A list of Filter objects and/or relative paths to files.
|
||||
|
||||
Will call itself recursively, if the files list contains Filter objects.
|
||||
"""
|
||||
for f in files:
|
||||
if isinstance(f, Filter):
|
||||
node = ['Filter', {'Name': f.name}]
|
||||
self._AddFilesToNode(node, f.contents)
|
||||
else:
|
||||
node = ['File', {'RelativePath': f}]
|
||||
self.files_dict[f] = node
|
||||
parent.append(node)
|
||||
|
||||
def AddFiles(self, files):
|
||||
"""Adds files to the project.
|
||||
|
||||
Args:
|
||||
files: A list of Filter objects and/or relative paths to files.
|
||||
|
||||
This makes a copy of the file/filter tree at the time of this call. If you
|
||||
later add files to a Filter object which was passed into a previous call
|
||||
to AddFiles(), it will not be reflected in this project.
|
||||
"""
|
||||
self._AddFilesToNode(self.files_section, files)
|
||||
# TODO(rspangler) This also doesn't handle adding files to an existing
|
||||
# filter. That is, it doesn't merge the trees.
|
||||
|
||||
def AddFileConfig(self, path, config, attrs=None, tools=None):
|
||||
"""Adds a configuration to a file.
|
||||
|
||||
Args:
|
||||
path: Relative path to the file.
|
||||
config: Name of configuration to add.
|
||||
attrs: Dict of configuration attributes; may be None.
|
||||
tools: List of tools (strings or Tool objects); may be None.
|
||||
|
||||
Raises:
|
||||
ValueError: Relative path does not match any file added via AddFiles().
|
||||
"""
|
||||
# Find the file node with the right relative path
|
||||
parent = self.files_dict.get(path)
|
||||
if not parent:
|
||||
raise ValueError('AddFileConfig: file "%s" not in project.' % path)
|
||||
|
||||
# Add the config to the file node
|
||||
spec = self._GetSpecForConfiguration('FileConfiguration', config, attrs,
|
||||
tools)
|
||||
parent.append(spec)
|
||||
|
||||
def WriteIfChanged(self):
|
||||
"""Writes the project file."""
|
||||
# First create XML content definition
|
||||
content = [
|
||||
'VisualStudioProject',
|
||||
{'ProjectType': 'Visual C++',
|
||||
'Version': self.version.ProjectVersion(),
|
||||
'Name': self.name,
|
||||
'ProjectGUID': self.guid,
|
||||
'RootNamespace': self.name,
|
||||
'Keyword': 'Win32Proj'
|
||||
},
|
||||
self.platform_section,
|
||||
self.tool_files_section,
|
||||
self.configurations_section,
|
||||
['References'], # empty section
|
||||
self.files_section,
|
||||
['Globals'] # empty section
|
||||
]
|
||||
easy_xml.WriteXmlIfChanged(content, self.project_path,
|
||||
encoding="Windows-1252")
|
||||
1063
third_party/gyp/MSVSSettings.py
vendored
Normal file
1063
third_party/gyp/MSVSSettings.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1483
third_party/gyp/MSVSSettings_test.py
vendored
Executable file
1483
third_party/gyp/MSVSSettings_test.py
vendored
Executable file
File diff suppressed because it is too large
Load Diff
58
third_party/gyp/MSVSToolFile.py
vendored
Normal file
58
third_party/gyp/MSVSToolFile.py
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Visual Studio project reader/writer."""
|
||||
|
||||
import gyp.common
|
||||
import gyp.easy_xml as easy_xml
|
||||
|
||||
|
||||
class Writer(object):
|
||||
"""Visual Studio XML tool file writer."""
|
||||
|
||||
def __init__(self, tool_file_path, name):
|
||||
"""Initializes the tool file.
|
||||
|
||||
Args:
|
||||
tool_file_path: Path to the tool file.
|
||||
name: Name of the tool file.
|
||||
"""
|
||||
self.tool_file_path = tool_file_path
|
||||
self.name = name
|
||||
self.rules_section = ['Rules']
|
||||
|
||||
def AddCustomBuildRule(self, name, cmd, description,
|
||||
additional_dependencies,
|
||||
outputs, extensions):
|
||||
"""Adds a rule to the tool file.
|
||||
|
||||
Args:
|
||||
name: Name of the rule.
|
||||
description: Description of the rule.
|
||||
cmd: Command line of the rule.
|
||||
additional_dependencies: other files which may trigger the rule.
|
||||
outputs: outputs of the rule.
|
||||
extensions: extensions handled by the rule.
|
||||
"""
|
||||
rule = ['CustomBuildRule',
|
||||
{'Name': name,
|
||||
'ExecutionDescription': description,
|
||||
'CommandLine': cmd,
|
||||
'Outputs': ';'.join(outputs),
|
||||
'FileExtensions': ';'.join(extensions),
|
||||
'AdditionalDependencies':
|
||||
';'.join(additional_dependencies)
|
||||
}]
|
||||
self.rules_section.append(rule)
|
||||
|
||||
def WriteIfChanged(self):
|
||||
"""Writes the tool file."""
|
||||
content = ['VisualStudioToolFile',
|
||||
{'Version': '8.00',
|
||||
'Name': self.name
|
||||
},
|
||||
self.rules_section
|
||||
]
|
||||
easy_xml.WriteXmlIfChanged(content, self.tool_file_path,
|
||||
encoding="Windows-1252")
|
||||
147
third_party/gyp/MSVSUserFile.py
vendored
Normal file
147
third_party/gyp/MSVSUserFile.py
vendored
Normal file
@@ -0,0 +1,147 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Visual Studio user preferences file writer."""
|
||||
|
||||
import os
|
||||
import re
|
||||
import socket # for gethostname
|
||||
|
||||
import gyp.common
|
||||
import gyp.easy_xml as easy_xml
|
||||
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
def _FindCommandInPath(command):
|
||||
"""If there are no slashes in the command given, this function
|
||||
searches the PATH env to find the given command, and converts it
|
||||
to an absolute path. We have to do this because MSVS is looking
|
||||
for an actual file to launch a debugger on, not just a command
|
||||
line. Note that this happens at GYP time, so anything needing to
|
||||
be built needs to have a full path."""
|
||||
if '/' in command or '\\' in command:
|
||||
# If the command already has path elements (either relative or
|
||||
# absolute), then assume it is constructed properly.
|
||||
return command
|
||||
else:
|
||||
# Search through the path list and find an existing file that
|
||||
# we can access.
|
||||
paths = os.environ.get('PATH','').split(os.pathsep)
|
||||
for path in paths:
|
||||
item = os.path.join(path, command)
|
||||
if os.path.isfile(item) and os.access(item, os.X_OK):
|
||||
return item
|
||||
return command
|
||||
|
||||
def _QuoteWin32CommandLineArgs(args):
|
||||
new_args = []
|
||||
for arg in args:
|
||||
# Replace all double-quotes with double-double-quotes to escape
|
||||
# them for cmd shell, and then quote the whole thing if there
|
||||
# are any.
|
||||
if arg.find('"') != -1:
|
||||
arg = '""'.join(arg.split('"'))
|
||||
arg = '"%s"' % arg
|
||||
|
||||
# Otherwise, if there are any spaces, quote the whole arg.
|
||||
elif re.search(r'[ \t\n]', arg):
|
||||
arg = '"%s"' % arg
|
||||
new_args.append(arg)
|
||||
return new_args
|
||||
|
||||
class Writer(object):
|
||||
"""Visual Studio XML user user file writer."""
|
||||
|
||||
def __init__(self, user_file_path, version, name):
|
||||
"""Initializes the user file.
|
||||
|
||||
Args:
|
||||
user_file_path: Path to the user file.
|
||||
version: Version info.
|
||||
name: Name of the user file.
|
||||
"""
|
||||
self.user_file_path = user_file_path
|
||||
self.version = version
|
||||
self.name = name
|
||||
self.configurations = {}
|
||||
|
||||
def AddConfig(self, name):
|
||||
"""Adds a configuration to the project.
|
||||
|
||||
Args:
|
||||
name: Configuration name.
|
||||
"""
|
||||
self.configurations[name] = ['Configuration', {'Name': name}]
|
||||
|
||||
def AddDebugSettings(self, config_name, command, environment = {},
|
||||
working_directory=""):
|
||||
"""Adds a DebugSettings node to the user file for a particular config.
|
||||
|
||||
Args:
|
||||
command: command line to run. First element in the list is the
|
||||
executable. All elements of the command will be quoted if
|
||||
necessary.
|
||||
working_directory: other files which may trigger the rule. (optional)
|
||||
"""
|
||||
command = _QuoteWin32CommandLineArgs(command)
|
||||
|
||||
abs_command = _FindCommandInPath(command[0])
|
||||
|
||||
if environment and isinstance(environment, dict):
|
||||
env_list = ['%s="%s"' % (key, val)
|
||||
for (key,val) in environment.iteritems()]
|
||||
environment = ' '.join(env_list)
|
||||
else:
|
||||
environment = ''
|
||||
|
||||
n_cmd = ['DebugSettings',
|
||||
{'Command': abs_command,
|
||||
'WorkingDirectory': working_directory,
|
||||
'CommandArguments': " ".join(command[1:]),
|
||||
'RemoteMachine': socket.gethostname(),
|
||||
'Environment': environment,
|
||||
'EnvironmentMerge': 'true',
|
||||
# Currently these are all "dummy" values that we're just setting
|
||||
# in the default manner that MSVS does it. We could use some of
|
||||
# these to add additional capabilities, I suppose, but they might
|
||||
# not have parity with other platforms then.
|
||||
'Attach': 'false',
|
||||
'DebuggerType': '3', # 'auto' debugger
|
||||
'Remote': '1',
|
||||
'RemoteCommand': '',
|
||||
'HttpUrl': '',
|
||||
'PDBPath': '',
|
||||
'SQLDebugging': '',
|
||||
'DebuggerFlavor': '0',
|
||||
'MPIRunCommand': '',
|
||||
'MPIRunArguments': '',
|
||||
'MPIRunWorkingDirectory': '',
|
||||
'ApplicationCommand': '',
|
||||
'ApplicationArguments': '',
|
||||
'ShimCommand': '',
|
||||
'MPIAcceptMode': '',
|
||||
'MPIAcceptFilter': ''
|
||||
}]
|
||||
|
||||
# Find the config, and add it if it doesn't exist.
|
||||
if config_name not in self.configurations:
|
||||
self.AddConfig(config_name)
|
||||
|
||||
# Add the DebugSettings onto the appropriate config.
|
||||
self.configurations[config_name].append(n_cmd)
|
||||
|
||||
def WriteIfChanged(self):
|
||||
"""Writes the user file."""
|
||||
configs = ['Configurations']
|
||||
for config, spec in sorted(self.configurations.iteritems()):
|
||||
configs.append(spec)
|
||||
|
||||
content = ['VisualStudioUserFile',
|
||||
{'Version': self.version.ProjectVersion(),
|
||||
'Name': self.name
|
||||
},
|
||||
configs]
|
||||
easy_xml.WriteXmlIfChanged(content, self.user_file_path,
|
||||
encoding="Windows-1252")
|
||||
267
third_party/gyp/MSVSUtil.py
vendored
Normal file
267
third_party/gyp/MSVSUtil.py
vendored
Normal file
@@ -0,0 +1,267 @@
|
||||
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Utility functions shared amongst the Windows generators."""
|
||||
|
||||
import copy
|
||||
import os
|
||||
|
||||
|
||||
_TARGET_TYPE_EXT = {
|
||||
'executable': '.exe',
|
||||
'loadable_module': '.dll',
|
||||
'shared_library': '.dll',
|
||||
}
|
||||
|
||||
|
||||
def _GetLargePdbShimCcPath():
|
||||
"""Returns the path of the large_pdb_shim.cc file."""
|
||||
this_dir = os.path.abspath(os.path.dirname(__file__))
|
||||
src_dir = os.path.abspath(os.path.join(this_dir, '..', '..'))
|
||||
win_data_dir = os.path.join(src_dir, 'data', 'win')
|
||||
large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc')
|
||||
return large_pdb_shim_cc
|
||||
|
||||
|
||||
def _DeepCopySomeKeys(in_dict, keys):
|
||||
"""Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
|
||||
|
||||
Arguments:
|
||||
in_dict: The dictionary to copy.
|
||||
keys: The keys to be copied. If a key is in this list and doesn't exist in
|
||||
|in_dict| this is not an error.
|
||||
Returns:
|
||||
The partially deep-copied dictionary.
|
||||
"""
|
||||
d = {}
|
||||
for key in keys:
|
||||
if key not in in_dict:
|
||||
continue
|
||||
d[key] = copy.deepcopy(in_dict[key])
|
||||
return d
|
||||
|
||||
|
||||
def _SuffixName(name, suffix):
|
||||
"""Add a suffix to the end of a target.
|
||||
|
||||
Arguments:
|
||||
name: name of the target (foo#target)
|
||||
suffix: the suffix to be added
|
||||
Returns:
|
||||
Target name with suffix added (foo_suffix#target)
|
||||
"""
|
||||
parts = name.rsplit('#', 1)
|
||||
parts[0] = '%s_%s' % (parts[0], suffix)
|
||||
return '#'.join(parts)
|
||||
|
||||
|
||||
def _ShardName(name, number):
|
||||
"""Add a shard number to the end of a target.
|
||||
|
||||
Arguments:
|
||||
name: name of the target (foo#target)
|
||||
number: shard number
|
||||
Returns:
|
||||
Target name with shard added (foo_1#target)
|
||||
"""
|
||||
return _SuffixName(name, str(number))
|
||||
|
||||
|
||||
def ShardTargets(target_list, target_dicts):
|
||||
"""Shard some targets apart to work around the linkers limits.
|
||||
|
||||
Arguments:
|
||||
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||
target_dicts: Dict of target properties keyed on target pair.
|
||||
Returns:
|
||||
Tuple of the new sharded versions of the inputs.
|
||||
"""
|
||||
# Gather the targets to shard, and how many pieces.
|
||||
targets_to_shard = {}
|
||||
for t in target_dicts:
|
||||
shards = int(target_dicts[t].get('msvs_shard', 0))
|
||||
if shards:
|
||||
targets_to_shard[t] = shards
|
||||
# Shard target_list.
|
||||
new_target_list = []
|
||||
for t in target_list:
|
||||
if t in targets_to_shard:
|
||||
for i in range(targets_to_shard[t]):
|
||||
new_target_list.append(_ShardName(t, i))
|
||||
else:
|
||||
new_target_list.append(t)
|
||||
# Shard target_dict.
|
||||
new_target_dicts = {}
|
||||
for t in target_dicts:
|
||||
if t in targets_to_shard:
|
||||
for i in range(targets_to_shard[t]):
|
||||
name = _ShardName(t, i)
|
||||
new_target_dicts[name] = copy.copy(target_dicts[t])
|
||||
new_target_dicts[name]['target_name'] = _ShardName(
|
||||
new_target_dicts[name]['target_name'], i)
|
||||
sources = new_target_dicts[name].get('sources', [])
|
||||
new_sources = []
|
||||
for pos in range(i, len(sources), targets_to_shard[t]):
|
||||
new_sources.append(sources[pos])
|
||||
new_target_dicts[name]['sources'] = new_sources
|
||||
else:
|
||||
new_target_dicts[t] = target_dicts[t]
|
||||
# Shard dependencies.
|
||||
for t in new_target_dicts:
|
||||
dependencies = copy.copy(new_target_dicts[t].get('dependencies', []))
|
||||
new_dependencies = []
|
||||
for d in dependencies:
|
||||
if d in targets_to_shard:
|
||||
for i in range(targets_to_shard[d]):
|
||||
new_dependencies.append(_ShardName(d, i))
|
||||
else:
|
||||
new_dependencies.append(d)
|
||||
new_target_dicts[t]['dependencies'] = new_dependencies
|
||||
|
||||
return (new_target_list, new_target_dicts)
|
||||
|
||||
|
||||
def _GetPdbPath(target_dict, config_name, vars):
|
||||
"""Returns the path to the PDB file that will be generated by a given
|
||||
configuration.
|
||||
|
||||
The lookup proceeds as follows:
|
||||
- Look for an explicit path in the VCLinkerTool configuration block.
|
||||
- Look for an 'msvs_large_pdb_path' variable.
|
||||
- Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
|
||||
specified.
|
||||
- Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
|
||||
|
||||
Arguments:
|
||||
target_dict: The target dictionary to be searched.
|
||||
config_name: The name of the configuration of interest.
|
||||
vars: A dictionary of common GYP variables with generator-specific values.
|
||||
Returns:
|
||||
The path of the corresponding PDB file.
|
||||
"""
|
||||
config = target_dict['configurations'][config_name]
|
||||
msvs = config.setdefault('msvs_settings', {})
|
||||
|
||||
linker = msvs.get('VCLinkerTool', {})
|
||||
|
||||
pdb_path = linker.get('ProgramDatabaseFile')
|
||||
if pdb_path:
|
||||
return pdb_path
|
||||
|
||||
variables = target_dict.get('variables', {})
|
||||
pdb_path = variables.get('msvs_large_pdb_path', None)
|
||||
if pdb_path:
|
||||
return pdb_path
|
||||
|
||||
|
||||
pdb_base = target_dict.get('product_name', target_dict['target_name'])
|
||||
pdb_base = '%s%s.pdb' % (pdb_base, _TARGET_TYPE_EXT[target_dict['type']])
|
||||
pdb_path = vars['PRODUCT_DIR'] + '/' + pdb_base
|
||||
|
||||
return pdb_path
|
||||
|
||||
|
||||
def InsertLargePdbShims(target_list, target_dicts, vars):
|
||||
"""Insert a shim target that forces the linker to use 4KB pagesize PDBs.
|
||||
|
||||
This is a workaround for targets with PDBs greater than 1GB in size, the
|
||||
limit for the 1KB pagesize PDBs created by the linker by default.
|
||||
|
||||
Arguments:
|
||||
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||
target_dicts: Dict of target properties keyed on target pair.
|
||||
vars: A dictionary of common GYP variables with generator-specific values.
|
||||
Returns:
|
||||
Tuple of the shimmed version of the inputs.
|
||||
"""
|
||||
# Determine which targets need shimming.
|
||||
targets_to_shim = []
|
||||
for t in target_dicts:
|
||||
target_dict = target_dicts[t]
|
||||
|
||||
# We only want to shim targets that have msvs_large_pdb enabled.
|
||||
if not int(target_dict.get('msvs_large_pdb', 0)):
|
||||
continue
|
||||
# This is intended for executable, shared_library and loadable_module
|
||||
# targets where every configuration is set up to produce a PDB output.
|
||||
# If any of these conditions is not true then the shim logic will fail
|
||||
# below.
|
||||
targets_to_shim.append(t)
|
||||
|
||||
large_pdb_shim_cc = _GetLargePdbShimCcPath()
|
||||
|
||||
for t in targets_to_shim:
|
||||
target_dict = target_dicts[t]
|
||||
target_name = target_dict.get('target_name')
|
||||
|
||||
base_dict = _DeepCopySomeKeys(target_dict,
|
||||
['configurations', 'default_configuration', 'toolset'])
|
||||
|
||||
# This is the dict for copying the source file (part of the GYP tree)
|
||||
# to the intermediate directory of the project. This is necessary because
|
||||
# we can't always build a relative path to the shim source file (on Windows
|
||||
# GYP and the project may be on different drives), and Ninja hates absolute
|
||||
# paths (it ends up generating the .obj and .obj.d alongside the source
|
||||
# file, polluting GYPs tree).
|
||||
copy_suffix = 'large_pdb_copy'
|
||||
copy_target_name = target_name + '_' + copy_suffix
|
||||
full_copy_target_name = _SuffixName(t, copy_suffix)
|
||||
shim_cc_basename = os.path.basename(large_pdb_shim_cc)
|
||||
shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name
|
||||
shim_cc_path = shim_cc_dir + '/' + shim_cc_basename
|
||||
copy_dict = copy.deepcopy(base_dict)
|
||||
copy_dict['target_name'] = copy_target_name
|
||||
copy_dict['type'] = 'none'
|
||||
copy_dict['sources'] = [ large_pdb_shim_cc ]
|
||||
copy_dict['copies'] = [{
|
||||
'destination': shim_cc_dir,
|
||||
'files': [ large_pdb_shim_cc ]
|
||||
}]
|
||||
|
||||
# This is the dict for the PDB generating shim target. It depends on the
|
||||
# copy target.
|
||||
shim_suffix = 'large_pdb_shim'
|
||||
shim_target_name = target_name + '_' + shim_suffix
|
||||
full_shim_target_name = _SuffixName(t, shim_suffix)
|
||||
shim_dict = copy.deepcopy(base_dict)
|
||||
shim_dict['target_name'] = shim_target_name
|
||||
shim_dict['type'] = 'static_library'
|
||||
shim_dict['sources'] = [ shim_cc_path ]
|
||||
shim_dict['dependencies'] = [ full_copy_target_name ]
|
||||
|
||||
# Set up the shim to output its PDB to the same location as the final linker
|
||||
# target.
|
||||
for config_name, config in shim_dict.get('configurations').iteritems():
|
||||
pdb_path = _GetPdbPath(target_dict, config_name, vars)
|
||||
|
||||
# A few keys that we don't want to propagate.
|
||||
for key in ['msvs_precompiled_header', 'msvs_precompiled_source', 'test']:
|
||||
config.pop(key, None)
|
||||
|
||||
msvs = config.setdefault('msvs_settings', {})
|
||||
|
||||
# Update the compiler directives in the shim target.
|
||||
compiler = msvs.setdefault('VCCLCompilerTool', {})
|
||||
compiler['DebugInformationFormat'] = '3'
|
||||
compiler['ProgramDataBaseFileName'] = pdb_path
|
||||
|
||||
# Set the explicit PDB path in the appropriate configuration of the
|
||||
# original target.
|
||||
config = target_dict['configurations'][config_name]
|
||||
msvs = config.setdefault('msvs_settings', {})
|
||||
linker = msvs.setdefault('VCLinkerTool', {})
|
||||
linker['GenerateDebugInformation'] = 'true'
|
||||
linker['ProgramDatabaseFile'] = pdb_path
|
||||
|
||||
# Add the new targets. They must go to the beginning of the list so that
|
||||
# the dependency generation works as expected in ninja.
|
||||
target_list.insert(0, full_copy_target_name)
|
||||
target_list.insert(0, full_shim_target_name)
|
||||
target_dicts[full_copy_target_name] = copy_dict
|
||||
target_dicts[full_shim_target_name] = shim_dict
|
||||
|
||||
# Update the original target to depend on the shim target.
|
||||
target_dict.setdefault('dependencies', []).append(full_shim_target_name)
|
||||
|
||||
return (target_list, target_dicts)
|
||||
409
third_party/gyp/MSVSVersion.py
vendored
Normal file
409
third_party/gyp/MSVSVersion.py
vendored
Normal file
@@ -0,0 +1,409 @@
|
||||
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Handle version information related to Visual Stuio."""
|
||||
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import gyp
|
||||
import glob
|
||||
|
||||
|
||||
class VisualStudioVersion(object):
|
||||
"""Information regarding a version of Visual Studio."""
|
||||
|
||||
def __init__(self, short_name, description,
|
||||
solution_version, project_version, flat_sln, uses_vcxproj,
|
||||
path, sdk_based, default_toolset=None):
|
||||
self.short_name = short_name
|
||||
self.description = description
|
||||
self.solution_version = solution_version
|
||||
self.project_version = project_version
|
||||
self.flat_sln = flat_sln
|
||||
self.uses_vcxproj = uses_vcxproj
|
||||
self.path = path
|
||||
self.sdk_based = sdk_based
|
||||
self.default_toolset = default_toolset
|
||||
|
||||
def ShortName(self):
|
||||
return self.short_name
|
||||
|
||||
def Description(self):
|
||||
"""Get the full description of the version."""
|
||||
return self.description
|
||||
|
||||
def SolutionVersion(self):
|
||||
"""Get the version number of the sln files."""
|
||||
return self.solution_version
|
||||
|
||||
def ProjectVersion(self):
|
||||
"""Get the version number of the vcproj or vcxproj files."""
|
||||
return self.project_version
|
||||
|
||||
def FlatSolution(self):
|
||||
return self.flat_sln
|
||||
|
||||
def UsesVcxproj(self):
|
||||
"""Returns true if this version uses a vcxproj file."""
|
||||
return self.uses_vcxproj
|
||||
|
||||
def ProjectExtension(self):
|
||||
"""Returns the file extension for the project."""
|
||||
return self.uses_vcxproj and '.vcxproj' or '.vcproj'
|
||||
|
||||
def Path(self):
|
||||
"""Returns the path to Visual Studio installation."""
|
||||
return self.path
|
||||
|
||||
def ToolPath(self, tool):
|
||||
"""Returns the path to a given compiler tool. """
|
||||
return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
|
||||
|
||||
def DefaultToolset(self):
|
||||
"""Returns the msbuild toolset version that will be used in the absence
|
||||
of a user override."""
|
||||
return self.default_toolset
|
||||
|
||||
def SetupScript(self, target_arch):
|
||||
"""Returns a command (with arguments) to be used to set up the
|
||||
environment."""
|
||||
# Check if we are running in the SDK command line environment and use
|
||||
# the setup script from the SDK if so. |target_arch| should be either
|
||||
# 'x86' or 'x64'.
|
||||
assert target_arch in ('x86', 'x64')
|
||||
sdk_dir = os.environ.get('WindowsSDKDir')
|
||||
if self.sdk_based and sdk_dir:
|
||||
return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')),
|
||||
'/' + target_arch]
|
||||
else:
|
||||
# We don't use VC/vcvarsall.bat for x86 because vcvarsall calls
|
||||
# vcvars32, which it can only find if VS??COMNTOOLS is set, which it
|
||||
# isn't always.
|
||||
if target_arch == 'x86':
|
||||
if self.short_name == '2013' and (
|
||||
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
|
||||
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
|
||||
# VS2013 non-Express has a x64-x86 cross that we want to prefer.
|
||||
return [os.path.normpath(
|
||||
os.path.join(self.path, 'VC/vcvarsall.bat')), 'amd64_x86']
|
||||
# Otherwise, the standard x86 compiler.
|
||||
return [os.path.normpath(
|
||||
os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))]
|
||||
else:
|
||||
assert target_arch == 'x64'
|
||||
arg = 'x86_amd64'
|
||||
# Use the 64-on-64 compiler if we're not using an express
|
||||
# edition and we're running on a 64bit OS.
|
||||
if self.short_name[-1] != 'e' and (
|
||||
os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
|
||||
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
|
||||
arg = 'amd64'
|
||||
return [os.path.normpath(
|
||||
os.path.join(self.path, 'VC/vcvarsall.bat')), arg]
|
||||
|
||||
|
||||
def _RegistryQueryBase(sysdir, key, value):
|
||||
"""Use reg.exe to read a particular key.
|
||||
|
||||
While ideally we might use the win32 module, we would like gyp to be
|
||||
python neutral, so for instance cygwin python lacks this module.
|
||||
|
||||
Arguments:
|
||||
sysdir: The system subdirectory to attempt to launch reg.exe from.
|
||||
key: The registry key to read from.
|
||||
value: The particular value to read.
|
||||
Return:
|
||||
stdout from reg.exe, or None for failure.
|
||||
"""
|
||||
# Skip if not on Windows or Python Win32 setup issue
|
||||
if sys.platform not in ('win32', 'cygwin'):
|
||||
return None
|
||||
# Setup params to pass to and attempt to launch reg.exe
|
||||
cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'),
|
||||
'query', key]
|
||||
if value:
|
||||
cmd.extend(['/v', value])
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
# Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
|
||||
# Note that the error text may be in [1] in some cases
|
||||
text = p.communicate()[0]
|
||||
# Check return code from reg.exe; officially 0==success and 1==error
|
||||
if p.returncode:
|
||||
return None
|
||||
return text
|
||||
|
||||
|
||||
def _RegistryQuery(key, value=None):
|
||||
"""Use reg.exe to read a particular key through _RegistryQueryBase.
|
||||
|
||||
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
|
||||
that fails, it falls back to System32. Sysnative is available on Vista and
|
||||
up and available on Windows Server 2003 and XP through KB patch 942589. Note
|
||||
that Sysnative will always fail if using 64-bit python due to it being a
|
||||
virtual directory and System32 will work correctly in the first place.
|
||||
|
||||
KB 942589 - http://support.microsoft.com/kb/942589/en-us.
|
||||
|
||||
Arguments:
|
||||
key: The registry key.
|
||||
value: The particular registry value to read (optional).
|
||||
Return:
|
||||
stdout from reg.exe, or None for failure.
|
||||
"""
|
||||
text = None
|
||||
try:
|
||||
text = _RegistryQueryBase('Sysnative', key, value)
|
||||
except OSError, e:
|
||||
if e.errno == errno.ENOENT:
|
||||
text = _RegistryQueryBase('System32', key, value)
|
||||
else:
|
||||
raise
|
||||
return text
|
||||
|
||||
|
||||
def _RegistryGetValue(key, value):
|
||||
"""Use reg.exe to obtain the value of a registry key.
|
||||
|
||||
Args:
|
||||
key: The registry key.
|
||||
value: The particular registry value to read.
|
||||
Return:
|
||||
contents of the registry key's value, or None on failure.
|
||||
"""
|
||||
text = _RegistryQuery(key, value)
|
||||
if not text:
|
||||
return None
|
||||
# Extract value.
|
||||
match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
|
||||
if not match:
|
||||
return None
|
||||
return match.group(1)
|
||||
|
||||
|
||||
def _RegistryKeyExists(key):
|
||||
"""Use reg.exe to see if a key exists.
|
||||
|
||||
Args:
|
||||
key: The registry key to check.
|
||||
Return:
|
||||
True if the key exists
|
||||
"""
|
||||
if not _RegistryQuery(key):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _CreateVersion(name, path, sdk_based=False):
|
||||
"""Sets up MSVS project generation.
|
||||
|
||||
Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
|
||||
autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
|
||||
passed in that doesn't match a value in versions python will throw a error.
|
||||
"""
|
||||
if path:
|
||||
path = os.path.normpath(path)
|
||||
versions = {
|
||||
'2013': VisualStudioVersion('2013',
|
||||
'Visual Studio 2013',
|
||||
solution_version='13.00',
|
||||
project_version='12.0',
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset='v120'),
|
||||
'2013e': VisualStudioVersion('2013e',
|
||||
'Visual Studio 2013',
|
||||
solution_version='13.00',
|
||||
project_version='12.0',
|
||||
flat_sln=True,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset='v120'),
|
||||
'2012': VisualStudioVersion('2012',
|
||||
'Visual Studio 2012',
|
||||
solution_version='12.00',
|
||||
project_version='4.0',
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset='v110'),
|
||||
'2012e': VisualStudioVersion('2012e',
|
||||
'Visual Studio 2012',
|
||||
solution_version='12.00',
|
||||
project_version='4.0',
|
||||
flat_sln=True,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset='v110'),
|
||||
'2010': VisualStudioVersion('2010',
|
||||
'Visual Studio 2010',
|
||||
solution_version='11.00',
|
||||
project_version='4.0',
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based),
|
||||
'2010e': VisualStudioVersion('2010e',
|
||||
'Visual C++ Express 2010',
|
||||
solution_version='11.00',
|
||||
project_version='4.0',
|
||||
flat_sln=True,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based),
|
||||
'2008': VisualStudioVersion('2008',
|
||||
'Visual Studio 2008',
|
||||
solution_version='10.00',
|
||||
project_version='9.00',
|
||||
flat_sln=False,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based),
|
||||
'2008e': VisualStudioVersion('2008e',
|
||||
'Visual Studio 2008',
|
||||
solution_version='10.00',
|
||||
project_version='9.00',
|
||||
flat_sln=True,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based),
|
||||
'2005': VisualStudioVersion('2005',
|
||||
'Visual Studio 2005',
|
||||
solution_version='9.00',
|
||||
project_version='8.00',
|
||||
flat_sln=False,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based),
|
||||
'2005e': VisualStudioVersion('2005e',
|
||||
'Visual Studio 2005',
|
||||
solution_version='9.00',
|
||||
project_version='8.00',
|
||||
flat_sln=True,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based),
|
||||
}
|
||||
return versions[str(name)]
|
||||
|
||||
|
||||
def _ConvertToCygpath(path):
|
||||
"""Convert to cygwin path if we are using cygwin."""
|
||||
if sys.platform == 'cygwin':
|
||||
p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE)
|
||||
path = p.communicate()[0].strip()
|
||||
return path
|
||||
|
||||
|
||||
def _DetectVisualStudioVersions(versions_to_check, force_express):
|
||||
"""Collect the list of installed visual studio versions.
|
||||
|
||||
Returns:
|
||||
A list of visual studio versions installed in descending order of
|
||||
usage preference.
|
||||
Base this on the registry and a quick check if devenv.exe exists.
|
||||
Only versions 8-10 are considered.
|
||||
Possibilities are:
|
||||
2005(e) - Visual Studio 2005 (8)
|
||||
2008(e) - Visual Studio 2008 (9)
|
||||
2010(e) - Visual Studio 2010 (10)
|
||||
2012(e) - Visual Studio 2012 (11)
|
||||
2013(e) - Visual Studio 2013 (11)
|
||||
Where (e) is e for express editions of MSVS and blank otherwise.
|
||||
"""
|
||||
version_to_year = {
|
||||
'8.0': '2005',
|
||||
'9.0': '2008',
|
||||
'10.0': '2010',
|
||||
'11.0': '2012',
|
||||
'12.0': '2013',
|
||||
}
|
||||
versions = []
|
||||
for version in versions_to_check:
|
||||
# Old method of searching for which VS version is installed
|
||||
# We don't use the 2010-encouraged-way because we also want to get the
|
||||
# path to the binaries, which it doesn't offer.
|
||||
keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version,
|
||||
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version,
|
||||
r'HKLM\Software\Microsoft\VCExpress\%s' % version,
|
||||
r'HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s' % version]
|
||||
for index in range(len(keys)):
|
||||
path = _RegistryGetValue(keys[index], 'InstallDir')
|
||||
if not path:
|
||||
continue
|
||||
path = _ConvertToCygpath(path)
|
||||
# Check for full.
|
||||
full_path = os.path.join(path, 'devenv.exe')
|
||||
express_path = os.path.join(path, '*express.exe')
|
||||
if not force_express and os.path.exists(full_path):
|
||||
# Add this one.
|
||||
versions.append(_CreateVersion(version_to_year[version],
|
||||
os.path.join(path, '..', '..')))
|
||||
# Check for express.
|
||||
elif glob.glob(express_path):
|
||||
# Add this one.
|
||||
versions.append(_CreateVersion(version_to_year[version] + 'e',
|
||||
os.path.join(path, '..', '..')))
|
||||
|
||||
# The old method above does not work when only SDK is installed.
|
||||
keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7',
|
||||
r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7']
|
||||
for index in range(len(keys)):
|
||||
path = _RegistryGetValue(keys[index], version)
|
||||
if not path:
|
||||
continue
|
||||
path = _ConvertToCygpath(path)
|
||||
versions.append(_CreateVersion(version_to_year[version] + 'e',
|
||||
os.path.join(path, '..'), sdk_based=True))
|
||||
|
||||
return versions
|
||||
|
||||
|
||||
def SelectVisualStudioVersion(version='auto'):
|
||||
"""Select which version of Visual Studio projects to generate.
|
||||
|
||||
Arguments:
|
||||
version: Hook to allow caller to force a particular version (vs auto).
|
||||
Returns:
|
||||
An object representing a visual studio project format version.
|
||||
"""
|
||||
# In auto mode, check environment variable for override.
|
||||
if version == 'auto':
|
||||
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
|
||||
version_map = {
|
||||
'auto': ('10.0', '12.0', '9.0', '8.0', '11.0'),
|
||||
'2005': ('8.0',),
|
||||
'2005e': ('8.0',),
|
||||
'2008': ('9.0',),
|
||||
'2008e': ('9.0',),
|
||||
'2010': ('10.0',),
|
||||
'2010e': ('10.0',),
|
||||
'2012': ('11.0',),
|
||||
'2012e': ('11.0',),
|
||||
'2013': ('12.0',),
|
||||
'2013e': ('12.0',),
|
||||
}
|
||||
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
|
||||
if override_path:
|
||||
msvs_version = os.environ.get('GYP_MSVS_VERSION')
|
||||
if not msvs_version:
|
||||
raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
|
||||
'set to a particular version (e.g. 2010e).')
|
||||
return _CreateVersion(msvs_version, override_path, sdk_based=True)
|
||||
version = str(version)
|
||||
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
|
||||
if not versions:
|
||||
if version == 'auto':
|
||||
# Default to 2005 if we couldn't find anything
|
||||
return _CreateVersion('2005', None)
|
||||
else:
|
||||
return _CreateVersion(version, None)
|
||||
return versions[0]
|
||||
537
third_party/gyp/__init__.py
vendored
Executable file
537
third_party/gyp/__init__.py
vendored
Executable file
@@ -0,0 +1,537 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import copy
|
||||
import gyp.input
|
||||
import optparse
|
||||
import os.path
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
import traceback
|
||||
from gyp.common import GypError
|
||||
|
||||
# Default debug modes for GYP
|
||||
debug = {}
|
||||
|
||||
# List of "official" debug modes, but you can use anything you like.
|
||||
DEBUG_GENERAL = 'general'
|
||||
DEBUG_VARIABLES = 'variables'
|
||||
DEBUG_INCLUDES = 'includes'
|
||||
|
||||
|
||||
def DebugOutput(mode, message, *args):
|
||||
if 'all' in gyp.debug or mode in gyp.debug:
|
||||
ctx = ('unknown', 0, 'unknown')
|
||||
try:
|
||||
f = traceback.extract_stack(limit=2)
|
||||
if f:
|
||||
ctx = f[0][:3]
|
||||
except:
|
||||
pass
|
||||
if args:
|
||||
message %= args
|
||||
print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]),
|
||||
ctx[1], ctx[2], message)
|
||||
|
||||
def FindBuildFiles():
|
||||
extension = '.gyp'
|
||||
files = os.listdir(os.getcwd())
|
||||
build_files = []
|
||||
for file in files:
|
||||
if file.endswith(extension):
|
||||
build_files.append(file)
|
||||
return build_files
|
||||
|
||||
|
||||
def Load(build_files, format, default_variables={},
|
||||
includes=[], depth='.', params=None, check=False,
|
||||
circular_check=True):
|
||||
"""
|
||||
Loads one or more specified build files.
|
||||
default_variables and includes will be copied before use.
|
||||
Returns the generator for the specified format and the
|
||||
data returned by loading the specified build files.
|
||||
"""
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
flavor = None
|
||||
if '-' in format:
|
||||
format, params['flavor'] = format.split('-', 1)
|
||||
|
||||
default_variables = copy.copy(default_variables)
|
||||
|
||||
# Default variables provided by this program and its modules should be
|
||||
# named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
|
||||
# avoiding collisions with user and automatic variables.
|
||||
default_variables['GENERATOR'] = format
|
||||
|
||||
# Format can be a custom python file, or by default the name of a module
|
||||
# within gyp.generator.
|
||||
if format.endswith('.py'):
|
||||
generator_name = os.path.splitext(format)[0]
|
||||
path, generator_name = os.path.split(generator_name)
|
||||
|
||||
# Make sure the path to the custom generator is in sys.path
|
||||
# Don't worry about removing it once we are done. Keeping the path
|
||||
# to each generator that is used in sys.path is likely harmless and
|
||||
# arguably a good idea.
|
||||
path = os.path.abspath(path)
|
||||
if path not in sys.path:
|
||||
sys.path.insert(0, path)
|
||||
else:
|
||||
generator_name = 'gyp.generator.' + format
|
||||
|
||||
# These parameters are passed in order (as opposed to by key)
|
||||
# because ActivePython cannot handle key parameters to __import__.
|
||||
generator = __import__(generator_name, globals(), locals(), generator_name)
|
||||
for (key, val) in generator.generator_default_variables.items():
|
||||
default_variables.setdefault(key, val)
|
||||
|
||||
# Give the generator the opportunity to set additional variables based on
|
||||
# the params it will receive in the output phase.
|
||||
if getattr(generator, 'CalculateVariables', None):
|
||||
generator.CalculateVariables(default_variables, params)
|
||||
|
||||
# Give the generator the opportunity to set generator_input_info based on
|
||||
# the params it will receive in the output phase.
|
||||
if getattr(generator, 'CalculateGeneratorInputInfo', None):
|
||||
generator.CalculateGeneratorInputInfo(params)
|
||||
|
||||
# Fetch the generator specific info that gets fed to input, we use getattr
|
||||
# so we can default things and the generators only have to provide what
|
||||
# they need.
|
||||
generator_input_info = {
|
||||
'non_configuration_keys':
|
||||
getattr(generator, 'generator_additional_non_configuration_keys', []),
|
||||
'path_sections':
|
||||
getattr(generator, 'generator_additional_path_sections', []),
|
||||
'extra_sources_for_rules':
|
||||
getattr(generator, 'generator_extra_sources_for_rules', []),
|
||||
'generator_supports_multiple_toolsets':
|
||||
getattr(generator, 'generator_supports_multiple_toolsets', False),
|
||||
'generator_wants_static_library_dependencies_adjusted':
|
||||
getattr(generator,
|
||||
'generator_wants_static_library_dependencies_adjusted', True),
|
||||
'generator_wants_sorted_dependencies':
|
||||
getattr(generator, 'generator_wants_sorted_dependencies', False),
|
||||
'generator_filelist_paths':
|
||||
getattr(generator, 'generator_filelist_paths', None),
|
||||
}
|
||||
|
||||
# Process the input specific to this generator.
|
||||
result = gyp.input.Load(build_files, default_variables, includes[:],
|
||||
depth, generator_input_info, check, circular_check,
|
||||
params['parallel'], params['root_targets'])
|
||||
return [generator] + result
|
||||
|
||||
def NameValueListToDict(name_value_list):
|
||||
"""
|
||||
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
|
||||
of the pairs. If a string is simply NAME, then the value in the dictionary
|
||||
is set to True. If VALUE can be converted to an integer, it is.
|
||||
"""
|
||||
result = { }
|
||||
for item in name_value_list:
|
||||
tokens = item.split('=', 1)
|
||||
if len(tokens) == 2:
|
||||
# If we can make it an int, use that, otherwise, use the string.
|
||||
try:
|
||||
token_value = int(tokens[1])
|
||||
except ValueError:
|
||||
token_value = tokens[1]
|
||||
# Set the variable to the supplied value.
|
||||
result[tokens[0]] = token_value
|
||||
else:
|
||||
# No value supplied, treat it as a boolean and set it.
|
||||
result[tokens[0]] = True
|
||||
return result
|
||||
|
||||
def ShlexEnv(env_name):
|
||||
flags = os.environ.get(env_name, [])
|
||||
if flags:
|
||||
flags = shlex.split(flags)
|
||||
return flags
|
||||
|
||||
def FormatOpt(opt, value):
|
||||
if opt.startswith('--'):
|
||||
return '%s=%s' % (opt, value)
|
||||
return opt + value
|
||||
|
||||
def RegenerateAppendFlag(flag, values, predicate, env_name, options):
|
||||
"""Regenerate a list of command line flags, for an option of action='append'.
|
||||
|
||||
The |env_name|, if given, is checked in the environment and used to generate
|
||||
an initial list of options, then the options that were specified on the
|
||||
command line (given in |values|) are appended. This matches the handling of
|
||||
environment variables and command line flags where command line flags override
|
||||
the environment, while not requiring the environment to be set when the flags
|
||||
are used again.
|
||||
"""
|
||||
flags = []
|
||||
if options.use_environment and env_name:
|
||||
for flag_value in ShlexEnv(env_name):
|
||||
value = FormatOpt(flag, predicate(flag_value))
|
||||
if value in flags:
|
||||
flags.remove(value)
|
||||
flags.append(value)
|
||||
if values:
|
||||
for flag_value in values:
|
||||
flags.append(FormatOpt(flag, predicate(flag_value)))
|
||||
return flags
|
||||
|
||||
def RegenerateFlags(options):
|
||||
"""Given a parsed options object, and taking the environment variables into
|
||||
account, returns a list of flags that should regenerate an equivalent options
|
||||
object (even in the absence of the environment variables.)
|
||||
|
||||
Any path options will be normalized relative to depth.
|
||||
|
||||
The format flag is not included, as it is assumed the calling generator will
|
||||
set that as appropriate.
|
||||
"""
|
||||
def FixPath(path):
|
||||
path = gyp.common.FixIfRelativePath(path, options.depth)
|
||||
if not path:
|
||||
return os.path.curdir
|
||||
return path
|
||||
|
||||
def Noop(value):
|
||||
return value
|
||||
|
||||
# We always want to ignore the environment when regenerating, to avoid
|
||||
# duplicate or changed flags in the environment at the time of regeneration.
|
||||
flags = ['--ignore-environment']
|
||||
for name, metadata in options._regeneration_metadata.iteritems():
|
||||
opt = metadata['opt']
|
||||
value = getattr(options, name)
|
||||
value_predicate = metadata['type'] == 'path' and FixPath or Noop
|
||||
action = metadata['action']
|
||||
env_name = metadata['env_name']
|
||||
if action == 'append':
|
||||
flags.extend(RegenerateAppendFlag(opt, value, value_predicate,
|
||||
env_name, options))
|
||||
elif action in ('store', None): # None is a synonym for 'store'.
|
||||
if value:
|
||||
flags.append(FormatOpt(opt, value_predicate(value)))
|
||||
elif options.use_environment and env_name and os.environ.get(env_name):
|
||||
flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
|
||||
elif action in ('store_true', 'store_false'):
|
||||
if ((action == 'store_true' and value) or
|
||||
(action == 'store_false' and not value)):
|
||||
flags.append(opt)
|
||||
elif options.use_environment and env_name:
|
||||
print >>sys.stderr, ('Warning: environment regeneration unimplemented '
|
||||
'for %s flag %r env_name %r' % (action, opt,
|
||||
env_name))
|
||||
else:
|
||||
print >>sys.stderr, ('Warning: regeneration unimplemented for action %r '
|
||||
'flag %r' % (action, opt))
|
||||
|
||||
return flags
|
||||
|
||||
class RegeneratableOptionParser(optparse.OptionParser):
|
||||
def __init__(self):
|
||||
self.__regeneratable_options = {}
|
||||
optparse.OptionParser.__init__(self)
|
||||
|
||||
def add_option(self, *args, **kw):
|
||||
"""Add an option to the parser.
|
||||
|
||||
This accepts the same arguments as OptionParser.add_option, plus the
|
||||
following:
|
||||
regenerate: can be set to False to prevent this option from being included
|
||||
in regeneration.
|
||||
env_name: name of environment variable that additional values for this
|
||||
option come from.
|
||||
type: adds type='path', to tell the regenerator that the values of
|
||||
this option need to be made relative to options.depth
|
||||
"""
|
||||
env_name = kw.pop('env_name', None)
|
||||
if 'dest' in kw and kw.pop('regenerate', True):
|
||||
dest = kw['dest']
|
||||
|
||||
# The path type is needed for regenerating, for optparse we can just treat
|
||||
# it as a string.
|
||||
type = kw.get('type')
|
||||
if type == 'path':
|
||||
kw['type'] = 'string'
|
||||
|
||||
self.__regeneratable_options[dest] = {
|
||||
'action': kw.get('action'),
|
||||
'type': type,
|
||||
'env_name': env_name,
|
||||
'opt': args[0],
|
||||
}
|
||||
|
||||
optparse.OptionParser.add_option(self, *args, **kw)
|
||||
|
||||
def parse_args(self, *args):
|
||||
values, args = optparse.OptionParser.parse_args(self, *args)
|
||||
values._regeneration_metadata = self.__regeneratable_options
|
||||
return values, args
|
||||
|
||||
def gyp_main(args):
|
||||
my_name = os.path.basename(sys.argv[0])
|
||||
|
||||
parser = RegeneratableOptionParser()
|
||||
usage = 'usage: %s [options ...] [build_file ...]'
|
||||
parser.set_usage(usage.replace('%s', '%prog'))
|
||||
parser.add_option('--build', dest='configs', action='append',
|
||||
help='configuration for build after project generation')
|
||||
parser.add_option('--check', dest='check', action='store_true',
|
||||
help='check format of gyp files')
|
||||
parser.add_option('--config-dir', dest='config_dir', action='store',
|
||||
env_name='GYP_CONFIG_DIR', default=None,
|
||||
help='The location for configuration files like '
|
||||
'include.gypi.')
|
||||
parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
|
||||
action='append', default=[], help='turn on a debugging '
|
||||
'mode for debugging GYP. Supported modes are "variables", '
|
||||
'"includes" and "general" or "all" for all of them.')
|
||||
parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
|
||||
env_name='GYP_DEFINES',
|
||||
help='sets variable VAR to value VAL')
|
||||
parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
|
||||
help='set DEPTH gyp variable to a relative path to PATH')
|
||||
parser.add_option('-f', '--format', dest='formats', action='append',
|
||||
env_name='GYP_GENERATORS', regenerate=False,
|
||||
help='output formats to generate')
|
||||
parser.add_option('-G', dest='generator_flags', action='append', default=[],
|
||||
metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
|
||||
help='sets generator flag FLAG to VAL')
|
||||
parser.add_option('--generator-output', dest='generator_output',
|
||||
action='store', default=None, metavar='DIR', type='path',
|
||||
env_name='GYP_GENERATOR_OUTPUT',
|
||||
help='puts generated build files under DIR')
|
||||
parser.add_option('--ignore-environment', dest='use_environment',
|
||||
action='store_false', default=True, regenerate=False,
|
||||
help='do not read options from environment variables')
|
||||
parser.add_option('-I', '--include', dest='includes', action='append',
|
||||
metavar='INCLUDE', type='path',
|
||||
help='files to include in all loaded .gyp files')
|
||||
# --no-circular-check disables the check for circular relationships between
|
||||
# .gyp files. These relationships should not exist, but they've only been
|
||||
# observed to be harmful with the Xcode generator. Chromium's .gyp files
|
||||
# currently have some circular relationships on non-Mac platforms, so this
|
||||
# option allows the strict behavior to be used on Macs and the lenient
|
||||
# behavior to be used elsewhere.
|
||||
# TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
|
||||
parser.add_option('--no-circular-check', dest='circular_check',
|
||||
action='store_false', default=True, regenerate=False,
|
||||
help="don't check for circular relationships between files")
|
||||
parser.add_option('--no-parallel', action='store_true', default=False,
|
||||
help='Disable multiprocessing')
|
||||
parser.add_option('-S', '--suffix', dest='suffix', default='',
|
||||
help='suffix to add to generated files')
|
||||
parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
|
||||
default=None, metavar='DIR', type='path',
|
||||
help='directory to use as the root of the source tree')
|
||||
parser.add_option('-R', '--root-target', dest='root_targets',
|
||||
action='append', metavar='TARGET',
|
||||
help='include only TARGET and its deep dependencies')
|
||||
|
||||
options, build_files_arg = parser.parse_args(args)
|
||||
build_files = build_files_arg
|
||||
|
||||
# Set up the configuration directory (defaults to ~/.gyp)
|
||||
if not options.config_dir:
|
||||
home = None
|
||||
home_dot_gyp = None
|
||||
if options.use_environment:
|
||||
home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None)
|
||||
if home_dot_gyp:
|
||||
home_dot_gyp = os.path.expanduser(home_dot_gyp)
|
||||
|
||||
if not home_dot_gyp:
|
||||
home_vars = ['HOME']
|
||||
if sys.platform in ('cygwin', 'win32'):
|
||||
home_vars.append('USERPROFILE')
|
||||
for home_var in home_vars:
|
||||
home = os.getenv(home_var)
|
||||
if home != None:
|
||||
home_dot_gyp = os.path.join(home, '.gyp')
|
||||
if not os.path.exists(home_dot_gyp):
|
||||
home_dot_gyp = None
|
||||
else:
|
||||
break
|
||||
else:
|
||||
home_dot_gyp = os.path.expanduser(options.config_dir)
|
||||
|
||||
if home_dot_gyp and not os.path.exists(home_dot_gyp):
|
||||
home_dot_gyp = None
|
||||
|
||||
if not options.formats:
|
||||
# If no format was given on the command line, then check the env variable.
|
||||
generate_formats = []
|
||||
if options.use_environment:
|
||||
generate_formats = os.environ.get('GYP_GENERATORS', [])
|
||||
if generate_formats:
|
||||
generate_formats = re.split('[\s,]', generate_formats)
|
||||
if generate_formats:
|
||||
options.formats = generate_formats
|
||||
else:
|
||||
# Nothing in the variable, default based on platform.
|
||||
if sys.platform == 'darwin':
|
||||
options.formats = ['xcode']
|
||||
elif sys.platform in ('win32', 'cygwin'):
|
||||
options.formats = ['msvs']
|
||||
else:
|
||||
options.formats = ['make']
|
||||
|
||||
if not options.generator_output and options.use_environment:
|
||||
g_o = os.environ.get('GYP_GENERATOR_OUTPUT')
|
||||
if g_o:
|
||||
options.generator_output = g_o
|
||||
|
||||
options.parallel = not options.no_parallel
|
||||
|
||||
for mode in options.debug:
|
||||
gyp.debug[mode] = 1
|
||||
|
||||
# Do an extra check to avoid work when we're not debugging.
|
||||
if DEBUG_GENERAL in gyp.debug:
|
||||
DebugOutput(DEBUG_GENERAL, 'running with these options:')
|
||||
for option, value in sorted(options.__dict__.items()):
|
||||
if option[0] == '_':
|
||||
continue
|
||||
if isinstance(value, basestring):
|
||||
DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value)
|
||||
else:
|
||||
DebugOutput(DEBUG_GENERAL, " %s: %s", option, value)
|
||||
|
||||
if not build_files:
|
||||
build_files = FindBuildFiles()
|
||||
if not build_files:
|
||||
raise GypError((usage + '\n\n%s: error: no build_file') %
|
||||
(my_name, my_name))
|
||||
|
||||
# TODO(mark): Chromium-specific hack!
|
||||
# For Chromium, the gyp "depth" variable should always be a relative path
|
||||
# to Chromium's top-level "src" directory. If no depth variable was set
|
||||
# on the command line, try to find a "src" directory by looking at the
|
||||
# absolute path to each build file's directory. The first "src" component
|
||||
# found will be treated as though it were the path used for --depth.
|
||||
if not options.depth:
|
||||
for build_file in build_files:
|
||||
build_file_dir = os.path.abspath(os.path.dirname(build_file))
|
||||
build_file_dir_components = build_file_dir.split(os.path.sep)
|
||||
components_len = len(build_file_dir_components)
|
||||
for index in xrange(components_len - 1, -1, -1):
|
||||
if build_file_dir_components[index] == 'src':
|
||||
options.depth = os.path.sep.join(build_file_dir_components)
|
||||
break
|
||||
del build_file_dir_components[index]
|
||||
|
||||
# If the inner loop found something, break without advancing to another
|
||||
# build file.
|
||||
if options.depth:
|
||||
break
|
||||
|
||||
if not options.depth:
|
||||
raise GypError('Could not automatically locate src directory. This is'
|
||||
'a temporary Chromium feature that will be removed. Use'
|
||||
'--depth as a workaround.')
|
||||
|
||||
# If toplevel-dir is not set, we assume that depth is the root of our source
|
||||
# tree.
|
||||
if not options.toplevel_dir:
|
||||
options.toplevel_dir = options.depth
|
||||
|
||||
# -D on the command line sets variable defaults - D isn't just for define,
|
||||
# it's for default. Perhaps there should be a way to force (-F?) a
|
||||
# variable's value so that it can't be overridden by anything else.
|
||||
cmdline_default_variables = {}
|
||||
defines = []
|
||||
if options.use_environment:
|
||||
defines += ShlexEnv('GYP_DEFINES')
|
||||
if options.defines:
|
||||
defines += options.defines
|
||||
cmdline_default_variables = NameValueListToDict(defines)
|
||||
if DEBUG_GENERAL in gyp.debug:
|
||||
DebugOutput(DEBUG_GENERAL,
|
||||
"cmdline_default_variables: %s", cmdline_default_variables)
|
||||
|
||||
# Set up includes.
|
||||
includes = []
|
||||
|
||||
# If ~/.gyp/include.gypi exists, it'll be forcibly included into every
|
||||
# .gyp file that's loaded, before anything else is included.
|
||||
if home_dot_gyp != None:
|
||||
default_include = os.path.join(home_dot_gyp, 'include.gypi')
|
||||
if os.path.exists(default_include):
|
||||
print 'Using overrides found in ' + default_include
|
||||
includes.append(default_include)
|
||||
|
||||
# Command-line --include files come after the default include.
|
||||
if options.includes:
|
||||
includes.extend(options.includes)
|
||||
|
||||
# Generator flags should be prefixed with the target generator since they
|
||||
# are global across all generator runs.
|
||||
gen_flags = []
|
||||
if options.use_environment:
|
||||
gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS')
|
||||
if options.generator_flags:
|
||||
gen_flags += options.generator_flags
|
||||
generator_flags = NameValueListToDict(gen_flags)
|
||||
if DEBUG_GENERAL in gyp.debug.keys():
|
||||
DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags)
|
||||
|
||||
# Generate all requested formats (use a set in case we got one format request
|
||||
# twice)
|
||||
for format in set(options.formats):
|
||||
params = {'options': options,
|
||||
'build_files': build_files,
|
||||
'generator_flags': generator_flags,
|
||||
'cwd': os.getcwd(),
|
||||
'build_files_arg': build_files_arg,
|
||||
'gyp_binary': sys.argv[0],
|
||||
'home_dot_gyp': home_dot_gyp,
|
||||
'parallel': options.parallel,
|
||||
'root_targets': options.root_targets}
|
||||
|
||||
# Start with the default variables from the command line.
|
||||
[generator, flat_list, targets, data] = Load(build_files, format,
|
||||
cmdline_default_variables,
|
||||
includes, options.depth,
|
||||
params, options.check,
|
||||
options.circular_check)
|
||||
|
||||
# TODO(mark): Pass |data| for now because the generator needs a list of
|
||||
# build files that came in. In the future, maybe it should just accept
|
||||
# a list, and not the whole data dict.
|
||||
# NOTE: flat_list is the flattened dependency graph specifying the order
|
||||
# that targets may be built. Build systems that operate serially or that
|
||||
# need to have dependencies defined before dependents reference them should
|
||||
# generate targets in the order specified in flat_list.
|
||||
generator.GenerateOutput(flat_list, targets, data, params)
|
||||
|
||||
if options.configs:
|
||||
valid_configs = targets[flat_list[0]]['configurations'].keys()
|
||||
for conf in options.configs:
|
||||
if conf not in valid_configs:
|
||||
raise GypError('Invalid config specified via --build: %s' % conf)
|
||||
generator.PerformBuild(data, options.configs, params)
|
||||
|
||||
# Done
|
||||
return 0
|
||||
|
||||
|
||||
def main(args):
|
||||
try:
|
||||
return gyp_main(args)
|
||||
except GypError, e:
|
||||
sys.stderr.write("gyp: %s\n" % e)
|
||||
return 1
|
||||
|
||||
# NOTE: setuptools generated console_scripts calls function with no arguments
|
||||
def script_main():
|
||||
return main(sys.argv[1:])
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(script_main())
|
||||
521
third_party/gyp/common.py
vendored
Normal file
521
third_party/gyp/common.py
vendored
Normal file
@@ -0,0 +1,521 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from __future__ import with_statement
|
||||
|
||||
import errno
|
||||
import filecmp
|
||||
import os.path
|
||||
import re
|
||||
import tempfile
|
||||
import sys
|
||||
|
||||
|
||||
# A minimal memoizing decorator. It'll blow up if the args aren't immutable,
|
||||
# among other "problems".
|
||||
class memoize(object):
|
||||
def __init__(self, func):
|
||||
self.func = func
|
||||
self.cache = {}
|
||||
def __call__(self, *args):
|
||||
try:
|
||||
return self.cache[args]
|
||||
except KeyError:
|
||||
result = self.func(*args)
|
||||
self.cache[args] = result
|
||||
return result
|
||||
|
||||
|
||||
class GypError(Exception):
|
||||
"""Error class representing an error, which is to be presented
|
||||
to the user. The main entry point will catch and display this.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def ExceptionAppend(e, msg):
|
||||
"""Append a message to the given exception's message."""
|
||||
if not e.args:
|
||||
e.args = (msg,)
|
||||
elif len(e.args) == 1:
|
||||
e.args = (str(e.args[0]) + ' ' + msg,)
|
||||
else:
|
||||
e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
|
||||
|
||||
|
||||
def FindQualifiedTargets(target, qualified_list):
|
||||
"""
|
||||
Given a list of qualified targets, return the qualified targets for the
|
||||
specified |target|.
|
||||
"""
|
||||
return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target]
|
||||
|
||||
|
||||
def ParseQualifiedTarget(target):
|
||||
# Splits a qualified target into a build file, target name and toolset.
|
||||
|
||||
# NOTE: rsplit is used to disambiguate the Windows drive letter separator.
|
||||
target_split = target.rsplit(':', 1)
|
||||
if len(target_split) == 2:
|
||||
[build_file, target] = target_split
|
||||
else:
|
||||
build_file = None
|
||||
|
||||
target_split = target.rsplit('#', 1)
|
||||
if len(target_split) == 2:
|
||||
[target, toolset] = target_split
|
||||
else:
|
||||
toolset = None
|
||||
|
||||
return [build_file, target, toolset]
|
||||
|
||||
|
||||
def ResolveTarget(build_file, target, toolset):
|
||||
# This function resolves a target into a canonical form:
|
||||
# - a fully defined build file, either absolute or relative to the current
|
||||
# directory
|
||||
# - a target name
|
||||
# - a toolset
|
||||
#
|
||||
# build_file is the file relative to which 'target' is defined.
|
||||
# target is the qualified target.
|
||||
# toolset is the default toolset for that target.
|
||||
[parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
|
||||
|
||||
if parsed_build_file:
|
||||
if build_file:
|
||||
# If a relative path, parsed_build_file is relative to the directory
|
||||
# containing build_file. If build_file is not in the current directory,
|
||||
# parsed_build_file is not a usable path as-is. Resolve it by
|
||||
# interpreting it as relative to build_file. If parsed_build_file is
|
||||
# absolute, it is usable as a path regardless of the current directory,
|
||||
# and os.path.join will return it as-is.
|
||||
build_file = os.path.normpath(os.path.join(os.path.dirname(build_file),
|
||||
parsed_build_file))
|
||||
# Further (to handle cases like ../cwd), make it relative to cwd)
|
||||
if not os.path.isabs(build_file):
|
||||
build_file = RelativePath(build_file, '.')
|
||||
else:
|
||||
build_file = parsed_build_file
|
||||
|
||||
if parsed_toolset:
|
||||
toolset = parsed_toolset
|
||||
|
||||
return [build_file, target, toolset]
|
||||
|
||||
|
||||
def BuildFile(fully_qualified_target):
|
||||
# Extracts the build file from the fully qualified target.
|
||||
return ParseQualifiedTarget(fully_qualified_target)[0]
|
||||
|
||||
|
||||
def GetEnvironFallback(var_list, default):
|
||||
"""Look up a key in the environment, with fallback to secondary keys
|
||||
and finally falling back to a default value."""
|
||||
for var in var_list:
|
||||
if var in os.environ:
|
||||
return os.environ[var]
|
||||
return default
|
||||
|
||||
|
||||
def QualifiedTarget(build_file, target, toolset):
|
||||
# "Qualified" means the file that a target was defined in and the target
|
||||
# name, separated by a colon, suffixed by a # and the toolset name:
|
||||
# /path/to/file.gyp:target_name#toolset
|
||||
fully_qualified = build_file + ':' + target
|
||||
if toolset:
|
||||
fully_qualified = fully_qualified + '#' + toolset
|
||||
return fully_qualified
|
||||
|
||||
|
||||
@memoize
|
||||
def RelativePath(path, relative_to):
|
||||
# Assuming both |path| and |relative_to| are relative to the current
|
||||
# directory, returns a relative path that identifies path relative to
|
||||
# relative_to.
|
||||
|
||||
# Convert to normalized (and therefore absolute paths).
|
||||
path = os.path.realpath(path)
|
||||
relative_to = os.path.realpath(relative_to)
|
||||
|
||||
# On Windows, we can't create a relative path to a different drive, so just
|
||||
# use the absolute path.
|
||||
if sys.platform == 'win32':
|
||||
if (os.path.splitdrive(path)[0].lower() !=
|
||||
os.path.splitdrive(relative_to)[0].lower()):
|
||||
return path
|
||||
|
||||
# Split the paths into components.
|
||||
path_split = path.split(os.path.sep)
|
||||
relative_to_split = relative_to.split(os.path.sep)
|
||||
|
||||
# Determine how much of the prefix the two paths share.
|
||||
prefix_len = len(os.path.commonprefix([path_split, relative_to_split]))
|
||||
|
||||
# Put enough ".." components to back up out of relative_to to the common
|
||||
# prefix, and then append the part of path_split after the common prefix.
|
||||
relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \
|
||||
path_split[prefix_len:]
|
||||
|
||||
if len(relative_split) == 0:
|
||||
# The paths were the same.
|
||||
return ''
|
||||
|
||||
# Turn it back into a string and we're done.
|
||||
return os.path.join(*relative_split)
|
||||
|
||||
|
||||
@memoize
|
||||
def InvertRelativePath(path, toplevel_dir=None):
|
||||
"""Given a path like foo/bar that is relative to toplevel_dir, return
|
||||
the inverse relative path back to the toplevel_dir.
|
||||
|
||||
E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
|
||||
should always produce the empty string, unless the path contains symlinks.
|
||||
"""
|
||||
if not path:
|
||||
return path
|
||||
toplevel_dir = '.' if toplevel_dir is None else toplevel_dir
|
||||
return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path))
|
||||
|
||||
|
||||
def FixIfRelativePath(path, relative_to):
|
||||
# Like RelativePath but returns |path| unchanged if it is absolute.
|
||||
if os.path.isabs(path):
|
||||
return path
|
||||
return RelativePath(path, relative_to)
|
||||
|
||||
|
||||
def UnrelativePath(path, relative_to):
|
||||
# Assuming that |relative_to| is relative to the current directory, and |path|
|
||||
# is a path relative to the dirname of |relative_to|, returns a path that
|
||||
# identifies |path| relative to the current directory.
|
||||
rel_dir = os.path.dirname(relative_to)
|
||||
return os.path.normpath(os.path.join(rel_dir, path))
|
||||
|
||||
|
||||
# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at
|
||||
# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
|
||||
# and the documentation for various shells.
|
||||
|
||||
# _quote is a pattern that should match any argument that needs to be quoted
|
||||
# with double-quotes by EncodePOSIXShellArgument. It matches the following
|
||||
# characters appearing anywhere in an argument:
|
||||
# \t, \n, space parameter separators
|
||||
# # comments
|
||||
# $ expansions (quoted to always expand within one argument)
|
||||
# % called out by IEEE 1003.1 XCU.2.2
|
||||
# & job control
|
||||
# ' quoting
|
||||
# (, ) subshell execution
|
||||
# *, ?, [ pathname expansion
|
||||
# ; command delimiter
|
||||
# <, >, | redirection
|
||||
# = assignment
|
||||
# {, } brace expansion (bash)
|
||||
# ~ tilde expansion
|
||||
# It also matches the empty string, because "" (or '') is the only way to
|
||||
# represent an empty string literal argument to a POSIX shell.
|
||||
#
|
||||
# This does not match the characters in _escape, because those need to be
|
||||
# backslash-escaped regardless of whether they appear in a double-quoted
|
||||
# string.
|
||||
_quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$')
|
||||
|
||||
# _escape is a pattern that should match any character that needs to be
|
||||
# escaped with a backslash, whether or not the argument matched the _quote
|
||||
# pattern. _escape is used with re.sub to backslash anything in _escape's
|
||||
# first match group, hence the (parentheses) in the regular expression.
|
||||
#
|
||||
# _escape matches the following characters appearing anywhere in an argument:
|
||||
# " to prevent POSIX shells from interpreting this character for quoting
|
||||
# \ to prevent POSIX shells from interpreting this character for escaping
|
||||
# ` to prevent POSIX shells from interpreting this character for command
|
||||
# substitution
|
||||
# Missing from this list is $, because the desired behavior of
|
||||
# EncodePOSIXShellArgument is to permit parameter (variable) expansion.
|
||||
#
|
||||
# Also missing from this list is !, which bash will interpret as the history
|
||||
# expansion character when history is enabled. bash does not enable history
|
||||
# by default in non-interactive shells, so this is not thought to be a problem.
|
||||
# ! was omitted from this list because bash interprets "\!" as a literal string
|
||||
# including the backslash character (avoiding history expansion but retaining
|
||||
# the backslash), which would not be correct for argument encoding. Handling
|
||||
# this case properly would also be problematic because bash allows the history
|
||||
# character to be changed with the histchars shell variable. Fortunately,
|
||||
# as history is not enabled in non-interactive shells and
|
||||
# EncodePOSIXShellArgument is only expected to encode for non-interactive
|
||||
# shells, there is no room for error here by ignoring !.
|
||||
_escape = re.compile(r'(["\\`])')
|
||||
|
||||
def EncodePOSIXShellArgument(argument):
|
||||
"""Encodes |argument| suitably for consumption by POSIX shells.
|
||||
|
||||
argument may be quoted and escaped as necessary to ensure that POSIX shells
|
||||
treat the returned value as a literal representing the argument passed to
|
||||
this function. Parameter (variable) expansions beginning with $ are allowed
|
||||
to remain intact without escaping the $, to allow the argument to contain
|
||||
references to variables to be expanded by the shell.
|
||||
"""
|
||||
|
||||
if not isinstance(argument, str):
|
||||
argument = str(argument)
|
||||
|
||||
if _quote.search(argument):
|
||||
quote = '"'
|
||||
else:
|
||||
quote = ''
|
||||
|
||||
encoded = quote + re.sub(_escape, r'\\\1', argument) + quote
|
||||
|
||||
return encoded
|
||||
|
||||
|
||||
def EncodePOSIXShellList(list):
|
||||
"""Encodes |list| suitably for consumption by POSIX shells.
|
||||
|
||||
Returns EncodePOSIXShellArgument for each item in list, and joins them
|
||||
together using the space character as an argument separator.
|
||||
"""
|
||||
|
||||
encoded_arguments = []
|
||||
for argument in list:
|
||||
encoded_arguments.append(EncodePOSIXShellArgument(argument))
|
||||
return ' '.join(encoded_arguments)
|
||||
|
||||
|
||||
def DeepDependencyTargets(target_dicts, roots):
|
||||
"""Returns the recursive list of target dependencies."""
|
||||
dependencies = set()
|
||||
pending = set(roots)
|
||||
while pending:
|
||||
# Pluck out one.
|
||||
r = pending.pop()
|
||||
# Skip if visited already.
|
||||
if r in dependencies:
|
||||
continue
|
||||
# Add it.
|
||||
dependencies.add(r)
|
||||
# Add its children.
|
||||
spec = target_dicts[r]
|
||||
pending.update(set(spec.get('dependencies', [])))
|
||||
pending.update(set(spec.get('dependencies_original', [])))
|
||||
return list(dependencies - set(roots))
|
||||
|
||||
|
||||
def BuildFileTargets(target_list, build_file):
|
||||
"""From a target_list, returns the subset from the specified build_file.
|
||||
"""
|
||||
return [p for p in target_list if BuildFile(p) == build_file]
|
||||
|
||||
|
||||
def AllTargets(target_list, target_dicts, build_file):
|
||||
"""Returns all targets (direct and dependencies) for the specified build_file.
|
||||
"""
|
||||
bftargets = BuildFileTargets(target_list, build_file)
|
||||
deptargets = DeepDependencyTargets(target_dicts, bftargets)
|
||||
return bftargets + deptargets
|
||||
|
||||
|
||||
def WriteOnDiff(filename):
|
||||
"""Write to a file only if the new contents differ.
|
||||
|
||||
Arguments:
|
||||
filename: name of the file to potentially write to.
|
||||
Returns:
|
||||
A file like object which will write to temporary file and only overwrite
|
||||
the target if it differs (on close).
|
||||
"""
|
||||
|
||||
class Writer:
|
||||
"""Wrapper around file which only covers the target if it differs."""
|
||||
def __init__(self):
|
||||
# Pick temporary file.
|
||||
tmp_fd, self.tmp_path = tempfile.mkstemp(
|
||||
suffix='.tmp',
|
||||
prefix=os.path.split(filename)[1] + '.gyp.',
|
||||
dir=os.path.split(filename)[0])
|
||||
try:
|
||||
self.tmp_file = os.fdopen(tmp_fd, 'wb')
|
||||
except Exception:
|
||||
# Don't leave turds behind.
|
||||
os.unlink(self.tmp_path)
|
||||
raise
|
||||
|
||||
def __getattr__(self, attrname):
|
||||
# Delegate everything else to self.tmp_file
|
||||
return getattr(self.tmp_file, attrname)
|
||||
|
||||
def close(self):
|
||||
try:
|
||||
# Close tmp file.
|
||||
self.tmp_file.close()
|
||||
# Determine if different.
|
||||
same = False
|
||||
try:
|
||||
same = filecmp.cmp(self.tmp_path, filename, False)
|
||||
except OSError, e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
if same:
|
||||
# The new file is identical to the old one, just get rid of the new
|
||||
# one.
|
||||
os.unlink(self.tmp_path)
|
||||
else:
|
||||
# The new file is different from the old one, or there is no old one.
|
||||
# Rename the new file to the permanent name.
|
||||
#
|
||||
# tempfile.mkstemp uses an overly restrictive mode, resulting in a
|
||||
# file that can only be read by the owner, regardless of the umask.
|
||||
# There's no reason to not respect the umask here, which means that
|
||||
# an extra hoop is required to fetch it and reset the new file's mode.
|
||||
#
|
||||
# No way to get the umask without setting a new one? Set a safe one
|
||||
# and then set it back to the old value.
|
||||
umask = os.umask(077)
|
||||
os.umask(umask)
|
||||
os.chmod(self.tmp_path, 0666 & ~umask)
|
||||
if sys.platform == 'win32' and os.path.exists(filename):
|
||||
# NOTE: on windows (but not cygwin) rename will not replace an
|
||||
# existing file, so it must be preceded with a remove. Sadly there
|
||||
# is no way to make the switch atomic.
|
||||
os.remove(filename)
|
||||
os.rename(self.tmp_path, filename)
|
||||
except Exception:
|
||||
# Don't leave turds behind.
|
||||
os.unlink(self.tmp_path)
|
||||
raise
|
||||
|
||||
return Writer()
|
||||
|
||||
|
||||
def EnsureDirExists(path):
|
||||
"""Make sure the directory for |path| exists."""
|
||||
try:
|
||||
os.makedirs(os.path.dirname(path))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def GetFlavor(params):
|
||||
"""Returns |params.flavor| if it's set, the system's default flavor else."""
|
||||
flavors = {
|
||||
'cygwin': 'win',
|
||||
'win32': 'win',
|
||||
'darwin': 'mac',
|
||||
}
|
||||
|
||||
if 'flavor' in params:
|
||||
return params['flavor']
|
||||
if sys.platform in flavors:
|
||||
return flavors[sys.platform]
|
||||
if sys.platform.startswith('sunos'):
|
||||
return 'solaris'
|
||||
if sys.platform.startswith('freebsd'):
|
||||
return 'freebsd'
|
||||
if sys.platform.startswith('openbsd'):
|
||||
return 'openbsd'
|
||||
if sys.platform.startswith('aix'):
|
||||
return 'aix'
|
||||
|
||||
return 'linux'
|
||||
|
||||
|
||||
def CopyTool(flavor, out_path):
|
||||
"""Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
|
||||
to |out_path|."""
|
||||
# aix and solaris just need flock emulation. mac and win use more complicated
|
||||
# support scripts.
|
||||
prefix = {
|
||||
'aix': 'flock',
|
||||
'solaris': 'flock',
|
||||
'mac': 'mac',
|
||||
'win': 'win'
|
||||
}.get(flavor, None)
|
||||
if not prefix:
|
||||
return
|
||||
|
||||
# Slurp input file.
|
||||
source_path = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), '%s_tool.py' % prefix)
|
||||
with open(source_path) as source_file:
|
||||
source = source_file.readlines()
|
||||
|
||||
# Add header and write it out.
|
||||
tool_path = os.path.join(out_path, 'gyp-%s-tool' % prefix)
|
||||
with open(tool_path, 'w') as tool_file:
|
||||
tool_file.write(
|
||||
''.join([source[0], '# Generated by gyp. Do not edit.\n'] + source[1:]))
|
||||
|
||||
# Make file executable.
|
||||
os.chmod(tool_path, 0755)
|
||||
|
||||
|
||||
# From Alex Martelli,
|
||||
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
|
||||
# ASPN: Python Cookbook: Remove duplicates from a sequence
|
||||
# First comment, dated 2001/10/13.
|
||||
# (Also in the printed Python Cookbook.)
|
||||
|
||||
def uniquer(seq, idfun=None):
|
||||
if idfun is None:
|
||||
idfun = lambda x: x
|
||||
seen = {}
|
||||
result = []
|
||||
for item in seq:
|
||||
marker = idfun(item)
|
||||
if marker in seen: continue
|
||||
seen[marker] = 1
|
||||
result.append(item)
|
||||
return result
|
||||
|
||||
|
||||
class CycleError(Exception):
|
||||
"""An exception raised when an unexpected cycle is detected."""
|
||||
def __init__(self, nodes):
|
||||
self.nodes = nodes
|
||||
def __str__(self):
|
||||
return 'CycleError: cycle involving: ' + str(self.nodes)
|
||||
|
||||
|
||||
def TopologicallySorted(graph, get_edges):
|
||||
"""Topologically sort based on a user provided edge definition.
|
||||
|
||||
Args:
|
||||
graph: A list of node names.
|
||||
get_edges: A function mapping from node name to a hashable collection
|
||||
of node names which this node has outgoing edges to.
|
||||
Returns:
|
||||
A list containing all of the node in graph in topological order.
|
||||
It is assumed that calling get_edges once for each node and caching is
|
||||
cheaper than repeatedly calling get_edges.
|
||||
Raises:
|
||||
CycleError in the event of a cycle.
|
||||
Example:
|
||||
graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
|
||||
def GetEdges(node):
|
||||
return re.findall(r'\$\(([^))]\)', graph[node])
|
||||
print TopologicallySorted(graph.keys(), GetEdges)
|
||||
==>
|
||||
['a', 'c', b']
|
||||
"""
|
||||
get_edges = memoize(get_edges)
|
||||
visited = set()
|
||||
visiting = set()
|
||||
ordered_nodes = []
|
||||
def Visit(node):
|
||||
if node in visiting:
|
||||
raise CycleError(visiting)
|
||||
if node in visited:
|
||||
return
|
||||
visited.add(node)
|
||||
visiting.add(node)
|
||||
for neighbor in get_edges(node):
|
||||
Visit(neighbor)
|
||||
visiting.remove(node)
|
||||
ordered_nodes.insert(0, node)
|
||||
for node in sorted(graph):
|
||||
Visit(node)
|
||||
return ordered_nodes
|
||||
72
third_party/gyp/common_test.py
vendored
Executable file
72
third_party/gyp/common_test.py
vendored
Executable file
@@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Unit tests for the common.py file."""
|
||||
|
||||
import gyp.common
|
||||
import unittest
|
||||
import sys
|
||||
|
||||
|
||||
class TestTopologicallySorted(unittest.TestCase):
|
||||
def test_Valid(self):
|
||||
"""Test that sorting works on a valid graph with one possible order."""
|
||||
graph = {
|
||||
'a': ['b', 'c'],
|
||||
'b': [],
|
||||
'c': ['d'],
|
||||
'd': ['b'],
|
||||
}
|
||||
def GetEdge(node):
|
||||
return tuple(graph[node])
|
||||
self.assertEqual(
|
||||
gyp.common.TopologicallySorted(graph.keys(), GetEdge),
|
||||
['a', 'c', 'd', 'b'])
|
||||
|
||||
def test_Cycle(self):
|
||||
"""Test that an exception is thrown on a cyclic graph."""
|
||||
graph = {
|
||||
'a': ['b'],
|
||||
'b': ['c'],
|
||||
'c': ['d'],
|
||||
'd': ['a'],
|
||||
}
|
||||
def GetEdge(node):
|
||||
return tuple(graph[node])
|
||||
self.assertRaises(
|
||||
gyp.common.CycleError, gyp.common.TopologicallySorted,
|
||||
graph.keys(), GetEdge)
|
||||
|
||||
|
||||
class TestGetFlavor(unittest.TestCase):
|
||||
"""Test that gyp.common.GetFlavor works as intended"""
|
||||
original_platform = ''
|
||||
|
||||
def setUp(self):
|
||||
self.original_platform = sys.platform
|
||||
|
||||
def tearDown(self):
|
||||
sys.platform = self.original_platform
|
||||
|
||||
def assertFlavor(self, expected, argument, param):
|
||||
sys.platform = argument
|
||||
self.assertEqual(expected, gyp.common.GetFlavor(param))
|
||||
|
||||
def test_platform_default(self):
|
||||
self.assertFlavor('freebsd', 'freebsd9' , {})
|
||||
self.assertFlavor('freebsd', 'freebsd10', {})
|
||||
self.assertFlavor('openbsd', 'openbsd5' , {})
|
||||
self.assertFlavor('solaris', 'sunos5' , {});
|
||||
self.assertFlavor('solaris', 'sunos' , {});
|
||||
self.assertFlavor('linux' , 'linux2' , {});
|
||||
self.assertFlavor('linux' , 'linux3' , {});
|
||||
|
||||
def test_param(self):
|
||||
self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
157
third_party/gyp/easy_xml.py
vendored
Normal file
157
third_party/gyp/easy_xml.py
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import re
|
||||
import os
|
||||
|
||||
|
||||
def XmlToString(content, encoding='utf-8', pretty=False):
|
||||
""" Writes the XML content to disk, touching the file only if it has changed.
|
||||
|
||||
Visual Studio files have a lot of pre-defined structures. This function makes
|
||||
it easy to represent these structures as Python data structures, instead of
|
||||
having to create a lot of function calls.
|
||||
|
||||
Each XML element of the content is represented as a list composed of:
|
||||
1. The name of the element, a string,
|
||||
2. The attributes of the element, a dictionary (optional), and
|
||||
3+. The content of the element, if any. Strings are simple text nodes and
|
||||
lists are child elements.
|
||||
|
||||
Example 1:
|
||||
<test/>
|
||||
becomes
|
||||
['test']
|
||||
|
||||
Example 2:
|
||||
<myelement a='value1' b='value2'>
|
||||
<childtype>This is</childtype>
|
||||
<childtype>it!</childtype>
|
||||
</myelement>
|
||||
|
||||
becomes
|
||||
['myelement', {'a':'value1', 'b':'value2'},
|
||||
['childtype', 'This is'],
|
||||
['childtype', 'it!'],
|
||||
]
|
||||
|
||||
Args:
|
||||
content: The structured content to be converted.
|
||||
encoding: The encoding to report on the first XML line.
|
||||
pretty: True if we want pretty printing with indents and new lines.
|
||||
|
||||
Returns:
|
||||
The XML content as a string.
|
||||
"""
|
||||
# We create a huge list of all the elements of the file.
|
||||
xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding]
|
||||
if pretty:
|
||||
xml_parts.append('\n')
|
||||
_ConstructContentList(xml_parts, content, pretty)
|
||||
|
||||
# Convert it to a string
|
||||
return ''.join(xml_parts)
|
||||
|
||||
|
||||
def _ConstructContentList(xml_parts, specification, pretty, level=0):
|
||||
""" Appends the XML parts corresponding to the specification.
|
||||
|
||||
Args:
|
||||
xml_parts: A list of XML parts to be appended to.
|
||||
specification: The specification of the element. See EasyXml docs.
|
||||
pretty: True if we want pretty printing with indents and new lines.
|
||||
level: Indentation level.
|
||||
"""
|
||||
# The first item in a specification is the name of the element.
|
||||
if pretty:
|
||||
indentation = ' ' * level
|
||||
new_line = '\n'
|
||||
else:
|
||||
indentation = ''
|
||||
new_line = ''
|
||||
name = specification[0]
|
||||
if not isinstance(name, str):
|
||||
raise Exception('The first item of an EasyXml specification should be '
|
||||
'a string. Specification was ' + str(specification))
|
||||
xml_parts.append(indentation + '<' + name)
|
||||
|
||||
# Optionally in second position is a dictionary of the attributes.
|
||||
rest = specification[1:]
|
||||
if rest and isinstance(rest[0], dict):
|
||||
for at, val in sorted(rest[0].iteritems()):
|
||||
xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True)))
|
||||
rest = rest[1:]
|
||||
if rest:
|
||||
xml_parts.append('>')
|
||||
all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True)
|
||||
multi_line = not all_strings
|
||||
if multi_line and new_line:
|
||||
xml_parts.append(new_line)
|
||||
for child_spec in rest:
|
||||
# If it's a string, append a text node.
|
||||
# Otherwise recurse over that child definition
|
||||
if isinstance(child_spec, str):
|
||||
xml_parts.append(_XmlEscape(child_spec))
|
||||
else:
|
||||
_ConstructContentList(xml_parts, child_spec, pretty, level + 1)
|
||||
if multi_line and indentation:
|
||||
xml_parts.append(indentation)
|
||||
xml_parts.append('</%s>%s' % (name, new_line))
|
||||
else:
|
||||
xml_parts.append('/>%s' % new_line)
|
||||
|
||||
|
||||
def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False,
|
||||
win32=False):
|
||||
""" Writes the XML content to disk, touching the file only if it has changed.
|
||||
|
||||
Args:
|
||||
content: The structured content to be written.
|
||||
path: Location of the file.
|
||||
encoding: The encoding to report on the first line of the XML file.
|
||||
pretty: True if we want pretty printing with indents and new lines.
|
||||
"""
|
||||
xml_string = XmlToString(content, encoding, pretty)
|
||||
if win32 and os.linesep != '\r\n':
|
||||
xml_string = xml_string.replace('\n', '\r\n')
|
||||
|
||||
# Get the old content
|
||||
try:
|
||||
f = open(path, 'r')
|
||||
existing = f.read()
|
||||
f.close()
|
||||
except:
|
||||
existing = None
|
||||
|
||||
# It has changed, write it
|
||||
if existing != xml_string:
|
||||
f = open(path, 'w')
|
||||
f.write(xml_string)
|
||||
f.close()
|
||||
|
||||
|
||||
_xml_escape_map = {
|
||||
'"': '"',
|
||||
"'": ''',
|
||||
'<': '<',
|
||||
'>': '>',
|
||||
'&': '&',
|
||||
'\n': '
',
|
||||
'\r': '
',
|
||||
}
|
||||
|
||||
|
||||
_xml_escape_re = re.compile(
|
||||
"(%s)" % "|".join(map(re.escape, _xml_escape_map.keys())))
|
||||
|
||||
|
||||
def _XmlEscape(value, attr=False):
|
||||
""" Escape a string for inclusion in XML."""
|
||||
def replace(match):
|
||||
m = match.string[match.start() : match.end()]
|
||||
# don't replace single quotes in attrs
|
||||
if attr and m == "'":
|
||||
return m
|
||||
return _xml_escape_map[m]
|
||||
return _xml_escape_re.sub(replace, value)
|
||||
103
third_party/gyp/easy_xml_test.py
vendored
Executable file
103
third_party/gyp/easy_xml_test.py
vendored
Executable file
@@ -0,0 +1,103 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the easy_xml.py file. """
|
||||
|
||||
import gyp.easy_xml as easy_xml
|
||||
import unittest
|
||||
import StringIO
|
||||
|
||||
|
||||
class TestSequenceFunctions(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.stderr = StringIO.StringIO()
|
||||
|
||||
def test_EasyXml_simple(self):
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(['test']),
|
||||
'<?xml version="1.0" encoding="utf-8"?><test/>')
|
||||
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(['test'], encoding='Windows-1252'),
|
||||
'<?xml version="1.0" encoding="Windows-1252"?><test/>')
|
||||
|
||||
def test_EasyXml_simple_with_attributes(self):
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]),
|
||||
'<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>')
|
||||
|
||||
def test_EasyXml_escaping(self):
|
||||
original = '<test>\'"\r&\nfoo'
|
||||
converted = '<test>\'"
&
foo'
|
||||
converted_apos = converted.replace("'", ''')
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(['test3', {'a': original}, original]),
|
||||
'<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>' %
|
||||
(converted, converted_apos))
|
||||
|
||||
def test_EasyXml_pretty(self):
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(
|
||||
['test3',
|
||||
['GrandParent',
|
||||
['Parent1',
|
||||
['Child']
|
||||
],
|
||||
['Parent2']
|
||||
]
|
||||
],
|
||||
pretty=True),
|
||||
'<?xml version="1.0" encoding="utf-8"?>\n'
|
||||
'<test3>\n'
|
||||
' <GrandParent>\n'
|
||||
' <Parent1>\n'
|
||||
' <Child/>\n'
|
||||
' </Parent1>\n'
|
||||
' <Parent2/>\n'
|
||||
' </GrandParent>\n'
|
||||
'</test3>\n')
|
||||
|
||||
|
||||
def test_EasyXml_complex(self):
|
||||
# We want to create:
|
||||
target = (
|
||||
'<?xml version="1.0" encoding="utf-8"?>'
|
||||
'<Project>'
|
||||
'<PropertyGroup Label="Globals">'
|
||||
'<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>'
|
||||
'<Keyword>Win32Proj</Keyword>'
|
||||
'<RootNamespace>automated_ui_tests</RootNamespace>'
|
||||
'</PropertyGroup>'
|
||||
'<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
|
||||
'<PropertyGroup '
|
||||
'Condition="\'$(Configuration)|$(Platform)\'=='
|
||||
'\'Debug|Win32\'" Label="Configuration">'
|
||||
'<ConfigurationType>Application</ConfigurationType>'
|
||||
'<CharacterSet>Unicode</CharacterSet>'
|
||||
'</PropertyGroup>'
|
||||
'</Project>')
|
||||
|
||||
xml = easy_xml.XmlToString(
|
||||
['Project',
|
||||
['PropertyGroup', {'Label': 'Globals'},
|
||||
['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'],
|
||||
['Keyword', 'Win32Proj'],
|
||||
['RootNamespace', 'automated_ui_tests']
|
||||
],
|
||||
['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}],
|
||||
['PropertyGroup',
|
||||
{'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'",
|
||||
'Label': 'Configuration'},
|
||||
['ConfigurationType', 'Application'],
|
||||
['CharacterSet', 'Unicode']
|
||||
]
|
||||
])
|
||||
self.assertEqual(xml, target)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
49
third_party/gyp/flock_tool.py
vendored
Executable file
49
third_party/gyp/flock_tool.py
vendored
Executable file
@@ -0,0 +1,49 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""These functions are executed via gyp-flock-tool when using the Makefile
|
||||
generator. Used on systems that don't have a built-in flock."""
|
||||
|
||||
import fcntl
|
||||
import os
|
||||
import struct
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def main(args):
|
||||
executor = FlockTool()
|
||||
executor.Dispatch(args)
|
||||
|
||||
|
||||
class FlockTool(object):
|
||||
"""This class emulates the 'flock' command."""
|
||||
def Dispatch(self, args):
|
||||
"""Dispatches a string command to a method."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
method = "Exec%s" % self._CommandifyName(args[0])
|
||||
getattr(self, method)(*args[1:])
|
||||
|
||||
def _CommandifyName(self, name_string):
|
||||
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
|
||||
return name_string.title().replace('-', '')
|
||||
|
||||
def ExecFlock(self, lockfile, *cmd_list):
|
||||
"""Emulates the most basic behavior of Linux's flock(1)."""
|
||||
# Rely on exception handling to report errors.
|
||||
# Note that the stock python on SunOS has a bug
|
||||
# where fcntl.flock(fd, LOCK_EX) always fails
|
||||
# with EBADF, that's why we use this F_SETLK
|
||||
# hack instead.
|
||||
fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666)
|
||||
op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
|
||||
fcntl.fcntl(fd, fcntl.F_SETLK, op)
|
||||
return subprocess.call(cmd_list)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
1
third_party/gyp/generator/__init__.py
vendored
Normal file
1
third_party/gyp/generator/__init__.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
# dummy
|
||||
1069
third_party/gyp/generator/android.py
vendored
Normal file
1069
third_party/gyp/generator/android.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1143
third_party/gyp/generator/cmake.py
vendored
Normal file
1143
third_party/gyp/generator/cmake.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
81
third_party/gyp/generator/dump_dependency_json.py
vendored
Normal file
81
third_party/gyp/generator/dump_dependency_json.py
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import collections
|
||||
import os
|
||||
import gyp
|
||||
import gyp.common
|
||||
import gyp.msvs_emulation
|
||||
import json
|
||||
import sys
|
||||
|
||||
generator_supports_multiple_toolsets = True
|
||||
|
||||
generator_wants_static_library_dependencies_adjusted = False
|
||||
|
||||
generator_default_variables = {
|
||||
}
|
||||
for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
|
||||
'LIB_DIR', 'SHARED_LIB_DIR']:
|
||||
# Some gyp steps fail if these are empty(!).
|
||||
generator_default_variables[dirname] = 'dir'
|
||||
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
|
||||
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
|
||||
'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
|
||||
'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
|
||||
'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
|
||||
'CONFIGURATION_NAME']:
|
||||
generator_default_variables[unused] = ''
|
||||
|
||||
|
||||
def CalculateVariables(default_variables, params):
|
||||
generator_flags = params.get('generator_flags', {})
|
||||
for key, val in generator_flags.items():
|
||||
default_variables.setdefault(key, val)
|
||||
default_variables.setdefault('OS', gyp.common.GetFlavor(params))
|
||||
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor =='win':
|
||||
# Copy additional generator configuration data from VS, which is shared
|
||||
# by the Windows Ninja generator.
|
||||
import gyp.generator.msvs as msvs_generator
|
||||
generator_additional_non_configuration_keys = getattr(msvs_generator,
|
||||
'generator_additional_non_configuration_keys', [])
|
||||
generator_additional_path_sections = getattr(msvs_generator,
|
||||
'generator_additional_path_sections', [])
|
||||
|
||||
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
||||
|
||||
|
||||
def CalculateGeneratorInputInfo(params):
|
||||
"""Calculate the generator specific info that gets fed to input (called by
|
||||
gyp)."""
|
||||
generator_flags = params.get('generator_flags', {})
|
||||
if generator_flags.get('adjust_static_libraries', False):
|
||||
global generator_wants_static_library_dependencies_adjusted
|
||||
generator_wants_static_library_dependencies_adjusted = True
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
# Map of target -> list of targets it depends on.
|
||||
edges = {}
|
||||
|
||||
# Queue of targets to visit.
|
||||
targets_to_visit = target_list[:]
|
||||
|
||||
while len(targets_to_visit) > 0:
|
||||
target = targets_to_visit.pop()
|
||||
if target in edges:
|
||||
continue
|
||||
edges[target] = []
|
||||
|
||||
for dep in target_dicts[target].get('dependencies', []):
|
||||
edges[target].append(dep)
|
||||
targets_to_visit.append(dep)
|
||||
|
||||
filename = 'dump.json'
|
||||
f = open(filename, 'w')
|
||||
json.dump(edges, f)
|
||||
f.close()
|
||||
print 'Wrote json to %s.' % filename
|
||||
335
third_party/gyp/generator/eclipse.py
vendored
Normal file
335
third_party/gyp/generator/eclipse.py
vendored
Normal file
@@ -0,0 +1,335 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""GYP backend that generates Eclipse CDT settings files.
|
||||
|
||||
This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML
|
||||
files that can be imported into an Eclipse CDT project. The XML file contains a
|
||||
list of include paths and symbols (i.e. defines).
|
||||
|
||||
Because a full .cproject definition is not created by this generator, it's not
|
||||
possible to properly define the include dirs and symbols for each file
|
||||
individually. Instead, one set of includes/symbols is generated for the entire
|
||||
project. This works fairly well (and is a vast improvement in general), but may
|
||||
still result in a few indexer issues here and there.
|
||||
|
||||
This generator has no automated tests, so expect it to be broken.
|
||||
"""
|
||||
|
||||
from xml.sax.saxutils import escape
|
||||
import os.path
|
||||
import subprocess
|
||||
import gyp
|
||||
import gyp.common
|
||||
import gyp.msvs_emulation
|
||||
import shlex
|
||||
|
||||
generator_wants_static_library_dependencies_adjusted = False
|
||||
|
||||
generator_default_variables = {
|
||||
}
|
||||
|
||||
for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']:
|
||||
# Some gyp steps fail if these are empty(!).
|
||||
generator_default_variables[dirname] = 'dir'
|
||||
|
||||
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
|
||||
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
|
||||
'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
|
||||
'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
|
||||
'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
|
||||
'CONFIGURATION_NAME']:
|
||||
generator_default_variables[unused] = ''
|
||||
|
||||
# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as
|
||||
# part of the path when dealing with generated headers. This value will be
|
||||
# replaced dynamically for each configuration.
|
||||
generator_default_variables['SHARED_INTERMEDIATE_DIR'] = \
|
||||
'$SHARED_INTERMEDIATE_DIR'
|
||||
|
||||
|
||||
def CalculateVariables(default_variables, params):
|
||||
generator_flags = params.get('generator_flags', {})
|
||||
for key, val in generator_flags.items():
|
||||
default_variables.setdefault(key, val)
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
default_variables.setdefault('OS', flavor)
|
||||
if flavor == 'win':
|
||||
# Copy additional generator configuration data from VS, which is shared
|
||||
# by the Eclipse generator.
|
||||
import gyp.generator.msvs as msvs_generator
|
||||
generator_additional_non_configuration_keys = getattr(msvs_generator,
|
||||
'generator_additional_non_configuration_keys', [])
|
||||
generator_additional_path_sections = getattr(msvs_generator,
|
||||
'generator_additional_path_sections', [])
|
||||
|
||||
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
||||
|
||||
|
||||
def CalculateGeneratorInputInfo(params):
|
||||
"""Calculate the generator specific info that gets fed to input (called by
|
||||
gyp)."""
|
||||
generator_flags = params.get('generator_flags', {})
|
||||
if generator_flags.get('adjust_static_libraries', False):
|
||||
global generator_wants_static_library_dependencies_adjusted
|
||||
generator_wants_static_library_dependencies_adjusted = True
|
||||
|
||||
|
||||
def GetAllIncludeDirectories(target_list, target_dicts,
|
||||
shared_intermediate_dirs, config_name, params,
|
||||
compiler_path):
|
||||
"""Calculate the set of include directories to be used.
|
||||
|
||||
Returns:
|
||||
A list including all the include_dir's specified for every target followed
|
||||
by any include directories that were added as cflag compiler options.
|
||||
"""
|
||||
|
||||
gyp_includes_set = set()
|
||||
compiler_includes_list = []
|
||||
|
||||
# Find compiler's default include dirs.
|
||||
if compiler_path:
|
||||
command = shlex.split(compiler_path)
|
||||
command.extend(['-E', '-xc++', '-v', '-'])
|
||||
proc = subprocess.Popen(args=command, stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
output = proc.communicate()[1]
|
||||
# Extract the list of include dirs from the output, which has this format:
|
||||
# ...
|
||||
# #include "..." search starts here:
|
||||
# #include <...> search starts here:
|
||||
# /usr/include/c++/4.6
|
||||
# /usr/local/include
|
||||
# End of search list.
|
||||
# ...
|
||||
in_include_list = False
|
||||
for line in output.splitlines():
|
||||
if line.startswith('#include'):
|
||||
in_include_list = True
|
||||
continue
|
||||
if line.startswith('End of search list.'):
|
||||
break
|
||||
if in_include_list:
|
||||
include_dir = line.strip()
|
||||
if include_dir not in compiler_includes_list:
|
||||
compiler_includes_list.append(include_dir)
|
||||
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor == 'win':
|
||||
generator_flags = params.get('generator_flags', {})
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
if config_name in target['configurations']:
|
||||
config = target['configurations'][config_name]
|
||||
|
||||
# Look for any include dirs that were explicitly added via cflags. This
|
||||
# may be done in gyp files to force certain includes to come at the end.
|
||||
# TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and
|
||||
# remove this.
|
||||
if flavor == 'win':
|
||||
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
|
||||
cflags = msvs_settings.GetCflags(config_name)
|
||||
else:
|
||||
cflags = config['cflags']
|
||||
for cflag in cflags:
|
||||
if cflag.startswith('-I'):
|
||||
include_dir = cflag[2:]
|
||||
if include_dir not in compiler_includes_list:
|
||||
compiler_includes_list.append(include_dir)
|
||||
|
||||
# Find standard gyp include dirs.
|
||||
if config.has_key('include_dirs'):
|
||||
include_dirs = config['include_dirs']
|
||||
for shared_intermediate_dir in shared_intermediate_dirs:
|
||||
for include_dir in include_dirs:
|
||||
include_dir = include_dir.replace('$SHARED_INTERMEDIATE_DIR',
|
||||
shared_intermediate_dir)
|
||||
if not os.path.isabs(include_dir):
|
||||
base_dir = os.path.dirname(target_name)
|
||||
|
||||
include_dir = base_dir + '/' + include_dir
|
||||
include_dir = os.path.abspath(include_dir)
|
||||
|
||||
gyp_includes_set.add(include_dir)
|
||||
|
||||
# Generate a list that has all the include dirs.
|
||||
all_includes_list = list(gyp_includes_set)
|
||||
all_includes_list.sort()
|
||||
for compiler_include in compiler_includes_list:
|
||||
if not compiler_include in gyp_includes_set:
|
||||
all_includes_list.append(compiler_include)
|
||||
|
||||
# All done.
|
||||
return all_includes_list
|
||||
|
||||
|
||||
def GetCompilerPath(target_list, data):
|
||||
"""Determine a command that can be used to invoke the compiler.
|
||||
|
||||
Returns:
|
||||
If this is a gyp project that has explicit make settings, try to determine
|
||||
the compiler from that. Otherwise, see if a compiler was specified via the
|
||||
CC_target environment variable.
|
||||
"""
|
||||
|
||||
# First, see if the compiler is configured in make's settings.
|
||||
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
|
||||
make_global_settings_dict = data[build_file].get('make_global_settings', {})
|
||||
for key, value in make_global_settings_dict:
|
||||
if key in ['CC', 'CXX']:
|
||||
return value
|
||||
|
||||
# Check to see if the compiler was specified as an environment variable.
|
||||
for key in ['CC_target', 'CC', 'CXX']:
|
||||
compiler = os.environ.get(key)
|
||||
if compiler:
|
||||
return compiler
|
||||
|
||||
return 'gcc'
|
||||
|
||||
|
||||
def GetAllDefines(target_list, target_dicts, data, config_name, params,
|
||||
compiler_path):
|
||||
"""Calculate the defines for a project.
|
||||
|
||||
Returns:
|
||||
A dict that includes explict defines declared in gyp files along with all of
|
||||
the default defines that the compiler uses.
|
||||
"""
|
||||
|
||||
# Get defines declared in the gyp files.
|
||||
all_defines = {}
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor == 'win':
|
||||
generator_flags = params.get('generator_flags', {})
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
|
||||
if flavor == 'win':
|
||||
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
|
||||
extra_defines = msvs_settings.GetComputedDefines(config_name)
|
||||
else:
|
||||
extra_defines = []
|
||||
if config_name in target['configurations']:
|
||||
config = target['configurations'][config_name]
|
||||
target_defines = config['defines']
|
||||
else:
|
||||
target_defines = []
|
||||
for define in target_defines + extra_defines:
|
||||
split_define = define.split('=', 1)
|
||||
if len(split_define) == 1:
|
||||
split_define.append('1')
|
||||
if split_define[0].strip() in all_defines:
|
||||
# Already defined
|
||||
continue
|
||||
all_defines[split_define[0].strip()] = split_define[1].strip()
|
||||
# Get default compiler defines (if possible).
|
||||
if flavor == 'win':
|
||||
return all_defines # Default defines already processed in the loop above.
|
||||
if compiler_path:
|
||||
command = shlex.split(compiler_path)
|
||||
command.extend(['-E', '-dM', '-'])
|
||||
cpp_proc = subprocess.Popen(args=command, cwd='.',
|
||||
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
|
||||
cpp_output = cpp_proc.communicate()[0]
|
||||
cpp_lines = cpp_output.split('\n')
|
||||
for cpp_line in cpp_lines:
|
||||
if not cpp_line.strip():
|
||||
continue
|
||||
cpp_line_parts = cpp_line.split(' ', 2)
|
||||
key = cpp_line_parts[1]
|
||||
if len(cpp_line_parts) >= 3:
|
||||
val = cpp_line_parts[2]
|
||||
else:
|
||||
val = '1'
|
||||
all_defines[key] = val
|
||||
|
||||
return all_defines
|
||||
|
||||
|
||||
def WriteIncludePaths(out, eclipse_langs, include_dirs):
|
||||
"""Write the includes section of a CDT settings export file."""
|
||||
|
||||
out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \
|
||||
'settingswizards.IncludePaths">\n')
|
||||
out.write(' <language name="holder for library settings"></language>\n')
|
||||
for lang in eclipse_langs:
|
||||
out.write(' <language name="%s">\n' % lang)
|
||||
for include_dir in include_dirs:
|
||||
out.write(' <includepath workspace_path="false">%s</includepath>\n' %
|
||||
include_dir)
|
||||
out.write(' </language>\n')
|
||||
out.write(' </section>\n')
|
||||
|
||||
|
||||
def WriteMacros(out, eclipse_langs, defines):
|
||||
"""Write the macros section of a CDT settings export file."""
|
||||
|
||||
out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \
|
||||
'settingswizards.Macros">\n')
|
||||
out.write(' <language name="holder for library settings"></language>\n')
|
||||
for lang in eclipse_langs:
|
||||
out.write(' <language name="%s">\n' % lang)
|
||||
for key in sorted(defines.iterkeys()):
|
||||
out.write(' <macro><name>%s</name><value>%s</value></macro>\n' %
|
||||
(escape(key), escape(defines[key])))
|
||||
out.write(' </language>\n')
|
||||
out.write(' </section>\n')
|
||||
|
||||
|
||||
def GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||
config_name):
|
||||
options = params['options']
|
||||
generator_flags = params.get('generator_flags', {})
|
||||
|
||||
# build_dir: relative path from source root to our output files.
|
||||
# e.g. "out/Debug"
|
||||
build_dir = os.path.join(generator_flags.get('output_dir', 'out'),
|
||||
config_name)
|
||||
|
||||
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
|
||||
# Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the
|
||||
# SHARED_INTERMEDIATE_DIR. Include both possible locations.
|
||||
shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'),
|
||||
os.path.join(toplevel_build, 'gen')]
|
||||
|
||||
out_name = os.path.join(toplevel_build, 'eclipse-cdt-settings.xml')
|
||||
gyp.common.EnsureDirExists(out_name)
|
||||
out = open(out_name, 'w')
|
||||
|
||||
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
out.write('<cdtprojectproperties>\n')
|
||||
|
||||
eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
|
||||
'GNU C++', 'GNU C', 'Assembly']
|
||||
compiler_path = GetCompilerPath(target_list, data)
|
||||
include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
|
||||
shared_intermediate_dirs, config_name,
|
||||
params, compiler_path)
|
||||
WriteIncludePaths(out, eclipse_langs, include_dirs)
|
||||
defines = GetAllDefines(target_list, target_dicts, data, config_name, params,
|
||||
compiler_path)
|
||||
WriteMacros(out, eclipse_langs, defines)
|
||||
|
||||
out.write('</cdtprojectproperties>\n')
|
||||
out.close()
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
"""Generate an XML settings file that can be imported into a CDT project."""
|
||||
|
||||
if params['options'].generator_output:
|
||||
raise NotImplementedError, "--generator_output not implemented for eclipse"
|
||||
|
||||
user_config = params.get('generator_flags', {}).get('config', None)
|
||||
if user_config:
|
||||
GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||
user_config)
|
||||
else:
|
||||
config_names = target_dicts[target_list[0]]['configurations'].keys()
|
||||
for config_name in config_names:
|
||||
GenerateOutputForConfig(target_list, target_dicts, data, params,
|
||||
config_name)
|
||||
|
||||
87
third_party/gyp/generator/gypd.py
vendored
Normal file
87
third_party/gyp/generator/gypd.py
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""gypd output module
|
||||
|
||||
This module produces gyp input as its output. Output files are given the
|
||||
.gypd extension to avoid overwriting the .gyp files that they are generated
|
||||
from. Internal references to .gyp files (such as those found in
|
||||
"dependencies" sections) are not adjusted to point to .gypd files instead;
|
||||
unlike other paths, which are relative to the .gyp or .gypd file, such paths
|
||||
are relative to the directory from which gyp was run to create the .gypd file.
|
||||
|
||||
This generator module is intended to be a sample and a debugging aid, hence
|
||||
the "d" for "debug" in .gypd. It is useful to inspect the results of the
|
||||
various merges, expansions, and conditional evaluations performed by gyp
|
||||
and to see a representation of what would be fed to a generator module.
|
||||
|
||||
It's not advisable to rename .gypd files produced by this module to .gyp,
|
||||
because they will have all merges, expansions, and evaluations already
|
||||
performed and the relevant constructs not present in the output; paths to
|
||||
dependencies may be wrong; and various sections that do not belong in .gyp
|
||||
files such as such as "included_files" and "*_excluded" will be present.
|
||||
Output will also be stripped of comments. This is not intended to be a
|
||||
general-purpose gyp pretty-printer; for that, you probably just want to
|
||||
run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
|
||||
comments but won't do all of the other things done to this module's output.
|
||||
|
||||
The specific formatting of the output generated by this module is subject
|
||||
to change.
|
||||
"""
|
||||
|
||||
|
||||
import gyp.common
|
||||
import errno
|
||||
import os
|
||||
import pprint
|
||||
|
||||
|
||||
# These variables should just be spit back out as variable references.
|
||||
_generator_identity_variables = [
|
||||
'EXECUTABLE_PREFIX',
|
||||
'EXECUTABLE_SUFFIX',
|
||||
'INTERMEDIATE_DIR',
|
||||
'PRODUCT_DIR',
|
||||
'RULE_INPUT_ROOT',
|
||||
'RULE_INPUT_DIRNAME',
|
||||
'RULE_INPUT_EXT',
|
||||
'RULE_INPUT_NAME',
|
||||
'RULE_INPUT_PATH',
|
||||
'SHARED_INTERMEDIATE_DIR',
|
||||
]
|
||||
|
||||
# gypd doesn't define a default value for OS like many other generator
|
||||
# modules. Specify "-D OS=whatever" on the command line to provide a value.
|
||||
generator_default_variables = {
|
||||
}
|
||||
|
||||
# gypd supports multiple toolsets
|
||||
generator_supports_multiple_toolsets = True
|
||||
|
||||
# TODO(mark): This always uses <, which isn't right. The input module should
|
||||
# notify the generator to tell it which phase it is operating in, and this
|
||||
# module should use < for the early phase and then switch to > for the late
|
||||
# phase. Bonus points for carrying @ back into the output too.
|
||||
for v in _generator_identity_variables:
|
||||
generator_default_variables[v] = '<(%s)' % v
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
output_files = {}
|
||||
for qualified_target in target_list:
|
||||
[input_file, target] = \
|
||||
gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
|
||||
|
||||
if input_file[-4:] != '.gyp':
|
||||
continue
|
||||
input_file_stem = input_file[:-4]
|
||||
output_file = input_file_stem + params['options'].suffix + '.gypd'
|
||||
|
||||
if not output_file in output_files:
|
||||
output_files[output_file] = input_file
|
||||
|
||||
for output_file, input_file in output_files.iteritems():
|
||||
output = open(output_file, 'w')
|
||||
pprint.pprint(data[input_file], output)
|
||||
output.close()
|
||||
56
third_party/gyp/generator/gypsh.py
vendored
Normal file
56
third_party/gyp/generator/gypsh.py
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""gypsh output module
|
||||
|
||||
gypsh is a GYP shell. It's not really a generator per se. All it does is
|
||||
fire up an interactive Python session with a few local variables set to the
|
||||
variables passed to the generator. Like gypd, it's intended as a debugging
|
||||
aid, to facilitate the exploration of .gyp structures after being processed
|
||||
by the input module.
|
||||
|
||||
The expected usage is "gyp -f gypsh -D OS=desired_os".
|
||||
"""
|
||||
|
||||
|
||||
import code
|
||||
import sys
|
||||
|
||||
|
||||
# All of this stuff about generator variables was lovingly ripped from gypd.py.
|
||||
# That module has a much better description of what's going on and why.
|
||||
_generator_identity_variables = [
|
||||
'EXECUTABLE_PREFIX',
|
||||
'EXECUTABLE_SUFFIX',
|
||||
'INTERMEDIATE_DIR',
|
||||
'PRODUCT_DIR',
|
||||
'RULE_INPUT_ROOT',
|
||||
'RULE_INPUT_DIRNAME',
|
||||
'RULE_INPUT_EXT',
|
||||
'RULE_INPUT_NAME',
|
||||
'RULE_INPUT_PATH',
|
||||
'SHARED_INTERMEDIATE_DIR',
|
||||
]
|
||||
|
||||
generator_default_variables = {
|
||||
}
|
||||
|
||||
for v in _generator_identity_variables:
|
||||
generator_default_variables[v] = '<(%s)' % v
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
locals = {
|
||||
'target_list': target_list,
|
||||
'target_dicts': target_dicts,
|
||||
'data': data,
|
||||
}
|
||||
|
||||
# Use a banner that looks like the stock Python one and like what
|
||||
# code.interact uses by default, but tack on something to indicate what
|
||||
# locals are available, and identify gypsh.
|
||||
banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
|
||||
(sys.version, sys.platform, repr(sorted(locals.keys())))
|
||||
|
||||
code.interact(banner, local=locals)
|
||||
2181
third_party/gyp/generator/make.py
vendored
Normal file
2181
third_party/gyp/generator/make.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3335
third_party/gyp/generator/msvs.py
vendored
Normal file
3335
third_party/gyp/generator/msvs.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
37
third_party/gyp/generator/msvs_test.py
vendored
Executable file
37
third_party/gyp/generator/msvs_test.py
vendored
Executable file
@@ -0,0 +1,37 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the msvs.py file. """
|
||||
|
||||
import gyp.generator.msvs as msvs
|
||||
import unittest
|
||||
import StringIO
|
||||
|
||||
|
||||
class TestSequenceFunctions(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.stderr = StringIO.StringIO()
|
||||
|
||||
def test_GetLibraries(self):
|
||||
self.assertEqual(
|
||||
msvs._GetLibraries({}),
|
||||
[])
|
||||
self.assertEqual(
|
||||
msvs._GetLibraries({'libraries': []}),
|
||||
[])
|
||||
self.assertEqual(
|
||||
msvs._GetLibraries({'other':'foo', 'libraries': ['a.lib']}),
|
||||
['a.lib'])
|
||||
self.assertEqual(
|
||||
msvs._GetLibraries({'libraries': ['-la']}),
|
||||
['a.lib'])
|
||||
self.assertEqual(
|
||||
msvs._GetLibraries({'libraries': ['a.lib', 'b.lib', 'c.lib', '-lb.lib',
|
||||
'-lb.lib', 'd.lib', 'a.lib']}),
|
||||
['c.lib', 'b.lib', 'd.lib', 'a.lib'])
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
2156
third_party/gyp/generator/ninja.py
vendored
Normal file
2156
third_party/gyp/generator/ninja.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
44
third_party/gyp/generator/ninja_test.py
vendored
Normal file
44
third_party/gyp/generator/ninja_test.py
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the ninja.py file. """
|
||||
|
||||
import gyp.generator.ninja as ninja
|
||||
import unittest
|
||||
import StringIO
|
||||
import sys
|
||||
import TestCommon
|
||||
|
||||
|
||||
class TestPrefixesAndSuffixes(unittest.TestCase):
|
||||
def test_BinaryNamesWindows(self):
|
||||
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
|
||||
'build.ninja', 'win')
|
||||
spec = { 'target_name': 'wee' }
|
||||
self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
|
||||
endswith('.exe'))
|
||||
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
|
||||
endswith('.dll'))
|
||||
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
|
||||
endswith('.lib'))
|
||||
|
||||
def test_BinaryNamesLinux(self):
|
||||
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
|
||||
'build.ninja', 'linux')
|
||||
spec = { 'target_name': 'wee' }
|
||||
self.assertTrue('.' not in writer.ComputeOutputFileName(spec,
|
||||
'executable'))
|
||||
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
|
||||
startswith('lib'))
|
||||
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
|
||||
startswith('lib'))
|
||||
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
|
||||
endswith('.so'))
|
||||
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
|
||||
endswith('.a'))
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
1224
third_party/gyp/generator/xcode.py
vendored
Normal file
1224
third_party/gyp/generator/xcode.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
23
third_party/gyp/generator/xcode_test.py
vendored
Normal file
23
third_party/gyp/generator/xcode_test.py
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the xcode.py file. """
|
||||
|
||||
import gyp.generator.xcode as xcode
|
||||
import unittest
|
||||
import sys
|
||||
|
||||
|
||||
class TestEscapeXcodeDefine(unittest.TestCase):
|
||||
if sys.platform == 'darwin':
|
||||
def test_InheritedRemainsUnescaped(self):
|
||||
self.assertEqual(xcode.EscapeXcodeDefine('$(inherited)'), '$(inherited)')
|
||||
|
||||
def test_Escaping(self):
|
||||
self.assertEqual(xcode.EscapeXcodeDefine('a b"c\\'), 'a\\ b\\"c\\\\')
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
2809
third_party/gyp/input.py
vendored
Normal file
2809
third_party/gyp/input.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
90
third_party/gyp/input_test.py
vendored
Executable file
90
third_party/gyp/input_test.py
vendored
Executable file
@@ -0,0 +1,90 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Unit tests for the input.py file."""
|
||||
|
||||
import gyp.input
|
||||
import unittest
|
||||
import sys
|
||||
|
||||
|
||||
class TestFindCycles(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.nodes = {}
|
||||
for x in ('a', 'b', 'c', 'd', 'e'):
|
||||
self.nodes[x] = gyp.input.DependencyGraphNode(x)
|
||||
|
||||
def _create_dependency(self, dependent, dependency):
|
||||
dependent.dependencies.append(dependency)
|
||||
dependency.dependents.append(dependent)
|
||||
|
||||
def test_no_cycle_empty_graph(self):
|
||||
for label, node in self.nodes.iteritems():
|
||||
self.assertEquals([], node.FindCycles())
|
||||
|
||||
def test_no_cycle_line(self):
|
||||
self._create_dependency(self.nodes['a'], self.nodes['b'])
|
||||
self._create_dependency(self.nodes['b'], self.nodes['c'])
|
||||
self._create_dependency(self.nodes['c'], self.nodes['d'])
|
||||
|
||||
for label, node in self.nodes.iteritems():
|
||||
self.assertEquals([], node.FindCycles())
|
||||
|
||||
def test_no_cycle_dag(self):
|
||||
self._create_dependency(self.nodes['a'], self.nodes['b'])
|
||||
self._create_dependency(self.nodes['a'], self.nodes['c'])
|
||||
self._create_dependency(self.nodes['b'], self.nodes['c'])
|
||||
|
||||
for label, node in self.nodes.iteritems():
|
||||
self.assertEquals([], node.FindCycles())
|
||||
|
||||
def test_cycle_self_reference(self):
|
||||
self._create_dependency(self.nodes['a'], self.nodes['a'])
|
||||
|
||||
self.assertEquals([(self.nodes['a'], self.nodes['a'])],
|
||||
self.nodes['a'].FindCycles())
|
||||
|
||||
def test_cycle_two_nodes(self):
|
||||
self._create_dependency(self.nodes['a'], self.nodes['b'])
|
||||
self._create_dependency(self.nodes['b'], self.nodes['a'])
|
||||
|
||||
self.assertEquals([(self.nodes['a'], self.nodes['b'], self.nodes['a'])],
|
||||
self.nodes['a'].FindCycles())
|
||||
self.assertEquals([(self.nodes['b'], self.nodes['a'], self.nodes['b'])],
|
||||
self.nodes['b'].FindCycles())
|
||||
|
||||
def test_two_cycles(self):
|
||||
self._create_dependency(self.nodes['a'], self.nodes['b'])
|
||||
self._create_dependency(self.nodes['b'], self.nodes['a'])
|
||||
|
||||
self._create_dependency(self.nodes['b'], self.nodes['c'])
|
||||
self._create_dependency(self.nodes['c'], self.nodes['b'])
|
||||
|
||||
cycles = self.nodes['a'].FindCycles()
|
||||
self.assertTrue(
|
||||
(self.nodes['a'], self.nodes['b'], self.nodes['a']) in cycles)
|
||||
self.assertTrue(
|
||||
(self.nodes['b'], self.nodes['c'], self.nodes['b']) in cycles)
|
||||
self.assertEquals(2, len(cycles))
|
||||
|
||||
def test_big_cycle(self):
|
||||
self._create_dependency(self.nodes['a'], self.nodes['b'])
|
||||
self._create_dependency(self.nodes['b'], self.nodes['c'])
|
||||
self._create_dependency(self.nodes['c'], self.nodes['d'])
|
||||
self._create_dependency(self.nodes['d'], self.nodes['e'])
|
||||
self._create_dependency(self.nodes['e'], self.nodes['a'])
|
||||
|
||||
self.assertEquals([(self.nodes['a'],
|
||||
self.nodes['b'],
|
||||
self.nodes['c'],
|
||||
self.nodes['d'],
|
||||
self.nodes['e'],
|
||||
self.nodes['a'])],
|
||||
self.nodes['a'].FindCycles())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
510
third_party/gyp/mac_tool.py
vendored
Executable file
510
third_party/gyp/mac_tool.py
vendored
Executable file
@@ -0,0 +1,510 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Utility functions to perform Xcode-style build steps.
|
||||
|
||||
These functions are executed via gyp-mac-tool when using the Makefile generator.
|
||||
"""
|
||||
|
||||
import fcntl
|
||||
import fnmatch
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import plistlib
|
||||
import re
|
||||
import shutil
|
||||
import string
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
|
||||
def main(args):
|
||||
executor = MacTool()
|
||||
exit_code = executor.Dispatch(args)
|
||||
if exit_code is not None:
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
class MacTool(object):
|
||||
"""This class performs all the Mac tooling steps. The methods can either be
|
||||
executed directly, or dispatched from an argument list."""
|
||||
|
||||
def Dispatch(self, args):
|
||||
"""Dispatches a string command to a method."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
method = "Exec%s" % self._CommandifyName(args[0])
|
||||
return getattr(self, method)(*args[1:])
|
||||
|
||||
def _CommandifyName(self, name_string):
|
||||
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
|
||||
return name_string.title().replace('-', '')
|
||||
|
||||
def ExecCopyBundleResource(self, source, dest):
|
||||
"""Copies a resource file to the bundle/Resources directory, performing any
|
||||
necessary compilation on each resource."""
|
||||
extension = os.path.splitext(source)[1].lower()
|
||||
if os.path.isdir(source):
|
||||
# Copy tree.
|
||||
# TODO(thakis): This copies file attributes like mtime, while the
|
||||
# single-file branch below doesn't. This should probably be changed to
|
||||
# be consistent with the single-file branch.
|
||||
if os.path.exists(dest):
|
||||
shutil.rmtree(dest)
|
||||
shutil.copytree(source, dest)
|
||||
elif extension == '.xib':
|
||||
return self._CopyXIBFile(source, dest)
|
||||
elif extension == '.storyboard':
|
||||
return self._CopyXIBFile(source, dest)
|
||||
elif extension == '.strings':
|
||||
self._CopyStringsFile(source, dest)
|
||||
else:
|
||||
shutil.copy(source, dest)
|
||||
|
||||
def _CopyXIBFile(self, source, dest):
|
||||
"""Compiles a XIB file with ibtool into a binary plist in the bundle."""
|
||||
|
||||
# ibtool sometimes crashes with relative paths. See crbug.com/314728.
|
||||
base = os.path.dirname(os.path.realpath(__file__))
|
||||
if os.path.relpath(source):
|
||||
source = os.path.join(base, source)
|
||||
if os.path.relpath(dest):
|
||||
dest = os.path.join(base, dest)
|
||||
|
||||
args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
|
||||
'--output-format', 'human-readable-text', '--compile', dest, source]
|
||||
ibtool_section_re = re.compile(r'/\*.*\*/')
|
||||
ibtool_re = re.compile(r'.*note:.*is clipping its content')
|
||||
ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
|
||||
current_section_header = None
|
||||
for line in ibtoolout.stdout:
|
||||
if ibtool_section_re.match(line):
|
||||
current_section_header = line
|
||||
elif not ibtool_re.match(line):
|
||||
if current_section_header:
|
||||
sys.stdout.write(current_section_header)
|
||||
current_section_header = None
|
||||
sys.stdout.write(line)
|
||||
return ibtoolout.returncode
|
||||
|
||||
def _CopyStringsFile(self, source, dest):
|
||||
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
|
||||
input_code = self._DetectInputEncoding(source) or "UTF-8"
|
||||
|
||||
# Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
|
||||
# CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
|
||||
# CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
|
||||
# semicolon in dictionary.
|
||||
# on invalid files. Do the same kind of validation.
|
||||
import CoreFoundation
|
||||
s = open(source, 'rb').read()
|
||||
d = CoreFoundation.CFDataCreate(None, s, len(s))
|
||||
_, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
|
||||
if error:
|
||||
return
|
||||
|
||||
fp = open(dest, 'wb')
|
||||
fp.write(s.decode(input_code).encode('UTF-16'))
|
||||
fp.close()
|
||||
|
||||
def _DetectInputEncoding(self, file_name):
|
||||
"""Reads the first few bytes from file_name and tries to guess the text
|
||||
encoding. Returns None as a guess if it can't detect it."""
|
||||
fp = open(file_name, 'rb')
|
||||
try:
|
||||
header = fp.read(3)
|
||||
except e:
|
||||
fp.close()
|
||||
return None
|
||||
fp.close()
|
||||
if header.startswith("\xFE\xFF"):
|
||||
return "UTF-16"
|
||||
elif header.startswith("\xFF\xFE"):
|
||||
return "UTF-16"
|
||||
elif header.startswith("\xEF\xBB\xBF"):
|
||||
return "UTF-8"
|
||||
else:
|
||||
return None
|
||||
|
||||
def ExecCopyInfoPlist(self, source, dest, *keys):
|
||||
"""Copies the |source| Info.plist to the destination directory |dest|."""
|
||||
# Read the source Info.plist into memory.
|
||||
fd = open(source, 'r')
|
||||
lines = fd.read()
|
||||
fd.close()
|
||||
|
||||
# Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
|
||||
plist = plistlib.readPlistFromString(lines)
|
||||
if keys:
|
||||
plist = dict(plist.items() + json.loads(keys[0]).items())
|
||||
lines = plistlib.writePlistToString(plist)
|
||||
|
||||
# Go through all the environment variables and replace them as variables in
|
||||
# the file.
|
||||
IDENT_RE = re.compile('[/\s]')
|
||||
for key in os.environ:
|
||||
if key.startswith('_'):
|
||||
continue
|
||||
evar = '${%s}' % key
|
||||
evalue = os.environ[key]
|
||||
lines = string.replace(lines, evar, evalue)
|
||||
|
||||
# Xcode supports various suffices on environment variables, which are
|
||||
# all undocumented. :rfc1034identifier is used in the standard project
|
||||
# template these days, and :identifier was used earlier. They are used to
|
||||
# convert non-url characters into things that look like valid urls --
|
||||
# except that the replacement character for :identifier, '_' isn't valid
|
||||
# in a URL either -- oops, hence :rfc1034identifier was born.
|
||||
evar = '${%s:identifier}' % key
|
||||
evalue = IDENT_RE.sub('_', os.environ[key])
|
||||
lines = string.replace(lines, evar, evalue)
|
||||
|
||||
evar = '${%s:rfc1034identifier}' % key
|
||||
evalue = IDENT_RE.sub('-', os.environ[key])
|
||||
lines = string.replace(lines, evar, evalue)
|
||||
|
||||
# Remove any keys with values that haven't been replaced.
|
||||
lines = lines.split('\n')
|
||||
for i in range(len(lines)):
|
||||
if lines[i].strip().startswith("<string>${"):
|
||||
lines[i] = None
|
||||
lines[i - 1] = None
|
||||
lines = '\n'.join(filter(lambda x: x is not None, lines))
|
||||
|
||||
# Write out the file with variables replaced.
|
||||
fd = open(dest, 'w')
|
||||
fd.write(lines)
|
||||
fd.close()
|
||||
|
||||
# Now write out PkgInfo file now that the Info.plist file has been
|
||||
# "compiled".
|
||||
self._WritePkgInfo(dest)
|
||||
|
||||
def _WritePkgInfo(self, info_plist):
|
||||
"""This writes the PkgInfo file from the data stored in Info.plist."""
|
||||
plist = plistlib.readPlist(info_plist)
|
||||
if not plist:
|
||||
return
|
||||
|
||||
# Only create PkgInfo for executable types.
|
||||
package_type = plist['CFBundlePackageType']
|
||||
if package_type != 'APPL':
|
||||
return
|
||||
|
||||
# The format of PkgInfo is eight characters, representing the bundle type
|
||||
# and bundle signature, each four characters. If that is missing, four
|
||||
# '?' characters are used instead.
|
||||
signature_code = plist.get('CFBundleSignature', '????')
|
||||
if len(signature_code) != 4: # Wrong length resets everything, too.
|
||||
signature_code = '?' * 4
|
||||
|
||||
dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
|
||||
fp = open(dest, 'w')
|
||||
fp.write('%s%s' % (package_type, signature_code))
|
||||
fp.close()
|
||||
|
||||
def ExecFlock(self, lockfile, *cmd_list):
|
||||
"""Emulates the most basic behavior of Linux's flock(1)."""
|
||||
# Rely on exception handling to report errors.
|
||||
fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
|
||||
fcntl.flock(fd, fcntl.LOCK_EX)
|
||||
return subprocess.call(cmd_list)
|
||||
|
||||
def ExecFilterLibtool(self, *cmd_list):
|
||||
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
|
||||
symbols'."""
|
||||
libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
|
||||
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE)
|
||||
_, err = libtoolout.communicate()
|
||||
for line in err.splitlines():
|
||||
if not libtool_re.match(line):
|
||||
print >>sys.stderr, line
|
||||
return libtoolout.returncode
|
||||
|
||||
def ExecPackageFramework(self, framework, version):
|
||||
"""Takes a path to Something.framework and the Current version of that and
|
||||
sets up all the symlinks."""
|
||||
# Find the name of the binary based on the part before the ".framework".
|
||||
binary = os.path.basename(framework).split('.')[0]
|
||||
|
||||
CURRENT = 'Current'
|
||||
RESOURCES = 'Resources'
|
||||
VERSIONS = 'Versions'
|
||||
|
||||
if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
|
||||
# Binary-less frameworks don't seem to contain symlinks (see e.g.
|
||||
# chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
|
||||
return
|
||||
|
||||
# Move into the framework directory to set the symlinks correctly.
|
||||
pwd = os.getcwd()
|
||||
os.chdir(framework)
|
||||
|
||||
# Set up the Current version.
|
||||
self._Relink(version, os.path.join(VERSIONS, CURRENT))
|
||||
|
||||
# Set up the root symlinks.
|
||||
self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
|
||||
self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
|
||||
|
||||
# Back to where we were before!
|
||||
os.chdir(pwd)
|
||||
|
||||
def _Relink(self, dest, link):
|
||||
"""Creates a symlink to |dest| named |link|. If |link| already exists,
|
||||
it is overwritten."""
|
||||
if os.path.lexists(link):
|
||||
os.remove(link)
|
||||
os.symlink(dest, link)
|
||||
|
||||
def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
|
||||
"""Code sign a bundle.
|
||||
|
||||
This function tries to code sign an iOS bundle, following the same
|
||||
algorithm as Xcode:
|
||||
1. copy ResourceRules.plist from the user or the SDK into the bundle,
|
||||
2. pick the provisioning profile that best match the bundle identifier,
|
||||
and copy it into the bundle as embedded.mobileprovision,
|
||||
3. copy Entitlements.plist from user or SDK next to the bundle,
|
||||
4. code sign the bundle.
|
||||
"""
|
||||
resource_rules_path = self._InstallResourceRules(resource_rules)
|
||||
substitutions, overrides = self._InstallProvisioningProfile(
|
||||
provisioning, self._GetCFBundleIdentifier())
|
||||
entitlements_path = self._InstallEntitlements(
|
||||
entitlements, substitutions, overrides)
|
||||
subprocess.check_call([
|
||||
'codesign', '--force', '--sign', key, '--resource-rules',
|
||||
resource_rules_path, '--entitlements', entitlements_path,
|
||||
os.path.join(
|
||||
os.environ['TARGET_BUILD_DIR'],
|
||||
os.environ['FULL_PRODUCT_NAME'])])
|
||||
|
||||
def _InstallResourceRules(self, resource_rules):
|
||||
"""Installs ResourceRules.plist from user or SDK into the bundle.
|
||||
|
||||
Args:
|
||||
resource_rules: string, optional, path to the ResourceRules.plist file
|
||||
to use, default to "${SDKROOT}/ResourceRules.plist"
|
||||
|
||||
Returns:
|
||||
Path to the copy of ResourceRules.plist into the bundle.
|
||||
"""
|
||||
source_path = resource_rules
|
||||
target_path = os.path.join(
|
||||
os.environ['BUILT_PRODUCTS_DIR'],
|
||||
os.environ['CONTENTS_FOLDER_PATH'],
|
||||
'ResourceRules.plist')
|
||||
if not source_path:
|
||||
source_path = os.path.join(
|
||||
os.environ['SDKROOT'], 'ResourceRules.plist')
|
||||
shutil.copy2(source_path, target_path)
|
||||
return target_path
|
||||
|
||||
def _InstallProvisioningProfile(self, profile, bundle_identifier):
|
||||
"""Installs embedded.mobileprovision into the bundle.
|
||||
|
||||
Args:
|
||||
profile: string, optional, short name of the .mobileprovision file
|
||||
to use, if empty or the file is missing, the best file installed
|
||||
will be used
|
||||
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
|
||||
|
||||
Returns:
|
||||
A tuple containing two dictionary: variables substitutions and values
|
||||
to overrides when generating the entitlements file.
|
||||
"""
|
||||
source_path, provisioning_data, team_id = self._FindProvisioningProfile(
|
||||
profile, bundle_identifier)
|
||||
target_path = os.path.join(
|
||||
os.environ['BUILT_PRODUCTS_DIR'],
|
||||
os.environ['CONTENTS_FOLDER_PATH'],
|
||||
'embedded.mobileprovision')
|
||||
shutil.copy2(source_path, target_path)
|
||||
substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
|
||||
return substitutions, provisioning_data['Entitlements']
|
||||
|
||||
def _FindProvisioningProfile(self, profile, bundle_identifier):
|
||||
"""Finds the .mobileprovision file to use for signing the bundle.
|
||||
|
||||
Checks all the installed provisioning profiles (or if the user specified
|
||||
the PROVISIONING_PROFILE variable, only consult it) and select the most
|
||||
specific that correspond to the bundle identifier.
|
||||
|
||||
Args:
|
||||
profile: string, optional, short name of the .mobileprovision file
|
||||
to use, if empty or the file is missing, the best file installed
|
||||
will be used
|
||||
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
|
||||
|
||||
Returns:
|
||||
A tuple of the path to the selected provisioning profile, the data of
|
||||
the embedded plist in the provisioning profile and the team identifier
|
||||
to use for code signing.
|
||||
|
||||
Raises:
|
||||
SystemExit: if no .mobileprovision can be used to sign the bundle.
|
||||
"""
|
||||
profiles_dir = os.path.join(
|
||||
os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
|
||||
if not os.path.isdir(profiles_dir):
|
||||
print >>sys.stderr, (
|
||||
'cannot find mobile provisioning for %s' % bundle_identifier)
|
||||
sys.exit(1)
|
||||
provisioning_profiles = None
|
||||
if profile:
|
||||
profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
|
||||
if os.path.exists(profile_path):
|
||||
provisioning_profiles = [profile_path]
|
||||
if not provisioning_profiles:
|
||||
provisioning_profiles = glob.glob(
|
||||
os.path.join(profiles_dir, '*.mobileprovision'))
|
||||
valid_provisioning_profiles = {}
|
||||
for profile_path in provisioning_profiles:
|
||||
profile_data = self._LoadProvisioningProfile(profile_path)
|
||||
app_id_pattern = profile_data.get(
|
||||
'Entitlements', {}).get('application-identifier', '')
|
||||
for team_identifier in profile_data.get('TeamIdentifier', []):
|
||||
app_id = '%s.%s' % (team_identifier, bundle_identifier)
|
||||
if fnmatch.fnmatch(app_id, app_id_pattern):
|
||||
valid_provisioning_profiles[app_id_pattern] = (
|
||||
profile_path, profile_data, team_identifier)
|
||||
if not valid_provisioning_profiles:
|
||||
print >>sys.stderr, (
|
||||
'cannot find mobile provisioning for %s' % bundle_identifier)
|
||||
sys.exit(1)
|
||||
# If the user has multiple provisioning profiles installed that can be
|
||||
# used for ${bundle_identifier}, pick the most specific one (ie. the
|
||||
# provisioning profile whose pattern is the longest).
|
||||
selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
|
||||
return valid_provisioning_profiles[selected_key]
|
||||
|
||||
def _LoadProvisioningProfile(self, profile_path):
|
||||
"""Extracts the plist embedded in a provisioning profile.
|
||||
|
||||
Args:
|
||||
profile_path: string, path to the .mobileprovision file
|
||||
|
||||
Returns:
|
||||
Content of the plist embedded in the provisioning profile as a dictionary.
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile() as temp:
|
||||
subprocess.check_call([
|
||||
'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
|
||||
return self._LoadPlistMaybeBinary(temp.name)
|
||||
|
||||
def _LoadPlistMaybeBinary(self, plist_path):
|
||||
"""Loads into a memory a plist possibly encoded in binary format.
|
||||
|
||||
This is a wrapper around plistlib.readPlist that tries to convert the
|
||||
plist to the XML format if it can't be parsed (assuming that it is in
|
||||
the binary format).
|
||||
|
||||
Args:
|
||||
plist_path: string, path to a plist file, in XML or binary format
|
||||
|
||||
Returns:
|
||||
Content of the plist as a dictionary.
|
||||
"""
|
||||
try:
|
||||
# First, try to read the file using plistlib that only supports XML,
|
||||
# and if an exception is raised, convert a temporary copy to XML and
|
||||
# load that copy.
|
||||
return plistlib.readPlist(plist_path)
|
||||
except:
|
||||
pass
|
||||
with tempfile.NamedTemporaryFile() as temp:
|
||||
shutil.copy2(plist_path, temp.name)
|
||||
subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
|
||||
return plistlib.readPlist(temp.name)
|
||||
|
||||
def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
|
||||
"""Constructs a dictionary of variable substitutions for Entitlements.plist.
|
||||
|
||||
Args:
|
||||
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
|
||||
app_identifier_prefix: string, value for AppIdentifierPrefix
|
||||
|
||||
Returns:
|
||||
Dictionary of substitutions to apply when generating Entitlements.plist.
|
||||
"""
|
||||
return {
|
||||
'CFBundleIdentifier': bundle_identifier,
|
||||
'AppIdentifierPrefix': app_identifier_prefix,
|
||||
}
|
||||
|
||||
def _GetCFBundleIdentifier(self):
|
||||
"""Extracts CFBundleIdentifier value from Info.plist in the bundle.
|
||||
|
||||
Returns:
|
||||
Value of CFBundleIdentifier in the Info.plist located in the bundle.
|
||||
"""
|
||||
info_plist_path = os.path.join(
|
||||
os.environ['TARGET_BUILD_DIR'],
|
||||
os.environ['INFOPLIST_PATH'])
|
||||
info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
|
||||
return info_plist_data['CFBundleIdentifier']
|
||||
|
||||
def _InstallEntitlements(self, entitlements, substitutions, overrides):
|
||||
"""Generates and install the ${BundleName}.xcent entitlements file.
|
||||
|
||||
Expands variables "$(variable)" pattern in the source entitlements file,
|
||||
add extra entitlements defined in the .mobileprovision file and the copy
|
||||
the generated plist to "${BundlePath}.xcent".
|
||||
|
||||
Args:
|
||||
entitlements: string, optional, path to the Entitlements.plist template
|
||||
to use, defaults to "${SDKROOT}/Entitlements.plist"
|
||||
substitutions: dictionary, variable substitutions
|
||||
overrides: dictionary, values to add to the entitlements
|
||||
|
||||
Returns:
|
||||
Path to the generated entitlements file.
|
||||
"""
|
||||
source_path = entitlements
|
||||
target_path = os.path.join(
|
||||
os.environ['BUILT_PRODUCTS_DIR'],
|
||||
os.environ['PRODUCT_NAME'] + '.xcent')
|
||||
if not source_path:
|
||||
source_path = os.path.join(
|
||||
os.environ['SDKROOT'],
|
||||
'Entitlements.plist')
|
||||
shutil.copy2(source_path, target_path)
|
||||
data = self._LoadPlistMaybeBinary(target_path)
|
||||
data = self._ExpandVariables(data, substitutions)
|
||||
if overrides:
|
||||
for key in overrides:
|
||||
if key not in data:
|
||||
data[key] = overrides[key]
|
||||
plistlib.writePlist(data, target_path)
|
||||
return target_path
|
||||
|
||||
def _ExpandVariables(self, data, substitutions):
|
||||
"""Expands variables "$(variable)" in data.
|
||||
|
||||
Args:
|
||||
data: object, can be either string, list or dictionary
|
||||
substitutions: dictionary, variable substitutions to perform
|
||||
|
||||
Returns:
|
||||
Copy of data where each references to "$(variable)" has been replaced
|
||||
by the corresponding value found in substitutions, or left intact if
|
||||
the key was not found.
|
||||
"""
|
||||
if isinstance(data, str):
|
||||
for key, value in substitutions.iteritems():
|
||||
data = data.replace('$(%s)' % key, value)
|
||||
return data
|
||||
if isinstance(data, list):
|
||||
return [self._ExpandVariables(v, substitutions) for v in data]
|
||||
if isinstance(data, dict):
|
||||
return {k: self._ExpandVariables(data[k], substitutions) for k in data}
|
||||
return data
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
972
third_party/gyp/msvs_emulation.py
vendored
Normal file
972
third_party/gyp/msvs_emulation.py
vendored
Normal file
@@ -0,0 +1,972 @@
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""
|
||||
This module helps emulate Visual Studio 2008 behavior on top of other
|
||||
build systems, primarily ninja.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import gyp.MSVSVersion
|
||||
|
||||
windows_quoter_regex = re.compile(r'(\\*)"')
|
||||
|
||||
def QuoteForRspFile(arg):
|
||||
"""Quote a command line argument so that it appears as one argument when
|
||||
processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
|
||||
Windows programs)."""
|
||||
# See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
|
||||
# threads. This is actually the quoting rules for CommandLineToArgvW, not
|
||||
# for the shell, because the shell doesn't do anything in Windows. This
|
||||
# works more or less because most programs (including the compiler, etc.)
|
||||
# use that function to handle command line arguments.
|
||||
|
||||
# For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
|
||||
# preceding it, and results in n backslashes + the quote. So we substitute
|
||||
# in 2* what we match, +1 more, plus the quote.
|
||||
arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
|
||||
|
||||
# %'s also need to be doubled otherwise they're interpreted as batch
|
||||
# positional arguments. Also make sure to escape the % so that they're
|
||||
# passed literally through escaping so they can be singled to just the
|
||||
# original %. Otherwise, trying to pass the literal representation that
|
||||
# looks like an environment variable to the shell (e.g. %PATH%) would fail.
|
||||
arg = arg.replace('%', '%%')
|
||||
|
||||
# These commands are used in rsp files, so no escaping for the shell (via ^)
|
||||
# is necessary.
|
||||
|
||||
# Finally, wrap the whole thing in quotes so that the above quote rule
|
||||
# applies and whitespace isn't a word break.
|
||||
return '"' + arg + '"'
|
||||
|
||||
|
||||
def EncodeRspFileList(args):
|
||||
"""Process a list of arguments using QuoteCmdExeArgument."""
|
||||
# Note that the first argument is assumed to be the command. Don't add
|
||||
# quotes around it because then built-ins like 'echo', etc. won't work.
|
||||
# Take care to normpath only the path in the case of 'call ../x.bat' because
|
||||
# otherwise the whole thing is incorrectly interpreted as a path and not
|
||||
# normalized correctly.
|
||||
if not args: return ''
|
||||
if args[0].startswith('call '):
|
||||
call, program = args[0].split(' ', 1)
|
||||
program = call + ' ' + os.path.normpath(program)
|
||||
else:
|
||||
program = os.path.normpath(args[0])
|
||||
return program + ' ' + ' '.join(QuoteForRspFile(arg) for arg in args[1:])
|
||||
|
||||
|
||||
def _GenericRetrieve(root, default, path):
|
||||
"""Given a list of dictionary keys |path| and a tree of dicts |root|, find
|
||||
value at path, or return |default| if any of the path doesn't exist."""
|
||||
if not root:
|
||||
return default
|
||||
if not path:
|
||||
return root
|
||||
return _GenericRetrieve(root.get(path[0]), default, path[1:])
|
||||
|
||||
|
||||
def _AddPrefix(element, prefix):
|
||||
"""Add |prefix| to |element| or each subelement if element is iterable."""
|
||||
if element is None:
|
||||
return element
|
||||
# Note, not Iterable because we don't want to handle strings like that.
|
||||
if isinstance(element, list) or isinstance(element, tuple):
|
||||
return [prefix + e for e in element]
|
||||
else:
|
||||
return prefix + element
|
||||
|
||||
|
||||
def _DoRemapping(element, map):
|
||||
"""If |element| then remap it through |map|. If |element| is iterable then
|
||||
each item will be remapped. Any elements not found will be removed."""
|
||||
if map is not None and element is not None:
|
||||
if not callable(map):
|
||||
map = map.get # Assume it's a dict, otherwise a callable to do the remap.
|
||||
if isinstance(element, list) or isinstance(element, tuple):
|
||||
element = filter(None, [map(elem) for elem in element])
|
||||
else:
|
||||
element = map(element)
|
||||
return element
|
||||
|
||||
|
||||
def _AppendOrReturn(append, element):
|
||||
"""If |append| is None, simply return |element|. If |append| is not None,
|
||||
then add |element| to it, adding each item in |element| if it's a list or
|
||||
tuple."""
|
||||
if append is not None and element is not None:
|
||||
if isinstance(element, list) or isinstance(element, tuple):
|
||||
append.extend(element)
|
||||
else:
|
||||
append.append(element)
|
||||
else:
|
||||
return element
|
||||
|
||||
|
||||
def _FindDirectXInstallation():
|
||||
"""Try to find an installation location for the DirectX SDK. Check for the
|
||||
standard environment variable, and if that doesn't exist, try to find
|
||||
via the registry. May return None if not found in either location."""
|
||||
# Return previously calculated value, if there is one
|
||||
if hasattr(_FindDirectXInstallation, 'dxsdk_dir'):
|
||||
return _FindDirectXInstallation.dxsdk_dir
|
||||
|
||||
dxsdk_dir = os.environ.get('DXSDK_DIR')
|
||||
if not dxsdk_dir:
|
||||
# Setup params to pass to and attempt to launch reg.exe.
|
||||
cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s']
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
for line in p.communicate()[0].splitlines():
|
||||
if 'InstallPath' in line:
|
||||
dxsdk_dir = line.split(' ')[3] + "\\"
|
||||
|
||||
# Cache return value
|
||||
_FindDirectXInstallation.dxsdk_dir = dxsdk_dir
|
||||
return dxsdk_dir
|
||||
|
||||
|
||||
class MsvsSettings(object):
|
||||
"""A class that understands the gyp 'msvs_...' values (especially the
|
||||
msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
|
||||
class helps map those settings to command line options."""
|
||||
|
||||
def __init__(self, spec, generator_flags):
|
||||
self.spec = spec
|
||||
self.vs_version = GetVSVersion(generator_flags)
|
||||
self.dxsdk_dir = _FindDirectXInstallation()
|
||||
|
||||
# Try to find an installation location for the Windows DDK by checking
|
||||
# the WDK_DIR environment variable, may be None.
|
||||
self.wdk_dir = os.environ.get('WDK_DIR')
|
||||
|
||||
supported_fields = [
|
||||
('msvs_configuration_attributes', dict),
|
||||
('msvs_settings', dict),
|
||||
('msvs_system_include_dirs', list),
|
||||
('msvs_disabled_warnings', list),
|
||||
('msvs_precompiled_header', str),
|
||||
('msvs_precompiled_source', str),
|
||||
('msvs_configuration_platform', str),
|
||||
('msvs_target_platform', str),
|
||||
]
|
||||
configs = spec['configurations']
|
||||
for field, default in supported_fields:
|
||||
setattr(self, field, {})
|
||||
for configname, config in configs.iteritems():
|
||||
getattr(self, field)[configname] = config.get(field, default())
|
||||
|
||||
self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
|
||||
|
||||
unsupported_fields = [
|
||||
'msvs_prebuild',
|
||||
'msvs_postbuild',
|
||||
]
|
||||
unsupported = []
|
||||
for field in unsupported_fields:
|
||||
for config in configs.values():
|
||||
if field in config:
|
||||
unsupported += ["%s not supported (target %s)." %
|
||||
(field, spec['target_name'])]
|
||||
if unsupported:
|
||||
raise Exception('\n'.join(unsupported))
|
||||
|
||||
def GetVSMacroEnv(self, base_to_build=None, config=None):
|
||||
"""Get a dict of variables mapping internal VS macro names to their gyp
|
||||
equivalents."""
|
||||
target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
|
||||
target_name = self.spec.get('product_prefix', '') + \
|
||||
self.spec.get('product_name', self.spec['target_name'])
|
||||
target_dir = base_to_build + '\\' if base_to_build else ''
|
||||
replacements = {
|
||||
'$(OutDir)\\': target_dir,
|
||||
'$(TargetDir)\\': target_dir,
|
||||
'$(IntDir)': '$!INTERMEDIATE_DIR',
|
||||
'$(InputPath)': '${source}',
|
||||
'$(InputName)': '${root}',
|
||||
'$(ProjectName)': self.spec['target_name'],
|
||||
'$(TargetName)': target_name,
|
||||
'$(PlatformName)': target_platform,
|
||||
'$(ProjectDir)\\': '',
|
||||
}
|
||||
# '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
|
||||
# Visual Studio is actually installed.
|
||||
if self.vs_version.Path():
|
||||
replacements['$(VSInstallDir)'] = self.vs_version.Path()
|
||||
replacements['$(VCInstallDir)'] = os.path.join(self.vs_version.Path(),
|
||||
'VC') + '\\'
|
||||
# Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
|
||||
# set. This happens when the SDK is sync'd via src-internal, rather than
|
||||
# by typical end-user installation of the SDK. If it's not set, we don't
|
||||
# want to leave the unexpanded variable in the path, so simply strip it.
|
||||
replacements['$(DXSDK_DIR)'] = self.dxsdk_dir if self.dxsdk_dir else ''
|
||||
replacements['$(WDK_DIR)'] = self.wdk_dir if self.wdk_dir else ''
|
||||
return replacements
|
||||
|
||||
def ConvertVSMacros(self, s, base_to_build=None, config=None):
|
||||
"""Convert from VS macro names to something equivalent."""
|
||||
env = self.GetVSMacroEnv(base_to_build, config=config)
|
||||
return ExpandMacros(s, env)
|
||||
|
||||
def AdjustLibraries(self, libraries):
|
||||
"""Strip -l from library if it's specified with that."""
|
||||
libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
|
||||
return [lib + '.lib' if not lib.endswith('.lib') else lib for lib in libs]
|
||||
|
||||
def _GetAndMunge(self, field, path, default, prefix, append, map):
|
||||
"""Retrieve a value from |field| at |path| or return |default|. If
|
||||
|append| is specified, and the item is found, it will be appended to that
|
||||
object instead of returned. If |map| is specified, results will be
|
||||
remapped through |map| before being returned or appended."""
|
||||
result = _GenericRetrieve(field, default, path)
|
||||
result = _DoRemapping(result, map)
|
||||
result = _AddPrefix(result, prefix)
|
||||
return _AppendOrReturn(append, result)
|
||||
|
||||
class _GetWrapper(object):
|
||||
def __init__(self, parent, field, base_path, append=None):
|
||||
self.parent = parent
|
||||
self.field = field
|
||||
self.base_path = [base_path]
|
||||
self.append = append
|
||||
def __call__(self, name, map=None, prefix='', default=None):
|
||||
return self.parent._GetAndMunge(self.field, self.base_path + [name],
|
||||
default=default, prefix=prefix, append=self.append, map=map)
|
||||
|
||||
def GetArch(self, config):
|
||||
"""Get architecture based on msvs_configuration_platform and
|
||||
msvs_target_platform. Returns either 'x86' or 'x64'."""
|
||||
configuration_platform = self.msvs_configuration_platform.get(config, '')
|
||||
platform = self.msvs_target_platform.get(config, '')
|
||||
if not platform: # If no specific override, use the configuration's.
|
||||
platform = configuration_platform
|
||||
# Map from platform to architecture.
|
||||
return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86')
|
||||
|
||||
def _TargetConfig(self, config):
|
||||
"""Returns the target-specific configuration."""
|
||||
# There's two levels of architecture/platform specification in VS. The
|
||||
# first level is globally for the configuration (this is what we consider
|
||||
# "the" config at the gyp level, which will be something like 'Debug' or
|
||||
# 'Release_x64'), and a second target-specific configuration, which is an
|
||||
# override for the global one. |config| is remapped here to take into
|
||||
# account the local target-specific overrides to the global configuration.
|
||||
arch = self.GetArch(config)
|
||||
if arch == 'x64' and not config.endswith('_x64'):
|
||||
config += '_x64'
|
||||
if arch == 'x86' and config.endswith('_x64'):
|
||||
config = config.rsplit('_', 1)[0]
|
||||
return config
|
||||
|
||||
def _Setting(self, path, config,
|
||||
default=None, prefix='', append=None, map=None):
|
||||
"""_GetAndMunge for msvs_settings."""
|
||||
return self._GetAndMunge(
|
||||
self.msvs_settings[config], path, default, prefix, append, map)
|
||||
|
||||
def _ConfigAttrib(self, path, config,
|
||||
default=None, prefix='', append=None, map=None):
|
||||
"""_GetAndMunge for msvs_configuration_attributes."""
|
||||
return self._GetAndMunge(
|
||||
self.msvs_configuration_attributes[config],
|
||||
path, default, prefix, append, map)
|
||||
|
||||
def AdjustIncludeDirs(self, include_dirs, config):
|
||||
"""Updates include_dirs to expand VS specific paths, and adds the system
|
||||
include dirs used for platform SDK and similar."""
|
||||
config = self._TargetConfig(config)
|
||||
includes = include_dirs + self.msvs_system_include_dirs[config]
|
||||
includes.extend(self._Setting(
|
||||
('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
|
||||
return [self.ConvertVSMacros(p, config=config) for p in includes]
|
||||
|
||||
def GetComputedDefines(self, config):
|
||||
"""Returns the set of defines that are injected to the defines list based
|
||||
on other VS settings."""
|
||||
config = self._TargetConfig(config)
|
||||
defines = []
|
||||
if self._ConfigAttrib(['CharacterSet'], config) == '1':
|
||||
defines.extend(('_UNICODE', 'UNICODE'))
|
||||
if self._ConfigAttrib(['CharacterSet'], config) == '2':
|
||||
defines.append('_MBCS')
|
||||
defines.extend(self._Setting(
|
||||
('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[]))
|
||||
return defines
|
||||
|
||||
def GetCompilerPdbName(self, config, expand_special):
|
||||
"""Get the pdb file name that should be used for compiler invocations, or
|
||||
None if there's no explicit name specified."""
|
||||
config = self._TargetConfig(config)
|
||||
pdbname = self._Setting(
|
||||
('VCCLCompilerTool', 'ProgramDataBaseFileName'), config)
|
||||
if pdbname:
|
||||
pdbname = expand_special(self.ConvertVSMacros(pdbname))
|
||||
return pdbname
|
||||
|
||||
def GetMapFileName(self, config, expand_special):
|
||||
"""Gets the explicitly overriden map file name for a target or returns None
|
||||
if it's not set."""
|
||||
config = self._TargetConfig(config)
|
||||
map_file = self._Setting(('VCLinkerTool', 'MapFileName'), config)
|
||||
if map_file:
|
||||
map_file = expand_special(self.ConvertVSMacros(map_file, config=config))
|
||||
return map_file
|
||||
|
||||
def GetOutputName(self, config, expand_special):
|
||||
"""Gets the explicitly overridden output name for a target or returns None
|
||||
if it's not overridden."""
|
||||
config = self._TargetConfig(config)
|
||||
type = self.spec['type']
|
||||
root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool'
|
||||
# TODO(scottmg): Handle OutputDirectory without OutputFile.
|
||||
output_file = self._Setting((root, 'OutputFile'), config)
|
||||
if output_file:
|
||||
output_file = expand_special(self.ConvertVSMacros(
|
||||
output_file, config=config))
|
||||
return output_file
|
||||
|
||||
def GetPDBName(self, config, expand_special, default):
|
||||
"""Gets the explicitly overridden pdb name for a target or returns
|
||||
default if it's not overridden, or if no pdb will be generated."""
|
||||
config = self._TargetConfig(config)
|
||||
output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
|
||||
generate_debug_info = self._Setting(
|
||||
('VCLinkerTool', 'GenerateDebugInformation'), config)
|
||||
if generate_debug_info:
|
||||
if output_file:
|
||||
return expand_special(self.ConvertVSMacros(output_file, config=config))
|
||||
else:
|
||||
return default
|
||||
else:
|
||||
return None
|
||||
|
||||
def GetCflags(self, config):
|
||||
"""Returns the flags that need to be added to .c and .cc compilations."""
|
||||
config = self._TargetConfig(config)
|
||||
cflags = []
|
||||
cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]])
|
||||
cl = self._GetWrapper(self, self.msvs_settings[config],
|
||||
'VCCLCompilerTool', append=cflags)
|
||||
cl('Optimization',
|
||||
map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2')
|
||||
cl('InlineFunctionExpansion', prefix='/Ob')
|
||||
cl('DisableSpecificWarnings', prefix='/wd')
|
||||
cl('StringPooling', map={'true': '/GF'})
|
||||
cl('EnableFiberSafeOptimizations', map={'true': '/GT'})
|
||||
cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
|
||||
cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
|
||||
cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
|
||||
cl('WholeProgramOptimization', map={'true': '/GL'})
|
||||
cl('WarningLevel', prefix='/W')
|
||||
cl('WarnAsError', map={'true': '/WX'})
|
||||
cl('DebugInformationFormat',
|
||||
map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
|
||||
cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
|
||||
cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'})
|
||||
cl('MinimalRebuild', map={'true': '/Gm'})
|
||||
cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'})
|
||||
cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC')
|
||||
cl('RuntimeLibrary',
|
||||
map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
|
||||
cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
|
||||
cl('DefaultCharIsUnsigned', map={'true': '/J'})
|
||||
cl('TreatWChar_tAsBuiltInType',
|
||||
map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
|
||||
cl('EnablePREfast', map={'true': '/analyze'})
|
||||
cl('AdditionalOptions', prefix='')
|
||||
cflags.extend(['/FI' + f for f in self._Setting(
|
||||
('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
|
||||
if self.vs_version.short_name in ('2013', '2013e'):
|
||||
# New flag required in 2013 to maintain previous PDB behavior.
|
||||
cflags.append('/FS')
|
||||
# ninja handles parallelism by itself, don't have the compiler do it too.
|
||||
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
|
||||
return cflags
|
||||
|
||||
def GetPrecompiledHeader(self, config, gyp_to_build_path):
|
||||
"""Returns an object that handles the generation of precompiled header
|
||||
build steps."""
|
||||
config = self._TargetConfig(config)
|
||||
return _PchHelper(self, config, gyp_to_build_path)
|
||||
|
||||
def _GetPchFlags(self, config, extension):
|
||||
"""Get the flags to be added to the cflags for precompiled header support.
|
||||
"""
|
||||
config = self._TargetConfig(config)
|
||||
# The PCH is only built once by a particular source file. Usage of PCH must
|
||||
# only be for the same language (i.e. C vs. C++), so only include the pch
|
||||
# flags when the language matches.
|
||||
if self.msvs_precompiled_header[config]:
|
||||
source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
|
||||
if _LanguageMatchesForPch(source_ext, extension):
|
||||
pch = os.path.split(self.msvs_precompiled_header[config])[1]
|
||||
return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch']
|
||||
return []
|
||||
|
||||
def GetCflagsC(self, config):
|
||||
"""Returns the flags that need to be added to .c compilations."""
|
||||
config = self._TargetConfig(config)
|
||||
return self._GetPchFlags(config, '.c')
|
||||
|
||||
def GetCflagsCC(self, config):
|
||||
"""Returns the flags that need to be added to .cc compilations."""
|
||||
config = self._TargetConfig(config)
|
||||
return ['/TP'] + self._GetPchFlags(config, '.cc')
|
||||
|
||||
def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
|
||||
"""Get and normalize the list of paths in AdditionalLibraryDirectories
|
||||
setting."""
|
||||
config = self._TargetConfig(config)
|
||||
libpaths = self._Setting((root, 'AdditionalLibraryDirectories'),
|
||||
config, default=[])
|
||||
libpaths = [os.path.normpath(
|
||||
gyp_to_build_path(self.ConvertVSMacros(p, config=config)))
|
||||
for p in libpaths]
|
||||
return ['/LIBPATH:"' + p + '"' for p in libpaths]
|
||||
|
||||
def GetLibFlags(self, config, gyp_to_build_path):
|
||||
"""Returns the flags that need to be added to lib commands."""
|
||||
config = self._TargetConfig(config)
|
||||
libflags = []
|
||||
lib = self._GetWrapper(self, self.msvs_settings[config],
|
||||
'VCLibrarianTool', append=libflags)
|
||||
libflags.extend(self._GetAdditionalLibraryDirectories(
|
||||
'VCLibrarianTool', config, gyp_to_build_path))
|
||||
lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
|
||||
lib('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:')
|
||||
lib('AdditionalOptions')
|
||||
return libflags
|
||||
|
||||
def GetDefFile(self, gyp_to_build_path):
|
||||
"""Returns the .def file from sources, if any. Otherwise returns None."""
|
||||
spec = self.spec
|
||||
if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
|
||||
def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
|
||||
if len(def_files) == 1:
|
||||
return gyp_to_build_path(def_files[0])
|
||||
elif len(def_files) > 1:
|
||||
raise Exception("Multiple .def files")
|
||||
return None
|
||||
|
||||
def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
|
||||
""".def files get implicitly converted to a ModuleDefinitionFile for the
|
||||
linker in the VS generator. Emulate that behaviour here."""
|
||||
def_file = self.GetDefFile(gyp_to_build_path)
|
||||
if def_file:
|
||||
ldflags.append('/DEF:"%s"' % def_file)
|
||||
|
||||
def GetPGDName(self, config, expand_special):
|
||||
"""Gets the explicitly overridden pgd name for a target or returns None
|
||||
if it's not overridden."""
|
||||
config = self._TargetConfig(config)
|
||||
output_file = self._Setting(
|
||||
('VCLinkerTool', 'ProfileGuidedDatabase'), config)
|
||||
if output_file:
|
||||
output_file = expand_special(self.ConvertVSMacros(
|
||||
output_file, config=config))
|
||||
return output_file
|
||||
|
||||
def GetLdflags(self, config, gyp_to_build_path, expand_special,
|
||||
manifest_base_name, output_name, is_executable, build_dir):
|
||||
"""Returns the flags that need to be added to link commands, and the
|
||||
manifest files."""
|
||||
config = self._TargetConfig(config)
|
||||
ldflags = []
|
||||
ld = self._GetWrapper(self, self.msvs_settings[config],
|
||||
'VCLinkerTool', append=ldflags)
|
||||
self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
|
||||
ld('GenerateDebugInformation', map={'true': '/DEBUG'})
|
||||
ld('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:')
|
||||
ldflags.extend(self._GetAdditionalLibraryDirectories(
|
||||
'VCLinkerTool', config, gyp_to_build_path))
|
||||
ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
|
||||
ld('TreatLinkerWarningAsErrors', prefix='/WX',
|
||||
map={'true': '', 'false': ':NO'})
|
||||
out = self.GetOutputName(config, expand_special)
|
||||
if out:
|
||||
ldflags.append('/OUT:' + out)
|
||||
pdb = self.GetPDBName(config, expand_special, output_name + '.pdb')
|
||||
if pdb:
|
||||
ldflags.append('/PDB:' + pdb)
|
||||
pgd = self.GetPGDName(config, expand_special)
|
||||
if pgd:
|
||||
ldflags.append('/PGD:' + pgd)
|
||||
map_file = self.GetMapFileName(config, expand_special)
|
||||
ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
|
||||
else '/MAP'})
|
||||
ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
|
||||
ld('AdditionalOptions', prefix='')
|
||||
|
||||
minimum_required_version = self._Setting(
|
||||
('VCLinkerTool', 'MinimumRequiredVersion'), config, default='')
|
||||
if minimum_required_version:
|
||||
minimum_required_version = ',' + minimum_required_version
|
||||
ld('SubSystem',
|
||||
map={'1': 'CONSOLE%s' % minimum_required_version,
|
||||
'2': 'WINDOWS%s' % minimum_required_version},
|
||||
prefix='/SUBSYSTEM:')
|
||||
|
||||
ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
|
||||
ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
|
||||
ld('BaseAddress', prefix='/BASE:')
|
||||
ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
|
||||
ld('RandomizedBaseAddress',
|
||||
map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
|
||||
ld('DataExecutionPrevention',
|
||||
map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
|
||||
ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
|
||||
ld('ForceSymbolReferences', prefix='/INCLUDE:')
|
||||
ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
|
||||
ld('LinkTimeCodeGeneration',
|
||||
map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE',
|
||||
'4': ':PGUPDATE'},
|
||||
prefix='/LTCG')
|
||||
ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
|
||||
ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
|
||||
ld('EntryPointSymbol', prefix='/ENTRY:')
|
||||
ld('Profile', map={'true': '/PROFILE'})
|
||||
ld('LargeAddressAware',
|
||||
map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE')
|
||||
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
|
||||
ld('AdditionalDependencies', prefix='')
|
||||
|
||||
# If the base address is not specifically controlled, DYNAMICBASE should
|
||||
# be on by default.
|
||||
base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
|
||||
ldflags)
|
||||
if not base_flags:
|
||||
ldflags.append('/DYNAMICBASE')
|
||||
|
||||
# If the NXCOMPAT flag has not been specified, default to on. Despite the
|
||||
# documentation that says this only defaults to on when the subsystem is
|
||||
# Vista or greater (which applies to the linker), the IDE defaults it on
|
||||
# unless it's explicitly off.
|
||||
if not filter(lambda x: 'NXCOMPAT' in x, ldflags):
|
||||
ldflags.append('/NXCOMPAT')
|
||||
|
||||
have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
|
||||
manifest_flags, intermediate_manifest, manifest_files = \
|
||||
self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path,
|
||||
is_executable and not have_def_file, build_dir)
|
||||
ldflags.extend(manifest_flags)
|
||||
return ldflags, intermediate_manifest, manifest_files
|
||||
|
||||
def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
|
||||
allow_isolation, build_dir):
|
||||
"""Returns a 3-tuple:
|
||||
- the set of flags that need to be added to the link to generate
|
||||
a default manifest
|
||||
- the intermediate manifest that the linker will generate that should be
|
||||
used to assert it doesn't add anything to the merged one.
|
||||
- the list of all the manifest files to be merged by the manifest tool and
|
||||
included into the link."""
|
||||
generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
|
||||
config,
|
||||
default='true')
|
||||
if generate_manifest != 'true':
|
||||
# This means not only that the linker should not generate the intermediate
|
||||
# manifest but also that the manifest tool should do nothing even when
|
||||
# additional manifests are specified.
|
||||
return ['/MANIFEST:NO'], [], []
|
||||
|
||||
output_name = name + '.intermediate.manifest'
|
||||
flags = [
|
||||
'/MANIFEST',
|
||||
'/ManifestFile:' + output_name,
|
||||
]
|
||||
|
||||
# Instead of using the MANIFESTUAC flags, we generate a .manifest to
|
||||
# include into the list of manifests. This allows us to avoid the need to
|
||||
# do two passes during linking. The /MANIFEST flag and /ManifestFile are
|
||||
# still used, and the intermediate manifest is used to assert that the
|
||||
# final manifest we get from merging all the additional manifest files
|
||||
# (plus the one we generate here) isn't modified by merging the
|
||||
# intermediate into it.
|
||||
|
||||
# Always NO, because we generate a manifest file that has what we want.
|
||||
flags.append('/MANIFESTUAC:NO')
|
||||
|
||||
config = self._TargetConfig(config)
|
||||
enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
|
||||
default='true')
|
||||
manifest_files = []
|
||||
generated_manifest_outer = \
|
||||
"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \
|
||||
"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \
|
||||
"</assembly>"
|
||||
if enable_uac == 'true':
|
||||
execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
|
||||
config, default='0')
|
||||
execution_level_map = {
|
||||
'0': 'asInvoker',
|
||||
'1': 'highestAvailable',
|
||||
'2': 'requireAdministrator'
|
||||
}
|
||||
|
||||
ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
|
||||
default='false')
|
||||
|
||||
inner = '''
|
||||
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
||||
<security>
|
||||
<requestedPrivileges>
|
||||
<requestedExecutionLevel level='%s' uiAccess='%s' />
|
||||
</requestedPrivileges>
|
||||
</security>
|
||||
</trustInfo>''' % (execution_level_map[execution_level], ui_access)
|
||||
else:
|
||||
inner = ''
|
||||
|
||||
generated_manifest_contents = generated_manifest_outer % inner
|
||||
generated_name = name + '.generated.manifest'
|
||||
# Need to join with the build_dir here as we're writing it during
|
||||
# generation time, but we return the un-joined version because the build
|
||||
# will occur in that directory. We only write the file if the contents
|
||||
# have changed so that simply regenerating the project files doesn't
|
||||
# cause a relink.
|
||||
build_dir_generated_name = os.path.join(build_dir, generated_name)
|
||||
gyp.common.EnsureDirExists(build_dir_generated_name)
|
||||
f = gyp.common.WriteOnDiff(build_dir_generated_name)
|
||||
f.write(generated_manifest_contents)
|
||||
f.close()
|
||||
manifest_files = [generated_name]
|
||||
|
||||
if allow_isolation:
|
||||
flags.append('/ALLOWISOLATION')
|
||||
|
||||
manifest_files += self._GetAdditionalManifestFiles(config,
|
||||
gyp_to_build_path)
|
||||
return flags, output_name, manifest_files
|
||||
|
||||
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
|
||||
"""Gets additional manifest files that are added to the default one
|
||||
generated by the linker."""
|
||||
files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config,
|
||||
default=[])
|
||||
if isinstance(files, str):
|
||||
files = files.split(';')
|
||||
return [os.path.normpath(
|
||||
gyp_to_build_path(self.ConvertVSMacros(f, config=config)))
|
||||
for f in files]
|
||||
|
||||
def IsUseLibraryDependencyInputs(self, config):
|
||||
"""Returns whether the target should be linked via Use Library Dependency
|
||||
Inputs (using component .objs of a given .lib)."""
|
||||
config = self._TargetConfig(config)
|
||||
uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
|
||||
return uldi == 'true'
|
||||
|
||||
def IsEmbedManifest(self, config):
|
||||
"""Returns whether manifest should be linked into binary."""
|
||||
config = self._TargetConfig(config)
|
||||
embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config,
|
||||
default='true')
|
||||
return embed == 'true'
|
||||
|
||||
def IsLinkIncremental(self, config):
|
||||
"""Returns whether the target should be linked incrementally."""
|
||||
config = self._TargetConfig(config)
|
||||
link_inc = self._Setting(('VCLinkerTool', 'LinkIncremental'), config)
|
||||
return link_inc != '1'
|
||||
|
||||
def GetRcflags(self, config, gyp_to_ninja_path):
|
||||
"""Returns the flags that need to be added to invocations of the resource
|
||||
compiler."""
|
||||
config = self._TargetConfig(config)
|
||||
rcflags = []
|
||||
rc = self._GetWrapper(self, self.msvs_settings[config],
|
||||
'VCResourceCompilerTool', append=rcflags)
|
||||
rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I')
|
||||
rcflags.append('/I' + gyp_to_ninja_path('.'))
|
||||
rc('PreprocessorDefinitions', prefix='/d')
|
||||
# /l arg must be in hex without leading '0x'
|
||||
rc('Culture', prefix='/l', map=lambda x: hex(int(x))[2:])
|
||||
return rcflags
|
||||
|
||||
def BuildCygwinBashCommandLine(self, args, path_to_base):
|
||||
"""Build a command line that runs args via cygwin bash. We assume that all
|
||||
incoming paths are in Windows normpath'd form, so they need to be
|
||||
converted to posix style for the part of the command line that's passed to
|
||||
bash. We also have to do some Visual Studio macro emulation here because
|
||||
various rules use magic VS names for things. Also note that rules that
|
||||
contain ninja variables cannot be fixed here (for example ${source}), so
|
||||
the outer generator needs to make sure that the paths that are written out
|
||||
are in posix style, if the command line will be used here."""
|
||||
cygwin_dir = os.path.normpath(
|
||||
os.path.join(path_to_base, self.msvs_cygwin_dirs[0]))
|
||||
cd = ('cd %s' % path_to_base).replace('\\', '/')
|
||||
args = [a.replace('\\', '/').replace('"', '\\"') for a in args]
|
||||
args = ["'%s'" % a.replace("'", "'\\''") for a in args]
|
||||
bash_cmd = ' '.join(args)
|
||||
cmd = (
|
||||
'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir +
|
||||
'bash -c "%s ; %s"' % (cd, bash_cmd))
|
||||
return cmd
|
||||
|
||||
def IsRuleRunUnderCygwin(self, rule):
|
||||
"""Determine if an action should be run under cygwin. If the variable is
|
||||
unset, or set to 1 we use cygwin."""
|
||||
return int(rule.get('msvs_cygwin_shell',
|
||||
self.spec.get('msvs_cygwin_shell', 1))) != 0
|
||||
|
||||
def _HasExplicitRuleForExtension(self, spec, extension):
|
||||
"""Determine if there's an explicit rule for a particular extension."""
|
||||
for rule in spec.get('rules', []):
|
||||
if rule['extension'] == extension:
|
||||
return True
|
||||
return False
|
||||
|
||||
def HasExplicitIdlRules(self, spec):
|
||||
"""Determine if there's an explicit rule for idl files. When there isn't we
|
||||
need to generate implicit rules to build MIDL .idl files."""
|
||||
return self._HasExplicitRuleForExtension(spec, 'idl')
|
||||
|
||||
def HasExplicitAsmRules(self, spec):
|
||||
"""Determine if there's an explicit rule for asm files. When there isn't we
|
||||
need to generate implicit rules to assemble .asm files."""
|
||||
return self._HasExplicitRuleForExtension(spec, 'asm')
|
||||
|
||||
def GetIdlBuildData(self, source, config):
|
||||
"""Determine the implicit outputs for an idl file. Returns output
|
||||
directory, outputs, and variables and flags that are required."""
|
||||
config = self._TargetConfig(config)
|
||||
midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool')
|
||||
def midl(name, default=None):
|
||||
return self.ConvertVSMacros(midl_get(name, default=default),
|
||||
config=config)
|
||||
tlb = midl('TypeLibraryName', default='${root}.tlb')
|
||||
header = midl('HeaderFileName', default='${root}.h')
|
||||
dlldata = midl('DLLDataFileName', default='dlldata.c')
|
||||
iid = midl('InterfaceIdentifierFileName', default='${root}_i.c')
|
||||
proxy = midl('ProxyFileName', default='${root}_p.c')
|
||||
# Note that .tlb is not included in the outputs as it is not always
|
||||
# generated depending on the content of the input idl file.
|
||||
outdir = midl('OutputDirectory', default='')
|
||||
output = [header, dlldata, iid, proxy]
|
||||
variables = [('tlb', tlb),
|
||||
('h', header),
|
||||
('dlldata', dlldata),
|
||||
('iid', iid),
|
||||
('proxy', proxy)]
|
||||
# TODO(scottmg): Are there configuration settings to set these flags?
|
||||
target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64'
|
||||
flags = ['/char', 'signed', '/env', target_platform, '/Oicf']
|
||||
return outdir, output, variables, flags
|
||||
|
||||
|
||||
def _LanguageMatchesForPch(source_ext, pch_source_ext):
|
||||
c_exts = ('.c',)
|
||||
cc_exts = ('.cc', '.cxx', '.cpp')
|
||||
return ((source_ext in c_exts and pch_source_ext in c_exts) or
|
||||
(source_ext in cc_exts and pch_source_ext in cc_exts))
|
||||
|
||||
|
||||
class PrecompiledHeader(object):
|
||||
"""Helper to generate dependencies and build rules to handle generation of
|
||||
precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
|
||||
"""
|
||||
def __init__(
|
||||
self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext):
|
||||
self.settings = settings
|
||||
self.config = config
|
||||
pch_source = self.settings.msvs_precompiled_source[self.config]
|
||||
self.pch_source = gyp_to_build_path(pch_source)
|
||||
filename, _ = os.path.splitext(pch_source)
|
||||
self.output_obj = gyp_to_unique_output(filename + obj_ext).lower()
|
||||
|
||||
def _PchHeader(self):
|
||||
"""Get the header that will appear in an #include line for all source
|
||||
files."""
|
||||
return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
|
||||
|
||||
def GetObjDependencies(self, sources, objs, arch):
|
||||
"""Given a list of sources files and the corresponding object files,
|
||||
returns a list of the pch files that should be depended upon. The
|
||||
additional wrapping in the return value is for interface compatability
|
||||
with make.py on Mac, and xcode_emulation.py."""
|
||||
assert arch is None
|
||||
if not self._PchHeader():
|
||||
return []
|
||||
pch_ext = os.path.splitext(self.pch_source)[1]
|
||||
for source in sources:
|
||||
if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
|
||||
return [(None, None, self.output_obj)]
|
||||
return []
|
||||
|
||||
def GetPchBuildCommands(self, arch):
|
||||
"""Not used on Windows as there are no additional build steps required
|
||||
(instead, existing steps are modified in GetFlagsModifications below)."""
|
||||
return []
|
||||
|
||||
def GetFlagsModifications(self, input, output, implicit, command,
|
||||
cflags_c, cflags_cc, expand_special):
|
||||
"""Get the modified cflags and implicit dependencies that should be used
|
||||
for the pch compilation step."""
|
||||
if input == self.pch_source:
|
||||
pch_output = ['/Yc' + self._PchHeader()]
|
||||
if command == 'cxx':
|
||||
return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))],
|
||||
self.output_obj, [])
|
||||
elif command == 'cc':
|
||||
return ([('cflags_c', map(expand_special, cflags_c + pch_output))],
|
||||
self.output_obj, [])
|
||||
return [], output, implicit
|
||||
|
||||
|
||||
vs_version = None
|
||||
def GetVSVersion(generator_flags):
|
||||
global vs_version
|
||||
if not vs_version:
|
||||
vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
|
||||
generator_flags.get('msvs_version', 'auto'))
|
||||
return vs_version
|
||||
|
||||
def _GetVsvarsSetupArgs(generator_flags, arch):
|
||||
vs = GetVSVersion(generator_flags)
|
||||
return vs.SetupScript()
|
||||
|
||||
def ExpandMacros(string, expansions):
|
||||
"""Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
|
||||
for the canonical way to retrieve a suitable dict."""
|
||||
if '$' in string:
|
||||
for old, new in expansions.iteritems():
|
||||
assert '$(' not in new, new
|
||||
string = string.replace(old, new)
|
||||
return string
|
||||
|
||||
def _ExtractImportantEnvironment(output_of_set):
|
||||
"""Extracts environment variables required for the toolchain to run from
|
||||
a textual dump output by the cmd.exe 'set' command."""
|
||||
envvars_to_save = (
|
||||
'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
|
||||
'include',
|
||||
'lib',
|
||||
'libpath',
|
||||
'path',
|
||||
'pathext',
|
||||
'systemroot',
|
||||
'temp',
|
||||
'tmp',
|
||||
)
|
||||
env = {}
|
||||
for line in output_of_set.splitlines():
|
||||
for envvar in envvars_to_save:
|
||||
if re.match(envvar + '=', line.lower()):
|
||||
var, setting = line.split('=', 1)
|
||||
if envvar == 'path':
|
||||
# Our own rules (for running gyp-win-tool) and other actions in
|
||||
# Chromium rely on python being in the path. Add the path to this
|
||||
# python here so that if it's not in the path when ninja is run
|
||||
# later, python will still be found.
|
||||
setting = os.path.dirname(sys.executable) + os.pathsep + setting
|
||||
env[var.upper()] = setting
|
||||
break
|
||||
for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
|
||||
if required not in env:
|
||||
raise Exception('Environment variable "%s" '
|
||||
'required to be set to valid path' % required)
|
||||
return env
|
||||
|
||||
def _FormatAsEnvironmentBlock(envvar_dict):
|
||||
"""Format as an 'environment block' directly suitable for CreateProcess.
|
||||
Briefly this is a list of key=value\0, terminated by an additional \0. See
|
||||
CreateProcess documentation for more details."""
|
||||
block = ''
|
||||
nul = '\0'
|
||||
for key, value in envvar_dict.iteritems():
|
||||
block += key + '=' + value + nul
|
||||
block += nul
|
||||
return block
|
||||
|
||||
def _ExtractCLPath(output_of_where):
|
||||
"""Gets the path to cl.exe based on the output of calling the environment
|
||||
setup batch file, followed by the equivalent of `where`."""
|
||||
# Take the first line, as that's the first found in the PATH.
|
||||
for line in output_of_where.strip().splitlines():
|
||||
if line.startswith('LOC:'):
|
||||
return line[len('LOC:'):].strip()
|
||||
|
||||
def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out):
|
||||
"""It's not sufficient to have the absolute path to the compiler, linker,
|
||||
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
|
||||
need to support both x86 and x64 compilers within the same build (to support
|
||||
msvs_target_platform hackery). Different architectures require a different
|
||||
compiler binary, and different supporting environment variables (INCLUDE,
|
||||
LIB, LIBPATH). So, we extract the environment here, wrap all invocations
|
||||
of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
|
||||
sets up the environment, and then we do not prefix the compiler with
|
||||
an absolute path, instead preferring something like "cl.exe" in the rule
|
||||
which will then run whichever the environment setup has put in the path.
|
||||
When the following procedure to generate environment files does not
|
||||
meet your requirement (e.g. for custom toolchains), you can pass
|
||||
"-G ninja_use_custom_environment_files" to the gyp to suppress file
|
||||
generation and use custom environment files prepared by yourself."""
|
||||
archs = ('x86', 'x64')
|
||||
if generator_flags.get('ninja_use_custom_environment_files', 0):
|
||||
cl_paths = {}
|
||||
for arch in archs:
|
||||
cl_paths[arch] = 'cl.exe'
|
||||
return cl_paths
|
||||
vs = GetVSVersion(generator_flags)
|
||||
cl_paths = {}
|
||||
for arch in archs:
|
||||
# Extract environment variables for subprocesses.
|
||||
args = vs.SetupScript(arch)
|
||||
args.extend(('&&', 'set'))
|
||||
popen = subprocess.Popen(
|
||||
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
variables, _ = popen.communicate()
|
||||
env = _ExtractImportantEnvironment(variables)
|
||||
env_block = _FormatAsEnvironmentBlock(env)
|
||||
f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
|
||||
f.write(env_block)
|
||||
f.close()
|
||||
|
||||
# Find cl.exe location for this architecture.
|
||||
args = vs.SetupScript(arch)
|
||||
args.extend(('&&',
|
||||
'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i'))
|
||||
popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
|
||||
output, _ = popen.communicate()
|
||||
cl_paths[arch] = _ExtractCLPath(output)
|
||||
return cl_paths
|
||||
|
||||
def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
|
||||
"""Emulate behavior of msvs_error_on_missing_sources present in the msvs
|
||||
generator: Check that all regular source files, i.e. not created at run time,
|
||||
exist on disk. Missing files cause needless recompilation when building via
|
||||
VS, and we want this check to match for people/bots that build using ninja,
|
||||
so they're not surprised when the VS build fails."""
|
||||
if int(generator_flags.get('msvs_error_on_missing_sources', 0)):
|
||||
no_specials = filter(lambda x: '$' not in x, sources)
|
||||
relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
|
||||
missing = filter(lambda x: not os.path.exists(x), relative)
|
||||
if missing:
|
||||
# They'll look like out\Release\..\..\stuff\things.cc, so normalize the
|
||||
# path for a slightly less crazy looking output.
|
||||
cleaned_up = [os.path.normpath(x) for x in missing]
|
||||
raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))
|
||||
|
||||
# Sets some values in default_variables, which are required for many
|
||||
# generators, run on Windows.
|
||||
def CalculateCommonVariables(default_variables, params):
|
||||
generator_flags = params.get('generator_flags', {})
|
||||
|
||||
# Set a variable so conditions can be based on msvs_version.
|
||||
msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
|
||||
default_variables['MSVS_VERSION'] = msvs_version.ShortName()
|
||||
|
||||
# To determine processor word size on Windows, in addition to checking
|
||||
# PROCESSOR_ARCHITECTURE (which reflects the word size of the current
|
||||
# process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
|
||||
# contains the actual word size of the system when running thru WOW64).
|
||||
if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
|
||||
'64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
|
||||
default_variables['MSVS_OS_BITS'] = 64
|
||||
else:
|
||||
default_variables['MSVS_OS_BITS'] = 32
|
||||
160
third_party/gyp/ninja_syntax.py
vendored
Normal file
160
third_party/gyp/ninja_syntax.py
vendored
Normal file
@@ -0,0 +1,160 @@
|
||||
# This file comes from
|
||||
# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py
|
||||
# Do not edit! Edit the upstream one instead.
|
||||
|
||||
"""Python module for generating .ninja files.
|
||||
|
||||
Note that this is emphatically not a required piece of Ninja; it's
|
||||
just a helpful utility for build-file-generation systems that already
|
||||
use Python.
|
||||
"""
|
||||
|
||||
import textwrap
|
||||
import re
|
||||
|
||||
def escape_path(word):
|
||||
return word.replace('$ ','$$ ').replace(' ','$ ').replace(':', '$:')
|
||||
|
||||
class Writer(object):
|
||||
def __init__(self, output, width=78):
|
||||
self.output = output
|
||||
self.width = width
|
||||
|
||||
def newline(self):
|
||||
self.output.write('\n')
|
||||
|
||||
def comment(self, text):
|
||||
for line in textwrap.wrap(text, self.width - 2):
|
||||
self.output.write('# ' + line + '\n')
|
||||
|
||||
def variable(self, key, value, indent=0):
|
||||
if value is None:
|
||||
return
|
||||
if isinstance(value, list):
|
||||
value = ' '.join(filter(None, value)) # Filter out empty strings.
|
||||
self._line('%s = %s' % (key, value), indent)
|
||||
|
||||
def pool(self, name, depth):
|
||||
self._line('pool %s' % name)
|
||||
self.variable('depth', depth, indent=1)
|
||||
|
||||
def rule(self, name, command, description=None, depfile=None,
|
||||
generator=False, pool=None, restat=False, rspfile=None,
|
||||
rspfile_content=None, deps=None):
|
||||
self._line('rule %s' % name)
|
||||
self.variable('command', command, indent=1)
|
||||
if description:
|
||||
self.variable('description', description, indent=1)
|
||||
if depfile:
|
||||
self.variable('depfile', depfile, indent=1)
|
||||
if generator:
|
||||
self.variable('generator', '1', indent=1)
|
||||
if pool:
|
||||
self.variable('pool', pool, indent=1)
|
||||
if restat:
|
||||
self.variable('restat', '1', indent=1)
|
||||
if rspfile:
|
||||
self.variable('rspfile', rspfile, indent=1)
|
||||
if rspfile_content:
|
||||
self.variable('rspfile_content', rspfile_content, indent=1)
|
||||
if deps:
|
||||
self.variable('deps', deps, indent=1)
|
||||
|
||||
def build(self, outputs, rule, inputs=None, implicit=None, order_only=None,
|
||||
variables=None):
|
||||
outputs = self._as_list(outputs)
|
||||
all_inputs = self._as_list(inputs)[:]
|
||||
out_outputs = list(map(escape_path, outputs))
|
||||
all_inputs = list(map(escape_path, all_inputs))
|
||||
|
||||
if implicit:
|
||||
implicit = map(escape_path, self._as_list(implicit))
|
||||
all_inputs.append('|')
|
||||
all_inputs.extend(implicit)
|
||||
if order_only:
|
||||
order_only = map(escape_path, self._as_list(order_only))
|
||||
all_inputs.append('||')
|
||||
all_inputs.extend(order_only)
|
||||
|
||||
self._line('build %s: %s' % (' '.join(out_outputs),
|
||||
' '.join([rule] + all_inputs)))
|
||||
|
||||
if variables:
|
||||
if isinstance(variables, dict):
|
||||
iterator = iter(variables.items())
|
||||
else:
|
||||
iterator = iter(variables)
|
||||
|
||||
for key, val in iterator:
|
||||
self.variable(key, val, indent=1)
|
||||
|
||||
return outputs
|
||||
|
||||
def include(self, path):
|
||||
self._line('include %s' % path)
|
||||
|
||||
def subninja(self, path):
|
||||
self._line('subninja %s' % path)
|
||||
|
||||
def default(self, paths):
|
||||
self._line('default %s' % ' '.join(self._as_list(paths)))
|
||||
|
||||
def _count_dollars_before_index(self, s, i):
|
||||
"""Returns the number of '$' characters right in front of s[i]."""
|
||||
dollar_count = 0
|
||||
dollar_index = i - 1
|
||||
while dollar_index > 0 and s[dollar_index] == '$':
|
||||
dollar_count += 1
|
||||
dollar_index -= 1
|
||||
return dollar_count
|
||||
|
||||
def _line(self, text, indent=0):
|
||||
"""Write 'text' word-wrapped at self.width characters."""
|
||||
leading_space = ' ' * indent
|
||||
while len(leading_space) + len(text) > self.width:
|
||||
# The text is too wide; wrap if possible.
|
||||
|
||||
# Find the rightmost space that would obey our width constraint and
|
||||
# that's not an escaped space.
|
||||
available_space = self.width - len(leading_space) - len(' $')
|
||||
space = available_space
|
||||
while True:
|
||||
space = text.rfind(' ', 0, space)
|
||||
if space < 0 or \
|
||||
self._count_dollars_before_index(text, space) % 2 == 0:
|
||||
break
|
||||
|
||||
if space < 0:
|
||||
# No such space; just use the first unescaped space we can find.
|
||||
space = available_space - 1
|
||||
while True:
|
||||
space = text.find(' ', space + 1)
|
||||
if space < 0 or \
|
||||
self._count_dollars_before_index(text, space) % 2 == 0:
|
||||
break
|
||||
if space < 0:
|
||||
# Give up on breaking.
|
||||
break
|
||||
|
||||
self.output.write(leading_space + text[0:space] + ' $\n')
|
||||
text = text[space+1:]
|
||||
|
||||
# Subsequent lines are continuations, so indent them.
|
||||
leading_space = ' ' * (indent+2)
|
||||
|
||||
self.output.write(leading_space + text + '\n')
|
||||
|
||||
def _as_list(self, input):
|
||||
if input is None:
|
||||
return []
|
||||
if isinstance(input, list):
|
||||
return input
|
||||
return [input]
|
||||
|
||||
|
||||
def escape(string):
|
||||
"""Escape a string such that it can be embedded into a Ninja file without
|
||||
further interpretation."""
|
||||
assert '\n' not in string, 'Ninja syntax does not allow newlines'
|
||||
# We only have one special metacharacter: '$'.
|
||||
return string.replace('$', '$$')
|
||||
289
third_party/gyp/ordered_dict.py
vendored
Normal file
289
third_party/gyp/ordered_dict.py
vendored
Normal file
@@ -0,0 +1,289 @@
|
||||
# Unmodified from http://code.activestate.com/recipes/576693/
|
||||
# other than to add MIT license header (as specified on page, but not in code).
|
||||
# Linked from Python documentation here:
|
||||
# http://docs.python.org/2/library/collections.html#collections.OrderedDict
|
||||
#
|
||||
# This should be deleted once Py2.7 is available on all bots, see
|
||||
# http://crbug.com/241769.
|
||||
#
|
||||
# Copyright (c) 2009 Raymond Hettinger.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
|
||||
# Passes Python2.7's test suite and incorporates all the latest updates.
|
||||
|
||||
try:
|
||||
from thread import get_ident as _get_ident
|
||||
except ImportError:
|
||||
from dummy_thread import get_ident as _get_ident
|
||||
|
||||
try:
|
||||
from _abcoll import KeysView, ValuesView, ItemsView
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class OrderedDict(dict):
|
||||
'Dictionary that remembers insertion order'
|
||||
# An inherited dict maps keys to values.
|
||||
# The inherited dict provides __getitem__, __len__, __contains__, and get.
|
||||
# The remaining methods are order-aware.
|
||||
# Big-O running times for all methods are the same as for regular dictionaries.
|
||||
|
||||
# The internal self.__map dictionary maps keys to links in a doubly linked list.
|
||||
# The circular doubly linked list starts and ends with a sentinel element.
|
||||
# The sentinel element never gets deleted (this simplifies the algorithm).
|
||||
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
|
||||
|
||||
def __init__(self, *args, **kwds):
|
||||
'''Initialize an ordered dictionary. Signature is the same as for
|
||||
regular dictionaries, but keyword arguments are not recommended
|
||||
because their insertion order is arbitrary.
|
||||
|
||||
'''
|
||||
if len(args) > 1:
|
||||
raise TypeError('expected at most 1 arguments, got %d' % len(args))
|
||||
try:
|
||||
self.__root
|
||||
except AttributeError:
|
||||
self.__root = root = [] # sentinel node
|
||||
root[:] = [root, root, None]
|
||||
self.__map = {}
|
||||
self.__update(*args, **kwds)
|
||||
|
||||
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
|
||||
'od.__setitem__(i, y) <==> od[i]=y'
|
||||
# Setting a new item creates a new link which goes at the end of the linked
|
||||
# list, and the inherited dictionary is updated with the new key/value pair.
|
||||
if key not in self:
|
||||
root = self.__root
|
||||
last = root[0]
|
||||
last[1] = root[0] = self.__map[key] = [last, root, key]
|
||||
dict_setitem(self, key, value)
|
||||
|
||||
def __delitem__(self, key, dict_delitem=dict.__delitem__):
|
||||
'od.__delitem__(y) <==> del od[y]'
|
||||
# Deleting an existing item uses self.__map to find the link which is
|
||||
# then removed by updating the links in the predecessor and successor nodes.
|
||||
dict_delitem(self, key)
|
||||
link_prev, link_next, key = self.__map.pop(key)
|
||||
link_prev[1] = link_next
|
||||
link_next[0] = link_prev
|
||||
|
||||
def __iter__(self):
|
||||
'od.__iter__() <==> iter(od)'
|
||||
root = self.__root
|
||||
curr = root[1]
|
||||
while curr is not root:
|
||||
yield curr[2]
|
||||
curr = curr[1]
|
||||
|
||||
def __reversed__(self):
|
||||
'od.__reversed__() <==> reversed(od)'
|
||||
root = self.__root
|
||||
curr = root[0]
|
||||
while curr is not root:
|
||||
yield curr[2]
|
||||
curr = curr[0]
|
||||
|
||||
def clear(self):
|
||||
'od.clear() -> None. Remove all items from od.'
|
||||
try:
|
||||
for node in self.__map.itervalues():
|
||||
del node[:]
|
||||
root = self.__root
|
||||
root[:] = [root, root, None]
|
||||
self.__map.clear()
|
||||
except AttributeError:
|
||||
pass
|
||||
dict.clear(self)
|
||||
|
||||
def popitem(self, last=True):
|
||||
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
|
||||
Pairs are returned in LIFO order if last is true or FIFO order if false.
|
||||
|
||||
'''
|
||||
if not self:
|
||||
raise KeyError('dictionary is empty')
|
||||
root = self.__root
|
||||
if last:
|
||||
link = root[0]
|
||||
link_prev = link[0]
|
||||
link_prev[1] = root
|
||||
root[0] = link_prev
|
||||
else:
|
||||
link = root[1]
|
||||
link_next = link[1]
|
||||
root[1] = link_next
|
||||
link_next[0] = root
|
||||
key = link[2]
|
||||
del self.__map[key]
|
||||
value = dict.pop(self, key)
|
||||
return key, value
|
||||
|
||||
# -- the following methods do not depend on the internal structure --
|
||||
|
||||
def keys(self):
|
||||
'od.keys() -> list of keys in od'
|
||||
return list(self)
|
||||
|
||||
def values(self):
|
||||
'od.values() -> list of values in od'
|
||||
return [self[key] for key in self]
|
||||
|
||||
def items(self):
|
||||
'od.items() -> list of (key, value) pairs in od'
|
||||
return [(key, self[key]) for key in self]
|
||||
|
||||
def iterkeys(self):
|
||||
'od.iterkeys() -> an iterator over the keys in od'
|
||||
return iter(self)
|
||||
|
||||
def itervalues(self):
|
||||
'od.itervalues -> an iterator over the values in od'
|
||||
for k in self:
|
||||
yield self[k]
|
||||
|
||||
def iteritems(self):
|
||||
'od.iteritems -> an iterator over the (key, value) items in od'
|
||||
for k in self:
|
||||
yield (k, self[k])
|
||||
|
||||
# Suppress 'OrderedDict.update: Method has no argument':
|
||||
# pylint: disable=E0211
|
||||
def update(*args, **kwds):
|
||||
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
|
||||
|
||||
If E is a dict instance, does: for k in E: od[k] = E[k]
|
||||
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
|
||||
Or if E is an iterable of items, does: for k, v in E: od[k] = v
|
||||
In either case, this is followed by: for k, v in F.items(): od[k] = v
|
||||
|
||||
'''
|
||||
if len(args) > 2:
|
||||
raise TypeError('update() takes at most 2 positional '
|
||||
'arguments (%d given)' % (len(args),))
|
||||
elif not args:
|
||||
raise TypeError('update() takes at least 1 argument (0 given)')
|
||||
self = args[0]
|
||||
# Make progressively weaker assumptions about "other"
|
||||
other = ()
|
||||
if len(args) == 2:
|
||||
other = args[1]
|
||||
if isinstance(other, dict):
|
||||
for key in other:
|
||||
self[key] = other[key]
|
||||
elif hasattr(other, 'keys'):
|
||||
for key in other.keys():
|
||||
self[key] = other[key]
|
||||
else:
|
||||
for key, value in other:
|
||||
self[key] = value
|
||||
for key, value in kwds.items():
|
||||
self[key] = value
|
||||
|
||||
__update = update # let subclasses override update without breaking __init__
|
||||
|
||||
__marker = object()
|
||||
|
||||
def pop(self, key, default=__marker):
|
||||
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
|
||||
If key is not found, d is returned if given, otherwise KeyError is raised.
|
||||
|
||||
'''
|
||||
if key in self:
|
||||
result = self[key]
|
||||
del self[key]
|
||||
return result
|
||||
if default is self.__marker:
|
||||
raise KeyError(key)
|
||||
return default
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
|
||||
if key in self:
|
||||
return self[key]
|
||||
self[key] = default
|
||||
return default
|
||||
|
||||
def __repr__(self, _repr_running={}):
|
||||
'od.__repr__() <==> repr(od)'
|
||||
call_key = id(self), _get_ident()
|
||||
if call_key in _repr_running:
|
||||
return '...'
|
||||
_repr_running[call_key] = 1
|
||||
try:
|
||||
if not self:
|
||||
return '%s()' % (self.__class__.__name__,)
|
||||
return '%s(%r)' % (self.__class__.__name__, self.items())
|
||||
finally:
|
||||
del _repr_running[call_key]
|
||||
|
||||
def __reduce__(self):
|
||||
'Return state information for pickling'
|
||||
items = [[k, self[k]] for k in self]
|
||||
inst_dict = vars(self).copy()
|
||||
for k in vars(OrderedDict()):
|
||||
inst_dict.pop(k, None)
|
||||
if inst_dict:
|
||||
return (self.__class__, (items,), inst_dict)
|
||||
return self.__class__, (items,)
|
||||
|
||||
def copy(self):
|
||||
'od.copy() -> a shallow copy of od'
|
||||
return self.__class__(self)
|
||||
|
||||
@classmethod
|
||||
def fromkeys(cls, iterable, value=None):
|
||||
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
|
||||
and values equal to v (which defaults to None).
|
||||
|
||||
'''
|
||||
d = cls()
|
||||
for key in iterable:
|
||||
d[key] = value
|
||||
return d
|
||||
|
||||
def __eq__(self, other):
|
||||
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
|
||||
while comparison to a regular mapping is order-insensitive.
|
||||
|
||||
'''
|
||||
if isinstance(other, OrderedDict):
|
||||
return len(self)==len(other) and self.items() == other.items()
|
||||
return dict.__eq__(self, other)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
# -- the following methods are only used in Python 2.7 --
|
||||
|
||||
def viewkeys(self):
|
||||
"od.viewkeys() -> a set-like object providing a view on od's keys"
|
||||
return KeysView(self)
|
||||
|
||||
def viewvalues(self):
|
||||
"od.viewvalues() -> an object providing a view on od's values"
|
||||
return ValuesView(self)
|
||||
|
||||
def viewitems(self):
|
||||
"od.viewitems() -> a set-like object providing a view on od's items"
|
||||
return ItemsView(self)
|
||||
|
||||
292
third_party/gyp/win_tool.py
vendored
Executable file
292
third_party/gyp/win_tool.py
vendored
Executable file
@@ -0,0 +1,292 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Utility functions for Windows builds.
|
||||
|
||||
These functions are executed via gyp-win-tool when using the ninja generator.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import string
|
||||
import sys
|
||||
|
||||
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
# A regex matching an argument corresponding to the output filename passed to
|
||||
# link.exe.
|
||||
_LINK_EXE_OUT_ARG = re.compile('/OUT:(?P<out>.+)$', re.IGNORECASE)
|
||||
|
||||
def main(args):
|
||||
executor = WinTool()
|
||||
exit_code = executor.Dispatch(args)
|
||||
if exit_code is not None:
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
class WinTool(object):
|
||||
"""This class performs all the Windows tooling steps. The methods can either
|
||||
be executed directly, or dispatched from an argument list."""
|
||||
|
||||
def _UseSeparateMspdbsrv(self, env, args):
|
||||
"""Allows to use a unique instance of mspdbsrv.exe per linker instead of a
|
||||
shared one."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
if args[0] != 'link.exe':
|
||||
return
|
||||
|
||||
# Use the output filename passed to the linker to generate an endpoint name
|
||||
# for mspdbsrv.exe.
|
||||
endpoint_name = None
|
||||
for arg in args:
|
||||
m = _LINK_EXE_OUT_ARG.match(arg)
|
||||
if m:
|
||||
endpoint_name = re.sub(r'\W+', '',
|
||||
'%s_%d' % (m.group('out'), os.getpid()))
|
||||
break
|
||||
|
||||
if endpoint_name is None:
|
||||
return
|
||||
|
||||
# Adds the appropriate environment variable. This will be read by link.exe
|
||||
# to know which instance of mspdbsrv.exe it should connect to (if it's
|
||||
# not set then the default endpoint is used).
|
||||
env['_MSPDBSRV_ENDPOINT_'] = endpoint_name
|
||||
|
||||
def Dispatch(self, args):
|
||||
"""Dispatches a string command to a method."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
method = "Exec%s" % self._CommandifyName(args[0])
|
||||
return getattr(self, method)(*args[1:])
|
||||
|
||||
def _CommandifyName(self, name_string):
|
||||
"""Transforms a tool name like recursive-mirror to RecursiveMirror."""
|
||||
return name_string.title().replace('-', '')
|
||||
|
||||
def _GetEnv(self, arch):
|
||||
"""Gets the saved environment from a file for a given architecture."""
|
||||
# The environment is saved as an "environment block" (see CreateProcess
|
||||
# and msvs_emulation for details). We convert to a dict here.
|
||||
# Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
|
||||
pairs = open(arch).read()[:-2].split('\0')
|
||||
kvs = [item.split('=', 1) for item in pairs]
|
||||
return dict(kvs)
|
||||
|
||||
def ExecStamp(self, path):
|
||||
"""Simple stamp command."""
|
||||
open(path, 'w').close()
|
||||
|
||||
def ExecRecursiveMirror(self, source, dest):
|
||||
"""Emulation of rm -rf out && cp -af in out."""
|
||||
if os.path.exists(dest):
|
||||
if os.path.isdir(dest):
|
||||
shutil.rmtree(dest)
|
||||
else:
|
||||
os.unlink(dest)
|
||||
if os.path.isdir(source):
|
||||
shutil.copytree(source, dest)
|
||||
else:
|
||||
shutil.copy2(source, dest)
|
||||
|
||||
def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
|
||||
"""Filter diagnostic output from link that looks like:
|
||||
' Creating library ui.dll.lib and object ui.dll.exp'
|
||||
This happens when there are exports from the dll or exe.
|
||||
"""
|
||||
env = self._GetEnv(arch)
|
||||
if use_separate_mspdbsrv == 'True':
|
||||
self._UseSeparateMspdbsrv(env, args)
|
||||
link = subprocess.Popen(args,
|
||||
shell=True,
|
||||
env=env,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT)
|
||||
out, _ = link.communicate()
|
||||
for line in out.splitlines():
|
||||
if not line.startswith(' Creating library '):
|
||||
print line
|
||||
return link.returncode
|
||||
|
||||
def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname,
|
||||
mt, rc, intermediate_manifest, *manifests):
|
||||
"""A wrapper for handling creating a manifest resource and then executing
|
||||
a link command."""
|
||||
# The 'normal' way to do manifests is to have link generate a manifest
|
||||
# based on gathering dependencies from the object files, then merge that
|
||||
# manifest with other manifests supplied as sources, convert the merged
|
||||
# manifest to a resource, and then *relink*, including the compiled
|
||||
# version of the manifest resource. This breaks incremental linking, and
|
||||
# is generally overly complicated. Instead, we merge all the manifests
|
||||
# provided (along with one that includes what would normally be in the
|
||||
# linker-generated one, see msvs_emulation.py), and include that into the
|
||||
# first and only link. We still tell link to generate a manifest, but we
|
||||
# only use that to assert that our simpler process did not miss anything.
|
||||
variables = {
|
||||
'python': sys.executable,
|
||||
'arch': arch,
|
||||
'out': out,
|
||||
'ldcmd': ldcmd,
|
||||
'resname': resname,
|
||||
'mt': mt,
|
||||
'rc': rc,
|
||||
'intermediate_manifest': intermediate_manifest,
|
||||
'manifests': ' '.join(manifests),
|
||||
}
|
||||
add_to_ld = ''
|
||||
if manifests:
|
||||
subprocess.check_call(
|
||||
'%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
|
||||
'-manifest %(manifests)s -out:%(out)s.manifest' % variables)
|
||||
if embed_manifest == 'True':
|
||||
subprocess.check_call(
|
||||
'%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest'
|
||||
' %(out)s.manifest.rc %(resname)s' % variables)
|
||||
subprocess.check_call(
|
||||
'%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s '
|
||||
'%(out)s.manifest.rc' % variables)
|
||||
add_to_ld = ' %(out)s.manifest.res' % variables
|
||||
subprocess.check_call(ldcmd + add_to_ld)
|
||||
|
||||
# Run mt.exe on the theoretically complete manifest we generated, merging
|
||||
# it with the one the linker generated to confirm that the linker
|
||||
# generated one does not add anything. This is strictly unnecessary for
|
||||
# correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
|
||||
# used in a #pragma comment.
|
||||
if manifests:
|
||||
# Merge the intermediate one with ours to .assert.manifest, then check
|
||||
# that .assert.manifest is identical to ours.
|
||||
subprocess.check_call(
|
||||
'%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
|
||||
'-manifest %(out)s.manifest %(intermediate_manifest)s '
|
||||
'-out:%(out)s.assert.manifest' % variables)
|
||||
assert_manifest = '%(out)s.assert.manifest' % variables
|
||||
our_manifest = '%(out)s.manifest' % variables
|
||||
# Load and normalize the manifests. mt.exe sometimes removes whitespace,
|
||||
# and sometimes doesn't unfortunately.
|
||||
with open(our_manifest, 'rb') as our_f:
|
||||
with open(assert_manifest, 'rb') as assert_f:
|
||||
our_data = our_f.read().translate(None, string.whitespace)
|
||||
assert_data = assert_f.read().translate(None, string.whitespace)
|
||||
if our_data != assert_data:
|
||||
os.unlink(out)
|
||||
def dump(filename):
|
||||
sys.stderr.write('%s\n-----\n' % filename)
|
||||
with open(filename, 'rb') as f:
|
||||
sys.stderr.write(f.read() + '\n-----\n')
|
||||
dump(intermediate_manifest)
|
||||
dump(our_manifest)
|
||||
dump(assert_manifest)
|
||||
sys.stderr.write(
|
||||
'Linker generated manifest "%s" added to final manifest "%s" '
|
||||
'(result in "%s"). '
|
||||
'Were /MANIFEST switches used in #pragma statements? ' % (
|
||||
intermediate_manifest, our_manifest, assert_manifest))
|
||||
return 1
|
||||
|
||||
def ExecManifestWrapper(self, arch, *args):
|
||||
"""Run manifest tool with environment set. Strip out undesirable warning
|
||||
(some XML blocks are recognized by the OS loader, but not the manifest
|
||||
tool)."""
|
||||
env = self._GetEnv(arch)
|
||||
popen = subprocess.Popen(args, shell=True, env=env,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
out, _ = popen.communicate()
|
||||
for line in out.splitlines():
|
||||
if line and 'manifest authoring warning 81010002' not in line:
|
||||
print line
|
||||
return popen.returncode
|
||||
|
||||
def ExecManifestToRc(self, arch, *args):
|
||||
"""Creates a resource file pointing a SxS assembly manifest.
|
||||
|args| is tuple containing path to resource file, path to manifest file
|
||||
and resource name which can be "1" (for executables) or "2" (for DLLs)."""
|
||||
manifest_path, resource_path, resource_name = args
|
||||
with open(resource_path, 'wb') as output:
|
||||
output.write('#include <windows.h>\n%s RT_MANIFEST "%s"' % (
|
||||
resource_name,
|
||||
os.path.abspath(manifest_path).replace('\\', '/')))
|
||||
|
||||
def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl,
|
||||
*flags):
|
||||
"""Filter noisy filenames output from MIDL compile step that isn't
|
||||
quietable via command line flags.
|
||||
"""
|
||||
args = ['midl', '/nologo'] + list(flags) + [
|
||||
'/out', outdir,
|
||||
'/tlb', tlb,
|
||||
'/h', h,
|
||||
'/dlldata', dlldata,
|
||||
'/iid', iid,
|
||||
'/proxy', proxy,
|
||||
idl]
|
||||
env = self._GetEnv(arch)
|
||||
popen = subprocess.Popen(args, shell=True, env=env,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
out, _ = popen.communicate()
|
||||
# Filter junk out of stdout, and write filtered versions. Output we want
|
||||
# to filter is pairs of lines that look like this:
|
||||
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
|
||||
# objidl.idl
|
||||
lines = out.splitlines()
|
||||
prefix = 'Processing '
|
||||
processing = set(os.path.basename(x) for x in lines if x.startswith(prefix))
|
||||
for line in lines:
|
||||
if not line.startswith(prefix) and line not in processing:
|
||||
print line
|
||||
return popen.returncode
|
||||
|
||||
def ExecAsmWrapper(self, arch, *args):
|
||||
"""Filter logo banner from invocations of asm.exe."""
|
||||
env = self._GetEnv(arch)
|
||||
# MSVS doesn't assemble x64 asm files.
|
||||
if arch == 'environment.x64':
|
||||
return 0
|
||||
popen = subprocess.Popen(args, shell=True, env=env,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
out, _ = popen.communicate()
|
||||
for line in out.splitlines():
|
||||
if (not line.startswith('Copyright (C) Microsoft Corporation') and
|
||||
not line.startswith('Microsoft (R) Macro Assembler') and
|
||||
not line.startswith(' Assembling: ') and
|
||||
line):
|
||||
print line
|
||||
return popen.returncode
|
||||
|
||||
def ExecRcWrapper(self, arch, *args):
|
||||
"""Filter logo banner from invocations of rc.exe. Older versions of RC
|
||||
don't support the /nologo flag."""
|
||||
env = self._GetEnv(arch)
|
||||
popen = subprocess.Popen(args, shell=True, env=env,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
out, _ = popen.communicate()
|
||||
for line in out.splitlines():
|
||||
if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and
|
||||
not line.startswith('Copyright (C) Microsoft Corporation') and
|
||||
line):
|
||||
print line
|
||||
return popen.returncode
|
||||
|
||||
def ExecActionWrapper(self, arch, rspfile, *dir):
|
||||
"""Runs an action command line from a response file using the environment
|
||||
for |arch|. If |dir| is supplied, use that as the working directory."""
|
||||
env = self._GetEnv(arch)
|
||||
# TODO(scottmg): This is a temporary hack to get some specific variables
|
||||
# through to actions that are set after gyp-time. http://crbug.com/333738.
|
||||
for k, v in os.environ.iteritems():
|
||||
if k not in env:
|
||||
env[k] = v
|
||||
args = open(rspfile).read()
|
||||
dir = dir[0] if dir else None
|
||||
return subprocess.call(args, shell=True, env=env, cwd=dir)
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
1440
third_party/gyp/xcode_emulation.py
vendored
Normal file
1440
third_party/gyp/xcode_emulation.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2889
third_party/gyp/xcodeproj_file.py
vendored
Normal file
2889
third_party/gyp/xcodeproj_file.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
69
third_party/gyp/xml_fix.py
vendored
Normal file
69
third_party/gyp/xml_fix.py
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Applies a fix to CR LF TAB handling in xml.dom.
|
||||
|
||||
Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293
|
||||
Working around this: http://bugs.python.org/issue5752
|
||||
TODO(bradnelson): Consider dropping this when we drop XP support.
|
||||
"""
|
||||
|
||||
|
||||
import xml.dom.minidom
|
||||
|
||||
|
||||
def _Replacement_write_data(writer, data, is_attrib=False):
|
||||
"""Writes datachars to writer."""
|
||||
data = data.replace("&", "&").replace("<", "<")
|
||||
data = data.replace("\"", """).replace(">", ">")
|
||||
if is_attrib:
|
||||
data = data.replace(
|
||||
"\r", "
").replace(
|
||||
"\n", "
").replace(
|
||||
"\t", "	")
|
||||
writer.write(data)
|
||||
|
||||
|
||||
def _Replacement_writexml(self, writer, indent="", addindent="", newl=""):
|
||||
# indent = current indentation
|
||||
# addindent = indentation to add to higher levels
|
||||
# newl = newline string
|
||||
writer.write(indent+"<" + self.tagName)
|
||||
|
||||
attrs = self._get_attributes()
|
||||
a_names = attrs.keys()
|
||||
a_names.sort()
|
||||
|
||||
for a_name in a_names:
|
||||
writer.write(" %s=\"" % a_name)
|
||||
_Replacement_write_data(writer, attrs[a_name].value, is_attrib=True)
|
||||
writer.write("\"")
|
||||
if self.childNodes:
|
||||
writer.write(">%s" % newl)
|
||||
for node in self.childNodes:
|
||||
node.writexml(writer, indent + addindent, addindent, newl)
|
||||
writer.write("%s</%s>%s" % (indent, self.tagName, newl))
|
||||
else:
|
||||
writer.write("/>%s" % newl)
|
||||
|
||||
|
||||
class XmlFix(object):
|
||||
"""Object to manage temporary patching of xml.dom.minidom."""
|
||||
|
||||
def __init__(self):
|
||||
# Preserve current xml.dom.minidom functions.
|
||||
self.write_data = xml.dom.minidom._write_data
|
||||
self.writexml = xml.dom.minidom.Element.writexml
|
||||
# Inject replacement versions of a function and a method.
|
||||
xml.dom.minidom._write_data = _Replacement_write_data
|
||||
xml.dom.minidom.Element.writexml = _Replacement_writexml
|
||||
|
||||
def Cleanup(self):
|
||||
if self.write_data:
|
||||
xml.dom.minidom._write_data = self.write_data
|
||||
xml.dom.minidom.Element.writexml = self.writexml
|
||||
self.write_data = None
|
||||
|
||||
def __del__(self):
|
||||
self.Cleanup()
|
||||
Reference in New Issue
Block a user