Updated the files.
This commit is contained in:
parent
1553e6b971
commit
753967d4f5
23418 changed files with 3784666 additions and 0 deletions
28
my-app/node_modules/node-gyp/gyp/LICENSE
generated
vendored
Executable file
28
my-app/node_modules/node-gyp/gyp/LICENSE
generated
vendored
Executable file
|
@ -0,0 +1,28 @@
|
|||
Copyright (c) 2020 Node.js contributors. All rights reserved.
|
||||
Copyright (c) 2009 Google Inc. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
30
my-app/node_modules/node-gyp/gyp/README.md
generated
vendored
Executable file
30
my-app/node_modules/node-gyp/gyp/README.md
generated
vendored
Executable file
|
@ -0,0 +1,30 @@
|
|||
GYP can Generate Your Projects.
|
||||
===================================
|
||||
|
||||
Documents are available at [gyp.gsrc.io](https://gyp.gsrc.io), or you can check out ```md-pages``` branch to read those documents offline.
|
||||
|
||||
__gyp-next__ is [released](https://github.com/nodejs/gyp-next/releases) to the [__Python Packaging Index__](https://pypi.org/project/gyp-next) (PyPI) and can be installed with the command:
|
||||
* `python3 -m pip install gyp-next`
|
||||
|
||||
When used as a command line utility, __gyp-next__ can also be installed with [pipx](https://pypa.github.io/pipx):
|
||||
* `pipx install gyp-next`
|
||||
```
|
||||
Installing to a new venv 'gyp-next'
|
||||
installed package gyp-next 0.13.0, installed using Python 3.10.6
|
||||
These apps are now globally available
|
||||
- gyp
|
||||
done! ✨ 🌟 ✨
|
||||
```
|
||||
|
||||
Or to run __gyp-next__ directly without installing it:
|
||||
* `pipx run gyp-next --help`
|
||||
```
|
||||
NOTE: running app 'gyp' from 'gyp-next'
|
||||
usage: usage: gyp [options ...] [build_file ...]
|
||||
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
--build CONFIGS configuration for build after project generation
|
||||
--check check format of gyp files
|
||||
[ ... ]
|
||||
```
|
12
my-app/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc
generated
vendored
Executable file
12
my-app/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc
generated
vendored
Executable file
|
@ -0,0 +1,12 @@
|
|||
// Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
// This file is used to generate an empty .pdb -- with a 4KB pagesize -- that is
|
||||
// then used during the final link for modules that have large PDBs. Otherwise,
|
||||
// the linker will generate a pdb with a page size of 1KB, which imposes a limit
|
||||
// of 1GB on the .pdb. By generating an initial empty .pdb with the compiler
|
||||
// (rather than the linker), this limit is avoided. With this in place PDBs may
|
||||
// grow to 2GB.
|
||||
//
|
||||
// This file is referenced by the msvs_large_pdb mechanism in MSVSUtil.py.
|
8
my-app/node_modules/node-gyp/gyp/gyp
generated
vendored
Executable file
8
my-app/node_modules/node-gyp/gyp/gyp
generated
vendored
Executable file
|
@ -0,0 +1,8 @@
|
|||
#!/bin/sh
|
||||
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
set -e
|
||||
base=$(dirname "$0")
|
||||
exec python "${base}/gyp_main.py" "$@"
|
5
my-app/node_modules/node-gyp/gyp/gyp.bat
generated
vendored
Executable file
5
my-app/node_modules/node-gyp/gyp/gyp.bat
generated
vendored
Executable file
|
@ -0,0 +1,5 @@
|
|||
@rem Copyright (c) 2009 Google Inc. All rights reserved.
|
||||
@rem Use of this source code is governed by a BSD-style license that can be
|
||||
@rem found in the LICENSE file.
|
||||
|
||||
@python "%~dp0gyp_main.py" %*
|
45
my-app/node_modules/node-gyp/gyp/gyp_main.py
generated
vendored
Executable file
45
my-app/node_modules/node-gyp/gyp/gyp_main.py
generated
vendored
Executable file
|
@ -0,0 +1,45 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2009 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
def IsCygwin():
|
||||
# Function copied from pylib/gyp/common.py
|
||||
try:
|
||||
out = subprocess.Popen(
|
||||
"uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
stdout, _ = out.communicate()
|
||||
return "CYGWIN" in stdout.decode("utf-8")
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def UnixifyPath(path):
|
||||
try:
|
||||
if not IsCygwin():
|
||||
return path
|
||||
out = subprocess.Popen(
|
||||
["cygpath", "-u", path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
stdout, _ = out.communicate()
|
||||
return stdout.decode("utf-8")
|
||||
except Exception:
|
||||
return path
|
||||
|
||||
|
||||
# Make sure we're using the version of pylib in this repo, not one installed
|
||||
# elsewhere on the system. Also convert to Unix style path on Cygwin systems,
|
||||
# else the 'gyp' library will not be found
|
||||
path = UnixifyPath(sys.argv[0])
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(path), "pylib"))
|
||||
import gyp # noqa: E402
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(gyp.script_main())
|
365
my-app/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
generated
vendored
Executable file
365
my-app/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
generated
vendored
Executable file
|
@ -0,0 +1,365 @@
|
|||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""New implementation of Visual Studio project generation."""
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import random
|
||||
from operator import attrgetter
|
||||
|
||||
import gyp.common
|
||||
|
||||
|
||||
def cmp(x, y):
|
||||
return (x > y) - (x < y)
|
||||
|
||||
|
||||
# Initialize random number generator
|
||||
random.seed()
|
||||
|
||||
# GUIDs for project types
|
||||
ENTRY_TYPE_GUIDS = {
|
||||
"project": "{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}",
|
||||
"folder": "{2150E333-8FDC-42A3-9474-1A3956D46DE8}",
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Helper functions
|
||||
|
||||
|
||||
def MakeGuid(name, seed="msvs_new"):
|
||||
"""Returns a GUID for the specified target name.
|
||||
|
||||
Args:
|
||||
name: Target name.
|
||||
seed: Seed for MD5 hash.
|
||||
Returns:
|
||||
A GUID-line string calculated from the name and seed.
|
||||
|
||||
This generates something which looks like a GUID, but depends only on the
|
||||
name and seed. This means the same name/seed will always generate the same
|
||||
GUID, so that projects and solutions which refer to each other can explicitly
|
||||
determine the GUID to refer to explicitly. It also means that the GUID will
|
||||
not change when the project for a target is rebuilt.
|
||||
"""
|
||||
# Calculate a MD5 signature for the seed and name.
|
||||
d = hashlib.md5((str(seed) + str(name)).encode("utf-8")).hexdigest().upper()
|
||||
# Convert most of the signature to GUID form (discard the rest)
|
||||
guid = (
|
||||
"{"
|
||||
+ d[:8]
|
||||
+ "-"
|
||||
+ d[8:12]
|
||||
+ "-"
|
||||
+ d[12:16]
|
||||
+ "-"
|
||||
+ d[16:20]
|
||||
+ "-"
|
||||
+ d[20:32]
|
||||
+ "}"
|
||||
)
|
||||
return guid
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class MSVSSolutionEntry:
|
||||
def __cmp__(self, other):
|
||||
# Sort by name then guid (so things are in order on vs2008).
|
||||
return cmp((self.name, self.get_guid()), (other.name, other.get_guid()))
|
||||
|
||||
|
||||
class MSVSFolder(MSVSSolutionEntry):
|
||||
"""Folder in a Visual Studio project or solution."""
|
||||
|
||||
def __init__(self, path, name=None, entries=None, guid=None, items=None):
|
||||
"""Initializes the folder.
|
||||
|
||||
Args:
|
||||
path: Full path to the folder.
|
||||
name: Name of the folder.
|
||||
entries: List of folder entries to nest inside this folder. May contain
|
||||
Folder or Project objects. May be None, if the folder is empty.
|
||||
guid: GUID to use for folder, if not None.
|
||||
items: List of solution items to include in the folder project. May be
|
||||
None, if the folder does not directly contain items.
|
||||
"""
|
||||
if name:
|
||||
self.name = name
|
||||
else:
|
||||
# Use last layer.
|
||||
self.name = os.path.basename(path)
|
||||
|
||||
self.path = path
|
||||
self.guid = guid
|
||||
|
||||
# Copy passed lists (or set to empty lists)
|
||||
self.entries = sorted(entries or [], key=attrgetter("path"))
|
||||
self.items = list(items or [])
|
||||
|
||||
self.entry_type_guid = ENTRY_TYPE_GUIDS["folder"]
|
||||
|
||||
def get_guid(self):
|
||||
if self.guid is None:
|
||||
# Use consistent guids for folders (so things don't regenerate).
|
||||
self.guid = MakeGuid(self.path, seed="msvs_folder")
|
||||
return self.guid
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class MSVSProject(MSVSSolutionEntry):
|
||||
"""Visual Studio project."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
path,
|
||||
name=None,
|
||||
dependencies=None,
|
||||
guid=None,
|
||||
spec=None,
|
||||
build_file=None,
|
||||
config_platform_overrides=None,
|
||||
fixpath_prefix=None,
|
||||
):
|
||||
"""Initializes the project.
|
||||
|
||||
Args:
|
||||
path: Absolute path to the project file.
|
||||
name: Name of project. If None, the name will be the same as the base
|
||||
name of the project file.
|
||||
dependencies: List of other Project objects this project is dependent
|
||||
upon, if not None.
|
||||
guid: GUID to use for project, if not None.
|
||||
spec: Dictionary specifying how to build this project.
|
||||
build_file: Filename of the .gyp file that the vcproj file comes from.
|
||||
config_platform_overrides: optional dict of configuration platforms to
|
||||
used in place of the default for this target.
|
||||
fixpath_prefix: the path used to adjust the behavior of _fixpath
|
||||
"""
|
||||
self.path = path
|
||||
self.guid = guid
|
||||
self.spec = spec
|
||||
self.build_file = build_file
|
||||
# Use project filename if name not specified
|
||||
self.name = name or os.path.splitext(os.path.basename(path))[0]
|
||||
|
||||
# Copy passed lists (or set to empty lists)
|
||||
self.dependencies = list(dependencies or [])
|
||||
|
||||
self.entry_type_guid = ENTRY_TYPE_GUIDS["project"]
|
||||
|
||||
if config_platform_overrides:
|
||||
self.config_platform_overrides = config_platform_overrides
|
||||
else:
|
||||
self.config_platform_overrides = {}
|
||||
self.fixpath_prefix = fixpath_prefix
|
||||
self.msbuild_toolset = None
|
||||
|
||||
def set_dependencies(self, dependencies):
|
||||
self.dependencies = list(dependencies or [])
|
||||
|
||||
def get_guid(self):
|
||||
if self.guid is None:
|
||||
# Set GUID from path
|
||||
# TODO(rspangler): This is fragile.
|
||||
# 1. We can't just use the project filename sans path, since there could
|
||||
# be multiple projects with the same base name (for example,
|
||||
# foo/unittest.vcproj and bar/unittest.vcproj).
|
||||
# 2. The path needs to be relative to $SOURCE_ROOT, so that the project
|
||||
# GUID is the same whether it's included from base/base.sln or
|
||||
# foo/bar/baz/baz.sln.
|
||||
# 3. The GUID needs to be the same each time this builder is invoked, so
|
||||
# that we don't need to rebuild the solution when the project changes.
|
||||
# 4. We should be able to handle pre-built project files by reading the
|
||||
# GUID from the files.
|
||||
self.guid = MakeGuid(self.name)
|
||||
return self.guid
|
||||
|
||||
def set_msbuild_toolset(self, msbuild_toolset):
|
||||
self.msbuild_toolset = msbuild_toolset
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class MSVSSolution:
|
||||
"""Visual Studio solution."""
|
||||
|
||||
def __init__(
|
||||
self, path, version, entries=None, variants=None, websiteProperties=True
|
||||
):
|
||||
"""Initializes the solution.
|
||||
|
||||
Args:
|
||||
path: Path to solution file.
|
||||
version: Format version to emit.
|
||||
entries: List of entries in solution. May contain Folder or Project
|
||||
objects. May be None, if the folder is empty.
|
||||
variants: List of build variant strings. If none, a default list will
|
||||
be used.
|
||||
websiteProperties: Flag to decide if the website properties section
|
||||
is generated.
|
||||
"""
|
||||
self.path = path
|
||||
self.websiteProperties = websiteProperties
|
||||
self.version = version
|
||||
|
||||
# Copy passed lists (or set to empty lists)
|
||||
self.entries = list(entries or [])
|
||||
|
||||
if variants:
|
||||
# Copy passed list
|
||||
self.variants = variants[:]
|
||||
else:
|
||||
# Use default
|
||||
self.variants = ["Debug|Win32", "Release|Win32"]
|
||||
# TODO(rspangler): Need to be able to handle a mapping of solution config
|
||||
# to project config. Should we be able to handle variants being a dict,
|
||||
# or add a separate variant_map variable? If it's a dict, we can't
|
||||
# guarantee the order of variants since dict keys aren't ordered.
|
||||
|
||||
# TODO(rspangler): Automatically write to disk for now; should delay until
|
||||
# node-evaluation time.
|
||||
self.Write()
|
||||
|
||||
def Write(self, writer=gyp.common.WriteOnDiff):
|
||||
"""Writes the solution file to disk.
|
||||
|
||||
Raises:
|
||||
IndexError: An entry appears multiple times.
|
||||
"""
|
||||
# Walk the entry tree and collect all the folders and projects.
|
||||
all_entries = set()
|
||||
entries_to_check = self.entries[:]
|
||||
while entries_to_check:
|
||||
e = entries_to_check.pop(0)
|
||||
|
||||
# If this entry has been visited, nothing to do.
|
||||
if e in all_entries:
|
||||
continue
|
||||
|
||||
all_entries.add(e)
|
||||
|
||||
# If this is a folder, check its entries too.
|
||||
if isinstance(e, MSVSFolder):
|
||||
entries_to_check += e.entries
|
||||
|
||||
all_entries = sorted(all_entries, key=attrgetter("path"))
|
||||
|
||||
# Open file and print header
|
||||
f = writer(self.path)
|
||||
f.write(
|
||||
"Microsoft Visual Studio Solution File, "
|
||||
"Format Version %s\r\n" % self.version.SolutionVersion()
|
||||
)
|
||||
f.write("# %s\r\n" % self.version.Description())
|
||||
|
||||
# Project entries
|
||||
sln_root = os.path.split(self.path)[0]
|
||||
for e in all_entries:
|
||||
relative_path = gyp.common.RelativePath(e.path, sln_root)
|
||||
# msbuild does not accept an empty folder_name.
|
||||
# use '.' in case relative_path is empty.
|
||||
folder_name = relative_path.replace("/", "\\") or "."
|
||||
f.write(
|
||||
'Project("%s") = "%s", "%s", "%s"\r\n'
|
||||
% (
|
||||
e.entry_type_guid, # Entry type GUID
|
||||
e.name, # Folder name
|
||||
folder_name, # Folder name (again)
|
||||
e.get_guid(), # Entry GUID
|
||||
)
|
||||
)
|
||||
|
||||
# TODO(rspangler): Need a way to configure this stuff
|
||||
if self.websiteProperties:
|
||||
f.write(
|
||||
"\tProjectSection(WebsiteProperties) = preProject\r\n"
|
||||
'\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
|
||||
'\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
|
||||
"\tEndProjectSection\r\n"
|
||||
)
|
||||
|
||||
if isinstance(e, MSVSFolder) and e.items:
|
||||
f.write("\tProjectSection(SolutionItems) = preProject\r\n")
|
||||
for i in e.items:
|
||||
f.write(f"\t\t{i} = {i}\r\n")
|
||||
f.write("\tEndProjectSection\r\n")
|
||||
|
||||
if isinstance(e, MSVSProject) and e.dependencies:
|
||||
f.write("\tProjectSection(ProjectDependencies) = postProject\r\n")
|
||||
for d in e.dependencies:
|
||||
f.write(f"\t\t{d.get_guid()} = {d.get_guid()}\r\n")
|
||||
f.write("\tEndProjectSection\r\n")
|
||||
|
||||
f.write("EndProject\r\n")
|
||||
|
||||
# Global section
|
||||
f.write("Global\r\n")
|
||||
|
||||
# Configurations (variants)
|
||||
f.write("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n")
|
||||
for v in self.variants:
|
||||
f.write(f"\t\t{v} = {v}\r\n")
|
||||
f.write("\tEndGlobalSection\r\n")
|
||||
|
||||
# Sort config guids for easier diffing of solution changes.
|
||||
config_guids = []
|
||||
config_guids_overrides = {}
|
||||
for e in all_entries:
|
||||
if isinstance(e, MSVSProject):
|
||||
config_guids.append(e.get_guid())
|
||||
config_guids_overrides[e.get_guid()] = e.config_platform_overrides
|
||||
config_guids.sort()
|
||||
|
||||
f.write("\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n")
|
||||
for g in config_guids:
|
||||
for v in self.variants:
|
||||
nv = config_guids_overrides[g].get(v, v)
|
||||
# Pick which project configuration to build for this solution
|
||||
# configuration.
|
||||
f.write(
|
||||
"\t\t%s.%s.ActiveCfg = %s\r\n"
|
||||
% (
|
||||
g, # Project GUID
|
||||
v, # Solution build configuration
|
||||
nv, # Project build config for that solution config
|
||||
)
|
||||
)
|
||||
|
||||
# Enable project in this solution configuration.
|
||||
f.write(
|
||||
"\t\t%s.%s.Build.0 = %s\r\n"
|
||||
% (
|
||||
g, # Project GUID
|
||||
v, # Solution build configuration
|
||||
nv, # Project build config for that solution config
|
||||
)
|
||||
)
|
||||
f.write("\tEndGlobalSection\r\n")
|
||||
|
||||
# TODO(rspangler): Should be able to configure this stuff too (though I've
|
||||
# never seen this be any different)
|
||||
f.write("\tGlobalSection(SolutionProperties) = preSolution\r\n")
|
||||
f.write("\t\tHideSolutionNode = FALSE\r\n")
|
||||
f.write("\tEndGlobalSection\r\n")
|
||||
|
||||
# Folder mappings
|
||||
# Omit this section if there are no folders
|
||||
if any(e.entries for e in all_entries if isinstance(e, MSVSFolder)):
|
||||
f.write("\tGlobalSection(NestedProjects) = preSolution\r\n")
|
||||
for e in all_entries:
|
||||
if not isinstance(e, MSVSFolder):
|
||||
continue # Does not apply to projects, only folders
|
||||
for subentry in e.entries:
|
||||
f.write(f"\t\t{subentry.get_guid()} = {e.get_guid()}\r\n")
|
||||
f.write("\tEndGlobalSection\r\n")
|
||||
|
||||
f.write("EndGlobal\r\n")
|
||||
|
||||
f.close()
|
206
my-app/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
generated
vendored
Executable file
206
my-app/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
generated
vendored
Executable file
|
@ -0,0 +1,206 @@
|
|||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Visual Studio project reader/writer."""
|
||||
|
||||
import gyp.easy_xml as easy_xml
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class Tool:
|
||||
"""Visual Studio tool."""
|
||||
|
||||
def __init__(self, name, attrs=None):
|
||||
"""Initializes the tool.
|
||||
|
||||
Args:
|
||||
name: Tool name.
|
||||
attrs: Dict of tool attributes; may be None.
|
||||
"""
|
||||
self._attrs = attrs or {}
|
||||
self._attrs["Name"] = name
|
||||
|
||||
def _GetSpecification(self):
|
||||
"""Creates an element for the tool.
|
||||
|
||||
Returns:
|
||||
A new xml.dom.Element for the tool.
|
||||
"""
|
||||
return ["Tool", self._attrs]
|
||||
|
||||
|
||||
class Filter:
|
||||
"""Visual Studio filter - that is, a virtual folder."""
|
||||
|
||||
def __init__(self, name, contents=None):
|
||||
"""Initializes the folder.
|
||||
|
||||
Args:
|
||||
name: Filter (folder) name.
|
||||
contents: List of filenames and/or Filter objects contained.
|
||||
"""
|
||||
self.name = name
|
||||
self.contents = list(contents or [])
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
class Writer:
|
||||
"""Visual Studio XML project writer."""
|
||||
|
||||
def __init__(self, project_path, version, name, guid=None, platforms=None):
|
||||
"""Initializes the project.
|
||||
|
||||
Args:
|
||||
project_path: Path to the project file.
|
||||
version: Format version to emit.
|
||||
name: Name of the project.
|
||||
guid: GUID to use for project, if not None.
|
||||
platforms: Array of string, the supported platforms. If null, ['Win32']
|
||||
"""
|
||||
self.project_path = project_path
|
||||
self.version = version
|
||||
self.name = name
|
||||
self.guid = guid
|
||||
|
||||
# Default to Win32 for platforms.
|
||||
if not platforms:
|
||||
platforms = ["Win32"]
|
||||
|
||||
# Initialize the specifications of the various sections.
|
||||
self.platform_section = ["Platforms"]
|
||||
for platform in platforms:
|
||||
self.platform_section.append(["Platform", {"Name": platform}])
|
||||
self.tool_files_section = ["ToolFiles"]
|
||||
self.configurations_section = ["Configurations"]
|
||||
self.files_section = ["Files"]
|
||||
|
||||
# Keep a dict keyed on filename to speed up access.
|
||||
self.files_dict = {}
|
||||
|
||||
def AddToolFile(self, path):
|
||||
"""Adds a tool file to the project.
|
||||
|
||||
Args:
|
||||
path: Relative path from project to tool file.
|
||||
"""
|
||||
self.tool_files_section.append(["ToolFile", {"RelativePath": path}])
|
||||
|
||||
def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools):
|
||||
"""Returns the specification for a configuration.
|
||||
|
||||
Args:
|
||||
config_type: Type of configuration node.
|
||||
config_name: Configuration name.
|
||||
attrs: Dict of configuration attributes; may be None.
|
||||
tools: List of tools (strings or Tool objects); may be None.
|
||||
Returns:
|
||||
"""
|
||||
# Handle defaults
|
||||
if not attrs:
|
||||
attrs = {}
|
||||
if not tools:
|
||||
tools = []
|
||||
|
||||
# Add configuration node and its attributes
|
||||
node_attrs = attrs.copy()
|
||||
node_attrs["Name"] = config_name
|
||||
specification = [config_type, node_attrs]
|
||||
|
||||
# Add tool nodes and their attributes
|
||||
if tools:
|
||||
for t in tools:
|
||||
if isinstance(t, Tool):
|
||||
specification.append(t._GetSpecification())
|
||||
else:
|
||||
specification.append(Tool(t)._GetSpecification())
|
||||
return specification
|
||||
|
||||
def AddConfig(self, name, attrs=None, tools=None):
|
||||
"""Adds a configuration to the project.
|
||||
|
||||
Args:
|
||||
name: Configuration name.
|
||||
attrs: Dict of configuration attributes; may be None.
|
||||
tools: List of tools (strings or Tool objects); may be None.
|
||||
"""
|
||||
spec = self._GetSpecForConfiguration("Configuration", name, attrs, tools)
|
||||
self.configurations_section.append(spec)
|
||||
|
||||
def _AddFilesToNode(self, parent, files):
|
||||
"""Adds files and/or filters to the parent node.
|
||||
|
||||
Args:
|
||||
parent: Destination node
|
||||
files: A list of Filter objects and/or relative paths to files.
|
||||
|
||||
Will call itself recursively, if the files list contains Filter objects.
|
||||
"""
|
||||
for f in files:
|
||||
if isinstance(f, Filter):
|
||||
node = ["Filter", {"Name": f.name}]
|
||||
self._AddFilesToNode(node, f.contents)
|
||||
else:
|
||||
node = ["File", {"RelativePath": f}]
|
||||
self.files_dict[f] = node
|
||||
parent.append(node)
|
||||
|
||||
def AddFiles(self, files):
|
||||
"""Adds files to the project.
|
||||
|
||||
Args:
|
||||
files: A list of Filter objects and/or relative paths to files.
|
||||
|
||||
This makes a copy of the file/filter tree at the time of this call. If you
|
||||
later add files to a Filter object which was passed into a previous call
|
||||
to AddFiles(), it will not be reflected in this project.
|
||||
"""
|
||||
self._AddFilesToNode(self.files_section, files)
|
||||
# TODO(rspangler) This also doesn't handle adding files to an existing
|
||||
# filter. That is, it doesn't merge the trees.
|
||||
|
||||
def AddFileConfig(self, path, config, attrs=None, tools=None):
|
||||
"""Adds a configuration to a file.
|
||||
|
||||
Args:
|
||||
path: Relative path to the file.
|
||||
config: Name of configuration to add.
|
||||
attrs: Dict of configuration attributes; may be None.
|
||||
tools: List of tools (strings or Tool objects); may be None.
|
||||
|
||||
Raises:
|
||||
ValueError: Relative path does not match any file added via AddFiles().
|
||||
"""
|
||||
# Find the file node with the right relative path
|
||||
parent = self.files_dict.get(path)
|
||||
if not parent:
|
||||
raise ValueError('AddFileConfig: file "%s" not in project.' % path)
|
||||
|
||||
# Add the config to the file node
|
||||
spec = self._GetSpecForConfiguration("FileConfiguration", config, attrs, tools)
|
||||
parent.append(spec)
|
||||
|
||||
def WriteIfChanged(self):
|
||||
"""Writes the project file."""
|
||||
# First create XML content definition
|
||||
content = [
|
||||
"VisualStudioProject",
|
||||
{
|
||||
"ProjectType": "Visual C++",
|
||||
"Version": self.version.ProjectVersion(),
|
||||
"Name": self.name,
|
||||
"ProjectGUID": self.guid,
|
||||
"RootNamespace": self.name,
|
||||
"Keyword": "Win32Proj",
|
||||
},
|
||||
self.platform_section,
|
||||
self.tool_files_section,
|
||||
self.configurations_section,
|
||||
["References"], # empty section
|
||||
self.files_section,
|
||||
["Globals"], # empty section
|
||||
]
|
||||
easy_xml.WriteXmlIfChanged(content, self.project_path, encoding="Windows-1252")
|
1270
my-app/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
generated
vendored
Executable file
1270
my-app/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
generated
vendored
Executable file
File diff suppressed because it is too large
Load diff
1547
my-app/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
generated
vendored
Executable file
1547
my-app/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
generated
vendored
Executable file
File diff suppressed because it is too large
Load diff
59
my-app/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
generated
vendored
Executable file
59
my-app/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
generated
vendored
Executable file
|
@ -0,0 +1,59 @@
|
|||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Visual Studio project reader/writer."""
|
||||
|
||||
import gyp.easy_xml as easy_xml
|
||||
|
||||
|
||||
class Writer:
|
||||
"""Visual Studio XML tool file writer."""
|
||||
|
||||
def __init__(self, tool_file_path, name):
|
||||
"""Initializes the tool file.
|
||||
|
||||
Args:
|
||||
tool_file_path: Path to the tool file.
|
||||
name: Name of the tool file.
|
||||
"""
|
||||
self.tool_file_path = tool_file_path
|
||||
self.name = name
|
||||
self.rules_section = ["Rules"]
|
||||
|
||||
def AddCustomBuildRule(
|
||||
self, name, cmd, description, additional_dependencies, outputs, extensions
|
||||
):
|
||||
"""Adds a rule to the tool file.
|
||||
|
||||
Args:
|
||||
name: Name of the rule.
|
||||
description: Description of the rule.
|
||||
cmd: Command line of the rule.
|
||||
additional_dependencies: other files which may trigger the rule.
|
||||
outputs: outputs of the rule.
|
||||
extensions: extensions handled by the rule.
|
||||
"""
|
||||
rule = [
|
||||
"CustomBuildRule",
|
||||
{
|
||||
"Name": name,
|
||||
"ExecutionDescription": description,
|
||||
"CommandLine": cmd,
|
||||
"Outputs": ";".join(outputs),
|
||||
"FileExtensions": ";".join(extensions),
|
||||
"AdditionalDependencies": ";".join(additional_dependencies),
|
||||
},
|
||||
]
|
||||
self.rules_section.append(rule)
|
||||
|
||||
def WriteIfChanged(self):
|
||||
"""Writes the tool file."""
|
||||
content = [
|
||||
"VisualStudioToolFile",
|
||||
{"Version": "8.00", "Name": self.name},
|
||||
self.rules_section,
|
||||
]
|
||||
easy_xml.WriteXmlIfChanged(
|
||||
content, self.tool_file_path, encoding="Windows-1252"
|
||||
)
|
153
my-app/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
generated
vendored
Executable file
153
my-app/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
generated
vendored
Executable file
|
@ -0,0 +1,153 @@
|
|||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Visual Studio user preferences file writer."""
|
||||
|
||||
import os
|
||||
import re
|
||||
import socket # for gethostname
|
||||
|
||||
import gyp.easy_xml as easy_xml
|
||||
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _FindCommandInPath(command):
|
||||
"""If there are no slashes in the command given, this function
|
||||
searches the PATH env to find the given command, and converts it
|
||||
to an absolute path. We have to do this because MSVS is looking
|
||||
for an actual file to launch a debugger on, not just a command
|
||||
line. Note that this happens at GYP time, so anything needing to
|
||||
be built needs to have a full path."""
|
||||
if "/" in command or "\\" in command:
|
||||
# If the command already has path elements (either relative or
|
||||
# absolute), then assume it is constructed properly.
|
||||
return command
|
||||
else:
|
||||
# Search through the path list and find an existing file that
|
||||
# we can access.
|
||||
paths = os.environ.get("PATH", "").split(os.pathsep)
|
||||
for path in paths:
|
||||
item = os.path.join(path, command)
|
||||
if os.path.isfile(item) and os.access(item, os.X_OK):
|
||||
return item
|
||||
return command
|
||||
|
||||
|
||||
def _QuoteWin32CommandLineArgs(args):
|
||||
new_args = []
|
||||
for arg in args:
|
||||
# Replace all double-quotes with double-double-quotes to escape
|
||||
# them for cmd shell, and then quote the whole thing if there
|
||||
# are any.
|
||||
if arg.find('"') != -1:
|
||||
arg = '""'.join(arg.split('"'))
|
||||
arg = '"%s"' % arg
|
||||
|
||||
# Otherwise, if there are any spaces, quote the whole arg.
|
||||
elif re.search(r"[ \t\n]", arg):
|
||||
arg = '"%s"' % arg
|
||||
new_args.append(arg)
|
||||
return new_args
|
||||
|
||||
|
||||
class Writer:
|
||||
"""Visual Studio XML user user file writer."""
|
||||
|
||||
def __init__(self, user_file_path, version, name):
|
||||
"""Initializes the user file.
|
||||
|
||||
Args:
|
||||
user_file_path: Path to the user file.
|
||||
version: Version info.
|
||||
name: Name of the user file.
|
||||
"""
|
||||
self.user_file_path = user_file_path
|
||||
self.version = version
|
||||
self.name = name
|
||||
self.configurations = {}
|
||||
|
||||
def AddConfig(self, name):
|
||||
"""Adds a configuration to the project.
|
||||
|
||||
Args:
|
||||
name: Configuration name.
|
||||
"""
|
||||
self.configurations[name] = ["Configuration", {"Name": name}]
|
||||
|
||||
def AddDebugSettings(
|
||||
self, config_name, command, environment={}, working_directory=""
|
||||
):
|
||||
"""Adds a DebugSettings node to the user file for a particular config.
|
||||
|
||||
Args:
|
||||
command: command line to run. First element in the list is the
|
||||
executable. All elements of the command will be quoted if
|
||||
necessary.
|
||||
working_directory: other files which may trigger the rule. (optional)
|
||||
"""
|
||||
command = _QuoteWin32CommandLineArgs(command)
|
||||
|
||||
abs_command = _FindCommandInPath(command[0])
|
||||
|
||||
if environment and isinstance(environment, dict):
|
||||
env_list = [f'{key}="{val}"' for (key, val) in environment.items()]
|
||||
environment = " ".join(env_list)
|
||||
else:
|
||||
environment = ""
|
||||
|
||||
n_cmd = [
|
||||
"DebugSettings",
|
||||
{
|
||||
"Command": abs_command,
|
||||
"WorkingDirectory": working_directory,
|
||||
"CommandArguments": " ".join(command[1:]),
|
||||
"RemoteMachine": socket.gethostname(),
|
||||
"Environment": environment,
|
||||
"EnvironmentMerge": "true",
|
||||
# Currently these are all "dummy" values that we're just setting
|
||||
# in the default manner that MSVS does it. We could use some of
|
||||
# these to add additional capabilities, I suppose, but they might
|
||||
# not have parity with other platforms then.
|
||||
"Attach": "false",
|
||||
"DebuggerType": "3", # 'auto' debugger
|
||||
"Remote": "1",
|
||||
"RemoteCommand": "",
|
||||
"HttpUrl": "",
|
||||
"PDBPath": "",
|
||||
"SQLDebugging": "",
|
||||
"DebuggerFlavor": "0",
|
||||
"MPIRunCommand": "",
|
||||
"MPIRunArguments": "",
|
||||
"MPIRunWorkingDirectory": "",
|
||||
"ApplicationCommand": "",
|
||||
"ApplicationArguments": "",
|
||||
"ShimCommand": "",
|
||||
"MPIAcceptMode": "",
|
||||
"MPIAcceptFilter": "",
|
||||
},
|
||||
]
|
||||
|
||||
# Find the config, and add it if it doesn't exist.
|
||||
if config_name not in self.configurations:
|
||||
self.AddConfig(config_name)
|
||||
|
||||
# Add the DebugSettings onto the appropriate config.
|
||||
self.configurations[config_name].append(n_cmd)
|
||||
|
||||
def WriteIfChanged(self):
|
||||
"""Writes the user file."""
|
||||
configs = ["Configurations"]
|
||||
for config, spec in sorted(self.configurations.items()):
|
||||
configs.append(spec)
|
||||
|
||||
content = [
|
||||
"VisualStudioUserFile",
|
||||
{"Version": self.version.ProjectVersion(), "Name": self.name},
|
||||
configs,
|
||||
]
|
||||
easy_xml.WriteXmlIfChanged(
|
||||
content, self.user_file_path, encoding="Windows-1252"
|
||||
)
|
271
my-app/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
generated
vendored
Executable file
271
my-app/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
generated
vendored
Executable file
|
@ -0,0 +1,271 @@
|
|||
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Utility functions shared amongst the Windows generators."""
|
||||
|
||||
import copy
|
||||
import os
|
||||
|
||||
|
||||
# A dictionary mapping supported target types to extensions.
|
||||
TARGET_TYPE_EXT = {
|
||||
"executable": "exe",
|
||||
"loadable_module": "dll",
|
||||
"shared_library": "dll",
|
||||
"static_library": "lib",
|
||||
"windows_driver": "sys",
|
||||
}
|
||||
|
||||
|
||||
def _GetLargePdbShimCcPath():
|
||||
"""Returns the path of the large_pdb_shim.cc file."""
|
||||
this_dir = os.path.abspath(os.path.dirname(__file__))
|
||||
src_dir = os.path.abspath(os.path.join(this_dir, "..", ".."))
|
||||
win_data_dir = os.path.join(src_dir, "data", "win")
|
||||
large_pdb_shim_cc = os.path.join(win_data_dir, "large-pdb-shim.cc")
|
||||
return large_pdb_shim_cc
|
||||
|
||||
|
||||
def _DeepCopySomeKeys(in_dict, keys):
|
||||
"""Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
|
||||
|
||||
Arguments:
|
||||
in_dict: The dictionary to copy.
|
||||
keys: The keys to be copied. If a key is in this list and doesn't exist in
|
||||
|in_dict| this is not an error.
|
||||
Returns:
|
||||
The partially deep-copied dictionary.
|
||||
"""
|
||||
d = {}
|
||||
for key in keys:
|
||||
if key not in in_dict:
|
||||
continue
|
||||
d[key] = copy.deepcopy(in_dict[key])
|
||||
return d
|
||||
|
||||
|
||||
def _SuffixName(name, suffix):
|
||||
"""Add a suffix to the end of a target.
|
||||
|
||||
Arguments:
|
||||
name: name of the target (foo#target)
|
||||
suffix: the suffix to be added
|
||||
Returns:
|
||||
Target name with suffix added (foo_suffix#target)
|
||||
"""
|
||||
parts = name.rsplit("#", 1)
|
||||
parts[0] = f"{parts[0]}_{suffix}"
|
||||
return "#".join(parts)
|
||||
|
||||
|
||||
def _ShardName(name, number):
|
||||
"""Add a shard number to the end of a target.
|
||||
|
||||
Arguments:
|
||||
name: name of the target (foo#target)
|
||||
number: shard number
|
||||
Returns:
|
||||
Target name with shard added (foo_1#target)
|
||||
"""
|
||||
return _SuffixName(name, str(number))
|
||||
|
||||
|
||||
def ShardTargets(target_list, target_dicts):
|
||||
"""Shard some targets apart to work around the linkers limits.
|
||||
|
||||
Arguments:
|
||||
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||
target_dicts: Dict of target properties keyed on target pair.
|
||||
Returns:
|
||||
Tuple of the new sharded versions of the inputs.
|
||||
"""
|
||||
# Gather the targets to shard, and how many pieces.
|
||||
targets_to_shard = {}
|
||||
for t in target_dicts:
|
||||
shards = int(target_dicts[t].get("msvs_shard", 0))
|
||||
if shards:
|
||||
targets_to_shard[t] = shards
|
||||
# Shard target_list.
|
||||
new_target_list = []
|
||||
for t in target_list:
|
||||
if t in targets_to_shard:
|
||||
for i in range(targets_to_shard[t]):
|
||||
new_target_list.append(_ShardName(t, i))
|
||||
else:
|
||||
new_target_list.append(t)
|
||||
# Shard target_dict.
|
||||
new_target_dicts = {}
|
||||
for t in target_dicts:
|
||||
if t in targets_to_shard:
|
||||
for i in range(targets_to_shard[t]):
|
||||
name = _ShardName(t, i)
|
||||
new_target_dicts[name] = copy.copy(target_dicts[t])
|
||||
new_target_dicts[name]["target_name"] = _ShardName(
|
||||
new_target_dicts[name]["target_name"], i
|
||||
)
|
||||
sources = new_target_dicts[name].get("sources", [])
|
||||
new_sources = []
|
||||
for pos in range(i, len(sources), targets_to_shard[t]):
|
||||
new_sources.append(sources[pos])
|
||||
new_target_dicts[name]["sources"] = new_sources
|
||||
else:
|
||||
new_target_dicts[t] = target_dicts[t]
|
||||
# Shard dependencies.
|
||||
for t in sorted(new_target_dicts):
|
||||
for deptype in ("dependencies", "dependencies_original"):
|
||||
dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
|
||||
new_dependencies = []
|
||||
for d in dependencies:
|
||||
if d in targets_to_shard:
|
||||
for i in range(targets_to_shard[d]):
|
||||
new_dependencies.append(_ShardName(d, i))
|
||||
else:
|
||||
new_dependencies.append(d)
|
||||
new_target_dicts[t][deptype] = new_dependencies
|
||||
|
||||
return (new_target_list, new_target_dicts)
|
||||
|
||||
|
||||
def _GetPdbPath(target_dict, config_name, vars):
|
||||
"""Returns the path to the PDB file that will be generated by a given
|
||||
configuration.
|
||||
|
||||
The lookup proceeds as follows:
|
||||
- Look for an explicit path in the VCLinkerTool configuration block.
|
||||
- Look for an 'msvs_large_pdb_path' variable.
|
||||
- Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
|
||||
specified.
|
||||
- Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
|
||||
|
||||
Arguments:
|
||||
target_dict: The target dictionary to be searched.
|
||||
config_name: The name of the configuration of interest.
|
||||
vars: A dictionary of common GYP variables with generator-specific values.
|
||||
Returns:
|
||||
The path of the corresponding PDB file.
|
||||
"""
|
||||
config = target_dict["configurations"][config_name]
|
||||
msvs = config.setdefault("msvs_settings", {})
|
||||
|
||||
linker = msvs.get("VCLinkerTool", {})
|
||||
|
||||
pdb_path = linker.get("ProgramDatabaseFile")
|
||||
if pdb_path:
|
||||
return pdb_path
|
||||
|
||||
variables = target_dict.get("variables", {})
|
||||
pdb_path = variables.get("msvs_large_pdb_path", None)
|
||||
if pdb_path:
|
||||
return pdb_path
|
||||
|
||||
pdb_base = target_dict.get("product_name", target_dict["target_name"])
|
||||
pdb_base = "{}.{}.pdb".format(pdb_base, TARGET_TYPE_EXT[target_dict["type"]])
|
||||
pdb_path = vars["PRODUCT_DIR"] + "/" + pdb_base
|
||||
|
||||
return pdb_path
|
||||
|
||||
|
||||
def InsertLargePdbShims(target_list, target_dicts, vars):
|
||||
"""Insert a shim target that forces the linker to use 4KB pagesize PDBs.
|
||||
|
||||
This is a workaround for targets with PDBs greater than 1GB in size, the
|
||||
limit for the 1KB pagesize PDBs created by the linker by default.
|
||||
|
||||
Arguments:
|
||||
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||
target_dicts: Dict of target properties keyed on target pair.
|
||||
vars: A dictionary of common GYP variables with generator-specific values.
|
||||
Returns:
|
||||
Tuple of the shimmed version of the inputs.
|
||||
"""
|
||||
# Determine which targets need shimming.
|
||||
targets_to_shim = []
|
||||
for t in target_dicts:
|
||||
target_dict = target_dicts[t]
|
||||
|
||||
# We only want to shim targets that have msvs_large_pdb enabled.
|
||||
if not int(target_dict.get("msvs_large_pdb", 0)):
|
||||
continue
|
||||
# This is intended for executable, shared_library and loadable_module
|
||||
# targets where every configuration is set up to produce a PDB output.
|
||||
# If any of these conditions is not true then the shim logic will fail
|
||||
# below.
|
||||
targets_to_shim.append(t)
|
||||
|
||||
large_pdb_shim_cc = _GetLargePdbShimCcPath()
|
||||
|
||||
for t in targets_to_shim:
|
||||
target_dict = target_dicts[t]
|
||||
target_name = target_dict.get("target_name")
|
||||
|
||||
base_dict = _DeepCopySomeKeys(
|
||||
target_dict, ["configurations", "default_configuration", "toolset"]
|
||||
)
|
||||
|
||||
# This is the dict for copying the source file (part of the GYP tree)
|
||||
# to the intermediate directory of the project. This is necessary because
|
||||
# we can't always build a relative path to the shim source file (on Windows
|
||||
# GYP and the project may be on different drives), and Ninja hates absolute
|
||||
# paths (it ends up generating the .obj and .obj.d alongside the source
|
||||
# file, polluting GYPs tree).
|
||||
copy_suffix = "large_pdb_copy"
|
||||
copy_target_name = target_name + "_" + copy_suffix
|
||||
full_copy_target_name = _SuffixName(t, copy_suffix)
|
||||
shim_cc_basename = os.path.basename(large_pdb_shim_cc)
|
||||
shim_cc_dir = vars["SHARED_INTERMEDIATE_DIR"] + "/" + copy_target_name
|
||||
shim_cc_path = shim_cc_dir + "/" + shim_cc_basename
|
||||
copy_dict = copy.deepcopy(base_dict)
|
||||
copy_dict["target_name"] = copy_target_name
|
||||
copy_dict["type"] = "none"
|
||||
copy_dict["sources"] = [large_pdb_shim_cc]
|
||||
copy_dict["copies"] = [
|
||||
{"destination": shim_cc_dir, "files": [large_pdb_shim_cc]}
|
||||
]
|
||||
|
||||
# This is the dict for the PDB generating shim target. It depends on the
|
||||
# copy target.
|
||||
shim_suffix = "large_pdb_shim"
|
||||
shim_target_name = target_name + "_" + shim_suffix
|
||||
full_shim_target_name = _SuffixName(t, shim_suffix)
|
||||
shim_dict = copy.deepcopy(base_dict)
|
||||
shim_dict["target_name"] = shim_target_name
|
||||
shim_dict["type"] = "static_library"
|
||||
shim_dict["sources"] = [shim_cc_path]
|
||||
shim_dict["dependencies"] = [full_copy_target_name]
|
||||
|
||||
# Set up the shim to output its PDB to the same location as the final linker
|
||||
# target.
|
||||
for config_name, config in shim_dict.get("configurations").items():
|
||||
pdb_path = _GetPdbPath(target_dict, config_name, vars)
|
||||
|
||||
# A few keys that we don't want to propagate.
|
||||
for key in ["msvs_precompiled_header", "msvs_precompiled_source", "test"]:
|
||||
config.pop(key, None)
|
||||
|
||||
msvs = config.setdefault("msvs_settings", {})
|
||||
|
||||
# Update the compiler directives in the shim target.
|
||||
compiler = msvs.setdefault("VCCLCompilerTool", {})
|
||||
compiler["DebugInformationFormat"] = "3"
|
||||
compiler["ProgramDataBaseFileName"] = pdb_path
|
||||
|
||||
# Set the explicit PDB path in the appropriate configuration of the
|
||||
# original target.
|
||||
config = target_dict["configurations"][config_name]
|
||||
msvs = config.setdefault("msvs_settings", {})
|
||||
linker = msvs.setdefault("VCLinkerTool", {})
|
||||
linker["GenerateDebugInformation"] = "true"
|
||||
linker["ProgramDatabaseFile"] = pdb_path
|
||||
|
||||
# Add the new targets. They must go to the beginning of the list so that
|
||||
# the dependency generation works as expected in ninja.
|
||||
target_list.insert(0, full_copy_target_name)
|
||||
target_list.insert(0, full_shim_target_name)
|
||||
target_dicts[full_copy_target_name] = copy_dict
|
||||
target_dicts[full_shim_target_name] = shim_dict
|
||||
|
||||
# Update the original target to depend on the shim target.
|
||||
target_dict.setdefault("dependencies", []).append(full_shim_target_name)
|
||||
|
||||
return (target_list, target_dicts)
|
574
my-app/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
generated
vendored
Executable file
574
my-app/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
generated
vendored
Executable file
|
@ -0,0 +1,574 @@
|
|||
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Handle version information related to Visual Stuio."""
|
||||
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import glob
|
||||
|
||||
|
||||
def JoinPath(*args):
|
||||
return os.path.normpath(os.path.join(*args))
|
||||
|
||||
|
||||
class VisualStudioVersion:
|
||||
"""Information regarding a version of Visual Studio."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
short_name,
|
||||
description,
|
||||
solution_version,
|
||||
project_version,
|
||||
flat_sln,
|
||||
uses_vcxproj,
|
||||
path,
|
||||
sdk_based,
|
||||
default_toolset=None,
|
||||
compatible_sdks=None,
|
||||
):
|
||||
self.short_name = short_name
|
||||
self.description = description
|
||||
self.solution_version = solution_version
|
||||
self.project_version = project_version
|
||||
self.flat_sln = flat_sln
|
||||
self.uses_vcxproj = uses_vcxproj
|
||||
self.path = path
|
||||
self.sdk_based = sdk_based
|
||||
self.default_toolset = default_toolset
|
||||
compatible_sdks = compatible_sdks or []
|
||||
compatible_sdks.sort(key=lambda v: float(v.replace("v", "")), reverse=True)
|
||||
self.compatible_sdks = compatible_sdks
|
||||
|
||||
def ShortName(self):
|
||||
return self.short_name
|
||||
|
||||
def Description(self):
|
||||
"""Get the full description of the version."""
|
||||
return self.description
|
||||
|
||||
def SolutionVersion(self):
|
||||
"""Get the version number of the sln files."""
|
||||
return self.solution_version
|
||||
|
||||
def ProjectVersion(self):
|
||||
"""Get the version number of the vcproj or vcxproj files."""
|
||||
return self.project_version
|
||||
|
||||
def FlatSolution(self):
|
||||
return self.flat_sln
|
||||
|
||||
def UsesVcxproj(self):
|
||||
"""Returns true if this version uses a vcxproj file."""
|
||||
return self.uses_vcxproj
|
||||
|
||||
def ProjectExtension(self):
|
||||
"""Returns the file extension for the project."""
|
||||
return self.uses_vcxproj and ".vcxproj" or ".vcproj"
|
||||
|
||||
def Path(self):
|
||||
"""Returns the path to Visual Studio installation."""
|
||||
return self.path
|
||||
|
||||
def ToolPath(self, tool):
|
||||
"""Returns the path to a given compiler tool. """
|
||||
return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
|
||||
|
||||
def DefaultToolset(self):
|
||||
"""Returns the msbuild toolset version that will be used in the absence
|
||||
of a user override."""
|
||||
return self.default_toolset
|
||||
|
||||
def _SetupScriptInternal(self, target_arch):
|
||||
"""Returns a command (with arguments) to be used to set up the
|
||||
environment."""
|
||||
assert target_arch in ("x86", "x64"), "target_arch not supported"
|
||||
# If WindowsSDKDir is set and SetEnv.Cmd exists then we are using the
|
||||
# depot_tools build tools and should run SetEnv.Cmd to set up the
|
||||
# environment. The check for WindowsSDKDir alone is not sufficient because
|
||||
# this is set by running vcvarsall.bat.
|
||||
sdk_dir = os.environ.get("WindowsSDKDir", "")
|
||||
setup_path = JoinPath(sdk_dir, "Bin", "SetEnv.Cmd")
|
||||
if self.sdk_based and sdk_dir and os.path.exists(setup_path):
|
||||
return [setup_path, "/" + target_arch]
|
||||
|
||||
is_host_arch_x64 = (
|
||||
os.environ.get("PROCESSOR_ARCHITECTURE") == "AMD64"
|
||||
or os.environ.get("PROCESSOR_ARCHITEW6432") == "AMD64"
|
||||
)
|
||||
|
||||
# For VS2017 (and newer) it's fairly easy
|
||||
if self.short_name >= "2017":
|
||||
script_path = JoinPath(
|
||||
self.path, "VC", "Auxiliary", "Build", "vcvarsall.bat"
|
||||
)
|
||||
|
||||
# Always use a native executable, cross-compiling if necessary.
|
||||
host_arch = "amd64" if is_host_arch_x64 else "x86"
|
||||
msvc_target_arch = "amd64" if target_arch == "x64" else "x86"
|
||||
arg = host_arch
|
||||
if host_arch != msvc_target_arch:
|
||||
arg += "_" + msvc_target_arch
|
||||
|
||||
return [script_path, arg]
|
||||
|
||||
# We try to find the best version of the env setup batch.
|
||||
vcvarsall = JoinPath(self.path, "VC", "vcvarsall.bat")
|
||||
if target_arch == "x86":
|
||||
if (
|
||||
self.short_name >= "2013"
|
||||
and self.short_name[-1] != "e"
|
||||
and is_host_arch_x64
|
||||
):
|
||||
# VS2013 and later, non-Express have a x64-x86 cross that we want
|
||||
# to prefer.
|
||||
return [vcvarsall, "amd64_x86"]
|
||||
else:
|
||||
# Otherwise, the standard x86 compiler. We don't use VC/vcvarsall.bat
|
||||
# for x86 because vcvarsall calls vcvars32, which it can only find if
|
||||
# VS??COMNTOOLS is set, which isn't guaranteed.
|
||||
return [JoinPath(self.path, "Common7", "Tools", "vsvars32.bat")]
|
||||
elif target_arch == "x64":
|
||||
arg = "x86_amd64"
|
||||
# Use the 64-on-64 compiler if we're not using an express edition and
|
||||
# we're running on a 64bit OS.
|
||||
if self.short_name[-1] != "e" and is_host_arch_x64:
|
||||
arg = "amd64"
|
||||
return [vcvarsall, arg]
|
||||
|
||||
def SetupScript(self, target_arch):
|
||||
script_data = self._SetupScriptInternal(target_arch)
|
||||
script_path = script_data[0]
|
||||
if not os.path.exists(script_path):
|
||||
raise Exception(
|
||||
"%s is missing - make sure VC++ tools are installed." % script_path
|
||||
)
|
||||
return script_data
|
||||
|
||||
|
||||
def _RegistryQueryBase(sysdir, key, value):
|
||||
"""Use reg.exe to read a particular key.
|
||||
|
||||
While ideally we might use the win32 module, we would like gyp to be
|
||||
python neutral, so for instance cygwin python lacks this module.
|
||||
|
||||
Arguments:
|
||||
sysdir: The system subdirectory to attempt to launch reg.exe from.
|
||||
key: The registry key to read from.
|
||||
value: The particular value to read.
|
||||
Return:
|
||||
stdout from reg.exe, or None for failure.
|
||||
"""
|
||||
# Skip if not on Windows or Python Win32 setup issue
|
||||
if sys.platform not in ("win32", "cygwin"):
|
||||
return None
|
||||
# Setup params to pass to and attempt to launch reg.exe
|
||||
cmd = [os.path.join(os.environ.get("WINDIR", ""), sysdir, "reg.exe"), "query", key]
|
||||
if value:
|
||||
cmd.extend(["/v", value])
|
||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
# Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
|
||||
# Note that the error text may be in [1] in some cases
|
||||
text = p.communicate()[0].decode("utf-8")
|
||||
# Check return code from reg.exe; officially 0==success and 1==error
|
||||
if p.returncode:
|
||||
return None
|
||||
return text
|
||||
|
||||
|
||||
def _RegistryQuery(key, value=None):
|
||||
r"""Use reg.exe to read a particular key through _RegistryQueryBase.
|
||||
|
||||
First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
|
||||
that fails, it falls back to System32. Sysnative is available on Vista and
|
||||
up and available on Windows Server 2003 and XP through KB patch 942589. Note
|
||||
that Sysnative will always fail if using 64-bit python due to it being a
|
||||
virtual directory and System32 will work correctly in the first place.
|
||||
|
||||
KB 942589 - http://support.microsoft.com/kb/942589/en-us.
|
||||
|
||||
Arguments:
|
||||
key: The registry key.
|
||||
value: The particular registry value to read (optional).
|
||||
Return:
|
||||
stdout from reg.exe, or None for failure.
|
||||
"""
|
||||
text = None
|
||||
try:
|
||||
text = _RegistryQueryBase("Sysnative", key, value)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
text = _RegistryQueryBase("System32", key, value)
|
||||
else:
|
||||
raise
|
||||
return text
|
||||
|
||||
|
||||
def _RegistryGetValueUsingWinReg(key, value):
|
||||
"""Use the _winreg module to obtain the value of a registry key.
|
||||
|
||||
Args:
|
||||
key: The registry key.
|
||||
value: The particular registry value to read.
|
||||
Return:
|
||||
contents of the registry key's value, or None on failure. Throws
|
||||
ImportError if winreg is unavailable.
|
||||
"""
|
||||
from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx
|
||||
try:
|
||||
root, subkey = key.split("\\", 1)
|
||||
assert root == "HKLM" # Only need HKLM for now.
|
||||
with OpenKey(HKEY_LOCAL_MACHINE, subkey) as hkey:
|
||||
return QueryValueEx(hkey, value)[0]
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
|
||||
def _RegistryGetValue(key, value):
|
||||
"""Use _winreg or reg.exe to obtain the value of a registry key.
|
||||
|
||||
Using _winreg is preferable because it solves an issue on some corporate
|
||||
environments where access to reg.exe is locked down. However, we still need
|
||||
to fallback to reg.exe for the case where the _winreg module is not available
|
||||
(for example in cygwin python).
|
||||
|
||||
Args:
|
||||
key: The registry key.
|
||||
value: The particular registry value to read.
|
||||
Return:
|
||||
contents of the registry key's value, or None on failure.
|
||||
"""
|
||||
try:
|
||||
return _RegistryGetValueUsingWinReg(key, value)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Fallback to reg.exe if we fail to import _winreg.
|
||||
text = _RegistryQuery(key, value)
|
||||
if not text:
|
||||
return None
|
||||
# Extract value.
|
||||
match = re.search(r"REG_\w+\s+([^\r]+)\r\n", text)
|
||||
if not match:
|
||||
return None
|
||||
return match.group(1)
|
||||
|
||||
|
||||
def _CreateVersion(name, path, sdk_based=False):
|
||||
"""Sets up MSVS project generation.
|
||||
|
||||
Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
|
||||
autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
|
||||
passed in that doesn't match a value in versions python will throw a error.
|
||||
"""
|
||||
if path:
|
||||
path = os.path.normpath(path)
|
||||
versions = {
|
||||
"2022": VisualStudioVersion(
|
||||
"2022",
|
||||
"Visual Studio 2022",
|
||||
solution_version="12.00",
|
||||
project_version="17.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v143",
|
||||
compatible_sdks=["v8.1", "v10.0"],
|
||||
),
|
||||
"2019": VisualStudioVersion(
|
||||
"2019",
|
||||
"Visual Studio 2019",
|
||||
solution_version="12.00",
|
||||
project_version="16.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v142",
|
||||
compatible_sdks=["v8.1", "v10.0"],
|
||||
),
|
||||
"2017": VisualStudioVersion(
|
||||
"2017",
|
||||
"Visual Studio 2017",
|
||||
solution_version="12.00",
|
||||
project_version="15.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v141",
|
||||
compatible_sdks=["v8.1", "v10.0"],
|
||||
),
|
||||
"2015": VisualStudioVersion(
|
||||
"2015",
|
||||
"Visual Studio 2015",
|
||||
solution_version="12.00",
|
||||
project_version="14.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v140",
|
||||
),
|
||||
"2013": VisualStudioVersion(
|
||||
"2013",
|
||||
"Visual Studio 2013",
|
||||
solution_version="13.00",
|
||||
project_version="12.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v120",
|
||||
),
|
||||
"2013e": VisualStudioVersion(
|
||||
"2013e",
|
||||
"Visual Studio 2013",
|
||||
solution_version="13.00",
|
||||
project_version="12.0",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v120",
|
||||
),
|
||||
"2012": VisualStudioVersion(
|
||||
"2012",
|
||||
"Visual Studio 2012",
|
||||
solution_version="12.00",
|
||||
project_version="4.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v110",
|
||||
),
|
||||
"2012e": VisualStudioVersion(
|
||||
"2012e",
|
||||
"Visual Studio 2012",
|
||||
solution_version="12.00",
|
||||
project_version="4.0",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
default_toolset="v110",
|
||||
),
|
||||
"2010": VisualStudioVersion(
|
||||
"2010",
|
||||
"Visual Studio 2010",
|
||||
solution_version="11.00",
|
||||
project_version="4.0",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2010e": VisualStudioVersion(
|
||||
"2010e",
|
||||
"Visual C++ Express 2010",
|
||||
solution_version="11.00",
|
||||
project_version="4.0",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=True,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2008": VisualStudioVersion(
|
||||
"2008",
|
||||
"Visual Studio 2008",
|
||||
solution_version="10.00",
|
||||
project_version="9.00",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2008e": VisualStudioVersion(
|
||||
"2008e",
|
||||
"Visual Studio 2008",
|
||||
solution_version="10.00",
|
||||
project_version="9.00",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2005": VisualStudioVersion(
|
||||
"2005",
|
||||
"Visual Studio 2005",
|
||||
solution_version="9.00",
|
||||
project_version="8.00",
|
||||
flat_sln=False,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
"2005e": VisualStudioVersion(
|
||||
"2005e",
|
||||
"Visual Studio 2005",
|
||||
solution_version="9.00",
|
||||
project_version="8.00",
|
||||
flat_sln=True,
|
||||
uses_vcxproj=False,
|
||||
path=path,
|
||||
sdk_based=sdk_based,
|
||||
),
|
||||
}
|
||||
return versions[str(name)]
|
||||
|
||||
|
||||
def _ConvertToCygpath(path):
|
||||
"""Convert to cygwin path if we are using cygwin."""
|
||||
if sys.platform == "cygwin":
|
||||
p = subprocess.Popen(["cygpath", path], stdout=subprocess.PIPE)
|
||||
path = p.communicate()[0].decode("utf-8").strip()
|
||||
return path
|
||||
|
||||
|
||||
def _DetectVisualStudioVersions(versions_to_check, force_express):
|
||||
"""Collect the list of installed visual studio versions.
|
||||
|
||||
Returns:
|
||||
A list of visual studio versions installed in descending order of
|
||||
usage preference.
|
||||
Base this on the registry and a quick check if devenv.exe exists.
|
||||
Possibilities are:
|
||||
2005(e) - Visual Studio 2005 (8)
|
||||
2008(e) - Visual Studio 2008 (9)
|
||||
2010(e) - Visual Studio 2010 (10)
|
||||
2012(e) - Visual Studio 2012 (11)
|
||||
2013(e) - Visual Studio 2013 (12)
|
||||
2015 - Visual Studio 2015 (14)
|
||||
2017 - Visual Studio 2017 (15)
|
||||
2019 - Visual Studio 2019 (16)
|
||||
2022 - Visual Studio 2022 (17)
|
||||
Where (e) is e for express editions of MSVS and blank otherwise.
|
||||
"""
|
||||
version_to_year = {
|
||||
"8.0": "2005",
|
||||
"9.0": "2008",
|
||||
"10.0": "2010",
|
||||
"11.0": "2012",
|
||||
"12.0": "2013",
|
||||
"14.0": "2015",
|
||||
"15.0": "2017",
|
||||
"16.0": "2019",
|
||||
"17.0": "2022",
|
||||
}
|
||||
versions = []
|
||||
for version in versions_to_check:
|
||||
# Old method of searching for which VS version is installed
|
||||
# We don't use the 2010-encouraged-way because we also want to get the
|
||||
# path to the binaries, which it doesn't offer.
|
||||
keys = [
|
||||
r"HKLM\Software\Microsoft\VisualStudio\%s" % version,
|
||||
r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s" % version,
|
||||
r"HKLM\Software\Microsoft\VCExpress\%s" % version,
|
||||
r"HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s" % version,
|
||||
]
|
||||
for index in range(len(keys)):
|
||||
path = _RegistryGetValue(keys[index], "InstallDir")
|
||||
if not path:
|
||||
continue
|
||||
path = _ConvertToCygpath(path)
|
||||
# Check for full.
|
||||
full_path = os.path.join(path, "devenv.exe")
|
||||
express_path = os.path.join(path, "*express.exe")
|
||||
if not force_express and os.path.exists(full_path):
|
||||
# Add this one.
|
||||
versions.append(
|
||||
_CreateVersion(
|
||||
version_to_year[version], os.path.join(path, "..", "..")
|
||||
)
|
||||
)
|
||||
# Check for express.
|
||||
elif glob.glob(express_path):
|
||||
# Add this one.
|
||||
versions.append(
|
||||
_CreateVersion(
|
||||
version_to_year[version] + "e", os.path.join(path, "..", "..")
|
||||
)
|
||||
)
|
||||
|
||||
# The old method above does not work when only SDK is installed.
|
||||
keys = [
|
||||
r"HKLM\Software\Microsoft\VisualStudio\SxS\VC7",
|
||||
r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7",
|
||||
r"HKLM\Software\Microsoft\VisualStudio\SxS\VS7",
|
||||
r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VS7",
|
||||
]
|
||||
for index in range(len(keys)):
|
||||
path = _RegistryGetValue(keys[index], version)
|
||||
if not path:
|
||||
continue
|
||||
path = _ConvertToCygpath(path)
|
||||
if version == "15.0":
|
||||
if os.path.exists(path):
|
||||
versions.append(_CreateVersion("2017", path))
|
||||
elif version != "14.0": # There is no Express edition for 2015.
|
||||
versions.append(
|
||||
_CreateVersion(
|
||||
version_to_year[version] + "e",
|
||||
os.path.join(path, ".."),
|
||||
sdk_based=True,
|
||||
)
|
||||
)
|
||||
|
||||
return versions
|
||||
|
||||
|
||||
def SelectVisualStudioVersion(version="auto", allow_fallback=True):
|
||||
"""Select which version of Visual Studio projects to generate.
|
||||
|
||||
Arguments:
|
||||
version: Hook to allow caller to force a particular version (vs auto).
|
||||
Returns:
|
||||
An object representing a visual studio project format version.
|
||||
"""
|
||||
# In auto mode, check environment variable for override.
|
||||
if version == "auto":
|
||||
version = os.environ.get("GYP_MSVS_VERSION", "auto")
|
||||
version_map = {
|
||||
"auto": ("17.0", "16.0", "15.0", "14.0", "12.0", "10.0", "9.0", "8.0", "11.0"),
|
||||
"2005": ("8.0",),
|
||||
"2005e": ("8.0",),
|
||||
"2008": ("9.0",),
|
||||
"2008e": ("9.0",),
|
||||
"2010": ("10.0",),
|
||||
"2010e": ("10.0",),
|
||||
"2012": ("11.0",),
|
||||
"2012e": ("11.0",),
|
||||
"2013": ("12.0",),
|
||||
"2013e": ("12.0",),
|
||||
"2015": ("14.0",),
|
||||
"2017": ("15.0",),
|
||||
"2019": ("16.0",),
|
||||
"2022": ("17.0",),
|
||||
}
|
||||
override_path = os.environ.get("GYP_MSVS_OVERRIDE_PATH")
|
||||
if override_path:
|
||||
msvs_version = os.environ.get("GYP_MSVS_VERSION")
|
||||
if not msvs_version:
|
||||
raise ValueError(
|
||||
"GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be "
|
||||
"set to a particular version (e.g. 2010e)."
|
||||
)
|
||||
return _CreateVersion(msvs_version, override_path, sdk_based=True)
|
||||
version = str(version)
|
||||
versions = _DetectVisualStudioVersions(version_map[version], "e" in version)
|
||||
if not versions:
|
||||
if not allow_fallback:
|
||||
raise ValueError("Could not locate Visual Studio installation.")
|
||||
if version == "auto":
|
||||
# Default to 2005 if we couldn't find anything
|
||||
return _CreateVersion("2005", None)
|
||||
else:
|
||||
return _CreateVersion(version, None)
|
||||
return versions[0]
|
692
my-app/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
generated
vendored
Executable file
692
my-app/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
generated
vendored
Executable file
|
@ -0,0 +1,692 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
import copy
|
||||
import gyp.input
|
||||
import argparse
|
||||
import os.path
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
import traceback
|
||||
from gyp.common import GypError
|
||||
|
||||
|
||||
# Default debug modes for GYP
|
||||
debug = {}
|
||||
|
||||
# List of "official" debug modes, but you can use anything you like.
|
||||
DEBUG_GENERAL = "general"
|
||||
DEBUG_VARIABLES = "variables"
|
||||
DEBUG_INCLUDES = "includes"
|
||||
|
||||
|
||||
def DebugOutput(mode, message, *args):
|
||||
if "all" in gyp.debug or mode in gyp.debug:
|
||||
ctx = ("unknown", 0, "unknown")
|
||||
try:
|
||||
f = traceback.extract_stack(limit=2)
|
||||
if f:
|
||||
ctx = f[0][:3]
|
||||
except Exception:
|
||||
pass
|
||||
if args:
|
||||
message %= args
|
||||
print(
|
||||
"%s:%s:%d:%s %s"
|
||||
% (mode.upper(), os.path.basename(ctx[0]), ctx[1], ctx[2], message)
|
||||
)
|
||||
|
||||
|
||||
def FindBuildFiles():
|
||||
extension = ".gyp"
|
||||
files = os.listdir(os.getcwd())
|
||||
build_files = []
|
||||
for file in files:
|
||||
if file.endswith(extension):
|
||||
build_files.append(file)
|
||||
return build_files
|
||||
|
||||
|
||||
def Load(
|
||||
build_files,
|
||||
format,
|
||||
default_variables={},
|
||||
includes=[],
|
||||
depth=".",
|
||||
params=None,
|
||||
check=False,
|
||||
circular_check=True,
|
||||
):
|
||||
"""
|
||||
Loads one or more specified build files.
|
||||
default_variables and includes will be copied before use.
|
||||
Returns the generator for the specified format and the
|
||||
data returned by loading the specified build files.
|
||||
"""
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
if "-" in format:
|
||||
format, params["flavor"] = format.split("-", 1)
|
||||
|
||||
default_variables = copy.copy(default_variables)
|
||||
|
||||
# Default variables provided by this program and its modules should be
|
||||
# named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
|
||||
# avoiding collisions with user and automatic variables.
|
||||
default_variables["GENERATOR"] = format
|
||||
default_variables["GENERATOR_FLAVOR"] = params.get("flavor", "")
|
||||
|
||||
# Format can be a custom python file, or by default the name of a module
|
||||
# within gyp.generator.
|
||||
if format.endswith(".py"):
|
||||
generator_name = os.path.splitext(format)[0]
|
||||
path, generator_name = os.path.split(generator_name)
|
||||
|
||||
# Make sure the path to the custom generator is in sys.path
|
||||
# Don't worry about removing it once we are done. Keeping the path
|
||||
# to each generator that is used in sys.path is likely harmless and
|
||||
# arguably a good idea.
|
||||
path = os.path.abspath(path)
|
||||
if path not in sys.path:
|
||||
sys.path.insert(0, path)
|
||||
else:
|
||||
generator_name = "gyp.generator." + format
|
||||
|
||||
# These parameters are passed in order (as opposed to by key)
|
||||
# because ActivePython cannot handle key parameters to __import__.
|
||||
generator = __import__(generator_name, globals(), locals(), generator_name)
|
||||
for (key, val) in generator.generator_default_variables.items():
|
||||
default_variables.setdefault(key, val)
|
||||
|
||||
output_dir = params["options"].generator_output or params["options"].toplevel_dir
|
||||
if default_variables["GENERATOR"] == "ninja":
|
||||
default_variables.setdefault(
|
||||
"PRODUCT_DIR_ABS",
|
||||
os.path.join(
|
||||
output_dir, "out", default_variables.get("build_type", "default")
|
||||
),
|
||||
)
|
||||
else:
|
||||
default_variables.setdefault(
|
||||
"PRODUCT_DIR_ABS",
|
||||
os.path.join(output_dir, default_variables["CONFIGURATION_NAME"]),
|
||||
)
|
||||
|
||||
# Give the generator the opportunity to set additional variables based on
|
||||
# the params it will receive in the output phase.
|
||||
if getattr(generator, "CalculateVariables", None):
|
||||
generator.CalculateVariables(default_variables, params)
|
||||
|
||||
# Give the generator the opportunity to set generator_input_info based on
|
||||
# the params it will receive in the output phase.
|
||||
if getattr(generator, "CalculateGeneratorInputInfo", None):
|
||||
generator.CalculateGeneratorInputInfo(params)
|
||||
|
||||
# Fetch the generator specific info that gets fed to input, we use getattr
|
||||
# so we can default things and the generators only have to provide what
|
||||
# they need.
|
||||
generator_input_info = {
|
||||
"non_configuration_keys": getattr(
|
||||
generator, "generator_additional_non_configuration_keys", []
|
||||
),
|
||||
"path_sections": getattr(generator, "generator_additional_path_sections", []),
|
||||
"extra_sources_for_rules": getattr(
|
||||
generator, "generator_extra_sources_for_rules", []
|
||||
),
|
||||
"generator_supports_multiple_toolsets": getattr(
|
||||
generator, "generator_supports_multiple_toolsets", False
|
||||
),
|
||||
"generator_wants_static_library_dependencies_adjusted": getattr(
|
||||
generator, "generator_wants_static_library_dependencies_adjusted", True
|
||||
),
|
||||
"generator_wants_sorted_dependencies": getattr(
|
||||
generator, "generator_wants_sorted_dependencies", False
|
||||
),
|
||||
"generator_filelist_paths": getattr(
|
||||
generator, "generator_filelist_paths", None
|
||||
),
|
||||
}
|
||||
|
||||
# Process the input specific to this generator.
|
||||
result = gyp.input.Load(
|
||||
build_files,
|
||||
default_variables,
|
||||
includes[:],
|
||||
depth,
|
||||
generator_input_info,
|
||||
check,
|
||||
circular_check,
|
||||
params["parallel"],
|
||||
params["root_targets"],
|
||||
)
|
||||
return [generator] + result
|
||||
|
||||
|
||||
def NameValueListToDict(name_value_list):
|
||||
"""
|
||||
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
|
||||
of the pairs. If a string is simply NAME, then the value in the dictionary
|
||||
is set to True. If VALUE can be converted to an integer, it is.
|
||||
"""
|
||||
result = {}
|
||||
for item in name_value_list:
|
||||
tokens = item.split("=", 1)
|
||||
if len(tokens) == 2:
|
||||
# If we can make it an int, use that, otherwise, use the string.
|
||||
try:
|
||||
token_value = int(tokens[1])
|
||||
except ValueError:
|
||||
token_value = tokens[1]
|
||||
# Set the variable to the supplied value.
|
||||
result[tokens[0]] = token_value
|
||||
else:
|
||||
# No value supplied, treat it as a boolean and set it.
|
||||
result[tokens[0]] = True
|
||||
return result
|
||||
|
||||
|
||||
def ShlexEnv(env_name):
|
||||
flags = os.environ.get(env_name, [])
|
||||
if flags:
|
||||
flags = shlex.split(flags)
|
||||
return flags
|
||||
|
||||
|
||||
def FormatOpt(opt, value):
|
||||
if opt.startswith("--"):
|
||||
return f"{opt}={value}"
|
||||
return opt + value
|
||||
|
||||
|
||||
def RegenerateAppendFlag(flag, values, predicate, env_name, options):
|
||||
"""Regenerate a list of command line flags, for an option of action='append'.
|
||||
|
||||
The |env_name|, if given, is checked in the environment and used to generate
|
||||
an initial list of options, then the options that were specified on the
|
||||
command line (given in |values|) are appended. This matches the handling of
|
||||
environment variables and command line flags where command line flags override
|
||||
the environment, while not requiring the environment to be set when the flags
|
||||
are used again.
|
||||
"""
|
||||
flags = []
|
||||
if options.use_environment and env_name:
|
||||
for flag_value in ShlexEnv(env_name):
|
||||
value = FormatOpt(flag, predicate(flag_value))
|
||||
if value in flags:
|
||||
flags.remove(value)
|
||||
flags.append(value)
|
||||
if values:
|
||||
for flag_value in values:
|
||||
flags.append(FormatOpt(flag, predicate(flag_value)))
|
||||
return flags
|
||||
|
||||
|
||||
def RegenerateFlags(options):
|
||||
"""Given a parsed options object, and taking the environment variables into
|
||||
account, returns a list of flags that should regenerate an equivalent options
|
||||
object (even in the absence of the environment variables.)
|
||||
|
||||
Any path options will be normalized relative to depth.
|
||||
|
||||
The format flag is not included, as it is assumed the calling generator will
|
||||
set that as appropriate.
|
||||
"""
|
||||
|
||||
def FixPath(path):
|
||||
path = gyp.common.FixIfRelativePath(path, options.depth)
|
||||
if not path:
|
||||
return os.path.curdir
|
||||
return path
|
||||
|
||||
def Noop(value):
|
||||
return value
|
||||
|
||||
# We always want to ignore the environment when regenerating, to avoid
|
||||
# duplicate or changed flags in the environment at the time of regeneration.
|
||||
flags = ["--ignore-environment"]
|
||||
for name, metadata in options._regeneration_metadata.items():
|
||||
opt = metadata["opt"]
|
||||
value = getattr(options, name)
|
||||
value_predicate = metadata["type"] == "path" and FixPath or Noop
|
||||
action = metadata["action"]
|
||||
env_name = metadata["env_name"]
|
||||
if action == "append":
|
||||
flags.extend(
|
||||
RegenerateAppendFlag(opt, value, value_predicate, env_name, options)
|
||||
)
|
||||
elif action in ("store", None): # None is a synonym for 'store'.
|
||||
if value:
|
||||
flags.append(FormatOpt(opt, value_predicate(value)))
|
||||
elif options.use_environment and env_name and os.environ.get(env_name):
|
||||
flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
|
||||
elif action in ("store_true", "store_false"):
|
||||
if (action == "store_true" and value) or (
|
||||
action == "store_false" and not value
|
||||
):
|
||||
flags.append(opt)
|
||||
elif options.use_environment and env_name:
|
||||
print(
|
||||
"Warning: environment regeneration unimplemented "
|
||||
"for %s flag %r env_name %r" % (action, opt, env_name),
|
||||
file=sys.stderr,
|
||||
)
|
||||
else:
|
||||
print(
|
||||
"Warning: regeneration unimplemented for action %r "
|
||||
"flag %r" % (action, opt),
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
return flags
|
||||
|
||||
|
||||
class RegeneratableOptionParser(argparse.ArgumentParser):
|
||||
def __init__(self, usage):
|
||||
self.__regeneratable_options = {}
|
||||
argparse.ArgumentParser.__init__(self, usage=usage)
|
||||
|
||||
def add_argument(self, *args, **kw):
|
||||
"""Add an option to the parser.
|
||||
|
||||
This accepts the same arguments as ArgumentParser.add_argument, plus the
|
||||
following:
|
||||
regenerate: can be set to False to prevent this option from being included
|
||||
in regeneration.
|
||||
env_name: name of environment variable that additional values for this
|
||||
option come from.
|
||||
type: adds type='path', to tell the regenerator that the values of
|
||||
this option need to be made relative to options.depth
|
||||
"""
|
||||
env_name = kw.pop("env_name", None)
|
||||
if "dest" in kw and kw.pop("regenerate", True):
|
||||
dest = kw["dest"]
|
||||
|
||||
# The path type is needed for regenerating, for optparse we can just treat
|
||||
# it as a string.
|
||||
type = kw.get("type")
|
||||
if type == "path":
|
||||
kw["type"] = str
|
||||
|
||||
self.__regeneratable_options[dest] = {
|
||||
"action": kw.get("action"),
|
||||
"type": type,
|
||||
"env_name": env_name,
|
||||
"opt": args[0],
|
||||
}
|
||||
|
||||
argparse.ArgumentParser.add_argument(self, *args, **kw)
|
||||
|
||||
def parse_args(self, *args):
|
||||
values, args = argparse.ArgumentParser.parse_known_args(self, *args)
|
||||
values._regeneration_metadata = self.__regeneratable_options
|
||||
return values, args
|
||||
|
||||
|
||||
def gyp_main(args):
|
||||
my_name = os.path.basename(sys.argv[0])
|
||||
usage = "usage: %(prog)s [options ...] [build_file ...]"
|
||||
|
||||
parser = RegeneratableOptionParser(usage=usage.replace("%s", "%(prog)s"))
|
||||
parser.add_argument(
|
||||
"--build",
|
||||
dest="configs",
|
||||
action="append",
|
||||
help="configuration for build after project generation",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--check", dest="check", action="store_true", help="check format of gyp files"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--config-dir",
|
||||
dest="config_dir",
|
||||
action="store",
|
||||
env_name="GYP_CONFIG_DIR",
|
||||
default=None,
|
||||
help="The location for configuration files like " "include.gypi.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--debug",
|
||||
dest="debug",
|
||||
metavar="DEBUGMODE",
|
||||
action="append",
|
||||
default=[],
|
||||
help="turn on a debugging "
|
||||
'mode for debugging GYP. Supported modes are "variables", '
|
||||
'"includes" and "general" or "all" for all of them.',
|
||||
)
|
||||
parser.add_argument(
|
||||
"-D",
|
||||
dest="defines",
|
||||
action="append",
|
||||
metavar="VAR=VAL",
|
||||
env_name="GYP_DEFINES",
|
||||
help="sets variable VAR to value VAL",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--depth",
|
||||
dest="depth",
|
||||
metavar="PATH",
|
||||
type="path",
|
||||
help="set DEPTH gyp variable to a relative path to PATH",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-f",
|
||||
"--format",
|
||||
dest="formats",
|
||||
action="append",
|
||||
env_name="GYP_GENERATORS",
|
||||
regenerate=False,
|
||||
help="output formats to generate",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-G",
|
||||
dest="generator_flags",
|
||||
action="append",
|
||||
default=[],
|
||||
metavar="FLAG=VAL",
|
||||
env_name="GYP_GENERATOR_FLAGS",
|
||||
help="sets generator flag FLAG to VAL",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--generator-output",
|
||||
dest="generator_output",
|
||||
action="store",
|
||||
default=None,
|
||||
metavar="DIR",
|
||||
type="path",
|
||||
env_name="GYP_GENERATOR_OUTPUT",
|
||||
help="puts generated build files under DIR",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ignore-environment",
|
||||
dest="use_environment",
|
||||
action="store_false",
|
||||
default=True,
|
||||
regenerate=False,
|
||||
help="do not read options from environment variables",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-I",
|
||||
"--include",
|
||||
dest="includes",
|
||||
action="append",
|
||||
metavar="INCLUDE",
|
||||
type="path",
|
||||
help="files to include in all loaded .gyp files",
|
||||
)
|
||||
# --no-circular-check disables the check for circular relationships between
|
||||
# .gyp files. These relationships should not exist, but they've only been
|
||||
# observed to be harmful with the Xcode generator. Chromium's .gyp files
|
||||
# currently have some circular relationships on non-Mac platforms, so this
|
||||
# option allows the strict behavior to be used on Macs and the lenient
|
||||
# behavior to be used elsewhere.
|
||||
# TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
|
||||
parser.add_argument(
|
||||
"--no-circular-check",
|
||||
dest="circular_check",
|
||||
action="store_false",
|
||||
default=True,
|
||||
regenerate=False,
|
||||
help="don't check for circular relationships between files",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-parallel",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Disable multiprocessing",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-S",
|
||||
"--suffix",
|
||||
dest="suffix",
|
||||
default="",
|
||||
help="suffix to add to generated files",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--toplevel-dir",
|
||||
dest="toplevel_dir",
|
||||
action="store",
|
||||
default=None,
|
||||
metavar="DIR",
|
||||
type="path",
|
||||
help="directory to use as the root of the source tree",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-R",
|
||||
"--root-target",
|
||||
dest="root_targets",
|
||||
action="append",
|
||||
metavar="TARGET",
|
||||
help="include only TARGET and its deep dependencies",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-V",
|
||||
"--version",
|
||||
dest="version",
|
||||
action="store_true",
|
||||
help="Show the version and exit.",
|
||||
)
|
||||
|
||||
options, build_files_arg = parser.parse_args(args)
|
||||
if options.version:
|
||||
import pkg_resources
|
||||
print(f"v{pkg_resources.get_distribution('gyp-next').version}")
|
||||
return 0
|
||||
build_files = build_files_arg
|
||||
|
||||
# Set up the configuration directory (defaults to ~/.gyp)
|
||||
if not options.config_dir:
|
||||
home = None
|
||||
home_dot_gyp = None
|
||||
if options.use_environment:
|
||||
home_dot_gyp = os.environ.get("GYP_CONFIG_DIR", None)
|
||||
if home_dot_gyp:
|
||||
home_dot_gyp = os.path.expanduser(home_dot_gyp)
|
||||
|
||||
if not home_dot_gyp:
|
||||
home_vars = ["HOME"]
|
||||
if sys.platform in ("cygwin", "win32"):
|
||||
home_vars.append("USERPROFILE")
|
||||
for home_var in home_vars:
|
||||
home = os.getenv(home_var)
|
||||
if home:
|
||||
home_dot_gyp = os.path.join(home, ".gyp")
|
||||
if not os.path.exists(home_dot_gyp):
|
||||
home_dot_gyp = None
|
||||
else:
|
||||
break
|
||||
else:
|
||||
home_dot_gyp = os.path.expanduser(options.config_dir)
|
||||
|
||||
if home_dot_gyp and not os.path.exists(home_dot_gyp):
|
||||
home_dot_gyp = None
|
||||
|
||||
if not options.formats:
|
||||
# If no format was given on the command line, then check the env variable.
|
||||
generate_formats = []
|
||||
if options.use_environment:
|
||||
generate_formats = os.environ.get("GYP_GENERATORS", [])
|
||||
if generate_formats:
|
||||
generate_formats = re.split(r"[\s,]", generate_formats)
|
||||
if generate_formats:
|
||||
options.formats = generate_formats
|
||||
else:
|
||||
# Nothing in the variable, default based on platform.
|
||||
if sys.platform == "darwin":
|
||||
options.formats = ["xcode"]
|
||||
elif sys.platform in ("win32", "cygwin"):
|
||||
options.formats = ["msvs"]
|
||||
else:
|
||||
options.formats = ["make"]
|
||||
|
||||
if not options.generator_output and options.use_environment:
|
||||
g_o = os.environ.get("GYP_GENERATOR_OUTPUT")
|
||||
if g_o:
|
||||
options.generator_output = g_o
|
||||
|
||||
options.parallel = not options.no_parallel
|
||||
|
||||
for mode in options.debug:
|
||||
gyp.debug[mode] = 1
|
||||
|
||||
# Do an extra check to avoid work when we're not debugging.
|
||||
if DEBUG_GENERAL in gyp.debug:
|
||||
DebugOutput(DEBUG_GENERAL, "running with these options:")
|
||||
for option, value in sorted(options.__dict__.items()):
|
||||
if option[0] == "_":
|
||||
continue
|
||||
if isinstance(value, str):
|
||||
DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value)
|
||||
else:
|
||||
DebugOutput(DEBUG_GENERAL, " %s: %s", option, value)
|
||||
|
||||
if not build_files:
|
||||
build_files = FindBuildFiles()
|
||||
if not build_files:
|
||||
raise GypError((usage + "\n\n%s: error: no build_file") % (my_name, my_name))
|
||||
|
||||
# TODO(mark): Chromium-specific hack!
|
||||
# For Chromium, the gyp "depth" variable should always be a relative path
|
||||
# to Chromium's top-level "src" directory. If no depth variable was set
|
||||
# on the command line, try to find a "src" directory by looking at the
|
||||
# absolute path to each build file's directory. The first "src" component
|
||||
# found will be treated as though it were the path used for --depth.
|
||||
if not options.depth:
|
||||
for build_file in build_files:
|
||||
build_file_dir = os.path.abspath(os.path.dirname(build_file))
|
||||
build_file_dir_components = build_file_dir.split(os.path.sep)
|
||||
components_len = len(build_file_dir_components)
|
||||
for index in range(components_len - 1, -1, -1):
|
||||
if build_file_dir_components[index] == "src":
|
||||
options.depth = os.path.sep.join(build_file_dir_components)
|
||||
break
|
||||
del build_file_dir_components[index]
|
||||
|
||||
# If the inner loop found something, break without advancing to another
|
||||
# build file.
|
||||
if options.depth:
|
||||
break
|
||||
|
||||
if not options.depth:
|
||||
raise GypError(
|
||||
"Could not automatically locate src directory. This is"
|
||||
"a temporary Chromium feature that will be removed. Use"
|
||||
"--depth as a workaround."
|
||||
)
|
||||
|
||||
# If toplevel-dir is not set, we assume that depth is the root of our source
|
||||
# tree.
|
||||
if not options.toplevel_dir:
|
||||
options.toplevel_dir = options.depth
|
||||
|
||||
# -D on the command line sets variable defaults - D isn't just for define,
|
||||
# it's for default. Perhaps there should be a way to force (-F?) a
|
||||
# variable's value so that it can't be overridden by anything else.
|
||||
cmdline_default_variables = {}
|
||||
defines = []
|
||||
if options.use_environment:
|
||||
defines += ShlexEnv("GYP_DEFINES")
|
||||
if options.defines:
|
||||
defines += options.defines
|
||||
cmdline_default_variables = NameValueListToDict(defines)
|
||||
if DEBUG_GENERAL in gyp.debug:
|
||||
DebugOutput(
|
||||
DEBUG_GENERAL, "cmdline_default_variables: %s", cmdline_default_variables
|
||||
)
|
||||
|
||||
# Set up includes.
|
||||
includes = []
|
||||
|
||||
# If ~/.gyp/include.gypi exists, it'll be forcibly included into every
|
||||
# .gyp file that's loaded, before anything else is included.
|
||||
if home_dot_gyp:
|
||||
default_include = os.path.join(home_dot_gyp, "include.gypi")
|
||||
if os.path.exists(default_include):
|
||||
print("Using overrides found in " + default_include)
|
||||
includes.append(default_include)
|
||||
|
||||
# Command-line --include files come after the default include.
|
||||
if options.includes:
|
||||
includes.extend(options.includes)
|
||||
|
||||
# Generator flags should be prefixed with the target generator since they
|
||||
# are global across all generator runs.
|
||||
gen_flags = []
|
||||
if options.use_environment:
|
||||
gen_flags += ShlexEnv("GYP_GENERATOR_FLAGS")
|
||||
if options.generator_flags:
|
||||
gen_flags += options.generator_flags
|
||||
generator_flags = NameValueListToDict(gen_flags)
|
||||
if DEBUG_GENERAL in gyp.debug:
|
||||
DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags)
|
||||
|
||||
# Generate all requested formats (use a set in case we got one format request
|
||||
# twice)
|
||||
for format in set(options.formats):
|
||||
params = {
|
||||
"options": options,
|
||||
"build_files": build_files,
|
||||
"generator_flags": generator_flags,
|
||||
"cwd": os.getcwd(),
|
||||
"build_files_arg": build_files_arg,
|
||||
"gyp_binary": sys.argv[0],
|
||||
"home_dot_gyp": home_dot_gyp,
|
||||
"parallel": options.parallel,
|
||||
"root_targets": options.root_targets,
|
||||
"target_arch": cmdline_default_variables.get("target_arch", ""),
|
||||
}
|
||||
|
||||
# Start with the default variables from the command line.
|
||||
[generator, flat_list, targets, data] = Load(
|
||||
build_files,
|
||||
format,
|
||||
cmdline_default_variables,
|
||||
includes,
|
||||
options.depth,
|
||||
params,
|
||||
options.check,
|
||||
options.circular_check,
|
||||
)
|
||||
|
||||
# TODO(mark): Pass |data| for now because the generator needs a list of
|
||||
# build files that came in. In the future, maybe it should just accept
|
||||
# a list, and not the whole data dict.
|
||||
# NOTE: flat_list is the flattened dependency graph specifying the order
|
||||
# that targets may be built. Build systems that operate serially or that
|
||||
# need to have dependencies defined before dependents reference them should
|
||||
# generate targets in the order specified in flat_list.
|
||||
generator.GenerateOutput(flat_list, targets, data, params)
|
||||
|
||||
if options.configs:
|
||||
valid_configs = targets[flat_list[0]]["configurations"]
|
||||
for conf in options.configs:
|
||||
if conf not in valid_configs:
|
||||
raise GypError("Invalid config specified via --build: %s" % conf)
|
||||
generator.PerformBuild(data, options.configs, params)
|
||||
|
||||
# Done
|
||||
return 0
|
||||
|
||||
|
||||
def main(args):
|
||||
try:
|
||||
return gyp_main(args)
|
||||
except GypError as e:
|
||||
sys.stderr.write("gyp: %s\n" % e)
|
||||
return 1
|
||||
|
||||
|
||||
# NOTE: setuptools generated console_scripts calls function with no arguments
|
||||
def script_main():
|
||||
return main(sys.argv[1:])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(script_main())
|
654
my-app/node_modules/node-gyp/gyp/pylib/gyp/common.py
generated
vendored
Executable file
654
my-app/node_modules/node-gyp/gyp/pylib/gyp/common.py
generated
vendored
Executable file
|
@ -0,0 +1,654 @@
|
|||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import errno
|
||||
import filecmp
|
||||
import os.path
|
||||
import re
|
||||
import tempfile
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
from collections.abc import MutableSet
|
||||
|
||||
|
||||
# A minimal memoizing decorator. It'll blow up if the args aren't immutable,
|
||||
# among other "problems".
|
||||
class memoize:
|
||||
def __init__(self, func):
|
||||
self.func = func
|
||||
self.cache = {}
|
||||
|
||||
def __call__(self, *args):
|
||||
try:
|
||||
return self.cache[args]
|
||||
except KeyError:
|
||||
result = self.func(*args)
|
||||
self.cache[args] = result
|
||||
return result
|
||||
|
||||
|
||||
class GypError(Exception):
|
||||
"""Error class representing an error, which is to be presented
|
||||
to the user. The main entry point will catch and display this.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def ExceptionAppend(e, msg):
|
||||
"""Append a message to the given exception's message."""
|
||||
if not e.args:
|
||||
e.args = (msg,)
|
||||
elif len(e.args) == 1:
|
||||
e.args = (str(e.args[0]) + " " + msg,)
|
||||
else:
|
||||
e.args = (str(e.args[0]) + " " + msg,) + e.args[1:]
|
||||
|
||||
|
||||
def FindQualifiedTargets(target, qualified_list):
|
||||
"""
|
||||
Given a list of qualified targets, return the qualified targets for the
|
||||
specified |target|.
|
||||
"""
|
||||
return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target]
|
||||
|
||||
|
||||
def ParseQualifiedTarget(target):
|
||||
# Splits a qualified target into a build file, target name and toolset.
|
||||
|
||||
# NOTE: rsplit is used to disambiguate the Windows drive letter separator.
|
||||
target_split = target.rsplit(":", 1)
|
||||
if len(target_split) == 2:
|
||||
[build_file, target] = target_split
|
||||
else:
|
||||
build_file = None
|
||||
|
||||
target_split = target.rsplit("#", 1)
|
||||
if len(target_split) == 2:
|
||||
[target, toolset] = target_split
|
||||
else:
|
||||
toolset = None
|
||||
|
||||
return [build_file, target, toolset]
|
||||
|
||||
|
||||
def ResolveTarget(build_file, target, toolset):
|
||||
# This function resolves a target into a canonical form:
|
||||
# - a fully defined build file, either absolute or relative to the current
|
||||
# directory
|
||||
# - a target name
|
||||
# - a toolset
|
||||
#
|
||||
# build_file is the file relative to which 'target' is defined.
|
||||
# target is the qualified target.
|
||||
# toolset is the default toolset for that target.
|
||||
[parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
|
||||
|
||||
if parsed_build_file:
|
||||
if build_file:
|
||||
# If a relative path, parsed_build_file is relative to the directory
|
||||
# containing build_file. If build_file is not in the current directory,
|
||||
# parsed_build_file is not a usable path as-is. Resolve it by
|
||||
# interpreting it as relative to build_file. If parsed_build_file is
|
||||
# absolute, it is usable as a path regardless of the current directory,
|
||||
# and os.path.join will return it as-is.
|
||||
build_file = os.path.normpath(
|
||||
os.path.join(os.path.dirname(build_file), parsed_build_file)
|
||||
)
|
||||
# Further (to handle cases like ../cwd), make it relative to cwd)
|
||||
if not os.path.isabs(build_file):
|
||||
build_file = RelativePath(build_file, ".")
|
||||
else:
|
||||
build_file = parsed_build_file
|
||||
|
||||
if parsed_toolset:
|
||||
toolset = parsed_toolset
|
||||
|
||||
return [build_file, target, toolset]
|
||||
|
||||
|
||||
def BuildFile(fully_qualified_target):
|
||||
# Extracts the build file from the fully qualified target.
|
||||
return ParseQualifiedTarget(fully_qualified_target)[0]
|
||||
|
||||
|
||||
def GetEnvironFallback(var_list, default):
|
||||
"""Look up a key in the environment, with fallback to secondary keys
|
||||
and finally falling back to a default value."""
|
||||
for var in var_list:
|
||||
if var in os.environ:
|
||||
return os.environ[var]
|
||||
return default
|
||||
|
||||
|
||||
def QualifiedTarget(build_file, target, toolset):
|
||||
# "Qualified" means the file that a target was defined in and the target
|
||||
# name, separated by a colon, suffixed by a # and the toolset name:
|
||||
# /path/to/file.gyp:target_name#toolset
|
||||
fully_qualified = build_file + ":" + target
|
||||
if toolset:
|
||||
fully_qualified = fully_qualified + "#" + toolset
|
||||
return fully_qualified
|
||||
|
||||
|
||||
@memoize
|
||||
def RelativePath(path, relative_to, follow_path_symlink=True):
|
||||
# Assuming both |path| and |relative_to| are relative to the current
|
||||
# directory, returns a relative path that identifies path relative to
|
||||
# relative_to.
|
||||
# If |follow_symlink_path| is true (default) and |path| is a symlink, then
|
||||
# this method returns a path to the real file represented by |path|. If it is
|
||||
# false, this method returns a path to the symlink. If |path| is not a
|
||||
# symlink, this option has no effect.
|
||||
|
||||
# Convert to normalized (and therefore absolute paths).
|
||||
path = os.path.realpath(path) if follow_path_symlink else os.path.abspath(path)
|
||||
relative_to = os.path.realpath(relative_to)
|
||||
|
||||
# On Windows, we can't create a relative path to a different drive, so just
|
||||
# use the absolute path.
|
||||
if sys.platform == "win32" and (
|
||||
os.path.splitdrive(path)[0].lower()
|
||||
!= os.path.splitdrive(relative_to)[0].lower()
|
||||
):
|
||||
return path
|
||||
|
||||
# Split the paths into components.
|
||||
path_split = path.split(os.path.sep)
|
||||
relative_to_split = relative_to.split(os.path.sep)
|
||||
|
||||
# Determine how much of the prefix the two paths share.
|
||||
prefix_len = len(os.path.commonprefix([path_split, relative_to_split]))
|
||||
|
||||
# Put enough ".." components to back up out of relative_to to the common
|
||||
# prefix, and then append the part of path_split after the common prefix.
|
||||
relative_split = [os.path.pardir] * (
|
||||
len(relative_to_split) - prefix_len
|
||||
) + path_split[prefix_len:]
|
||||
|
||||
if len(relative_split) == 0:
|
||||
# The paths were the same.
|
||||
return ""
|
||||
|
||||
# Turn it back into a string and we're done.
|
||||
return os.path.join(*relative_split)
|
||||
|
||||
|
||||
@memoize
|
||||
def InvertRelativePath(path, toplevel_dir=None):
|
||||
"""Given a path like foo/bar that is relative to toplevel_dir, return
|
||||
the inverse relative path back to the toplevel_dir.
|
||||
|
||||
E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
|
||||
should always produce the empty string, unless the path contains symlinks.
|
||||
"""
|
||||
if not path:
|
||||
return path
|
||||
toplevel_dir = "." if toplevel_dir is None else toplevel_dir
|
||||
return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path))
|
||||
|
||||
|
||||
def FixIfRelativePath(path, relative_to):
|
||||
# Like RelativePath but returns |path| unchanged if it is absolute.
|
||||
if os.path.isabs(path):
|
||||
return path
|
||||
return RelativePath(path, relative_to)
|
||||
|
||||
|
||||
def UnrelativePath(path, relative_to):
|
||||
# Assuming that |relative_to| is relative to the current directory, and |path|
|
||||
# is a path relative to the dirname of |relative_to|, returns a path that
|
||||
# identifies |path| relative to the current directory.
|
||||
rel_dir = os.path.dirname(relative_to)
|
||||
return os.path.normpath(os.path.join(rel_dir, path))
|
||||
|
||||
|
||||
# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at
|
||||
# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
|
||||
# and the documentation for various shells.
|
||||
|
||||
# _quote is a pattern that should match any argument that needs to be quoted
|
||||
# with double-quotes by EncodePOSIXShellArgument. It matches the following
|
||||
# characters appearing anywhere in an argument:
|
||||
# \t, \n, space parameter separators
|
||||
# # comments
|
||||
# $ expansions (quoted to always expand within one argument)
|
||||
# % called out by IEEE 1003.1 XCU.2.2
|
||||
# & job control
|
||||
# ' quoting
|
||||
# (, ) subshell execution
|
||||
# *, ?, [ pathname expansion
|
||||
# ; command delimiter
|
||||
# <, >, | redirection
|
||||
# = assignment
|
||||
# {, } brace expansion (bash)
|
||||
# ~ tilde expansion
|
||||
# It also matches the empty string, because "" (or '') is the only way to
|
||||
# represent an empty string literal argument to a POSIX shell.
|
||||
#
|
||||
# This does not match the characters in _escape, because those need to be
|
||||
# backslash-escaped regardless of whether they appear in a double-quoted
|
||||
# string.
|
||||
_quote = re.compile("[\t\n #$%&'()*;<=>?[{|}~]|^$")
|
||||
|
||||
# _escape is a pattern that should match any character that needs to be
|
||||
# escaped with a backslash, whether or not the argument matched the _quote
|
||||
# pattern. _escape is used with re.sub to backslash anything in _escape's
|
||||
# first match group, hence the (parentheses) in the regular expression.
|
||||
#
|
||||
# _escape matches the following characters appearing anywhere in an argument:
|
||||
# " to prevent POSIX shells from interpreting this character for quoting
|
||||
# \ to prevent POSIX shells from interpreting this character for escaping
|
||||
# ` to prevent POSIX shells from interpreting this character for command
|
||||
# substitution
|
||||
# Missing from this list is $, because the desired behavior of
|
||||
# EncodePOSIXShellArgument is to permit parameter (variable) expansion.
|
||||
#
|
||||
# Also missing from this list is !, which bash will interpret as the history
|
||||
# expansion character when history is enabled. bash does not enable history
|
||||
# by default in non-interactive shells, so this is not thought to be a problem.
|
||||
# ! was omitted from this list because bash interprets "\!" as a literal string
|
||||
# including the backslash character (avoiding history expansion but retaining
|
||||
# the backslash), which would not be correct for argument encoding. Handling
|
||||
# this case properly would also be problematic because bash allows the history
|
||||
# character to be changed with the histchars shell variable. Fortunately,
|
||||
# as history is not enabled in non-interactive shells and
|
||||
# EncodePOSIXShellArgument is only expected to encode for non-interactive
|
||||
# shells, there is no room for error here by ignoring !.
|
||||
_escape = re.compile(r'(["\\`])')
|
||||
|
||||
|
||||
def EncodePOSIXShellArgument(argument):
|
||||
"""Encodes |argument| suitably for consumption by POSIX shells.
|
||||
|
||||
argument may be quoted and escaped as necessary to ensure that POSIX shells
|
||||
treat the returned value as a literal representing the argument passed to
|
||||
this function. Parameter (variable) expansions beginning with $ are allowed
|
||||
to remain intact without escaping the $, to allow the argument to contain
|
||||
references to variables to be expanded by the shell.
|
||||
"""
|
||||
|
||||
if not isinstance(argument, str):
|
||||
argument = str(argument)
|
||||
|
||||
quote = '"' if _quote.search(argument) else ""
|
||||
|
||||
encoded = quote + re.sub(_escape, r"\\\1", argument) + quote
|
||||
|
||||
return encoded
|
||||
|
||||
|
||||
def EncodePOSIXShellList(list):
|
||||
"""Encodes |list| suitably for consumption by POSIX shells.
|
||||
|
||||
Returns EncodePOSIXShellArgument for each item in list, and joins them
|
||||
together using the space character as an argument separator.
|
||||
"""
|
||||
|
||||
encoded_arguments = []
|
||||
for argument in list:
|
||||
encoded_arguments.append(EncodePOSIXShellArgument(argument))
|
||||
return " ".join(encoded_arguments)
|
||||
|
||||
|
||||
def DeepDependencyTargets(target_dicts, roots):
|
||||
"""Returns the recursive list of target dependencies."""
|
||||
dependencies = set()
|
||||
pending = set(roots)
|
||||
while pending:
|
||||
# Pluck out one.
|
||||
r = pending.pop()
|
||||
# Skip if visited already.
|
||||
if r in dependencies:
|
||||
continue
|
||||
# Add it.
|
||||
dependencies.add(r)
|
||||
# Add its children.
|
||||
spec = target_dicts[r]
|
||||
pending.update(set(spec.get("dependencies", [])))
|
||||
pending.update(set(spec.get("dependencies_original", [])))
|
||||
return list(dependencies - set(roots))
|
||||
|
||||
|
||||
def BuildFileTargets(target_list, build_file):
|
||||
"""From a target_list, returns the subset from the specified build_file.
|
||||
"""
|
||||
return [p for p in target_list if BuildFile(p) == build_file]
|
||||
|
||||
|
||||
def AllTargets(target_list, target_dicts, build_file):
|
||||
"""Returns all targets (direct and dependencies) for the specified build_file.
|
||||
"""
|
||||
bftargets = BuildFileTargets(target_list, build_file)
|
||||
deptargets = DeepDependencyTargets(target_dicts, bftargets)
|
||||
return bftargets + deptargets
|
||||
|
||||
|
||||
def WriteOnDiff(filename):
|
||||
"""Write to a file only if the new contents differ.
|
||||
|
||||
Arguments:
|
||||
filename: name of the file to potentially write to.
|
||||
Returns:
|
||||
A file like object which will write to temporary file and only overwrite
|
||||
the target if it differs (on close).
|
||||
"""
|
||||
|
||||
class Writer:
|
||||
"""Wrapper around file which only covers the target if it differs."""
|
||||
|
||||
def __init__(self):
|
||||
# On Cygwin remove the "dir" argument
|
||||
# `C:` prefixed paths are treated as relative,
|
||||
# consequently ending up with current dir "/cygdrive/c/..."
|
||||
# being prefixed to those, which was
|
||||
# obviously a non-existent path,
|
||||
# for example: "/cygdrive/c/<some folder>/C:\<my win style abs path>".
|
||||
# For more details see:
|
||||
# https://docs.python.org/2/library/tempfile.html#tempfile.mkstemp
|
||||
base_temp_dir = "" if IsCygwin() else os.path.dirname(filename)
|
||||
# Pick temporary file.
|
||||
tmp_fd, self.tmp_path = tempfile.mkstemp(
|
||||
suffix=".tmp",
|
||||
prefix=os.path.split(filename)[1] + ".gyp.",
|
||||
dir=base_temp_dir,
|
||||
)
|
||||
try:
|
||||
self.tmp_file = os.fdopen(tmp_fd, "wb")
|
||||
except Exception:
|
||||
# Don't leave turds behind.
|
||||
os.unlink(self.tmp_path)
|
||||
raise
|
||||
|
||||
def __getattr__(self, attrname):
|
||||
# Delegate everything else to self.tmp_file
|
||||
return getattr(self.tmp_file, attrname)
|
||||
|
||||
def close(self):
|
||||
try:
|
||||
# Close tmp file.
|
||||
self.tmp_file.close()
|
||||
# Determine if different.
|
||||
same = False
|
||||
try:
|
||||
same = filecmp.cmp(self.tmp_path, filename, False)
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
if same:
|
||||
# The new file is identical to the old one, just get rid of the new
|
||||
# one.
|
||||
os.unlink(self.tmp_path)
|
||||
else:
|
||||
# The new file is different from the old one,
|
||||
# or there is no old one.
|
||||
# Rename the new file to the permanent name.
|
||||
#
|
||||
# tempfile.mkstemp uses an overly restrictive mode, resulting in a
|
||||
# file that can only be read by the owner, regardless of the umask.
|
||||
# There's no reason to not respect the umask here,
|
||||
# which means that an extra hoop is required
|
||||
# to fetch it and reset the new file's mode.
|
||||
#
|
||||
# No way to get the umask without setting a new one? Set a safe one
|
||||
# and then set it back to the old value.
|
||||
umask = os.umask(0o77)
|
||||
os.umask(umask)
|
||||
os.chmod(self.tmp_path, 0o666 & ~umask)
|
||||
if sys.platform == "win32" and os.path.exists(filename):
|
||||
# NOTE: on windows (but not cygwin) rename will not replace an
|
||||
# existing file, so it must be preceded with a remove.
|
||||
# Sadly there is no way to make the switch atomic.
|
||||
os.remove(filename)
|
||||
os.rename(self.tmp_path, filename)
|
||||
except Exception:
|
||||
# Don't leave turds behind.
|
||||
os.unlink(self.tmp_path)
|
||||
raise
|
||||
|
||||
def write(self, s):
|
||||
self.tmp_file.write(s.encode("utf-8"))
|
||||
|
||||
return Writer()
|
||||
|
||||
|
||||
def EnsureDirExists(path):
|
||||
"""Make sure the directory for |path| exists."""
|
||||
try:
|
||||
os.makedirs(os.path.dirname(path))
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
def GetFlavor(params):
|
||||
"""Returns |params.flavor| if it's set, the system's default flavor else."""
|
||||
flavors = {
|
||||
"cygwin": "win",
|
||||
"win32": "win",
|
||||
"darwin": "mac",
|
||||
}
|
||||
|
||||
if "flavor" in params:
|
||||
return params["flavor"]
|
||||
if sys.platform in flavors:
|
||||
return flavors[sys.platform]
|
||||
if sys.platform.startswith("sunos"):
|
||||
return "solaris"
|
||||
if sys.platform.startswith(("dragonfly", "freebsd")):
|
||||
return "freebsd"
|
||||
if sys.platform.startswith("openbsd"):
|
||||
return "openbsd"
|
||||
if sys.platform.startswith("netbsd"):
|
||||
return "netbsd"
|
||||
if sys.platform.startswith("aix"):
|
||||
return "aix"
|
||||
if sys.platform.startswith(("os390", "zos")):
|
||||
return "zos"
|
||||
if sys.platform == "os400":
|
||||
return "os400"
|
||||
|
||||
return "linux"
|
||||
|
||||
|
||||
def CopyTool(flavor, out_path, generator_flags={}):
|
||||
"""Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
|
||||
to |out_path|."""
|
||||
# aix and solaris just need flock emulation. mac and win use more complicated
|
||||
# support scripts.
|
||||
prefix = {
|
||||
"aix": "flock",
|
||||
"os400": "flock",
|
||||
"solaris": "flock",
|
||||
"mac": "mac",
|
||||
"ios": "mac",
|
||||
"win": "win",
|
||||
}.get(flavor, None)
|
||||
if not prefix:
|
||||
return
|
||||
|
||||
# Slurp input file.
|
||||
source_path = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), "%s_tool.py" % prefix
|
||||
)
|
||||
with open(source_path) as source_file:
|
||||
source = source_file.readlines()
|
||||
|
||||
# Set custom header flags.
|
||||
header = "# Generated by gyp. Do not edit.\n"
|
||||
mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None)
|
||||
if flavor == "mac" and mac_toolchain_dir:
|
||||
header += "import os;\nos.environ['DEVELOPER_DIR']='%s'\n" % mac_toolchain_dir
|
||||
|
||||
# Add header and write it out.
|
||||
tool_path = os.path.join(out_path, "gyp-%s-tool" % prefix)
|
||||
with open(tool_path, "w") as tool_file:
|
||||
tool_file.write("".join([source[0], header] + source[1:]))
|
||||
|
||||
# Make file executable.
|
||||
os.chmod(tool_path, 0o755)
|
||||
|
||||
|
||||
# From Alex Martelli,
|
||||
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
|
||||
# ASPN: Python Cookbook: Remove duplicates from a sequence
|
||||
# First comment, dated 2001/10/13.
|
||||
# (Also in the printed Python Cookbook.)
|
||||
|
||||
|
||||
def uniquer(seq, idfun=lambda x: x):
|
||||
seen = {}
|
||||
result = []
|
||||
for item in seq:
|
||||
marker = idfun(item)
|
||||
if marker in seen:
|
||||
continue
|
||||
seen[marker] = 1
|
||||
result.append(item)
|
||||
return result
|
||||
|
||||
|
||||
# Based on http://code.activestate.com/recipes/576694/.
|
||||
class OrderedSet(MutableSet):
|
||||
def __init__(self, iterable=None):
|
||||
self.end = end = []
|
||||
end += [None, end, end] # sentinel node for doubly linked list
|
||||
self.map = {} # key --> [key, prev, next]
|
||||
if iterable is not None:
|
||||
self |= iterable
|
||||
|
||||
def __len__(self):
|
||||
return len(self.map)
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.map
|
||||
|
||||
def add(self, key):
|
||||
if key not in self.map:
|
||||
end = self.end
|
||||
curr = end[1]
|
||||
curr[2] = end[1] = self.map[key] = [key, curr, end]
|
||||
|
||||
def discard(self, key):
|
||||
if key in self.map:
|
||||
key, prev_item, next_item = self.map.pop(key)
|
||||
prev_item[2] = next_item
|
||||
next_item[1] = prev_item
|
||||
|
||||
def __iter__(self):
|
||||
end = self.end
|
||||
curr = end[2]
|
||||
while curr is not end:
|
||||
yield curr[0]
|
||||
curr = curr[2]
|
||||
|
||||
def __reversed__(self):
|
||||
end = self.end
|
||||
curr = end[1]
|
||||
while curr is not end:
|
||||
yield curr[0]
|
||||
curr = curr[1]
|
||||
|
||||
# The second argument is an addition that causes a pylint warning.
|
||||
def pop(self, last=True): # pylint: disable=W0221
|
||||
if not self:
|
||||
raise KeyError("set is empty")
|
||||
key = self.end[1][0] if last else self.end[2][0]
|
||||
self.discard(key)
|
||||
return key
|
||||
|
||||
def __repr__(self):
|
||||
if not self:
|
||||
return f"{self.__class__.__name__}()"
|
||||
return f"{self.__class__.__name__}({list(self)!r})"
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, OrderedSet):
|
||||
return len(self) == len(other) and list(self) == list(other)
|
||||
return set(self) == set(other)
|
||||
|
||||
# Extensions to the recipe.
|
||||
def update(self, iterable):
|
||||
for i in iterable:
|
||||
if i not in self:
|
||||
self.add(i)
|
||||
|
||||
|
||||
class CycleError(Exception):
|
||||
"""An exception raised when an unexpected cycle is detected."""
|
||||
|
||||
def __init__(self, nodes):
|
||||
self.nodes = nodes
|
||||
|
||||
def __str__(self):
|
||||
return "CycleError: cycle involving: " + str(self.nodes)
|
||||
|
||||
|
||||
def TopologicallySorted(graph, get_edges):
|
||||
r"""Topologically sort based on a user provided edge definition.
|
||||
|
||||
Args:
|
||||
graph: A list of node names.
|
||||
get_edges: A function mapping from node name to a hashable collection
|
||||
of node names which this node has outgoing edges to.
|
||||
Returns:
|
||||
A list containing all of the node in graph in topological order.
|
||||
It is assumed that calling get_edges once for each node and caching is
|
||||
cheaper than repeatedly calling get_edges.
|
||||
Raises:
|
||||
CycleError in the event of a cycle.
|
||||
Example:
|
||||
graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
|
||||
def GetEdges(node):
|
||||
return re.findall(r'\$\(([^))]\)', graph[node])
|
||||
print TopologicallySorted(graph.keys(), GetEdges)
|
||||
==>
|
||||
['a', 'c', b']
|
||||
"""
|
||||
get_edges = memoize(get_edges)
|
||||
visited = set()
|
||||
visiting = set()
|
||||
ordered_nodes = []
|
||||
|
||||
def Visit(node):
|
||||
if node in visiting:
|
||||
raise CycleError(visiting)
|
||||
if node in visited:
|
||||
return
|
||||
visited.add(node)
|
||||
visiting.add(node)
|
||||
for neighbor in get_edges(node):
|
||||
Visit(neighbor)
|
||||
visiting.remove(node)
|
||||
ordered_nodes.insert(0, node)
|
||||
|
||||
for node in sorted(graph):
|
||||
Visit(node)
|
||||
return ordered_nodes
|
||||
|
||||
|
||||
def CrossCompileRequested():
|
||||
# TODO: figure out how to not build extra host objects in the
|
||||
# non-cross-compile case when this is enabled, and enable unconditionally.
|
||||
return (
|
||||
os.environ.get("GYP_CROSSCOMPILE")
|
||||
or os.environ.get("AR_host")
|
||||
or os.environ.get("CC_host")
|
||||
or os.environ.get("CXX_host")
|
||||
or os.environ.get("AR_target")
|
||||
or os.environ.get("CC_target")
|
||||
or os.environ.get("CXX_target")
|
||||
)
|
||||
|
||||
|
||||
def IsCygwin():
|
||||
try:
|
||||
out = subprocess.Popen(
|
||||
"uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
stdout = out.communicate()[0].decode("utf-8")
|
||||
return "CYGWIN" in str(stdout)
|
||||
except Exception:
|
||||
return False
|
78
my-app/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
generated
vendored
Executable file
78
my-app/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
generated
vendored
Executable file
|
@ -0,0 +1,78 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Unit tests for the common.py file."""
|
||||
|
||||
import gyp.common
|
||||
import unittest
|
||||
import sys
|
||||
|
||||
|
||||
class TestTopologicallySorted(unittest.TestCase):
|
||||
def test_Valid(self):
|
||||
"""Test that sorting works on a valid graph with one possible order."""
|
||||
graph = {
|
||||
"a": ["b", "c"],
|
||||
"b": [],
|
||||
"c": ["d"],
|
||||
"d": ["b"],
|
||||
}
|
||||
|
||||
def GetEdge(node):
|
||||
return tuple(graph[node])
|
||||
|
||||
self.assertEqual(
|
||||
gyp.common.TopologicallySorted(graph.keys(), GetEdge), ["a", "c", "d", "b"]
|
||||
)
|
||||
|
||||
def test_Cycle(self):
|
||||
"""Test that an exception is thrown on a cyclic graph."""
|
||||
graph = {
|
||||
"a": ["b"],
|
||||
"b": ["c"],
|
||||
"c": ["d"],
|
||||
"d": ["a"],
|
||||
}
|
||||
|
||||
def GetEdge(node):
|
||||
return tuple(graph[node])
|
||||
|
||||
self.assertRaises(
|
||||
gyp.common.CycleError, gyp.common.TopologicallySorted, graph.keys(), GetEdge
|
||||
)
|
||||
|
||||
|
||||
class TestGetFlavor(unittest.TestCase):
|
||||
"""Test that gyp.common.GetFlavor works as intended"""
|
||||
|
||||
original_platform = ""
|
||||
|
||||
def setUp(self):
|
||||
self.original_platform = sys.platform
|
||||
|
||||
def tearDown(self):
|
||||
sys.platform = self.original_platform
|
||||
|
||||
def assertFlavor(self, expected, argument, param):
|
||||
sys.platform = argument
|
||||
self.assertEqual(expected, gyp.common.GetFlavor(param))
|
||||
|
||||
def test_platform_default(self):
|
||||
self.assertFlavor("freebsd", "freebsd9", {})
|
||||
self.assertFlavor("freebsd", "freebsd10", {})
|
||||
self.assertFlavor("openbsd", "openbsd5", {})
|
||||
self.assertFlavor("solaris", "sunos5", {})
|
||||
self.assertFlavor("solaris", "sunos", {})
|
||||
self.assertFlavor("linux", "linux2", {})
|
||||
self.assertFlavor("linux", "linux3", {})
|
||||
self.assertFlavor("linux", "linux", {})
|
||||
|
||||
def test_param(self):
|
||||
self.assertFlavor("foobar", "linux2", {"flavor": "foobar"})
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
169
my-app/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
generated
vendored
Executable file
169
my-app/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
generated
vendored
Executable file
|
@ -0,0 +1,169 @@
|
|||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import sys
|
||||
import re
|
||||
import os
|
||||
import locale
|
||||
from functools import reduce
|
||||
|
||||
|
||||
def XmlToString(content, encoding="utf-8", pretty=False):
|
||||
""" Writes the XML content to disk, touching the file only if it has changed.
|
||||
|
||||
Visual Studio files have a lot of pre-defined structures. This function makes
|
||||
it easy to represent these structures as Python data structures, instead of
|
||||
having to create a lot of function calls.
|
||||
|
||||
Each XML element of the content is represented as a list composed of:
|
||||
1. The name of the element, a string,
|
||||
2. The attributes of the element, a dictionary (optional), and
|
||||
3+. The content of the element, if any. Strings are simple text nodes and
|
||||
lists are child elements.
|
||||
|
||||
Example 1:
|
||||
<test/>
|
||||
becomes
|
||||
['test']
|
||||
|
||||
Example 2:
|
||||
<myelement a='value1' b='value2'>
|
||||
<childtype>This is</childtype>
|
||||
<childtype>it!</childtype>
|
||||
</myelement>
|
||||
|
||||
becomes
|
||||
['myelement', {'a':'value1', 'b':'value2'},
|
||||
['childtype', 'This is'],
|
||||
['childtype', 'it!'],
|
||||
]
|
||||
|
||||
Args:
|
||||
content: The structured content to be converted.
|
||||
encoding: The encoding to report on the first XML line.
|
||||
pretty: True if we want pretty printing with indents and new lines.
|
||||
|
||||
Returns:
|
||||
The XML content as a string.
|
||||
"""
|
||||
# We create a huge list of all the elements of the file.
|
||||
xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding]
|
||||
if pretty:
|
||||
xml_parts.append("\n")
|
||||
_ConstructContentList(xml_parts, content, pretty)
|
||||
|
||||
# Convert it to a string
|
||||
return "".join(xml_parts)
|
||||
|
||||
|
||||
def _ConstructContentList(xml_parts, specification, pretty, level=0):
|
||||
""" Appends the XML parts corresponding to the specification.
|
||||
|
||||
Args:
|
||||
xml_parts: A list of XML parts to be appended to.
|
||||
specification: The specification of the element. See EasyXml docs.
|
||||
pretty: True if we want pretty printing with indents and new lines.
|
||||
level: Indentation level.
|
||||
"""
|
||||
# The first item in a specification is the name of the element.
|
||||
if pretty:
|
||||
indentation = " " * level
|
||||
new_line = "\n"
|
||||
else:
|
||||
indentation = ""
|
||||
new_line = ""
|
||||
name = specification[0]
|
||||
if not isinstance(name, str):
|
||||
raise Exception(
|
||||
"The first item of an EasyXml specification should be "
|
||||
"a string. Specification was " + str(specification)
|
||||
)
|
||||
xml_parts.append(indentation + "<" + name)
|
||||
|
||||
# Optionally in second position is a dictionary of the attributes.
|
||||
rest = specification[1:]
|
||||
if rest and isinstance(rest[0], dict):
|
||||
for at, val in sorted(rest[0].items()):
|
||||
xml_parts.append(f' {at}="{_XmlEscape(val, attr=True)}"')
|
||||
rest = rest[1:]
|
||||
if rest:
|
||||
xml_parts.append(">")
|
||||
all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True)
|
||||
multi_line = not all_strings
|
||||
if multi_line and new_line:
|
||||
xml_parts.append(new_line)
|
||||
for child_spec in rest:
|
||||
# If it's a string, append a text node.
|
||||
# Otherwise recurse over that child definition
|
||||
if isinstance(child_spec, str):
|
||||
xml_parts.append(_XmlEscape(child_spec))
|
||||
else:
|
||||
_ConstructContentList(xml_parts, child_spec, pretty, level + 1)
|
||||
if multi_line and indentation:
|
||||
xml_parts.append(indentation)
|
||||
xml_parts.append(f"</{name}>{new_line}")
|
||||
else:
|
||||
xml_parts.append("/>%s" % new_line)
|
||||
|
||||
|
||||
def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False,
|
||||
win32=(sys.platform == "win32")):
|
||||
""" Writes the XML content to disk, touching the file only if it has changed.
|
||||
|
||||
Args:
|
||||
content: The structured content to be written.
|
||||
path: Location of the file.
|
||||
encoding: The encoding to report on the first line of the XML file.
|
||||
pretty: True if we want pretty printing with indents and new lines.
|
||||
"""
|
||||
xml_string = XmlToString(content, encoding, pretty)
|
||||
if win32 and os.linesep != "\r\n":
|
||||
xml_string = xml_string.replace("\n", "\r\n")
|
||||
|
||||
try: # getdefaultlocale() was removed in Python 3.11
|
||||
default_encoding = locale.getdefaultlocale()[1]
|
||||
except AttributeError:
|
||||
default_encoding = locale.getencoding()
|
||||
|
||||
if default_encoding and default_encoding.upper() != encoding.upper():
|
||||
xml_string = xml_string.encode(encoding)
|
||||
|
||||
# Get the old content
|
||||
try:
|
||||
with open(path) as file:
|
||||
existing = file.read()
|
||||
except OSError:
|
||||
existing = None
|
||||
|
||||
# It has changed, write it
|
||||
if existing != xml_string:
|
||||
with open(path, "wb") as file:
|
||||
file.write(xml_string)
|
||||
|
||||
|
||||
_xml_escape_map = {
|
||||
'"': """,
|
||||
"'": "'",
|
||||
"<": "<",
|
||||
">": ">",
|
||||
"&": "&",
|
||||
"\n": "
",
|
||||
"\r": "
",
|
||||
}
|
||||
|
||||
|
||||
_xml_escape_re = re.compile("(%s)" % "|".join(map(re.escape, _xml_escape_map.keys())))
|
||||
|
||||
|
||||
def _XmlEscape(value, attr=False):
|
||||
""" Escape a string for inclusion in XML."""
|
||||
|
||||
def replace(match):
|
||||
m = match.string[match.start() : match.end()]
|
||||
# don't replace single quotes in attrs
|
||||
if attr and m == "'":
|
||||
return m
|
||||
return _xml_escape_map[m]
|
||||
|
||||
return _xml_escape_re.sub(replace, value)
|
113
my-app/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
generated
vendored
Executable file
113
my-app/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
generated
vendored
Executable file
|
@ -0,0 +1,113 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the easy_xml.py file. """
|
||||
|
||||
import gyp.easy_xml as easy_xml
|
||||
import unittest
|
||||
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class TestSequenceFunctions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.stderr = StringIO()
|
||||
|
||||
def test_EasyXml_simple(self):
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(["test"]),
|
||||
'<?xml version="1.0" encoding="utf-8"?><test/>',
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(["test"], encoding="Windows-1252"),
|
||||
'<?xml version="1.0" encoding="Windows-1252"?><test/>',
|
||||
)
|
||||
|
||||
def test_EasyXml_simple_with_attributes(self):
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(["test2", {"a": "value1", "b": "value2"}]),
|
||||
'<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>',
|
||||
)
|
||||
|
||||
def test_EasyXml_escaping(self):
|
||||
original = "<test>'\"\r&\nfoo"
|
||||
converted = "<test>'"
&
foo"
|
||||
converted_apos = converted.replace("'", "'")
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(["test3", {"a": original}, original]),
|
||||
'<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>'
|
||||
% (converted, converted_apos),
|
||||
)
|
||||
|
||||
def test_EasyXml_pretty(self):
|
||||
self.assertEqual(
|
||||
easy_xml.XmlToString(
|
||||
["test3", ["GrandParent", ["Parent1", ["Child"]], ["Parent2"]]],
|
||||
pretty=True,
|
||||
),
|
||||
'<?xml version="1.0" encoding="utf-8"?>\n'
|
||||
"<test3>\n"
|
||||
" <GrandParent>\n"
|
||||
" <Parent1>\n"
|
||||
" <Child/>\n"
|
||||
" </Parent1>\n"
|
||||
" <Parent2/>\n"
|
||||
" </GrandParent>\n"
|
||||
"</test3>\n",
|
||||
)
|
||||
|
||||
def test_EasyXml_complex(self):
|
||||
# We want to create:
|
||||
target = (
|
||||
'<?xml version="1.0" encoding="utf-8"?>'
|
||||
"<Project>"
|
||||
'<PropertyGroup Label="Globals">'
|
||||
"<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>"
|
||||
"<Keyword>Win32Proj</Keyword>"
|
||||
"<RootNamespace>automated_ui_tests</RootNamespace>"
|
||||
"</PropertyGroup>"
|
||||
'<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
|
||||
"<PropertyGroup "
|
||||
"Condition=\"'$(Configuration)|$(Platform)'=="
|
||||
'\'Debug|Win32\'" Label="Configuration">'
|
||||
"<ConfigurationType>Application</ConfigurationType>"
|
||||
"<CharacterSet>Unicode</CharacterSet>"
|
||||
"<SpectreMitigation>SpectreLoadCF</SpectreMitigation>"
|
||||
"<VCToolsVersion>14.36.32532</VCToolsVersion>"
|
||||
"</PropertyGroup>"
|
||||
"</Project>"
|
||||
)
|
||||
|
||||
xml = easy_xml.XmlToString(
|
||||
[
|
||||
"Project",
|
||||
[
|
||||
"PropertyGroup",
|
||||
{"Label": "Globals"},
|
||||
["ProjectGuid", "{D2250C20-3A94-4FB9-AF73-11BC5B73884B}"],
|
||||
["Keyword", "Win32Proj"],
|
||||
["RootNamespace", "automated_ui_tests"],
|
||||
],
|
||||
["Import", {"Project": "$(VCTargetsPath)\\Microsoft.Cpp.props"}],
|
||||
[
|
||||
"PropertyGroup",
|
||||
{
|
||||
"Condition": "'$(Configuration)|$(Platform)'=='Debug|Win32'",
|
||||
"Label": "Configuration",
|
||||
},
|
||||
["ConfigurationType", "Application"],
|
||||
["CharacterSet", "Unicode"],
|
||||
["SpectreMitigation", "SpectreLoadCF"],
|
||||
["VCToolsVersion", "14.36.32532"],
|
||||
],
|
||||
]
|
||||
)
|
||||
self.assertEqual(xml, target)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
55
my-app/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py
generated
vendored
Executable file
55
my-app/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py
generated
vendored
Executable file
|
@ -0,0 +1,55 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""These functions are executed via gyp-flock-tool when using the Makefile
|
||||
generator. Used on systems that don't have a built-in flock."""
|
||||
|
||||
import fcntl
|
||||
import os
|
||||
import struct
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def main(args):
|
||||
executor = FlockTool()
|
||||
executor.Dispatch(args)
|
||||
|
||||
|
||||
class FlockTool:
|
||||
"""This class emulates the 'flock' command."""
|
||||
|
||||
def Dispatch(self, args):
|
||||
"""Dispatches a string command to a method."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
method = "Exec%s" % self._CommandifyName(args[0])
|
||||
getattr(self, method)(*args[1:])
|
||||
|
||||
def _CommandifyName(self, name_string):
|
||||
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
|
||||
return name_string.title().replace("-", "")
|
||||
|
||||
def ExecFlock(self, lockfile, *cmd_list):
|
||||
"""Emulates the most basic behavior of Linux's flock(1)."""
|
||||
# Rely on exception handling to report errors.
|
||||
# Note that the stock python on SunOS has a bug
|
||||
# where fcntl.flock(fd, LOCK_EX) always fails
|
||||
# with EBADF, that's why we use this F_SETLK
|
||||
# hack instead.
|
||||
fd = os.open(lockfile, os.O_WRONLY | os.O_NOCTTY | os.O_CREAT, 0o666)
|
||||
if sys.platform.startswith("aix") or sys.platform == "os400":
|
||||
# Python on AIX is compiled with LARGEFILE support, which changes the
|
||||
# struct size.
|
||||
op = struct.pack("hhIllqq", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
|
||||
else:
|
||||
op = struct.pack("hhllhhl", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
|
||||
fcntl.fcntl(fd, fcntl.F_SETLK, op)
|
||||
return subprocess.call(cmd_list)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
0
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py
generated
vendored
Executable file
0
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py
generated
vendored
Executable file
804
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
generated
vendored
Executable file
804
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
generated
vendored
Executable file
|
@ -0,0 +1,804 @@
|
|||
# Copyright (c) 2014 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""
|
||||
This script is intended for use as a GYP_GENERATOR. It takes as input (by way of
|
||||
the generator flag config_path) the path of a json file that dictates the files
|
||||
and targets to search for. The following keys are supported:
|
||||
files: list of paths (relative) of the files to search for.
|
||||
test_targets: unqualified target names to search for. Any target in this list
|
||||
that depends upon a file in |files| is output regardless of the type of target
|
||||
or chain of dependencies.
|
||||
additional_compile_targets: Unqualified targets to search for in addition to
|
||||
test_targets. Targets in the combined list that depend upon a file in |files|
|
||||
are not necessarily output. For example, if the target is of type none then the
|
||||
target is not output (but one of the descendants of the target will be).
|
||||
|
||||
The following is output:
|
||||
error: only supplied if there is an error.
|
||||
compile_targets: minimal set of targets that directly or indirectly (for
|
||||
targets of type none) depend on the files in |files| and is one of the
|
||||
supplied targets or a target that one of the supplied targets depends on.
|
||||
The expectation is this set of targets is passed into a build step. This list
|
||||
always contains the output of test_targets as well.
|
||||
test_targets: set of targets from the supplied |test_targets| that either
|
||||
directly or indirectly depend upon a file in |files|. This list if useful
|
||||
if additional processing needs to be done for certain targets after the
|
||||
build, such as running tests.
|
||||
status: outputs one of three values: none of the supplied files were found,
|
||||
one of the include files changed so that it should be assumed everything
|
||||
changed (in this case test_targets and compile_targets are not output) or at
|
||||
least one file was found.
|
||||
invalid_targets: list of supplied targets that were not found.
|
||||
|
||||
Example:
|
||||
Consider a graph like the following:
|
||||
A D
|
||||
/ \
|
||||
B C
|
||||
A depends upon both B and C, A is of type none and B and C are executables.
|
||||
D is an executable, has no dependencies and nothing depends on it.
|
||||
If |additional_compile_targets| = ["A"], |test_targets| = ["B", "C"] and
|
||||
files = ["b.cc", "d.cc"] (B depends upon b.cc and D depends upon d.cc), then
|
||||
the following is output:
|
||||
|compile_targets| = ["B"] B must built as it depends upon the changed file b.cc
|
||||
and the supplied target A depends upon it. A is not output as a build_target
|
||||
as it is of type none with no rules and actions.
|
||||
|test_targets| = ["B"] B directly depends upon the change file b.cc.
|
||||
|
||||
Even though the file d.cc, which D depends upon, has changed D is not output
|
||||
as it was not supplied by way of |additional_compile_targets| or |test_targets|.
|
||||
|
||||
If the generator flag analyzer_output_path is specified, output is written
|
||||
there. Otherwise output is written to stdout.
|
||||
|
||||
In Gyp the "all" target is shorthand for the root targets in the files passed
|
||||
to gyp. For example, if file "a.gyp" contains targets "a1" and
|
||||
"a2", and file "b.gyp" contains targets "b1" and "b2" and "a2" has a dependency
|
||||
on "b2" and gyp is supplied "a.gyp" then "all" consists of "a1" and "a2".
|
||||
Notice that "b1" and "b2" are not in the "all" target as "b.gyp" was not
|
||||
directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp
|
||||
then the "all" target includes "b1" and "b2".
|
||||
"""
|
||||
|
||||
|
||||
import gyp.common
|
||||
import json
|
||||
import os
|
||||
import posixpath
|
||||
|
||||
debug = False
|
||||
|
||||
found_dependency_string = "Found dependency"
|
||||
no_dependency_string = "No dependencies"
|
||||
# Status when it should be assumed that everything has changed.
|
||||
all_changed_string = "Found dependency (all)"
|
||||
|
||||
# MatchStatus is used indicate if and how a target depends upon the supplied
|
||||
# sources.
|
||||
# The target's sources contain one of the supplied paths.
|
||||
MATCH_STATUS_MATCHES = 1
|
||||
# The target has a dependency on another target that contains one of the
|
||||
# supplied paths.
|
||||
MATCH_STATUS_MATCHES_BY_DEPENDENCY = 2
|
||||
# The target's sources weren't in the supplied paths and none of the target's
|
||||
# dependencies depend upon a target that matched.
|
||||
MATCH_STATUS_DOESNT_MATCH = 3
|
||||
# The target doesn't contain the source, but the dependent targets have not yet
|
||||
# been visited to determine a more specific status yet.
|
||||
MATCH_STATUS_TBD = 4
|
||||
|
||||
generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
|
||||
|
||||
generator_wants_static_library_dependencies_adjusted = False
|
||||
|
||||
generator_default_variables = {}
|
||||
for dirname in [
|
||||
"INTERMEDIATE_DIR",
|
||||
"SHARED_INTERMEDIATE_DIR",
|
||||
"PRODUCT_DIR",
|
||||
"LIB_DIR",
|
||||
"SHARED_LIB_DIR",
|
||||
]:
|
||||
generator_default_variables[dirname] = "!!!"
|
||||
|
||||
for unused in [
|
||||
"RULE_INPUT_PATH",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"STATIC_LIB_PREFIX",
|
||||
"STATIC_LIB_SUFFIX",
|
||||
"SHARED_LIB_PREFIX",
|
||||
"SHARED_LIB_SUFFIX",
|
||||
"CONFIGURATION_NAME",
|
||||
]:
|
||||
generator_default_variables[unused] = ""
|
||||
|
||||
|
||||
def _ToGypPath(path):
|
||||
"""Converts a path to the format used by gyp."""
|
||||
if os.sep == "\\" and os.altsep == "/":
|
||||
return path.replace("\\", "/")
|
||||
return path
|
||||
|
||||
|
||||
def _ResolveParent(path, base_path_components):
|
||||
"""Resolves |path|, which starts with at least one '../'. Returns an empty
|
||||
string if the path shouldn't be considered. See _AddSources() for a
|
||||
description of |base_path_components|."""
|
||||
depth = 0
|
||||
while path.startswith("../"):
|
||||
depth += 1
|
||||
path = path[3:]
|
||||
# Relative includes may go outside the source tree. For example, an action may
|
||||
# have inputs in /usr/include, which are not in the source tree.
|
||||
if depth > len(base_path_components):
|
||||
return ""
|
||||
if depth == len(base_path_components):
|
||||
return path
|
||||
return (
|
||||
"/".join(base_path_components[0 : len(base_path_components) - depth])
|
||||
+ "/"
|
||||
+ path
|
||||
)
|
||||
|
||||
|
||||
def _AddSources(sources, base_path, base_path_components, result):
|
||||
"""Extracts valid sources from |sources| and adds them to |result|. Each
|
||||
source file is relative to |base_path|, but may contain '..'. To make
|
||||
resolving '..' easier |base_path_components| contains each of the
|
||||
directories in |base_path|. Additionally each source may contain variables.
|
||||
Such sources are ignored as it is assumed dependencies on them are expressed
|
||||
and tracked in some other means."""
|
||||
# NOTE: gyp paths are always posix style.
|
||||
for source in sources:
|
||||
if not len(source) or source.startswith("!!!") or source.startswith("$"):
|
||||
continue
|
||||
# variable expansion may lead to //.
|
||||
org_source = source
|
||||
source = source[0] + source[1:].replace("//", "/")
|
||||
if source.startswith("../"):
|
||||
source = _ResolveParent(source, base_path_components)
|
||||
if len(source):
|
||||
result.append(source)
|
||||
continue
|
||||
result.append(base_path + source)
|
||||
if debug:
|
||||
print("AddSource", org_source, result[len(result) - 1])
|
||||
|
||||
|
||||
def _ExtractSourcesFromAction(action, base_path, base_path_components, results):
|
||||
if "inputs" in action:
|
||||
_AddSources(action["inputs"], base_path, base_path_components, results)
|
||||
|
||||
|
||||
def _ToLocalPath(toplevel_dir, path):
|
||||
"""Converts |path| to a path relative to |toplevel_dir|."""
|
||||
if path == toplevel_dir:
|
||||
return ""
|
||||
if path.startswith(toplevel_dir + "/"):
|
||||
return path[len(toplevel_dir) + len("/") :]
|
||||
return path
|
||||
|
||||
|
||||
def _ExtractSources(target, target_dict, toplevel_dir):
|
||||
# |target| is either absolute or relative and in the format of the OS. Gyp
|
||||
# source paths are always posix. Convert |target| to a posix path relative to
|
||||
# |toplevel_dir_|. This is done to make it easy to build source paths.
|
||||
base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target)))
|
||||
base_path_components = base_path.split("/")
|
||||
|
||||
# Add a trailing '/' so that _AddSources() can easily build paths.
|
||||
if len(base_path):
|
||||
base_path += "/"
|
||||
|
||||
if debug:
|
||||
print("ExtractSources", target, base_path)
|
||||
|
||||
results = []
|
||||
if "sources" in target_dict:
|
||||
_AddSources(target_dict["sources"], base_path, base_path_components, results)
|
||||
# Include the inputs from any actions. Any changes to these affect the
|
||||
# resulting output.
|
||||
if "actions" in target_dict:
|
||||
for action in target_dict["actions"]:
|
||||
_ExtractSourcesFromAction(action, base_path, base_path_components, results)
|
||||
if "rules" in target_dict:
|
||||
for rule in target_dict["rules"]:
|
||||
_ExtractSourcesFromAction(rule, base_path, base_path_components, results)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
class Target:
|
||||
"""Holds information about a particular target:
|
||||
deps: set of Targets this Target depends upon. This is not recursive, only the
|
||||
direct dependent Targets.
|
||||
match_status: one of the MatchStatus values.
|
||||
back_deps: set of Targets that have a dependency on this Target.
|
||||
visited: used during iteration to indicate whether we've visited this target.
|
||||
This is used for two iterations, once in building the set of Targets and
|
||||
again in _GetBuildTargets().
|
||||
name: fully qualified name of the target.
|
||||
requires_build: True if the target type is such that it needs to be built.
|
||||
See _DoesTargetTypeRequireBuild for details.
|
||||
added_to_compile_targets: used when determining if the target was added to the
|
||||
set of targets that needs to be built.
|
||||
in_roots: true if this target is a descendant of one of the root nodes.
|
||||
is_executable: true if the type of target is executable.
|
||||
is_static_library: true if the type of target is static_library.
|
||||
is_or_has_linked_ancestor: true if the target does a link (eg executable), or
|
||||
if there is a target in back_deps that does a link."""
|
||||
|
||||
def __init__(self, name):
|
||||
self.deps = set()
|
||||
self.match_status = MATCH_STATUS_TBD
|
||||
self.back_deps = set()
|
||||
self.name = name
|
||||
# TODO(sky): I don't like hanging this off Target. This state is specific
|
||||
# to certain functions and should be isolated there.
|
||||
self.visited = False
|
||||
self.requires_build = False
|
||||
self.added_to_compile_targets = False
|
||||
self.in_roots = False
|
||||
self.is_executable = False
|
||||
self.is_static_library = False
|
||||
self.is_or_has_linked_ancestor = False
|
||||
|
||||
|
||||
class Config:
|
||||
"""Details what we're looking for
|
||||
files: set of files to search for
|
||||
targets: see file description for details."""
|
||||
|
||||
def __init__(self):
|
||||
self.files = []
|
||||
self.targets = set()
|
||||
self.additional_compile_target_names = set()
|
||||
self.test_target_names = set()
|
||||
|
||||
def Init(self, params):
|
||||
"""Initializes Config. This is a separate method as it raises an exception
|
||||
if there is a parse error."""
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
config_path = generator_flags.get("config_path", None)
|
||||
if not config_path:
|
||||
return
|
||||
try:
|
||||
f = open(config_path)
|
||||
config = json.load(f)
|
||||
f.close()
|
||||
except OSError:
|
||||
raise Exception("Unable to open file " + config_path)
|
||||
except ValueError as e:
|
||||
raise Exception("Unable to parse config file " + config_path + str(e))
|
||||
if not isinstance(config, dict):
|
||||
raise Exception("config_path must be a JSON file containing a dictionary")
|
||||
self.files = config.get("files", [])
|
||||
self.additional_compile_target_names = set(
|
||||
config.get("additional_compile_targets", [])
|
||||
)
|
||||
self.test_target_names = set(config.get("test_targets", []))
|
||||
|
||||
|
||||
def _WasBuildFileModified(build_file, data, files, toplevel_dir):
|
||||
"""Returns true if the build file |build_file| is either in |files| or
|
||||
one of the files included by |build_file| is in |files|. |toplevel_dir| is
|
||||
the root of the source tree."""
|
||||
if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
|
||||
if debug:
|
||||
print("gyp file modified", build_file)
|
||||
return True
|
||||
|
||||
# First element of included_files is the file itself.
|
||||
if len(data[build_file]["included_files"]) <= 1:
|
||||
return False
|
||||
|
||||
for include_file in data[build_file]["included_files"][1:]:
|
||||
# |included_files| are relative to the directory of the |build_file|.
|
||||
rel_include_file = _ToGypPath(
|
||||
gyp.common.UnrelativePath(include_file, build_file)
|
||||
)
|
||||
if _ToLocalPath(toplevel_dir, rel_include_file) in files:
|
||||
if debug:
|
||||
print(
|
||||
"included gyp file modified, gyp_file=",
|
||||
build_file,
|
||||
"included file=",
|
||||
rel_include_file,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _GetOrCreateTargetByName(targets, target_name):
|
||||
"""Creates or returns the Target at targets[target_name]. If there is no
|
||||
Target for |target_name| one is created. Returns a tuple of whether a new
|
||||
Target was created and the Target."""
|
||||
if target_name in targets:
|
||||
return False, targets[target_name]
|
||||
target = Target(target_name)
|
||||
targets[target_name] = target
|
||||
return True, target
|
||||
|
||||
|
||||
def _DoesTargetTypeRequireBuild(target_dict):
|
||||
"""Returns true if the target type is such that it needs to be built."""
|
||||
# If a 'none' target has rules or actions we assume it requires a build.
|
||||
return bool(
|
||||
target_dict["type"] != "none"
|
||||
or target_dict.get("actions")
|
||||
or target_dict.get("rules")
|
||||
)
|
||||
|
||||
|
||||
def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, build_files):
|
||||
"""Returns a tuple of the following:
|
||||
. A dictionary mapping from fully qualified name to Target.
|
||||
. A list of the targets that have a source file in |files|.
|
||||
. Targets that constitute the 'all' target. See description at top of file
|
||||
for details on the 'all' target.
|
||||
This sets the |match_status| of the targets that contain any of the source
|
||||
files in |files| to MATCH_STATUS_MATCHES.
|
||||
|toplevel_dir| is the root of the source tree."""
|
||||
# Maps from target name to Target.
|
||||
name_to_target = {}
|
||||
|
||||
# Targets that matched.
|
||||
matching_targets = []
|
||||
|
||||
# Queue of targets to visit.
|
||||
targets_to_visit = target_list[:]
|
||||
|
||||
# Maps from build file to a boolean indicating whether the build file is in
|
||||
# |files|.
|
||||
build_file_in_files = {}
|
||||
|
||||
# Root targets across all files.
|
||||
roots = set()
|
||||
|
||||
# Set of Targets in |build_files|.
|
||||
build_file_targets = set()
|
||||
|
||||
while len(targets_to_visit) > 0:
|
||||
target_name = targets_to_visit.pop()
|
||||
created_target, target = _GetOrCreateTargetByName(name_to_target, target_name)
|
||||
if created_target:
|
||||
roots.add(target)
|
||||
elif target.visited:
|
||||
continue
|
||||
|
||||
target.visited = True
|
||||
target.requires_build = _DoesTargetTypeRequireBuild(target_dicts[target_name])
|
||||
target_type = target_dicts[target_name]["type"]
|
||||
target.is_executable = target_type == "executable"
|
||||
target.is_static_library = target_type == "static_library"
|
||||
target.is_or_has_linked_ancestor = (
|
||||
target_type in {"executable", "shared_library"}
|
||||
)
|
||||
|
||||
build_file = gyp.common.ParseQualifiedTarget(target_name)[0]
|
||||
if build_file not in build_file_in_files:
|
||||
build_file_in_files[build_file] = _WasBuildFileModified(
|
||||
build_file, data, files, toplevel_dir
|
||||
)
|
||||
|
||||
if build_file in build_files:
|
||||
build_file_targets.add(target)
|
||||
|
||||
# If a build file (or any of its included files) is modified we assume all
|
||||
# targets in the file are modified.
|
||||
if build_file_in_files[build_file]:
|
||||
print("matching target from modified build file", target_name)
|
||||
target.match_status = MATCH_STATUS_MATCHES
|
||||
matching_targets.append(target)
|
||||
else:
|
||||
sources = _ExtractSources(
|
||||
target_name, target_dicts[target_name], toplevel_dir
|
||||
)
|
||||
for source in sources:
|
||||
if _ToGypPath(os.path.normpath(source)) in files:
|
||||
print("target", target_name, "matches", source)
|
||||
target.match_status = MATCH_STATUS_MATCHES
|
||||
matching_targets.append(target)
|
||||
break
|
||||
|
||||
# Add dependencies to visit as well as updating back pointers for deps.
|
||||
for dep in target_dicts[target_name].get("dependencies", []):
|
||||
targets_to_visit.append(dep)
|
||||
|
||||
created_dep_target, dep_target = _GetOrCreateTargetByName(
|
||||
name_to_target, dep
|
||||
)
|
||||
if not created_dep_target:
|
||||
roots.discard(dep_target)
|
||||
|
||||
target.deps.add(dep_target)
|
||||
dep_target.back_deps.add(target)
|
||||
|
||||
return name_to_target, matching_targets, roots & build_file_targets
|
||||
|
||||
|
||||
def _GetUnqualifiedToTargetMapping(all_targets, to_find):
|
||||
"""Returns a tuple of the following:
|
||||
. mapping (dictionary) from unqualified name to Target for all the
|
||||
Targets in |to_find|.
|
||||
. any target names not found. If this is empty all targets were found."""
|
||||
result = {}
|
||||
if not to_find:
|
||||
return {}, []
|
||||
to_find = set(to_find)
|
||||
for target_name in all_targets:
|
||||
extracted = gyp.common.ParseQualifiedTarget(target_name)
|
||||
if len(extracted) > 1 and extracted[1] in to_find:
|
||||
to_find.remove(extracted[1])
|
||||
result[extracted[1]] = all_targets[target_name]
|
||||
if not to_find:
|
||||
return result, []
|
||||
return result, list(to_find)
|
||||
|
||||
|
||||
def _DoesTargetDependOnMatchingTargets(target):
|
||||
"""Returns true if |target| or any of its dependencies is one of the
|
||||
targets containing the files supplied as input to analyzer. This updates
|
||||
|matches| of the Targets as it recurses.
|
||||
target: the Target to look for."""
|
||||
if target.match_status == MATCH_STATUS_DOESNT_MATCH:
|
||||
return False
|
||||
if (
|
||||
target.match_status in {MATCH_STATUS_MATCHES,
|
||||
MATCH_STATUS_MATCHES_BY_DEPENDENCY}
|
||||
):
|
||||
return True
|
||||
for dep in target.deps:
|
||||
if _DoesTargetDependOnMatchingTargets(dep):
|
||||
target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
|
||||
print("\t", target.name, "matches by dep", dep.name)
|
||||
return True
|
||||
target.match_status = MATCH_STATUS_DOESNT_MATCH
|
||||
return False
|
||||
|
||||
|
||||
def _GetTargetsDependingOnMatchingTargets(possible_targets):
|
||||
"""Returns the list of Targets in |possible_targets| that depend (either
|
||||
directly on indirectly) on at least one of the targets containing the files
|
||||
supplied as input to analyzer.
|
||||
possible_targets: targets to search from."""
|
||||
found = []
|
||||
print("Targets that matched by dependency:")
|
||||
for target in possible_targets:
|
||||
if _DoesTargetDependOnMatchingTargets(target):
|
||||
found.append(target)
|
||||
return found
|
||||
|
||||
|
||||
def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
|
||||
"""Recurses through all targets that depend on |target|, adding all targets
|
||||
that need to be built (and are in |roots|) to |result|.
|
||||
roots: set of root targets.
|
||||
add_if_no_ancestor: If true and there are no ancestors of |target| then add
|
||||
|target| to |result|. |target| must still be in |roots|.
|
||||
result: targets that need to be built are added here."""
|
||||
if target.visited:
|
||||
return
|
||||
|
||||
target.visited = True
|
||||
target.in_roots = target in roots
|
||||
|
||||
for back_dep_target in target.back_deps:
|
||||
_AddCompileTargets(back_dep_target, roots, False, result)
|
||||
target.added_to_compile_targets |= back_dep_target.added_to_compile_targets
|
||||
target.in_roots |= back_dep_target.in_roots
|
||||
target.is_or_has_linked_ancestor |= back_dep_target.is_or_has_linked_ancestor
|
||||
|
||||
# Always add 'executable' targets. Even though they may be built by other
|
||||
# targets that depend upon them it makes detection of what is going to be
|
||||
# built easier.
|
||||
# And always add static_libraries that have no dependencies on them from
|
||||
# linkables. This is necessary as the other dependencies on them may be
|
||||
# static libraries themselves, which are not compile time dependencies.
|
||||
if target.in_roots and (
|
||||
target.is_executable
|
||||
or (
|
||||
not target.added_to_compile_targets
|
||||
and (add_if_no_ancestor or target.requires_build)
|
||||
)
|
||||
or (
|
||||
target.is_static_library
|
||||
and add_if_no_ancestor
|
||||
and not target.is_or_has_linked_ancestor
|
||||
)
|
||||
):
|
||||
print(
|
||||
"\t\tadding to compile targets",
|
||||
target.name,
|
||||
"executable",
|
||||
target.is_executable,
|
||||
"added_to_compile_targets",
|
||||
target.added_to_compile_targets,
|
||||
"add_if_no_ancestor",
|
||||
add_if_no_ancestor,
|
||||
"requires_build",
|
||||
target.requires_build,
|
||||
"is_static_library",
|
||||
target.is_static_library,
|
||||
"is_or_has_linked_ancestor",
|
||||
target.is_or_has_linked_ancestor,
|
||||
)
|
||||
result.add(target)
|
||||
target.added_to_compile_targets = True
|
||||
|
||||
|
||||
def _GetCompileTargets(matching_targets, supplied_targets):
|
||||
"""Returns the set of Targets that require a build.
|
||||
matching_targets: targets that changed and need to be built.
|
||||
supplied_targets: set of targets supplied to analyzer to search from."""
|
||||
result = set()
|
||||
for target in matching_targets:
|
||||
print("finding compile targets for match", target.name)
|
||||
_AddCompileTargets(target, supplied_targets, True, result)
|
||||
return result
|
||||
|
||||
|
||||
def _WriteOutput(params, **values):
|
||||
"""Writes the output, either to stdout or a file is specified."""
|
||||
if "error" in values:
|
||||
print("Error:", values["error"])
|
||||
if "status" in values:
|
||||
print(values["status"])
|
||||
if "targets" in values:
|
||||
values["targets"].sort()
|
||||
print("Supplied targets that depend on changed files:")
|
||||
for target in values["targets"]:
|
||||
print("\t", target)
|
||||
if "invalid_targets" in values:
|
||||
values["invalid_targets"].sort()
|
||||
print("The following targets were not found:")
|
||||
for target in values["invalid_targets"]:
|
||||
print("\t", target)
|
||||
if "build_targets" in values:
|
||||
values["build_targets"].sort()
|
||||
print("Targets that require a build:")
|
||||
for target in values["build_targets"]:
|
||||
print("\t", target)
|
||||
if "compile_targets" in values:
|
||||
values["compile_targets"].sort()
|
||||
print("Targets that need to be built:")
|
||||
for target in values["compile_targets"]:
|
||||
print("\t", target)
|
||||
if "test_targets" in values:
|
||||
values["test_targets"].sort()
|
||||
print("Test targets:")
|
||||
for target in values["test_targets"]:
|
||||
print("\t", target)
|
||||
|
||||
output_path = params.get("generator_flags", {}).get("analyzer_output_path", None)
|
||||
if not output_path:
|
||||
print(json.dumps(values))
|
||||
return
|
||||
try:
|
||||
f = open(output_path, "w")
|
||||
f.write(json.dumps(values) + "\n")
|
||||
f.close()
|
||||
except OSError as e:
|
||||
print("Error writing to output file", output_path, str(e))
|
||||
|
||||
|
||||
def _WasGypIncludeFileModified(params, files):
|
||||
"""Returns true if one of the files in |files| is in the set of included
|
||||
files."""
|
||||
if params["options"].includes:
|
||||
for include in params["options"].includes:
|
||||
if _ToGypPath(os.path.normpath(include)) in files:
|
||||
print("Include file modified, assuming all changed", include)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _NamesNotIn(names, mapping):
|
||||
"""Returns a list of the values in |names| that are not in |mapping|."""
|
||||
return [name for name in names if name not in mapping]
|
||||
|
||||
|
||||
def _LookupTargets(names, mapping):
|
||||
"""Returns a list of the mapping[name] for each value in |names| that is in
|
||||
|mapping|."""
|
||||
return [mapping[name] for name in names if name in mapping]
|
||||
|
||||
|
||||
def CalculateVariables(default_variables, params):
|
||||
"""Calculate additional variables for use in the build (called by gyp)."""
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor == "mac":
|
||||
default_variables.setdefault("OS", "mac")
|
||||
elif flavor == "win":
|
||||
default_variables.setdefault("OS", "win")
|
||||
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
||||
else:
|
||||
operating_system = flavor
|
||||
if flavor == "android":
|
||||
operating_system = "linux" # Keep this legacy behavior for now.
|
||||
default_variables.setdefault("OS", operating_system)
|
||||
|
||||
|
||||
class TargetCalculator:
|
||||
"""Calculates the matching test_targets and matching compile_targets."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
files,
|
||||
additional_compile_target_names,
|
||||
test_target_names,
|
||||
data,
|
||||
target_list,
|
||||
target_dicts,
|
||||
toplevel_dir,
|
||||
build_files,
|
||||
):
|
||||
self._additional_compile_target_names = set(additional_compile_target_names)
|
||||
self._test_target_names = set(test_target_names)
|
||||
(
|
||||
self._name_to_target,
|
||||
self._changed_targets,
|
||||
self._root_targets,
|
||||
) = _GenerateTargets(
|
||||
data, target_list, target_dicts, toplevel_dir, frozenset(files), build_files
|
||||
)
|
||||
(
|
||||
self._unqualified_mapping,
|
||||
self.invalid_targets,
|
||||
) = _GetUnqualifiedToTargetMapping(
|
||||
self._name_to_target, self._supplied_target_names_no_all()
|
||||
)
|
||||
|
||||
def _supplied_target_names(self):
|
||||
return self._additional_compile_target_names | self._test_target_names
|
||||
|
||||
def _supplied_target_names_no_all(self):
|
||||
"""Returns the supplied test targets without 'all'."""
|
||||
result = self._supplied_target_names()
|
||||
result.discard("all")
|
||||
return result
|
||||
|
||||
def is_build_impacted(self):
|
||||
"""Returns true if the supplied files impact the build at all."""
|
||||
return self._changed_targets
|
||||
|
||||
def find_matching_test_target_names(self):
|
||||
"""Returns the set of output test targets."""
|
||||
assert self.is_build_impacted()
|
||||
# Find the test targets first. 'all' is special cased to mean all the
|
||||
# root targets. To deal with all the supplied |test_targets| are expanded
|
||||
# to include the root targets during lookup. If any of the root targets
|
||||
# match, we remove it and replace it with 'all'.
|
||||
test_target_names_no_all = set(self._test_target_names)
|
||||
test_target_names_no_all.discard("all")
|
||||
test_targets_no_all = _LookupTargets(
|
||||
test_target_names_no_all, self._unqualified_mapping
|
||||
)
|
||||
test_target_names_contains_all = "all" in self._test_target_names
|
||||
if test_target_names_contains_all:
|
||||
test_targets = list(set(test_targets_no_all) | set(self._root_targets))
|
||||
else:
|
||||
test_targets = list(test_targets_no_all)
|
||||
print("supplied test_targets")
|
||||
for target_name in self._test_target_names:
|
||||
print("\t", target_name)
|
||||
print("found test_targets")
|
||||
for target in test_targets:
|
||||
print("\t", target.name)
|
||||
print("searching for matching test targets")
|
||||
matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets)
|
||||
matching_test_targets_contains_all = test_target_names_contains_all and set(
|
||||
matching_test_targets
|
||||
) & set(self._root_targets)
|
||||
if matching_test_targets_contains_all:
|
||||
# Remove any of the targets for all that were not explicitly supplied,
|
||||
# 'all' is subsequentely added to the matching names below.
|
||||
matching_test_targets = list(
|
||||
set(matching_test_targets) & set(test_targets_no_all)
|
||||
)
|
||||
print("matched test_targets")
|
||||
for target in matching_test_targets:
|
||||
print("\t", target.name)
|
||||
matching_target_names = [
|
||||
gyp.common.ParseQualifiedTarget(target.name)[1]
|
||||
for target in matching_test_targets
|
||||
]
|
||||
if matching_test_targets_contains_all:
|
||||
matching_target_names.append("all")
|
||||
print("\tall")
|
||||
return matching_target_names
|
||||
|
||||
def find_matching_compile_target_names(self):
|
||||
"""Returns the set of output compile targets."""
|
||||
assert self.is_build_impacted()
|
||||
# Compile targets are found by searching up from changed targets.
|
||||
# Reset the visited status for _GetBuildTargets.
|
||||
for target in self._name_to_target.values():
|
||||
target.visited = False
|
||||
|
||||
supplied_targets = _LookupTargets(
|
||||
self._supplied_target_names_no_all(), self._unqualified_mapping
|
||||
)
|
||||
if "all" in self._supplied_target_names():
|
||||
supplied_targets = list(set(supplied_targets) | set(self._root_targets))
|
||||
print("Supplied test_targets & compile_targets")
|
||||
for target in supplied_targets:
|
||||
print("\t", target.name)
|
||||
print("Finding compile targets")
|
||||
compile_targets = _GetCompileTargets(self._changed_targets, supplied_targets)
|
||||
return [
|
||||
gyp.common.ParseQualifiedTarget(target.name)[1]
|
||||
for target in compile_targets
|
||||
]
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
"""Called by gyp as the final stage. Outputs results."""
|
||||
config = Config()
|
||||
try:
|
||||
config.Init(params)
|
||||
|
||||
if not config.files:
|
||||
raise Exception(
|
||||
"Must specify files to analyze via config_path generator " "flag"
|
||||
)
|
||||
|
||||
toplevel_dir = _ToGypPath(os.path.abspath(params["options"].toplevel_dir))
|
||||
if debug:
|
||||
print("toplevel_dir", toplevel_dir)
|
||||
|
||||
if _WasGypIncludeFileModified(params, config.files):
|
||||
result_dict = {
|
||||
"status": all_changed_string,
|
||||
"test_targets": list(config.test_target_names),
|
||||
"compile_targets": list(
|
||||
config.additional_compile_target_names | config.test_target_names
|
||||
),
|
||||
}
|
||||
_WriteOutput(params, **result_dict)
|
||||
return
|
||||
|
||||
calculator = TargetCalculator(
|
||||
config.files,
|
||||
config.additional_compile_target_names,
|
||||
config.test_target_names,
|
||||
data,
|
||||
target_list,
|
||||
target_dicts,
|
||||
toplevel_dir,
|
||||
params["build_files"],
|
||||
)
|
||||
if not calculator.is_build_impacted():
|
||||
result_dict = {
|
||||
"status": no_dependency_string,
|
||||
"test_targets": [],
|
||||
"compile_targets": [],
|
||||
}
|
||||
if calculator.invalid_targets:
|
||||
result_dict["invalid_targets"] = calculator.invalid_targets
|
||||
_WriteOutput(params, **result_dict)
|
||||
return
|
||||
|
||||
test_target_names = calculator.find_matching_test_target_names()
|
||||
compile_target_names = calculator.find_matching_compile_target_names()
|
||||
found_at_least_one_target = compile_target_names or test_target_names
|
||||
result_dict = {
|
||||
"test_targets": test_target_names,
|
||||
"status": found_dependency_string
|
||||
if found_at_least_one_target
|
||||
else no_dependency_string,
|
||||
"compile_targets": list(set(compile_target_names) | set(test_target_names)),
|
||||
}
|
||||
if calculator.invalid_targets:
|
||||
result_dict["invalid_targets"] = calculator.invalid_targets
|
||||
_WriteOutput(params, **result_dict)
|
||||
|
||||
except Exception as e:
|
||||
_WriteOutput(params, error=str(e))
|
1173
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
generated
vendored
Executable file
1173
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
generated
vendored
Executable file
File diff suppressed because it is too large
Load diff
1318
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
generated
vendored
Executable file
1318
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
generated
vendored
Executable file
File diff suppressed because it is too large
Load diff
123
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py
generated
vendored
Executable file
123
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py
generated
vendored
Executable file
|
@ -0,0 +1,123 @@
|
|||
# Copyright (c) 2016 Ben Noordhuis <info@bnoordhuis.nl>. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import gyp.common
|
||||
import gyp.xcode_emulation
|
||||
import json
|
||||
import os
|
||||
|
||||
generator_additional_non_configuration_keys = []
|
||||
generator_additional_path_sections = []
|
||||
generator_extra_sources_for_rules = []
|
||||
generator_filelist_paths = None
|
||||
generator_supports_multiple_toolsets = True
|
||||
generator_wants_sorted_dependencies = False
|
||||
|
||||
# Lifted from make.py. The actual values don't matter much.
|
||||
generator_default_variables = {
|
||||
"CONFIGURATION_NAME": "$(BUILDTYPE)",
|
||||
"EXECUTABLE_PREFIX": "",
|
||||
"EXECUTABLE_SUFFIX": "",
|
||||
"INTERMEDIATE_DIR": "$(obj).$(TOOLSET)/$(TARGET)/geni",
|
||||
"PRODUCT_DIR": "$(builddir)",
|
||||
"RULE_INPUT_DIRNAME": "%(INPUT_DIRNAME)s",
|
||||
"RULE_INPUT_EXT": "$(suffix $<)",
|
||||
"RULE_INPUT_NAME": "$(notdir $<)",
|
||||
"RULE_INPUT_PATH": "$(abspath $<)",
|
||||
"RULE_INPUT_ROOT": "%(INPUT_ROOT)s",
|
||||
"SHARED_INTERMEDIATE_DIR": "$(obj)/gen",
|
||||
"SHARED_LIB_PREFIX": "lib",
|
||||
"STATIC_LIB_PREFIX": "lib",
|
||||
"STATIC_LIB_SUFFIX": ".a",
|
||||
}
|
||||
|
||||
|
||||
def IsMac(params):
|
||||
return gyp.common.GetFlavor(params) == "mac"
|
||||
|
||||
|
||||
def CalculateVariables(default_variables, params):
|
||||
default_variables.setdefault("OS", gyp.common.GetFlavor(params))
|
||||
|
||||
|
||||
def AddCommandsForTarget(cwd, target, params, per_config_commands):
|
||||
output_dir = params["generator_flags"].get("output_dir", "out")
|
||||
for configuration_name, configuration in target["configurations"].items():
|
||||
if IsMac(params):
|
||||
xcode_settings = gyp.xcode_emulation.XcodeSettings(target)
|
||||
cflags = xcode_settings.GetCflags(configuration_name)
|
||||
cflags_c = xcode_settings.GetCflagsC(configuration_name)
|
||||
cflags_cc = xcode_settings.GetCflagsCC(configuration_name)
|
||||
else:
|
||||
cflags = configuration.get("cflags", [])
|
||||
cflags_c = configuration.get("cflags_c", [])
|
||||
cflags_cc = configuration.get("cflags_cc", [])
|
||||
|
||||
cflags_c = cflags + cflags_c
|
||||
cflags_cc = cflags + cflags_cc
|
||||
|
||||
defines = configuration.get("defines", [])
|
||||
defines = ["-D" + s for s in defines]
|
||||
|
||||
# TODO(bnoordhuis) Handle generated source files.
|
||||
extensions = (".c", ".cc", ".cpp", ".cxx")
|
||||
sources = [s for s in target.get("sources", []) if s.endswith(extensions)]
|
||||
|
||||
def resolve(filename):
|
||||
return os.path.abspath(os.path.join(cwd, filename))
|
||||
|
||||
# TODO(bnoordhuis) Handle generated header files.
|
||||
include_dirs = configuration.get("include_dirs", [])
|
||||
include_dirs = [s for s in include_dirs if not s.startswith("$(obj)")]
|
||||
includes = ["-I" + resolve(s) for s in include_dirs]
|
||||
|
||||
defines = gyp.common.EncodePOSIXShellList(defines)
|
||||
includes = gyp.common.EncodePOSIXShellList(includes)
|
||||
cflags_c = gyp.common.EncodePOSIXShellList(cflags_c)
|
||||
cflags_cc = gyp.common.EncodePOSIXShellList(cflags_cc)
|
||||
|
||||
commands = per_config_commands.setdefault(configuration_name, [])
|
||||
for source in sources:
|
||||
file = resolve(source)
|
||||
isc = source.endswith(".c")
|
||||
cc = "cc" if isc else "c++"
|
||||
cflags = cflags_c if isc else cflags_cc
|
||||
command = " ".join(
|
||||
(
|
||||
cc,
|
||||
defines,
|
||||
includes,
|
||||
cflags,
|
||||
"-c",
|
||||
gyp.common.EncodePOSIXShellArgument(file),
|
||||
)
|
||||
)
|
||||
commands.append({"command": command, "directory": output_dir, "file": file})
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
per_config_commands = {}
|
||||
for qualified_target, target in target_dicts.items():
|
||||
build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(
|
||||
qualified_target
|
||||
)
|
||||
if IsMac(params):
|
||||
settings = data[build_file]
|
||||
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(settings, target)
|
||||
cwd = os.path.dirname(build_file)
|
||||
AddCommandsForTarget(cwd, target, params, per_config_commands)
|
||||
|
||||
try:
|
||||
output_dir = params["options"].generator_output
|
||||
except (AttributeError, KeyError):
|
||||
output_dir = params["generator_flags"].get("output_dir", "out")
|
||||
for configuration_name, commands in per_config_commands.items():
|
||||
filename = os.path.join(output_dir, configuration_name, "compile_commands.json")
|
||||
gyp.common.EnsureDirExists(filename)
|
||||
fp = open(filename, "w")
|
||||
json.dump(commands, fp=fp, indent=0, check_circular=False)
|
||||
|
||||
|
||||
def PerformBuild(data, configurations, params):
|
||||
pass
|
103
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
generated
vendored
Executable file
103
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
generated
vendored
Executable file
|
@ -0,0 +1,103 @@
|
|||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
import os
|
||||
import gyp
|
||||
import gyp.common
|
||||
import gyp.msvs_emulation
|
||||
import json
|
||||
|
||||
generator_supports_multiple_toolsets = True
|
||||
|
||||
generator_wants_static_library_dependencies_adjusted = False
|
||||
|
||||
generator_filelist_paths = {}
|
||||
|
||||
generator_default_variables = {}
|
||||
for dirname in [
|
||||
"INTERMEDIATE_DIR",
|
||||
"SHARED_INTERMEDIATE_DIR",
|
||||
"PRODUCT_DIR",
|
||||
"LIB_DIR",
|
||||
"SHARED_LIB_DIR",
|
||||
]:
|
||||
# Some gyp steps fail if these are empty(!).
|
||||
generator_default_variables[dirname] = "dir"
|
||||
for unused in [
|
||||
"RULE_INPUT_PATH",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"STATIC_LIB_PREFIX",
|
||||
"STATIC_LIB_SUFFIX",
|
||||
"SHARED_LIB_PREFIX",
|
||||
"SHARED_LIB_SUFFIX",
|
||||
"CONFIGURATION_NAME",
|
||||
]:
|
||||
generator_default_variables[unused] = ""
|
||||
|
||||
|
||||
def CalculateVariables(default_variables, params):
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
for key, val in generator_flags.items():
|
||||
default_variables.setdefault(key, val)
|
||||
default_variables.setdefault("OS", gyp.common.GetFlavor(params))
|
||||
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor == "win":
|
||||
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
||||
|
||||
|
||||
def CalculateGeneratorInputInfo(params):
|
||||
"""Calculate the generator specific info that gets fed to input (called by
|
||||
gyp)."""
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
if generator_flags.get("adjust_static_libraries", False):
|
||||
global generator_wants_static_library_dependencies_adjusted
|
||||
generator_wants_static_library_dependencies_adjusted = True
|
||||
|
||||
toplevel = params["options"].toplevel_dir
|
||||
generator_dir = os.path.relpath(params["options"].generator_output or ".")
|
||||
# output_dir: relative path from generator_dir to the build directory.
|
||||
output_dir = generator_flags.get("output_dir", "out")
|
||||
qualified_out_dir = os.path.normpath(
|
||||
os.path.join(toplevel, generator_dir, output_dir, "gypfiles")
|
||||
)
|
||||
global generator_filelist_paths
|
||||
generator_filelist_paths = {
|
||||
"toplevel": toplevel,
|
||||
"qualified_out_dir": qualified_out_dir,
|
||||
}
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
# Map of target -> list of targets it depends on.
|
||||
edges = {}
|
||||
|
||||
# Queue of targets to visit.
|
||||
targets_to_visit = target_list[:]
|
||||
|
||||
while len(targets_to_visit) > 0:
|
||||
target = targets_to_visit.pop()
|
||||
if target in edges:
|
||||
continue
|
||||
edges[target] = []
|
||||
|
||||
for dep in target_dicts[target].get("dependencies", []):
|
||||
edges[target].append(dep)
|
||||
targets_to_visit.append(dep)
|
||||
|
||||
try:
|
||||
filepath = params["generator_flags"]["output_dir"]
|
||||
except KeyError:
|
||||
filepath = "."
|
||||
filename = os.path.join(filepath, "dump.json")
|
||||
f = open(filename, "w")
|
||||
json.dump(edges, f)
|
||||
f.close()
|
||||
print("Wrote json to %s." % filename)
|
461
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
generated
vendored
Executable file
461
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
generated
vendored
Executable file
|
@ -0,0 +1,461 @@
|
|||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""GYP backend that generates Eclipse CDT settings files.
|
||||
|
||||
This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML
|
||||
files that can be imported into an Eclipse CDT project. The XML file contains a
|
||||
list of include paths and symbols (i.e. defines).
|
||||
|
||||
Because a full .cproject definition is not created by this generator, it's not
|
||||
possible to properly define the include dirs and symbols for each file
|
||||
individually. Instead, one set of includes/symbols is generated for the entire
|
||||
project. This works fairly well (and is a vast improvement in general), but may
|
||||
still result in a few indexer issues here and there.
|
||||
|
||||
This generator has no automated tests, so expect it to be broken.
|
||||
"""
|
||||
|
||||
from xml.sax.saxutils import escape
|
||||
import os.path
|
||||
import subprocess
|
||||
import gyp
|
||||
import gyp.common
|
||||
import gyp.msvs_emulation
|
||||
import shlex
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
generator_wants_static_library_dependencies_adjusted = False
|
||||
|
||||
generator_default_variables = {}
|
||||
|
||||
for dirname in ["INTERMEDIATE_DIR", "PRODUCT_DIR", "LIB_DIR", "SHARED_LIB_DIR"]:
|
||||
# Some gyp steps fail if these are empty(!), so we convert them to variables
|
||||
generator_default_variables[dirname] = "$" + dirname
|
||||
|
||||
for unused in [
|
||||
"RULE_INPUT_PATH",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"STATIC_LIB_PREFIX",
|
||||
"STATIC_LIB_SUFFIX",
|
||||
"SHARED_LIB_PREFIX",
|
||||
"SHARED_LIB_SUFFIX",
|
||||
"CONFIGURATION_NAME",
|
||||
]:
|
||||
generator_default_variables[unused] = ""
|
||||
|
||||
# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as
|
||||
# part of the path when dealing with generated headers. This value will be
|
||||
# replaced dynamically for each configuration.
|
||||
generator_default_variables["SHARED_INTERMEDIATE_DIR"] = "$SHARED_INTERMEDIATE_DIR"
|
||||
|
||||
|
||||
def CalculateVariables(default_variables, params):
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
for key, val in generator_flags.items():
|
||||
default_variables.setdefault(key, val)
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
default_variables.setdefault("OS", flavor)
|
||||
if flavor == "win":
|
||||
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
|
||||
|
||||
|
||||
def CalculateGeneratorInputInfo(params):
|
||||
"""Calculate the generator specific info that gets fed to input (called by
|
||||
gyp)."""
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
if generator_flags.get("adjust_static_libraries", False):
|
||||
global generator_wants_static_library_dependencies_adjusted
|
||||
generator_wants_static_library_dependencies_adjusted = True
|
||||
|
||||
|
||||
def GetAllIncludeDirectories(
|
||||
target_list,
|
||||
target_dicts,
|
||||
shared_intermediate_dirs,
|
||||
config_name,
|
||||
params,
|
||||
compiler_path,
|
||||
):
|
||||
"""Calculate the set of include directories to be used.
|
||||
|
||||
Returns:
|
||||
A list including all the include_dir's specified for every target followed
|
||||
by any include directories that were added as cflag compiler options.
|
||||
"""
|
||||
|
||||
gyp_includes_set = set()
|
||||
compiler_includes_list = []
|
||||
|
||||
# Find compiler's default include dirs.
|
||||
if compiler_path:
|
||||
command = shlex.split(compiler_path)
|
||||
command.extend(["-E", "-xc++", "-v", "-"])
|
||||
proc = subprocess.Popen(
|
||||
args=command,
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
output = proc.communicate()[1].decode("utf-8")
|
||||
# Extract the list of include dirs from the output, which has this format:
|
||||
# ...
|
||||
# #include "..." search starts here:
|
||||
# #include <...> search starts here:
|
||||
# /usr/include/c++/4.6
|
||||
# /usr/local/include
|
||||
# End of search list.
|
||||
# ...
|
||||
in_include_list = False
|
||||
for line in output.splitlines():
|
||||
if line.startswith("#include"):
|
||||
in_include_list = True
|
||||
continue
|
||||
if line.startswith("End of search list."):
|
||||
break
|
||||
if in_include_list:
|
||||
include_dir = line.strip()
|
||||
if include_dir not in compiler_includes_list:
|
||||
compiler_includes_list.append(include_dir)
|
||||
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor == "win":
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
if config_name in target["configurations"]:
|
||||
config = target["configurations"][config_name]
|
||||
|
||||
# Look for any include dirs that were explicitly added via cflags. This
|
||||
# may be done in gyp files to force certain includes to come at the end.
|
||||
# TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and
|
||||
# remove this.
|
||||
if flavor == "win":
|
||||
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
|
||||
cflags = msvs_settings.GetCflags(config_name)
|
||||
else:
|
||||
cflags = config["cflags"]
|
||||
for cflag in cflags:
|
||||
if cflag.startswith("-I"):
|
||||
include_dir = cflag[2:]
|
||||
if include_dir not in compiler_includes_list:
|
||||
compiler_includes_list.append(include_dir)
|
||||
|
||||
# Find standard gyp include dirs.
|
||||
if "include_dirs" in config:
|
||||
include_dirs = config["include_dirs"]
|
||||
for shared_intermediate_dir in shared_intermediate_dirs:
|
||||
for include_dir in include_dirs:
|
||||
include_dir = include_dir.replace(
|
||||
"$SHARED_INTERMEDIATE_DIR", shared_intermediate_dir
|
||||
)
|
||||
if not os.path.isabs(include_dir):
|
||||
base_dir = os.path.dirname(target_name)
|
||||
|
||||
include_dir = base_dir + "/" + include_dir
|
||||
include_dir = os.path.abspath(include_dir)
|
||||
|
||||
gyp_includes_set.add(include_dir)
|
||||
|
||||
# Generate a list that has all the include dirs.
|
||||
all_includes_list = list(gyp_includes_set)
|
||||
all_includes_list.sort()
|
||||
for compiler_include in compiler_includes_list:
|
||||
if compiler_include not in gyp_includes_set:
|
||||
all_includes_list.append(compiler_include)
|
||||
|
||||
# All done.
|
||||
return all_includes_list
|
||||
|
||||
|
||||
def GetCompilerPath(target_list, data, options):
|
||||
"""Determine a command that can be used to invoke the compiler.
|
||||
|
||||
Returns:
|
||||
If this is a gyp project that has explicit make settings, try to determine
|
||||
the compiler from that. Otherwise, see if a compiler was specified via the
|
||||
CC_target environment variable.
|
||||
"""
|
||||
# First, see if the compiler is configured in make's settings.
|
||||
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
|
||||
make_global_settings_dict = data[build_file].get("make_global_settings", {})
|
||||
for key, value in make_global_settings_dict:
|
||||
if key in ["CC", "CXX"]:
|
||||
return os.path.join(options.toplevel_dir, value)
|
||||
|
||||
# Check to see if the compiler was specified as an environment variable.
|
||||
for key in ["CC_target", "CC", "CXX"]:
|
||||
compiler = os.environ.get(key)
|
||||
if compiler:
|
||||
return compiler
|
||||
|
||||
return "gcc"
|
||||
|
||||
|
||||
def GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path):
|
||||
"""Calculate the defines for a project.
|
||||
|
||||
Returns:
|
||||
A dict that includes explicit defines declared in gyp files along with all
|
||||
of the default defines that the compiler uses.
|
||||
"""
|
||||
|
||||
# Get defines declared in the gyp files.
|
||||
all_defines = {}
|
||||
flavor = gyp.common.GetFlavor(params)
|
||||
if flavor == "win":
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
|
||||
if flavor == "win":
|
||||
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
|
||||
extra_defines = msvs_settings.GetComputedDefines(config_name)
|
||||
else:
|
||||
extra_defines = []
|
||||
if config_name in target["configurations"]:
|
||||
config = target["configurations"][config_name]
|
||||
target_defines = config["defines"]
|
||||
else:
|
||||
target_defines = []
|
||||
for define in target_defines + extra_defines:
|
||||
split_define = define.split("=", 1)
|
||||
if len(split_define) == 1:
|
||||
split_define.append("1")
|
||||
if split_define[0].strip() in all_defines:
|
||||
# Already defined
|
||||
continue
|
||||
all_defines[split_define[0].strip()] = split_define[1].strip()
|
||||
# Get default compiler defines (if possible).
|
||||
if flavor == "win":
|
||||
return all_defines # Default defines already processed in the loop above.
|
||||
if compiler_path:
|
||||
command = shlex.split(compiler_path)
|
||||
command.extend(["-E", "-dM", "-"])
|
||||
cpp_proc = subprocess.Popen(
|
||||
args=command, cwd=".", stdin=subprocess.PIPE, stdout=subprocess.PIPE
|
||||
)
|
||||
cpp_output = cpp_proc.communicate()[0].decode("utf-8")
|
||||
cpp_lines = cpp_output.split("\n")
|
||||
for cpp_line in cpp_lines:
|
||||
if not cpp_line.strip():
|
||||
continue
|
||||
cpp_line_parts = cpp_line.split(" ", 2)
|
||||
key = cpp_line_parts[1]
|
||||
val = cpp_line_parts[2] if len(cpp_line_parts) >= 3 else "1"
|
||||
all_defines[key] = val
|
||||
|
||||
return all_defines
|
||||
|
||||
|
||||
def WriteIncludePaths(out, eclipse_langs, include_dirs):
|
||||
"""Write the includes section of a CDT settings export file."""
|
||||
|
||||
out.write(
|
||||
' <section name="org.eclipse.cdt.internal.ui.wizards.'
|
||||
'settingswizards.IncludePaths">\n'
|
||||
)
|
||||
out.write(' <language name="holder for library settings"></language>\n')
|
||||
for lang in eclipse_langs:
|
||||
out.write(' <language name="%s">\n' % lang)
|
||||
for include_dir in include_dirs:
|
||||
out.write(
|
||||
' <includepath workspace_path="false">%s</includepath>\n'
|
||||
% include_dir
|
||||
)
|
||||
out.write(" </language>\n")
|
||||
out.write(" </section>\n")
|
||||
|
||||
|
||||
def WriteMacros(out, eclipse_langs, defines):
|
||||
"""Write the macros section of a CDT settings export file."""
|
||||
|
||||
out.write(
|
||||
' <section name="org.eclipse.cdt.internal.ui.wizards.'
|
||||
'settingswizards.Macros">\n'
|
||||
)
|
||||
out.write(' <language name="holder for library settings"></language>\n')
|
||||
for lang in eclipse_langs:
|
||||
out.write(' <language name="%s">\n' % lang)
|
||||
for key in sorted(defines):
|
||||
out.write(
|
||||
" <macro><name>%s</name><value>%s</value></macro>\n"
|
||||
% (escape(key), escape(defines[key]))
|
||||
)
|
||||
out.write(" </language>\n")
|
||||
out.write(" </section>\n")
|
||||
|
||||
|
||||
def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name):
|
||||
options = params["options"]
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
|
||||
# build_dir: relative path from source root to our output files.
|
||||
# e.g. "out/Debug"
|
||||
build_dir = os.path.join(generator_flags.get("output_dir", "out"), config_name)
|
||||
|
||||
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
|
||||
# Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the
|
||||
# SHARED_INTERMEDIATE_DIR. Include both possible locations.
|
||||
shared_intermediate_dirs = [
|
||||
os.path.join(toplevel_build, "obj", "gen"),
|
||||
os.path.join(toplevel_build, "gen"),
|
||||
]
|
||||
|
||||
GenerateCdtSettingsFile(
|
||||
target_list,
|
||||
target_dicts,
|
||||
data,
|
||||
params,
|
||||
config_name,
|
||||
os.path.join(toplevel_build, "eclipse-cdt-settings.xml"),
|
||||
options,
|
||||
shared_intermediate_dirs,
|
||||
)
|
||||
GenerateClasspathFile(
|
||||
target_list,
|
||||
target_dicts,
|
||||
options.toplevel_dir,
|
||||
toplevel_build,
|
||||
os.path.join(toplevel_build, "eclipse-classpath.xml"),
|
||||
)
|
||||
|
||||
|
||||
def GenerateCdtSettingsFile(
|
||||
target_list,
|
||||
target_dicts,
|
||||
data,
|
||||
params,
|
||||
config_name,
|
||||
out_name,
|
||||
options,
|
||||
shared_intermediate_dirs,
|
||||
):
|
||||
gyp.common.EnsureDirExists(out_name)
|
||||
with open(out_name, "w") as out:
|
||||
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
out.write("<cdtprojectproperties>\n")
|
||||
|
||||
eclipse_langs = [
|
||||
"C++ Source File",
|
||||
"C Source File",
|
||||
"Assembly Source File",
|
||||
"GNU C++",
|
||||
"GNU C",
|
||||
"Assembly",
|
||||
]
|
||||
compiler_path = GetCompilerPath(target_list, data, options)
|
||||
include_dirs = GetAllIncludeDirectories(
|
||||
target_list,
|
||||
target_dicts,
|
||||
shared_intermediate_dirs,
|
||||
config_name,
|
||||
params,
|
||||
compiler_path,
|
||||
)
|
||||
WriteIncludePaths(out, eclipse_langs, include_dirs)
|
||||
defines = GetAllDefines(
|
||||
target_list, target_dicts, data, config_name, params, compiler_path
|
||||
)
|
||||
WriteMacros(out, eclipse_langs, defines)
|
||||
|
||||
out.write("</cdtprojectproperties>\n")
|
||||
|
||||
|
||||
def GenerateClasspathFile(
|
||||
target_list, target_dicts, toplevel_dir, toplevel_build, out_name
|
||||
):
|
||||
"""Generates a classpath file suitable for symbol navigation and code
|
||||
completion of Java code (such as in Android projects) by finding all
|
||||
.java and .jar files used as action inputs."""
|
||||
gyp.common.EnsureDirExists(out_name)
|
||||
result = ET.Element("classpath")
|
||||
|
||||
def AddElements(kind, paths):
|
||||
# First, we need to normalize the paths so they are all relative to the
|
||||
# toplevel dir.
|
||||
rel_paths = set()
|
||||
for path in paths:
|
||||
if os.path.isabs(path):
|
||||
rel_paths.add(os.path.relpath(path, toplevel_dir))
|
||||
else:
|
||||
rel_paths.add(path)
|
||||
|
||||
for path in sorted(rel_paths):
|
||||
entry_element = ET.SubElement(result, "classpathentry")
|
||||
entry_element.set("kind", kind)
|
||||
entry_element.set("path", path)
|
||||
|
||||
AddElements("lib", GetJavaJars(target_list, target_dicts, toplevel_dir))
|
||||
AddElements("src", GetJavaSourceDirs(target_list, target_dicts, toplevel_dir))
|
||||
# Include the standard JRE container and a dummy out folder
|
||||
AddElements("con", ["org.eclipse.jdt.launching.JRE_CONTAINER"])
|
||||
# Include a dummy out folder so that Eclipse doesn't use the default /bin
|
||||
# folder in the root of the project.
|
||||
AddElements("output", [os.path.join(toplevel_build, ".eclipse-java-build")])
|
||||
|
||||
ET.ElementTree(result).write(out_name)
|
||||
|
||||
|
||||
def GetJavaJars(target_list, target_dicts, toplevel_dir):
|
||||
"""Generates a sequence of all .jars used as inputs."""
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
for action in target.get("actions", []):
|
||||
for input_ in action["inputs"]:
|
||||
if os.path.splitext(input_)[1] == ".jar" and not input_.startswith("$"):
|
||||
if os.path.isabs(input_):
|
||||
yield input_
|
||||
else:
|
||||
yield os.path.join(os.path.dirname(target_name), input_)
|
||||
|
||||
|
||||
def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir):
|
||||
"""Generates a sequence of all likely java package root directories."""
|
||||
for target_name in target_list:
|
||||
target = target_dicts[target_name]
|
||||
for action in target.get("actions", []):
|
||||
for input_ in action["inputs"]:
|
||||
if os.path.splitext(input_)[1] == ".java" and not input_.startswith(
|
||||
"$"
|
||||
):
|
||||
dir_ = os.path.dirname(
|
||||
os.path.join(os.path.dirname(target_name), input_)
|
||||
)
|
||||
# If there is a parent 'src' or 'java' folder, navigate up to it -
|
||||
# these are canonical package root names in Chromium. This will
|
||||
# break if 'src' or 'java' exists in the package structure. This
|
||||
# could be further improved by inspecting the java file for the
|
||||
# package name if this proves to be too fragile in practice.
|
||||
parent_search = dir_
|
||||
while os.path.basename(parent_search) not in ["src", "java"]:
|
||||
parent_search, _ = os.path.split(parent_search)
|
||||
if not parent_search or parent_search == toplevel_dir:
|
||||
# Didn't find a known root, just return the original path
|
||||
yield dir_
|
||||
break
|
||||
else:
|
||||
yield parent_search
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
"""Generate an XML settings file that can be imported into a CDT project."""
|
||||
|
||||
if params["options"].generator_output:
|
||||
raise NotImplementedError("--generator_output not implemented for eclipse")
|
||||
|
||||
user_config = params.get("generator_flags", {}).get("config", None)
|
||||
if user_config:
|
||||
GenerateOutputForConfig(target_list, target_dicts, data, params, user_config)
|
||||
else:
|
||||
config_names = target_dicts[target_list[0]]["configurations"]
|
||||
for config_name in config_names:
|
||||
GenerateOutputForConfig(
|
||||
target_list, target_dicts, data, params, config_name
|
||||
)
|
89
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
generated
vendored
Executable file
89
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
generated
vendored
Executable file
|
@ -0,0 +1,89 @@
|
|||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""gypd output module
|
||||
|
||||
This module produces gyp input as its output. Output files are given the
|
||||
.gypd extension to avoid overwriting the .gyp files that they are generated
|
||||
from. Internal references to .gyp files (such as those found in
|
||||
"dependencies" sections) are not adjusted to point to .gypd files instead;
|
||||
unlike other paths, which are relative to the .gyp or .gypd file, such paths
|
||||
are relative to the directory from which gyp was run to create the .gypd file.
|
||||
|
||||
This generator module is intended to be a sample and a debugging aid, hence
|
||||
the "d" for "debug" in .gypd. It is useful to inspect the results of the
|
||||
various merges, expansions, and conditional evaluations performed by gyp
|
||||
and to see a representation of what would be fed to a generator module.
|
||||
|
||||
It's not advisable to rename .gypd files produced by this module to .gyp,
|
||||
because they will have all merges, expansions, and evaluations already
|
||||
performed and the relevant constructs not present in the output; paths to
|
||||
dependencies may be wrong; and various sections that do not belong in .gyp
|
||||
files such as such as "included_files" and "*_excluded" will be present.
|
||||
Output will also be stripped of comments. This is not intended to be a
|
||||
general-purpose gyp pretty-printer; for that, you probably just want to
|
||||
run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
|
||||
comments but won't do all of the other things done to this module's output.
|
||||
|
||||
The specific formatting of the output generated by this module is subject
|
||||
to change.
|
||||
"""
|
||||
|
||||
|
||||
import gyp.common
|
||||
import pprint
|
||||
|
||||
|
||||
# These variables should just be spit back out as variable references.
|
||||
_generator_identity_variables = [
|
||||
"CONFIGURATION_NAME",
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"INTERMEDIATE_DIR",
|
||||
"LIB_DIR",
|
||||
"PRODUCT_DIR",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_PATH",
|
||||
"SHARED_INTERMEDIATE_DIR",
|
||||
"SHARED_LIB_DIR",
|
||||
"SHARED_LIB_PREFIX",
|
||||
"SHARED_LIB_SUFFIX",
|
||||
"STATIC_LIB_PREFIX",
|
||||
"STATIC_LIB_SUFFIX",
|
||||
]
|
||||
|
||||
# gypd doesn't define a default value for OS like many other generator
|
||||
# modules. Specify "-D OS=whatever" on the command line to provide a value.
|
||||
generator_default_variables = {}
|
||||
|
||||
# gypd supports multiple toolsets
|
||||
generator_supports_multiple_toolsets = True
|
||||
|
||||
# TODO(mark): This always uses <, which isn't right. The input module should
|
||||
# notify the generator to tell it which phase it is operating in, and this
|
||||
# module should use < for the early phase and then switch to > for the late
|
||||
# phase. Bonus points for carrying @ back into the output too.
|
||||
for v in _generator_identity_variables:
|
||||
generator_default_variables[v] = "<(%s)" % v
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
output_files = {}
|
||||
for qualified_target in target_list:
|
||||
[input_file, target] = gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
|
||||
|
||||
if input_file[-4:] != ".gyp":
|
||||
continue
|
||||
input_file_stem = input_file[:-4]
|
||||
output_file = input_file_stem + params["options"].suffix + ".gypd"
|
||||
|
||||
output_files[output_file] = output_files.get(output_file, input_file)
|
||||
|
||||
for output_file, input_file in output_files.items():
|
||||
output = open(output_file, "w")
|
||||
pprint.pprint(data[input_file], output)
|
||||
output.close()
|
58
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
generated
vendored
Executable file
58
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
generated
vendored
Executable file
|
@ -0,0 +1,58 @@
|
|||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""gypsh output module
|
||||
|
||||
gypsh is a GYP shell. It's not really a generator per se. All it does is
|
||||
fire up an interactive Python session with a few local variables set to the
|
||||
variables passed to the generator. Like gypd, it's intended as a debugging
|
||||
aid, to facilitate the exploration of .gyp structures after being processed
|
||||
by the input module.
|
||||
|
||||
The expected usage is "gyp -f gypsh -D OS=desired_os".
|
||||
"""
|
||||
|
||||
|
||||
import code
|
||||
import sys
|
||||
|
||||
|
||||
# All of this stuff about generator variables was lovingly ripped from gypd.py.
|
||||
# That module has a much better description of what's going on and why.
|
||||
_generator_identity_variables = [
|
||||
"EXECUTABLE_PREFIX",
|
||||
"EXECUTABLE_SUFFIX",
|
||||
"INTERMEDIATE_DIR",
|
||||
"PRODUCT_DIR",
|
||||
"RULE_INPUT_ROOT",
|
||||
"RULE_INPUT_DIRNAME",
|
||||
"RULE_INPUT_EXT",
|
||||
"RULE_INPUT_NAME",
|
||||
"RULE_INPUT_PATH",
|
||||
"SHARED_INTERMEDIATE_DIR",
|
||||
]
|
||||
|
||||
generator_default_variables = {}
|
||||
|
||||
for v in _generator_identity_variables:
|
||||
generator_default_variables[v] = "<(%s)" % v
|
||||
|
||||
|
||||
def GenerateOutput(target_list, target_dicts, data, params):
|
||||
locals = {
|
||||
"target_list": target_list,
|
||||
"target_dicts": target_dicts,
|
||||
"data": data,
|
||||
}
|
||||
|
||||
# Use a banner that looks like the stock Python one and like what
|
||||
# code.interact uses by default, but tack on something to indicate what
|
||||
# locals are available, and identify gypsh.
|
||||
banner = "Python {} on {}\nlocals.keys() = {}\ngypsh".format(
|
||||
sys.version,
|
||||
sys.platform,
|
||||
repr(sorted(locals.keys())),
|
||||
)
|
||||
|
||||
code.interact(banner, local=locals)
|
2711
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
generated
vendored
Executable file
2711
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
generated
vendored
Executable file
File diff suppressed because it is too large
Load diff
3981
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
generated
vendored
Executable file
3981
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
generated
vendored
Executable file
File diff suppressed because it is too large
Load diff
44
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
generated
vendored
Executable file
44
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
generated
vendored
Executable file
|
@ -0,0 +1,44 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the msvs.py file. """
|
||||
|
||||
import gyp.generator.msvs as msvs
|
||||
import unittest
|
||||
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class TestSequenceFunctions(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.stderr = StringIO()
|
||||
|
||||
def test_GetLibraries(self):
|
||||
self.assertEqual(msvs._GetLibraries({}), [])
|
||||
self.assertEqual(msvs._GetLibraries({"libraries": []}), [])
|
||||
self.assertEqual(
|
||||
msvs._GetLibraries({"other": "foo", "libraries": ["a.lib"]}), ["a.lib"]
|
||||
)
|
||||
self.assertEqual(msvs._GetLibraries({"libraries": ["-la"]}), ["a.lib"])
|
||||
self.assertEqual(
|
||||
msvs._GetLibraries(
|
||||
{
|
||||
"libraries": [
|
||||
"a.lib",
|
||||
"b.lib",
|
||||
"c.lib",
|
||||
"-lb.lib",
|
||||
"-lb.lib",
|
||||
"d.lib",
|
||||
"a.lib",
|
||||
]
|
||||
}
|
||||
),
|
||||
["c.lib", "b.lib", "d.lib", "a.lib"],
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
2933
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
generated
vendored
Executable file
2933
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
generated
vendored
Executable file
File diff suppressed because it is too large
Load diff
55
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
generated
vendored
Executable file
55
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
generated
vendored
Executable file
|
@ -0,0 +1,55 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the ninja.py file. """
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
import gyp.generator.ninja as ninja
|
||||
|
||||
|
||||
class TestPrefixesAndSuffixes(unittest.TestCase):
|
||||
def test_BinaryNamesWindows(self):
|
||||
# These cannot run on non-Windows as they require a VS installation to
|
||||
# correctly handle variable expansion.
|
||||
if sys.platform.startswith("win"):
|
||||
writer = ninja.NinjaWriter(
|
||||
"foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "win"
|
||||
)
|
||||
spec = {"target_name": "wee"}
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "executable").endswith(".exe")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "shared_library").endswith(".dll")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "static_library").endswith(".lib")
|
||||
)
|
||||
|
||||
def test_BinaryNamesLinux(self):
|
||||
writer = ninja.NinjaWriter(
|
||||
"foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "linux"
|
||||
)
|
||||
spec = {"target_name": "wee"}
|
||||
self.assertTrue("." not in writer.ComputeOutputFileName(spec, "executable"))
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "shared_library").startswith("lib")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "static_library").startswith("lib")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "shared_library").endswith(".so")
|
||||
)
|
||||
self.assertTrue(
|
||||
writer.ComputeOutputFileName(spec, "static_library").endswith(".a")
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
1391
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
generated
vendored
Executable file
1391
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
generated
vendored
Executable file
File diff suppressed because it is too large
Load diff
25
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
generated
vendored
Executable file
25
my-app/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
generated
vendored
Executable file
|
@ -0,0 +1,25 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
""" Unit tests for the xcode.py file. """
|
||||
|
||||
import gyp.generator.xcode as xcode
|
||||
import unittest
|
||||
import sys
|
||||
|
||||
|
||||
class TestEscapeXcodeDefine(unittest.TestCase):
|
||||
if sys.platform == "darwin":
|
||||
|
||||
def test_InheritedRemainsUnescaped(self):
|
||||
self.assertEqual(xcode.EscapeXcodeDefine("$(inherited)"), "$(inherited)")
|
||||
|
||||
def test_Escaping(self):
|
||||
self.assertEqual(xcode.EscapeXcodeDefine('a b"c\\'), 'a\\ b\\"c\\\\')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
3115
my-app/node_modules/node-gyp/gyp/pylib/gyp/input.py
generated
vendored
Executable file
3115
my-app/node_modules/node-gyp/gyp/pylib/gyp/input.py
generated
vendored
Executable file
File diff suppressed because it is too large
Load diff
98
my-app/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
generated
vendored
Executable file
98
my-app/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
generated
vendored
Executable file
|
@ -0,0 +1,98 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright 2013 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Unit tests for the input.py file."""
|
||||
|
||||
import gyp.input
|
||||
import unittest
|
||||
|
||||
|
||||
class TestFindCycles(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.nodes = {}
|
||||
for x in ("a", "b", "c", "d", "e"):
|
||||
self.nodes[x] = gyp.input.DependencyGraphNode(x)
|
||||
|
||||
def _create_dependency(self, dependent, dependency):
|
||||
dependent.dependencies.append(dependency)
|
||||
dependency.dependents.append(dependent)
|
||||
|
||||
def test_no_cycle_empty_graph(self):
|
||||
for label, node in self.nodes.items():
|
||||
self.assertEqual([], node.FindCycles())
|
||||
|
||||
def test_no_cycle_line(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["c"])
|
||||
self._create_dependency(self.nodes["c"], self.nodes["d"])
|
||||
|
||||
for label, node in self.nodes.items():
|
||||
self.assertEqual([], node.FindCycles())
|
||||
|
||||
def test_no_cycle_dag(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["a"], self.nodes["c"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["c"])
|
||||
|
||||
for label, node in self.nodes.items():
|
||||
self.assertEqual([], node.FindCycles())
|
||||
|
||||
def test_cycle_self_reference(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["a"])
|
||||
|
||||
self.assertEqual(
|
||||
[[self.nodes["a"], self.nodes["a"]]], self.nodes["a"].FindCycles()
|
||||
)
|
||||
|
||||
def test_cycle_two_nodes(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["a"])
|
||||
|
||||
self.assertEqual(
|
||||
[[self.nodes["a"], self.nodes["b"], self.nodes["a"]]],
|
||||
self.nodes["a"].FindCycles(),
|
||||
)
|
||||
self.assertEqual(
|
||||
[[self.nodes["b"], self.nodes["a"], self.nodes["b"]]],
|
||||
self.nodes["b"].FindCycles(),
|
||||
)
|
||||
|
||||
def test_two_cycles(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["a"])
|
||||
|
||||
self._create_dependency(self.nodes["b"], self.nodes["c"])
|
||||
self._create_dependency(self.nodes["c"], self.nodes["b"])
|
||||
|
||||
cycles = self.nodes["a"].FindCycles()
|
||||
self.assertTrue([self.nodes["a"], self.nodes["b"], self.nodes["a"]] in cycles)
|
||||
self.assertTrue([self.nodes["b"], self.nodes["c"], self.nodes["b"]] in cycles)
|
||||
self.assertEqual(2, len(cycles))
|
||||
|
||||
def test_big_cycle(self):
|
||||
self._create_dependency(self.nodes["a"], self.nodes["b"])
|
||||
self._create_dependency(self.nodes["b"], self.nodes["c"])
|
||||
self._create_dependency(self.nodes["c"], self.nodes["d"])
|
||||
self._create_dependency(self.nodes["d"], self.nodes["e"])
|
||||
self._create_dependency(self.nodes["e"], self.nodes["a"])
|
||||
|
||||
self.assertEqual(
|
||||
[
|
||||
[
|
||||
self.nodes["a"],
|
||||
self.nodes["b"],
|
||||
self.nodes["c"],
|
||||
self.nodes["d"],
|
||||
self.nodes["e"],
|
||||
self.nodes["a"],
|
||||
]
|
||||
],
|
||||
self.nodes["a"].FindCycles(),
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
771
my-app/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
generated
vendored
Executable file
771
my-app/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
generated
vendored
Executable file
|
@ -0,0 +1,771 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Utility functions to perform Xcode-style build steps.
|
||||
|
||||
These functions are executed via gyp-mac-tool when using the Makefile generator.
|
||||
"""
|
||||
|
||||
|
||||
import fcntl
|
||||
import fnmatch
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import plistlib
|
||||
import re
|
||||
import shutil
|
||||
import struct
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
|
||||
def main(args):
|
||||
executor = MacTool()
|
||||
exit_code = executor.Dispatch(args)
|
||||
if exit_code is not None:
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
class MacTool:
|
||||
"""This class performs all the Mac tooling steps. The methods can either be
|
||||
executed directly, or dispatched from an argument list."""
|
||||
|
||||
def Dispatch(self, args):
|
||||
"""Dispatches a string command to a method."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
method = "Exec%s" % self._CommandifyName(args[0])
|
||||
return getattr(self, method)(*args[1:])
|
||||
|
||||
def _CommandifyName(self, name_string):
|
||||
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
|
||||
return name_string.title().replace("-", "")
|
||||
|
||||
def ExecCopyBundleResource(self, source, dest, convert_to_binary):
|
||||
"""Copies a resource file to the bundle/Resources directory, performing any
|
||||
necessary compilation on each resource."""
|
||||
convert_to_binary = convert_to_binary == "True"
|
||||
extension = os.path.splitext(source)[1].lower()
|
||||
if os.path.isdir(source):
|
||||
# Copy tree.
|
||||
# TODO(thakis): This copies file attributes like mtime, while the
|
||||
# single-file branch below doesn't. This should probably be changed to
|
||||
# be consistent with the single-file branch.
|
||||
if os.path.exists(dest):
|
||||
shutil.rmtree(dest)
|
||||
shutil.copytree(source, dest)
|
||||
elif extension == ".xib":
|
||||
return self._CopyXIBFile(source, dest)
|
||||
elif extension == ".storyboard":
|
||||
return self._CopyXIBFile(source, dest)
|
||||
elif extension == ".strings" and not convert_to_binary:
|
||||
self._CopyStringsFile(source, dest)
|
||||
else:
|
||||
if os.path.exists(dest):
|
||||
os.unlink(dest)
|
||||
shutil.copy(source, dest)
|
||||
|
||||
if convert_to_binary and extension in (".plist", ".strings"):
|
||||
self._ConvertToBinary(dest)
|
||||
|
||||
def _CopyXIBFile(self, source, dest):
|
||||
"""Compiles a XIB file with ibtool into a binary plist in the bundle."""
|
||||
|
||||
# ibtool sometimes crashes with relative paths. See crbug.com/314728.
|
||||
base = os.path.dirname(os.path.realpath(__file__))
|
||||
if os.path.relpath(source):
|
||||
source = os.path.join(base, source)
|
||||
if os.path.relpath(dest):
|
||||
dest = os.path.join(base, dest)
|
||||
|
||||
args = ["xcrun", "ibtool", "--errors", "--warnings", "--notices"]
|
||||
|
||||
if os.environ["XCODE_VERSION_ACTUAL"] > "0700":
|
||||
args.extend(["--auto-activate-custom-fonts"])
|
||||
if "IPHONEOS_DEPLOYMENT_TARGET" in os.environ:
|
||||
args.extend(
|
||||
[
|
||||
"--target-device",
|
||||
"iphone",
|
||||
"--target-device",
|
||||
"ipad",
|
||||
"--minimum-deployment-target",
|
||||
os.environ["IPHONEOS_DEPLOYMENT_TARGET"],
|
||||
]
|
||||
)
|
||||
else:
|
||||
args.extend(
|
||||
[
|
||||
"--target-device",
|
||||
"mac",
|
||||
"--minimum-deployment-target",
|
||||
os.environ["MACOSX_DEPLOYMENT_TARGET"],
|
||||
]
|
||||
)
|
||||
|
||||
args.extend(
|
||||
["--output-format", "human-readable-text", "--compile", dest, source]
|
||||
)
|
||||
|
||||
ibtool_section_re = re.compile(r"/\*.*\*/")
|
||||
ibtool_re = re.compile(r".*note:.*is clipping its content")
|
||||
try:
|
||||
stdout = subprocess.check_output(args)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(e.output)
|
||||
raise
|
||||
current_section_header = None
|
||||
for line in stdout.splitlines():
|
||||
if ibtool_section_re.match(line):
|
||||
current_section_header = line
|
||||
elif not ibtool_re.match(line):
|
||||
if current_section_header:
|
||||
print(current_section_header)
|
||||
current_section_header = None
|
||||
print(line)
|
||||
return 0
|
||||
|
||||
def _ConvertToBinary(self, dest):
|
||||
subprocess.check_call(
|
||||
["xcrun", "plutil", "-convert", "binary1", "-o", dest, dest]
|
||||
)
|
||||
|
||||
def _CopyStringsFile(self, source, dest):
|
||||
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
|
||||
input_code = self._DetectInputEncoding(source) or "UTF-8"
|
||||
|
||||
# Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
|
||||
# CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
|
||||
# CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
|
||||
# semicolon in dictionary.
|
||||
# on invalid files. Do the same kind of validation.
|
||||
import CoreFoundation
|
||||
|
||||
with open(source, "rb") as in_file:
|
||||
s = in_file.read()
|
||||
d = CoreFoundation.CFDataCreate(None, s, len(s))
|
||||
_, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
|
||||
if error:
|
||||
return
|
||||
|
||||
with open(dest, "wb") as fp:
|
||||
fp.write(s.decode(input_code).encode("UTF-16"))
|
||||
|
||||
def _DetectInputEncoding(self, file_name):
|
||||
"""Reads the first few bytes from file_name and tries to guess the text
|
||||
encoding. Returns None as a guess if it can't detect it."""
|
||||
with open(file_name, "rb") as fp:
|
||||
try:
|
||||
header = fp.read(3)
|
||||
except Exception:
|
||||
return None
|
||||
if header.startswith(b"\xFE\xFF"):
|
||||
return "UTF-16"
|
||||
elif header.startswith(b"\xFF\xFE"):
|
||||
return "UTF-16"
|
||||
elif header.startswith(b"\xEF\xBB\xBF"):
|
||||
return "UTF-8"
|
||||
else:
|
||||
return None
|
||||
|
||||
def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
|
||||
"""Copies the |source| Info.plist to the destination directory |dest|."""
|
||||
# Read the source Info.plist into memory.
|
||||
with open(source) as fd:
|
||||
lines = fd.read()
|
||||
|
||||
# Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
|
||||
plist = plistlib.readPlistFromString(lines)
|
||||
if keys:
|
||||
plist.update(json.loads(keys[0]))
|
||||
lines = plistlib.writePlistToString(plist)
|
||||
|
||||
# Go through all the environment variables and replace them as variables in
|
||||
# the file.
|
||||
IDENT_RE = re.compile(r"[_/\s]")
|
||||
for key in os.environ:
|
||||
if key.startswith("_"):
|
||||
continue
|
||||
evar = "${%s}" % key
|
||||
evalue = os.environ[key]
|
||||
lines = lines.replace(lines, evar, evalue)
|
||||
|
||||
# Xcode supports various suffices on environment variables, which are
|
||||
# all undocumented. :rfc1034identifier is used in the standard project
|
||||
# template these days, and :identifier was used earlier. They are used to
|
||||
# convert non-url characters into things that look like valid urls --
|
||||
# except that the replacement character for :identifier, '_' isn't valid
|
||||
# in a URL either -- oops, hence :rfc1034identifier was born.
|
||||
evar = "${%s:identifier}" % key
|
||||
evalue = IDENT_RE.sub("_", os.environ[key])
|
||||
lines = lines.replace(lines, evar, evalue)
|
||||
|
||||
evar = "${%s:rfc1034identifier}" % key
|
||||
evalue = IDENT_RE.sub("-", os.environ[key])
|
||||
lines = lines.replace(lines, evar, evalue)
|
||||
|
||||
# Remove any keys with values that haven't been replaced.
|
||||
lines = lines.splitlines()
|
||||
for i in range(len(lines)):
|
||||
if lines[i].strip().startswith("<string>${"):
|
||||
lines[i] = None
|
||||
lines[i - 1] = None
|
||||
lines = "\n".join(line for line in lines if line is not None)
|
||||
|
||||
# Write out the file with variables replaced.
|
||||
with open(dest, "w") as fd:
|
||||
fd.write(lines)
|
||||
|
||||
# Now write out PkgInfo file now that the Info.plist file has been
|
||||
# "compiled".
|
||||
self._WritePkgInfo(dest)
|
||||
|
||||
if convert_to_binary == "True":
|
||||
self._ConvertToBinary(dest)
|
||||
|
||||
def _WritePkgInfo(self, info_plist):
|
||||
"""This writes the PkgInfo file from the data stored in Info.plist."""
|
||||
plist = plistlib.readPlist(info_plist)
|
||||
if not plist:
|
||||
return
|
||||
|
||||
# Only create PkgInfo for executable types.
|
||||
package_type = plist["CFBundlePackageType"]
|
||||
if package_type != "APPL":
|
||||
return
|
||||
|
||||
# The format of PkgInfo is eight characters, representing the bundle type
|
||||
# and bundle signature, each four characters. If that is missing, four
|
||||
# '?' characters are used instead.
|
||||
signature_code = plist.get("CFBundleSignature", "????")
|
||||
if len(signature_code) != 4: # Wrong length resets everything, too.
|
||||
signature_code = "?" * 4
|
||||
|
||||
dest = os.path.join(os.path.dirname(info_plist), "PkgInfo")
|
||||
with open(dest, "w") as fp:
|
||||
fp.write(f"{package_type}{signature_code}")
|
||||
|
||||
def ExecFlock(self, lockfile, *cmd_list):
|
||||
"""Emulates the most basic behavior of Linux's flock(1)."""
|
||||
# Rely on exception handling to report errors.
|
||||
fd = os.open(lockfile, os.O_RDONLY | os.O_NOCTTY | os.O_CREAT, 0o666)
|
||||
fcntl.flock(fd, fcntl.LOCK_EX)
|
||||
return subprocess.call(cmd_list)
|
||||
|
||||
def ExecFilterLibtool(self, *cmd_list):
|
||||
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
|
||||
symbols'."""
|
||||
libtool_re = re.compile(
|
||||
r"^.*libtool: (?:for architecture: \S* )?" r"file: .* has no symbols$"
|
||||
)
|
||||
libtool_re5 = re.compile(
|
||||
r"^.*libtool: warning for library: "
|
||||
+ r".* the table of contents is empty "
|
||||
+ r"\(no object file members in the library define global symbols\)$"
|
||||
)
|
||||
env = os.environ.copy()
|
||||
# Ref:
|
||||
# http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
|
||||
# The problem with this flag is that it resets the file mtime on the file to
|
||||
# epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
|
||||
env["ZERO_AR_DATE"] = "1"
|
||||
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
|
||||
err = libtoolout.communicate()[1].decode("utf-8")
|
||||
for line in err.splitlines():
|
||||
if not libtool_re.match(line) and not libtool_re5.match(line):
|
||||
print(line, file=sys.stderr)
|
||||
# Unconditionally touch the output .a file on the command line if present
|
||||
# and the command succeeded. A bit hacky.
|
||||
if not libtoolout.returncode:
|
||||
for i in range(len(cmd_list) - 1):
|
||||
if cmd_list[i] == "-o" and cmd_list[i + 1].endswith(".a"):
|
||||
os.utime(cmd_list[i + 1], None)
|
||||
break
|
||||
return libtoolout.returncode
|
||||
|
||||
def ExecPackageIosFramework(self, framework):
|
||||
# Find the name of the binary based on the part before the ".framework".
|
||||
binary = os.path.basename(framework).split(".")[0]
|
||||
module_path = os.path.join(framework, "Modules")
|
||||
if not os.path.exists(module_path):
|
||||
os.mkdir(module_path)
|
||||
module_template = (
|
||||
"framework module %s {\n"
|
||||
' umbrella header "%s.h"\n'
|
||||
"\n"
|
||||
" export *\n"
|
||||
" module * { export * }\n"
|
||||
"}\n" % (binary, binary)
|
||||
)
|
||||
|
||||
with open(os.path.join(module_path, "module.modulemap"), "w") as module_file:
|
||||
module_file.write(module_template)
|
||||
|
||||
def ExecPackageFramework(self, framework, version):
|
||||
"""Takes a path to Something.framework and the Current version of that and
|
||||
sets up all the symlinks."""
|
||||
# Find the name of the binary based on the part before the ".framework".
|
||||
binary = os.path.basename(framework).split(".")[0]
|
||||
|
||||
CURRENT = "Current"
|
||||
RESOURCES = "Resources"
|
||||
VERSIONS = "Versions"
|
||||
|
||||
if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
|
||||
# Binary-less frameworks don't seem to contain symlinks (see e.g.
|
||||
# chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
|
||||
return
|
||||
|
||||
# Move into the framework directory to set the symlinks correctly.
|
||||
pwd = os.getcwd()
|
||||
os.chdir(framework)
|
||||
|
||||
# Set up the Current version.
|
||||
self._Relink(version, os.path.join(VERSIONS, CURRENT))
|
||||
|
||||
# Set up the root symlinks.
|
||||
self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
|
||||
self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
|
||||
|
||||
# Back to where we were before!
|
||||
os.chdir(pwd)
|
||||
|
||||
def _Relink(self, dest, link):
|
||||
"""Creates a symlink to |dest| named |link|. If |link| already exists,
|
||||
it is overwritten."""
|
||||
if os.path.lexists(link):
|
||||
os.remove(link)
|
||||
os.symlink(dest, link)
|
||||
|
||||
def ExecCompileIosFrameworkHeaderMap(self, out, framework, *all_headers):
|
||||
framework_name = os.path.basename(framework).split(".")[0]
|
||||
all_headers = [os.path.abspath(header) for header in all_headers]
|
||||
filelist = {}
|
||||
for header in all_headers:
|
||||
filename = os.path.basename(header)
|
||||
filelist[filename] = header
|
||||
filelist[os.path.join(framework_name, filename)] = header
|
||||
WriteHmap(out, filelist)
|
||||
|
||||
def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers):
|
||||
header_path = os.path.join(framework, "Headers")
|
||||
if not os.path.exists(header_path):
|
||||
os.makedirs(header_path)
|
||||
for header in copy_headers:
|
||||
shutil.copy(header, os.path.join(header_path, os.path.basename(header)))
|
||||
|
||||
def ExecCompileXcassets(self, keys, *inputs):
|
||||
"""Compiles multiple .xcassets files into a single .car file.
|
||||
|
||||
This invokes 'actool' to compile all the inputs .xcassets files. The
|
||||
|keys| arguments is a json-encoded dictionary of extra arguments to
|
||||
pass to 'actool' when the asset catalogs contains an application icon
|
||||
or a launch image.
|
||||
|
||||
Note that 'actool' does not create the Assets.car file if the asset
|
||||
catalogs does not contains imageset.
|
||||
"""
|
||||
command_line = [
|
||||
"xcrun",
|
||||
"actool",
|
||||
"--output-format",
|
||||
"human-readable-text",
|
||||
"--compress-pngs",
|
||||
"--notices",
|
||||
"--warnings",
|
||||
"--errors",
|
||||
]
|
||||
is_iphone_target = "IPHONEOS_DEPLOYMENT_TARGET" in os.environ
|
||||
if is_iphone_target:
|
||||
platform = os.environ["CONFIGURATION"].split("-")[-1]
|
||||
if platform not in ("iphoneos", "iphonesimulator"):
|
||||
platform = "iphonesimulator"
|
||||
command_line.extend(
|
||||
[
|
||||
"--platform",
|
||||
platform,
|
||||
"--target-device",
|
||||
"iphone",
|
||||
"--target-device",
|
||||
"ipad",
|
||||
"--minimum-deployment-target",
|
||||
os.environ["IPHONEOS_DEPLOYMENT_TARGET"],
|
||||
"--compile",
|
||||
os.path.abspath(os.environ["CONTENTS_FOLDER_PATH"]),
|
||||
]
|
||||
)
|
||||
else:
|
||||
command_line.extend(
|
||||
[
|
||||
"--platform",
|
||||
"macosx",
|
||||
"--target-device",
|
||||
"mac",
|
||||
"--minimum-deployment-target",
|
||||
os.environ["MACOSX_DEPLOYMENT_TARGET"],
|
||||
"--compile",
|
||||
os.path.abspath(os.environ["UNLOCALIZED_RESOURCES_FOLDER_PATH"]),
|
||||
]
|
||||
)
|
||||
if keys:
|
||||
keys = json.loads(keys)
|
||||
for key, value in keys.items():
|
||||
arg_name = "--" + key
|
||||
if isinstance(value, bool):
|
||||
if value:
|
||||
command_line.append(arg_name)
|
||||
elif isinstance(value, list):
|
||||
for v in value:
|
||||
command_line.append(arg_name)
|
||||
command_line.append(str(v))
|
||||
else:
|
||||
command_line.append(arg_name)
|
||||
command_line.append(str(value))
|
||||
# Note: actool crashes if inputs path are relative, so use os.path.abspath
|
||||
# to get absolute path name for inputs.
|
||||
command_line.extend(map(os.path.abspath, inputs))
|
||||
subprocess.check_call(command_line)
|
||||
|
||||
def ExecMergeInfoPlist(self, output, *inputs):
|
||||
"""Merge multiple .plist files into a single .plist file."""
|
||||
merged_plist = {}
|
||||
for path in inputs:
|
||||
plist = self._LoadPlistMaybeBinary(path)
|
||||
self._MergePlist(merged_plist, plist)
|
||||
plistlib.writePlist(merged_plist, output)
|
||||
|
||||
def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve):
|
||||
"""Code sign a bundle.
|
||||
|
||||
This function tries to code sign an iOS bundle, following the same
|
||||
algorithm as Xcode:
|
||||
1. pick the provisioning profile that best match the bundle identifier,
|
||||
and copy it into the bundle as embedded.mobileprovision,
|
||||
2. copy Entitlements.plist from user or SDK next to the bundle,
|
||||
3. code sign the bundle.
|
||||
"""
|
||||
substitutions, overrides = self._InstallProvisioningProfile(
|
||||
provisioning, self._GetCFBundleIdentifier()
|
||||
)
|
||||
entitlements_path = self._InstallEntitlements(
|
||||
entitlements, substitutions, overrides
|
||||
)
|
||||
|
||||
args = ["codesign", "--force", "--sign", key]
|
||||
if preserve == "True":
|
||||
args.extend(["--deep", "--preserve-metadata=identifier,entitlements"])
|
||||
else:
|
||||
args.extend(["--entitlements", entitlements_path])
|
||||
args.extend(["--timestamp=none", path])
|
||||
subprocess.check_call(args)
|
||||
|
||||
def _InstallProvisioningProfile(self, profile, bundle_identifier):
|
||||
"""Installs embedded.mobileprovision into the bundle.
|
||||
|
||||
Args:
|
||||
profile: string, optional, short name of the .mobileprovision file
|
||||
to use, if empty or the file is missing, the best file installed
|
||||
will be used
|
||||
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
|
||||
|
||||
Returns:
|
||||
A tuple containing two dictionary: variables substitutions and values
|
||||
to overrides when generating the entitlements file.
|
||||
"""
|
||||
source_path, provisioning_data, team_id = self._FindProvisioningProfile(
|
||||
profile, bundle_identifier
|
||||
)
|
||||
target_path = os.path.join(
|
||||
os.environ["BUILT_PRODUCTS_DIR"],
|
||||
os.environ["CONTENTS_FOLDER_PATH"],
|
||||
"embedded.mobileprovision",
|
||||
)
|
||||
shutil.copy2(source_path, target_path)
|
||||
substitutions = self._GetSubstitutions(bundle_identifier, team_id + ".")
|
||||
return substitutions, provisioning_data["Entitlements"]
|
||||
|
||||
def _FindProvisioningProfile(self, profile, bundle_identifier):
|
||||
"""Finds the .mobileprovision file to use for signing the bundle.
|
||||
|
||||
Checks all the installed provisioning profiles (or if the user specified
|
||||
the PROVISIONING_PROFILE variable, only consult it) and select the most
|
||||
specific that correspond to the bundle identifier.
|
||||
|
||||
Args:
|
||||
profile: string, optional, short name of the .mobileprovision file
|
||||
to use, if empty or the file is missing, the best file installed
|
||||
will be used
|
||||
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
|
||||
|
||||
Returns:
|
||||
A tuple of the path to the selected provisioning profile, the data of
|
||||
the embedded plist in the provisioning profile and the team identifier
|
||||
to use for code signing.
|
||||
|
||||
Raises:
|
||||
SystemExit: if no .mobileprovision can be used to sign the bundle.
|
||||
"""
|
||||
profiles_dir = os.path.join(
|
||||
os.environ["HOME"], "Library", "MobileDevice", "Provisioning Profiles"
|
||||
)
|
||||
if not os.path.isdir(profiles_dir):
|
||||
print(
|
||||
"cannot find mobile provisioning for %s" % (bundle_identifier),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
provisioning_profiles = None
|
||||
if profile:
|
||||
profile_path = os.path.join(profiles_dir, profile + ".mobileprovision")
|
||||
if os.path.exists(profile_path):
|
||||
provisioning_profiles = [profile_path]
|
||||
if not provisioning_profiles:
|
||||
provisioning_profiles = glob.glob(
|
||||
os.path.join(profiles_dir, "*.mobileprovision")
|
||||
)
|
||||
valid_provisioning_profiles = {}
|
||||
for profile_path in provisioning_profiles:
|
||||
profile_data = self._LoadProvisioningProfile(profile_path)
|
||||
app_id_pattern = profile_data.get("Entitlements", {}).get(
|
||||
"application-identifier", ""
|
||||
)
|
||||
for team_identifier in profile_data.get("TeamIdentifier", []):
|
||||
app_id = f"{team_identifier}.{bundle_identifier}"
|
||||
if fnmatch.fnmatch(app_id, app_id_pattern):
|
||||
valid_provisioning_profiles[app_id_pattern] = (
|
||||
profile_path,
|
||||
profile_data,
|
||||
team_identifier,
|
||||
)
|
||||
if not valid_provisioning_profiles:
|
||||
print(
|
||||
"cannot find mobile provisioning for %s" % (bundle_identifier),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
# If the user has multiple provisioning profiles installed that can be
|
||||
# used for ${bundle_identifier}, pick the most specific one (ie. the
|
||||
# provisioning profile whose pattern is the longest).
|
||||
selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
|
||||
return valid_provisioning_profiles[selected_key]
|
||||
|
||||
def _LoadProvisioningProfile(self, profile_path):
|
||||
"""Extracts the plist embedded in a provisioning profile.
|
||||
|
||||
Args:
|
||||
profile_path: string, path to the .mobileprovision file
|
||||
|
||||
Returns:
|
||||
Content of the plist embedded in the provisioning profile as a dictionary.
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile() as temp:
|
||||
subprocess.check_call(
|
||||
["security", "cms", "-D", "-i", profile_path, "-o", temp.name]
|
||||
)
|
||||
return self._LoadPlistMaybeBinary(temp.name)
|
||||
|
||||
def _MergePlist(self, merged_plist, plist):
|
||||
"""Merge |plist| into |merged_plist|."""
|
||||
for key, value in plist.items():
|
||||
if isinstance(value, dict):
|
||||
merged_value = merged_plist.get(key, {})
|
||||
if isinstance(merged_value, dict):
|
||||
self._MergePlist(merged_value, value)
|
||||
merged_plist[key] = merged_value
|
||||
else:
|
||||
merged_plist[key] = value
|
||||
else:
|
||||
merged_plist[key] = value
|
||||
|
||||
def _LoadPlistMaybeBinary(self, plist_path):
|
||||
"""Loads into a memory a plist possibly encoded in binary format.
|
||||
|
||||
This is a wrapper around plistlib.readPlist that tries to convert the
|
||||
plist to the XML format if it can't be parsed (assuming that it is in
|
||||
the binary format).
|
||||
|
||||
Args:
|
||||
plist_path: string, path to a plist file, in XML or binary format
|
||||
|
||||
Returns:
|
||||
Content of the plist as a dictionary.
|
||||
"""
|
||||
try:
|
||||
# First, try to read the file using plistlib that only supports XML,
|
||||
# and if an exception is raised, convert a temporary copy to XML and
|
||||
# load that copy.
|
||||
return plistlib.readPlist(plist_path)
|
||||
except Exception:
|
||||
pass
|
||||
with tempfile.NamedTemporaryFile() as temp:
|
||||
shutil.copy2(plist_path, temp.name)
|
||||
subprocess.check_call(["plutil", "-convert", "xml1", temp.name])
|
||||
return plistlib.readPlist(temp.name)
|
||||
|
||||
def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
|
||||
"""Constructs a dictionary of variable substitutions for Entitlements.plist.
|
||||
|
||||
Args:
|
||||
bundle_identifier: string, value of CFBundleIdentifier from Info.plist
|
||||
app_identifier_prefix: string, value for AppIdentifierPrefix
|
||||
|
||||
Returns:
|
||||
Dictionary of substitutions to apply when generating Entitlements.plist.
|
||||
"""
|
||||
return {
|
||||
"CFBundleIdentifier": bundle_identifier,
|
||||
"AppIdentifierPrefix": app_identifier_prefix,
|
||||
}
|
||||
|
||||
def _GetCFBundleIdentifier(self):
|
||||
"""Extracts CFBundleIdentifier value from Info.plist in the bundle.
|
||||
|
||||
Returns:
|
||||
Value of CFBundleIdentifier in the Info.plist located in the bundle.
|
||||
"""
|
||||
info_plist_path = os.path.join(
|
||||
os.environ["TARGET_BUILD_DIR"], os.environ["INFOPLIST_PATH"]
|
||||
)
|
||||
info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
|
||||
return info_plist_data["CFBundleIdentifier"]
|
||||
|
||||
def _InstallEntitlements(self, entitlements, substitutions, overrides):
|
||||
"""Generates and install the ${BundleName}.xcent entitlements file.
|
||||
|
||||
Expands variables "$(variable)" pattern in the source entitlements file,
|
||||
add extra entitlements defined in the .mobileprovision file and the copy
|
||||
the generated plist to "${BundlePath}.xcent".
|
||||
|
||||
Args:
|
||||
entitlements: string, optional, path to the Entitlements.plist template
|
||||
to use, defaults to "${SDKROOT}/Entitlements.plist"
|
||||
substitutions: dictionary, variable substitutions
|
||||
overrides: dictionary, values to add to the entitlements
|
||||
|
||||
Returns:
|
||||
Path to the generated entitlements file.
|
||||
"""
|
||||
source_path = entitlements
|
||||
target_path = os.path.join(
|
||||
os.environ["BUILT_PRODUCTS_DIR"], os.environ["PRODUCT_NAME"] + ".xcent"
|
||||
)
|
||||
if not source_path:
|
||||
source_path = os.path.join(os.environ["SDKROOT"], "Entitlements.plist")
|
||||
shutil.copy2(source_path, target_path)
|
||||
data = self._LoadPlistMaybeBinary(target_path)
|
||||
data = self._ExpandVariables(data, substitutions)
|
||||
if overrides:
|
||||
for key in overrides:
|
||||
if key not in data:
|
||||
data[key] = overrides[key]
|
||||
plistlib.writePlist(data, target_path)
|
||||
return target_path
|
||||
|
||||
def _ExpandVariables(self, data, substitutions):
|
||||
"""Expands variables "$(variable)" in data.
|
||||
|
||||
Args:
|
||||
data: object, can be either string, list or dictionary
|
||||
substitutions: dictionary, variable substitutions to perform
|
||||
|
||||
Returns:
|
||||
Copy of data where each references to "$(variable)" has been replaced
|
||||
by the corresponding value found in substitutions, or left intact if
|
||||
the key was not found.
|
||||
"""
|
||||
if isinstance(data, str):
|
||||
for key, value in substitutions.items():
|
||||
data = data.replace("$(%s)" % key, value)
|
||||
return data
|
||||
if isinstance(data, list):
|
||||
return [self._ExpandVariables(v, substitutions) for v in data]
|
||||
if isinstance(data, dict):
|
||||
return {k: self._ExpandVariables(data[k], substitutions) for k in data}
|
||||
return data
|
||||
|
||||
|
||||
def NextGreaterPowerOf2(x):
|
||||
return 2 ** (x).bit_length()
|
||||
|
||||
|
||||
def WriteHmap(output_name, filelist):
|
||||
"""Generates a header map based on |filelist|.
|
||||
|
||||
Per Mark Mentovai:
|
||||
A header map is structured essentially as a hash table, keyed by names used
|
||||
in #includes, and providing pathnames to the actual files.
|
||||
|
||||
The implementation below and the comment above comes from inspecting:
|
||||
http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
|
||||
while also looking at the implementation in clang in:
|
||||
https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
|
||||
"""
|
||||
magic = 1751998832
|
||||
version = 1
|
||||
_reserved = 0
|
||||
count = len(filelist)
|
||||
capacity = NextGreaterPowerOf2(count)
|
||||
strings_offset = 24 + (12 * capacity)
|
||||
max_value_length = max(len(value) for value in filelist.values())
|
||||
|
||||
out = open(output_name, "wb")
|
||||
out.write(
|
||||
struct.pack(
|
||||
"<LHHLLLL",
|
||||
magic,
|
||||
version,
|
||||
_reserved,
|
||||
strings_offset,
|
||||
count,
|
||||
capacity,
|
||||
max_value_length,
|
||||
)
|
||||
)
|
||||
|
||||
# Create empty hashmap buckets.
|
||||
buckets = [None] * capacity
|
||||
for file, path in filelist.items():
|
||||
key = 0
|
||||
for c in file:
|
||||
key += ord(c.lower()) * 13
|
||||
|
||||
# Fill next empty bucket.
|
||||
while buckets[key & capacity - 1] is not None:
|
||||
key = key + 1
|
||||
buckets[key & capacity - 1] = (file, path)
|
||||
|
||||
next_offset = 1
|
||||
for bucket in buckets:
|
||||
if bucket is None:
|
||||
out.write(struct.pack("<LLL", 0, 0, 0))
|
||||
else:
|
||||
(file, path) = bucket
|
||||
key_offset = next_offset
|
||||
prefix_offset = key_offset + len(file) + 1
|
||||
suffix_offset = prefix_offset + len(os.path.dirname(path) + os.sep) + 1
|
||||
next_offset = suffix_offset + len(os.path.basename(path)) + 1
|
||||
out.write(struct.pack("<LLL", key_offset, prefix_offset, suffix_offset))
|
||||
|
||||
# Pad byte since next offset starts at 1.
|
||||
out.write(struct.pack("<x"))
|
||||
|
||||
for bucket in buckets:
|
||||
if bucket is not None:
|
||||
(file, path) = bucket
|
||||
out.write(struct.pack("<%ds" % len(file), file))
|
||||
out.write(struct.pack("<s", "\0"))
|
||||
base = os.path.dirname(path) + os.sep
|
||||
out.write(struct.pack("<%ds" % len(base), base))
|
||||
out.write(struct.pack("<s", "\0"))
|
||||
path = os.path.basename(path)
|
||||
out.write(struct.pack("<%ds" % len(path), path))
|
||||
out.write(struct.pack("<s", "\0"))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
1262
my-app/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
generated
vendored
Executable file
1262
my-app/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
generated
vendored
Executable file
File diff suppressed because it is too large
Load diff
174
my-app/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py
generated
vendored
Executable file
174
my-app/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py
generated
vendored
Executable file
|
@ -0,0 +1,174 @@
|
|||
# This file comes from
|
||||
# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py
|
||||
# Do not edit! Edit the upstream one instead.
|
||||
|
||||
"""Python module for generating .ninja files.
|
||||
|
||||
Note that this is emphatically not a required piece of Ninja; it's
|
||||
just a helpful utility for build-file-generation systems that already
|
||||
use Python.
|
||||
"""
|
||||
|
||||
import textwrap
|
||||
|
||||
|
||||
def escape_path(word):
|
||||
return word.replace("$ ", "$$ ").replace(" ", "$ ").replace(":", "$:")
|
||||
|
||||
|
||||
class Writer:
|
||||
def __init__(self, output, width=78):
|
||||
self.output = output
|
||||
self.width = width
|
||||
|
||||
def newline(self):
|
||||
self.output.write("\n")
|
||||
|
||||
def comment(self, text):
|
||||
for line in textwrap.wrap(text, self.width - 2):
|
||||
self.output.write("# " + line + "\n")
|
||||
|
||||
def variable(self, key, value, indent=0):
|
||||
if value is None:
|
||||
return
|
||||
if isinstance(value, list):
|
||||
value = " ".join(filter(None, value)) # Filter out empty strings.
|
||||
self._line(f"{key} = {value}", indent)
|
||||
|
||||
def pool(self, name, depth):
|
||||
self._line("pool %s" % name)
|
||||
self.variable("depth", depth, indent=1)
|
||||
|
||||
def rule(
|
||||
self,
|
||||
name,
|
||||
command,
|
||||
description=None,
|
||||
depfile=None,
|
||||
generator=False,
|
||||
pool=None,
|
||||
restat=False,
|
||||
rspfile=None,
|
||||
rspfile_content=None,
|
||||
deps=None,
|
||||
):
|
||||
self._line("rule %s" % name)
|
||||
self.variable("command", command, indent=1)
|
||||
if description:
|
||||
self.variable("description", description, indent=1)
|
||||
if depfile:
|
||||
self.variable("depfile", depfile, indent=1)
|
||||
if generator:
|
||||
self.variable("generator", "1", indent=1)
|
||||
if pool:
|
||||
self.variable("pool", pool, indent=1)
|
||||
if restat:
|
||||
self.variable("restat", "1", indent=1)
|
||||
if rspfile:
|
||||
self.variable("rspfile", rspfile, indent=1)
|
||||
if rspfile_content:
|
||||
self.variable("rspfile_content", rspfile_content, indent=1)
|
||||
if deps:
|
||||
self.variable("deps", deps, indent=1)
|
||||
|
||||
def build(
|
||||
self, outputs, rule, inputs=None, implicit=None, order_only=None, variables=None
|
||||
):
|
||||
outputs = self._as_list(outputs)
|
||||
all_inputs = self._as_list(inputs)[:]
|
||||
out_outputs = list(map(escape_path, outputs))
|
||||
all_inputs = list(map(escape_path, all_inputs))
|
||||
|
||||
if implicit:
|
||||
implicit = map(escape_path, self._as_list(implicit))
|
||||
all_inputs.append("|")
|
||||
all_inputs.extend(implicit)
|
||||
if order_only:
|
||||
order_only = map(escape_path, self._as_list(order_only))
|
||||
all_inputs.append("||")
|
||||
all_inputs.extend(order_only)
|
||||
|
||||
self._line(
|
||||
"build {}: {}".format(" ".join(out_outputs), " ".join([rule] + all_inputs))
|
||||
)
|
||||
|
||||
if variables:
|
||||
if isinstance(variables, dict):
|
||||
iterator = iter(variables.items())
|
||||
else:
|
||||
iterator = iter(variables)
|
||||
|
||||
for key, val in iterator:
|
||||
self.variable(key, val, indent=1)
|
||||
|
||||
return outputs
|
||||
|
||||
def include(self, path):
|
||||
self._line("include %s" % path)
|
||||
|
||||
def subninja(self, path):
|
||||
self._line("subninja %s" % path)
|
||||
|
||||
def default(self, paths):
|
||||
self._line("default %s" % " ".join(self._as_list(paths)))
|
||||
|
||||
def _count_dollars_before_index(self, s, i):
|
||||
"""Returns the number of '$' characters right in front of s[i]."""
|
||||
dollar_count = 0
|
||||
dollar_index = i - 1
|
||||
while dollar_index > 0 and s[dollar_index] == "$":
|
||||
dollar_count += 1
|
||||
dollar_index -= 1
|
||||
return dollar_count
|
||||
|
||||
def _line(self, text, indent=0):
|
||||
"""Write 'text' word-wrapped at self.width characters."""
|
||||
leading_space = " " * indent
|
||||
while len(leading_space) + len(text) > self.width:
|
||||
# The text is too wide; wrap if possible.
|
||||
|
||||
# Find the rightmost space that would obey our width constraint and
|
||||
# that's not an escaped space.
|
||||
available_space = self.width - len(leading_space) - len(" $")
|
||||
space = available_space
|
||||
while True:
|
||||
space = text.rfind(" ", 0, space)
|
||||
if space < 0 or self._count_dollars_before_index(text, space) % 2 == 0:
|
||||
break
|
||||
|
||||
if space < 0:
|
||||
# No such space; just use the first unescaped space we can find.
|
||||
space = available_space - 1
|
||||
while True:
|
||||
space = text.find(" ", space + 1)
|
||||
if (
|
||||
space < 0
|
||||
or self._count_dollars_before_index(text, space) % 2 == 0
|
||||
):
|
||||
break
|
||||
if space < 0:
|
||||
# Give up on breaking.
|
||||
break
|
||||
|
||||
self.output.write(leading_space + text[0:space] + " $\n")
|
||||
text = text[space + 1 :]
|
||||
|
||||
# Subsequent lines are continuations, so indent them.
|
||||
leading_space = " " * (indent + 2)
|
||||
|
||||
self.output.write(leading_space + text + "\n")
|
||||
|
||||
def _as_list(self, input):
|
||||
if input is None:
|
||||
return []
|
||||
if isinstance(input, list):
|
||||
return input
|
||||
return [input]
|
||||
|
||||
|
||||
def escape(string):
|
||||
"""Escape a string such that it can be embedded into a Ninja file without
|
||||
further interpretation."""
|
||||
assert "\n" not in string, "Ninja syntax does not allow newlines"
|
||||
# We only have one special metacharacter: '$'.
|
||||
return string.replace("$", "$$")
|
61
my-app/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
generated
vendored
Executable file
61
my-app/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
generated
vendored
Executable file
|
@ -0,0 +1,61 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""A clone of the default copy.deepcopy that doesn't handle cyclic
|
||||
structures or complex types except for dicts and lists. This is
|
||||
because gyp copies so large structure that small copy overhead ends up
|
||||
taking seconds in a project the size of Chromium."""
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
pass
|
||||
|
||||
|
||||
__all__ = ["Error", "deepcopy"]
|
||||
|
||||
|
||||
def deepcopy(x):
|
||||
"""Deep copy operation on gyp objects such as strings, ints, dicts
|
||||
and lists. More than twice as fast as copy.deepcopy but much less
|
||||
generic."""
|
||||
|
||||
try:
|
||||
return _deepcopy_dispatch[type(x)](x)
|
||||
except KeyError:
|
||||
raise Error(
|
||||
"Unsupported type %s for deepcopy. Use copy.deepcopy "
|
||||
+ "or expand simple_copy support." % type(x)
|
||||
)
|
||||
|
||||
|
||||
_deepcopy_dispatch = d = {}
|
||||
|
||||
|
||||
def _deepcopy_atomic(x):
|
||||
return x
|
||||
|
||||
|
||||
types = bool, float, int, str, type, type(None)
|
||||
|
||||
for x in types:
|
||||
d[x] = _deepcopy_atomic
|
||||
|
||||
|
||||
def _deepcopy_list(x):
|
||||
return [deepcopy(a) for a in x]
|
||||
|
||||
|
||||
d[list] = _deepcopy_list
|
||||
|
||||
|
||||
def _deepcopy_dict(x):
|
||||
y = {}
|
||||
for key, value in x.items():
|
||||
y[deepcopy(key)] = deepcopy(value)
|
||||
return y
|
||||
|
||||
|
||||
d[dict] = _deepcopy_dict
|
||||
|
||||
del d
|
373
my-app/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
generated
vendored
Executable file
373
my-app/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
generated
vendored
Executable file
|
@ -0,0 +1,373 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Utility functions for Windows builds.
|
||||
|
||||
These functions are executed via gyp-win-tool when using the ninja generator.
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import stat
|
||||
import string
|
||||
import sys
|
||||
|
||||
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
# A regex matching an argument corresponding to the output filename passed to
|
||||
# link.exe.
|
||||
_LINK_EXE_OUT_ARG = re.compile("/OUT:(?P<out>.+)$", re.IGNORECASE)
|
||||
|
||||
|
||||
def main(args):
|
||||
executor = WinTool()
|
||||
exit_code = executor.Dispatch(args)
|
||||
if exit_code is not None:
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
class WinTool:
|
||||
"""This class performs all the Windows tooling steps. The methods can either
|
||||
be executed directly, or dispatched from an argument list."""
|
||||
|
||||
def _UseSeparateMspdbsrv(self, env, args):
|
||||
"""Allows to use a unique instance of mspdbsrv.exe per linker instead of a
|
||||
shared one."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
if args[0] != "link.exe":
|
||||
return
|
||||
|
||||
# Use the output filename passed to the linker to generate an endpoint name
|
||||
# for mspdbsrv.exe.
|
||||
endpoint_name = None
|
||||
for arg in args:
|
||||
m = _LINK_EXE_OUT_ARG.match(arg)
|
||||
if m:
|
||||
endpoint_name = re.sub(
|
||||
r"\W+", "", "%s_%d" % (m.group("out"), os.getpid())
|
||||
)
|
||||
break
|
||||
|
||||
if endpoint_name is None:
|
||||
return
|
||||
|
||||
# Adds the appropriate environment variable. This will be read by link.exe
|
||||
# to know which instance of mspdbsrv.exe it should connect to (if it's
|
||||
# not set then the default endpoint is used).
|
||||
env["_MSPDBSRV_ENDPOINT_"] = endpoint_name
|
||||
|
||||
def Dispatch(self, args):
|
||||
"""Dispatches a string command to a method."""
|
||||
if len(args) < 1:
|
||||
raise Exception("Not enough arguments")
|
||||
|
||||
method = "Exec%s" % self._CommandifyName(args[0])
|
||||
return getattr(self, method)(*args[1:])
|
||||
|
||||
def _CommandifyName(self, name_string):
|
||||
"""Transforms a tool name like recursive-mirror to RecursiveMirror."""
|
||||
return name_string.title().replace("-", "")
|
||||
|
||||
def _GetEnv(self, arch):
|
||||
"""Gets the saved environment from a file for a given architecture."""
|
||||
# The environment is saved as an "environment block" (see CreateProcess
|
||||
# and msvs_emulation for details). We convert to a dict here.
|
||||
# Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
|
||||
pairs = open(arch).read()[:-2].split("\0")
|
||||
kvs = [item.split("=", 1) for item in pairs]
|
||||
return dict(kvs)
|
||||
|
||||
def ExecStamp(self, path):
|
||||
"""Simple stamp command."""
|
||||
open(path, "w").close()
|
||||
|
||||
def ExecRecursiveMirror(self, source, dest):
|
||||
"""Emulation of rm -rf out && cp -af in out."""
|
||||
if os.path.exists(dest):
|
||||
if os.path.isdir(dest):
|
||||
|
||||
def _on_error(fn, path, excinfo):
|
||||
# The operation failed, possibly because the file is set to
|
||||
# read-only. If that's why, make it writable and try the op again.
|
||||
if not os.access(path, os.W_OK):
|
||||
os.chmod(path, stat.S_IWRITE)
|
||||
fn(path)
|
||||
|
||||
shutil.rmtree(dest, onerror=_on_error)
|
||||
else:
|
||||
if not os.access(dest, os.W_OK):
|
||||
# Attempt to make the file writable before deleting it.
|
||||
os.chmod(dest, stat.S_IWRITE)
|
||||
os.unlink(dest)
|
||||
|
||||
if os.path.isdir(source):
|
||||
shutil.copytree(source, dest)
|
||||
else:
|
||||
shutil.copy2(source, dest)
|
||||
|
||||
def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
|
||||
"""Filter diagnostic output from link that looks like:
|
||||
' Creating library ui.dll.lib and object ui.dll.exp'
|
||||
This happens when there are exports from the dll or exe.
|
||||
"""
|
||||
env = self._GetEnv(arch)
|
||||
if use_separate_mspdbsrv == "True":
|
||||
self._UseSeparateMspdbsrv(env, args)
|
||||
if sys.platform == "win32":
|
||||
args = list(args) # *args is a tuple by default, which is read-only.
|
||||
args[0] = args[0].replace("/", "\\")
|
||||
# https://docs.python.org/2/library/subprocess.html:
|
||||
# "On Unix with shell=True [...] if args is a sequence, the first item
|
||||
# specifies the command string, and any additional items will be treated as
|
||||
# additional arguments to the shell itself. That is to say, Popen does the
|
||||
# equivalent of:
|
||||
# Popen(['/bin/sh', '-c', args[0], args[1], ...])"
|
||||
# For that reason, since going through the shell doesn't seem necessary on
|
||||
# non-Windows don't do that there.
|
||||
link = subprocess.Popen(
|
||||
args,
|
||||
shell=sys.platform == "win32",
|
||||
env=env,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
out = link.communicate()[0].decode("utf-8")
|
||||
for line in out.splitlines():
|
||||
if (
|
||||
not line.startswith(" Creating library ")
|
||||
and not line.startswith("Generating code")
|
||||
and not line.startswith("Finished generating code")
|
||||
):
|
||||
print(line)
|
||||
return link.returncode
|
||||
|
||||
def ExecLinkWithManifests(
|
||||
self,
|
||||
arch,
|
||||
embed_manifest,
|
||||
out,
|
||||
ldcmd,
|
||||
resname,
|
||||
mt,
|
||||
rc,
|
||||
intermediate_manifest,
|
||||
*manifests
|
||||
):
|
||||
"""A wrapper for handling creating a manifest resource and then executing
|
||||
a link command."""
|
||||
# The 'normal' way to do manifests is to have link generate a manifest
|
||||
# based on gathering dependencies from the object files, then merge that
|
||||
# manifest with other manifests supplied as sources, convert the merged
|
||||
# manifest to a resource, and then *relink*, including the compiled
|
||||
# version of the manifest resource. This breaks incremental linking, and
|
||||
# is generally overly complicated. Instead, we merge all the manifests
|
||||
# provided (along with one that includes what would normally be in the
|
||||
# linker-generated one, see msvs_emulation.py), and include that into the
|
||||
# first and only link. We still tell link to generate a manifest, but we
|
||||
# only use that to assert that our simpler process did not miss anything.
|
||||
variables = {
|
||||
"python": sys.executable,
|
||||
"arch": arch,
|
||||
"out": out,
|
||||
"ldcmd": ldcmd,
|
||||
"resname": resname,
|
||||
"mt": mt,
|
||||
"rc": rc,
|
||||
"intermediate_manifest": intermediate_manifest,
|
||||
"manifests": " ".join(manifests),
|
||||
}
|
||||
add_to_ld = ""
|
||||
if manifests:
|
||||
subprocess.check_call(
|
||||
"%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo "
|
||||
"-manifest %(manifests)s -out:%(out)s.manifest" % variables
|
||||
)
|
||||
if embed_manifest == "True":
|
||||
subprocess.check_call(
|
||||
"%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest"
|
||||
" %(out)s.manifest.rc %(resname)s" % variables
|
||||
)
|
||||
subprocess.check_call(
|
||||
"%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s "
|
||||
"%(out)s.manifest.rc" % variables
|
||||
)
|
||||
add_to_ld = " %(out)s.manifest.res" % variables
|
||||
subprocess.check_call(ldcmd + add_to_ld)
|
||||
|
||||
# Run mt.exe on the theoretically complete manifest we generated, merging
|
||||
# it with the one the linker generated to confirm that the linker
|
||||
# generated one does not add anything. This is strictly unnecessary for
|
||||
# correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
|
||||
# used in a #pragma comment.
|
||||
if manifests:
|
||||
# Merge the intermediate one with ours to .assert.manifest, then check
|
||||
# that .assert.manifest is identical to ours.
|
||||
subprocess.check_call(
|
||||
"%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo "
|
||||
"-manifest %(out)s.manifest %(intermediate_manifest)s "
|
||||
"-out:%(out)s.assert.manifest" % variables
|
||||
)
|
||||
assert_manifest = "%(out)s.assert.manifest" % variables
|
||||
our_manifest = "%(out)s.manifest" % variables
|
||||
# Load and normalize the manifests. mt.exe sometimes removes whitespace,
|
||||
# and sometimes doesn't unfortunately.
|
||||
with open(our_manifest) as our_f, open(assert_manifest) as assert_f:
|
||||
translator = str.maketrans("", "", string.whitespace)
|
||||
our_data = our_f.read().translate(translator)
|
||||
assert_data = assert_f.read().translate(translator)
|
||||
if our_data != assert_data:
|
||||
os.unlink(out)
|
||||
|
||||
def dump(filename):
|
||||
print(filename, file=sys.stderr)
|
||||
print("-----", file=sys.stderr)
|
||||
with open(filename) as f:
|
||||
print(f.read(), file=sys.stderr)
|
||||
print("-----", file=sys.stderr)
|
||||
|
||||
dump(intermediate_manifest)
|
||||
dump(our_manifest)
|
||||
dump(assert_manifest)
|
||||
sys.stderr.write(
|
||||
'Linker generated manifest "%s" added to final manifest "%s" '
|
||||
'(result in "%s"). '
|
||||
"Were /MANIFEST switches used in #pragma statements? "
|
||||
% (intermediate_manifest, our_manifest, assert_manifest)
|
||||
)
|
||||
return 1
|
||||
|
||||
def ExecManifestWrapper(self, arch, *args):
|
||||
"""Run manifest tool with environment set. Strip out undesirable warning
|
||||
(some XML blocks are recognized by the OS loader, but not the manifest
|
||||
tool)."""
|
||||
env = self._GetEnv(arch)
|
||||
popen = subprocess.Popen(
|
||||
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
out = popen.communicate()[0].decode("utf-8")
|
||||
for line in out.splitlines():
|
||||
if line and "manifest authoring warning 81010002" not in line:
|
||||
print(line)
|
||||
return popen.returncode
|
||||
|
||||
def ExecManifestToRc(self, arch, *args):
|
||||
"""Creates a resource file pointing a SxS assembly manifest.
|
||||
|args| is tuple containing path to resource file, path to manifest file
|
||||
and resource name which can be "1" (for executables) or "2" (for DLLs)."""
|
||||
manifest_path, resource_path, resource_name = args
|
||||
with open(resource_path, "w") as output:
|
||||
output.write(
|
||||
'#include <windows.h>\n%s RT_MANIFEST "%s"'
|
||||
% (resource_name, os.path.abspath(manifest_path).replace("\\", "/"))
|
||||
)
|
||||
|
||||
def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl, *flags):
|
||||
"""Filter noisy filenames output from MIDL compile step that isn't
|
||||
quietable via command line flags.
|
||||
"""
|
||||
args = (
|
||||
["midl", "/nologo"]
|
||||
+ list(flags)
|
||||
+ [
|
||||
"/out",
|
||||
outdir,
|
||||
"/tlb",
|
||||
tlb,
|
||||
"/h",
|
||||
h,
|
||||
"/dlldata",
|
||||
dlldata,
|
||||
"/iid",
|
||||
iid,
|
||||
"/proxy",
|
||||
proxy,
|
||||
idl,
|
||||
]
|
||||
)
|
||||
env = self._GetEnv(arch)
|
||||
popen = subprocess.Popen(
|
||||
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
out = popen.communicate()[0].decode("utf-8")
|
||||
# Filter junk out of stdout, and write filtered versions. Output we want
|
||||
# to filter is pairs of lines that look like this:
|
||||
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
|
||||
# objidl.idl
|
||||
lines = out.splitlines()
|
||||
prefixes = ("Processing ", "64 bit Processing ")
|
||||
processing = {os.path.basename(x) for x in lines if x.startswith(prefixes)}
|
||||
for line in lines:
|
||||
if not line.startswith(prefixes) and line not in processing:
|
||||
print(line)
|
||||
return popen.returncode
|
||||
|
||||
def ExecAsmWrapper(self, arch, *args):
|
||||
"""Filter logo banner from invocations of asm.exe."""
|
||||
env = self._GetEnv(arch)
|
||||
popen = subprocess.Popen(
|
||||
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
out = popen.communicate()[0].decode("utf-8")
|
||||
for line in out.splitlines():
|
||||
if (
|
||||
not line.startswith("Copyright (C) Microsoft Corporation")
|
||||
and not line.startswith("Microsoft (R) Macro Assembler")
|
||||
and not line.startswith(" Assembling: ")
|
||||
and line
|
||||
):
|
||||
print(line)
|
||||
return popen.returncode
|
||||
|
||||
def ExecRcWrapper(self, arch, *args):
|
||||
"""Filter logo banner from invocations of rc.exe. Older versions of RC
|
||||
don't support the /nologo flag."""
|
||||
env = self._GetEnv(arch)
|
||||
popen = subprocess.Popen(
|
||||
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
out = popen.communicate()[0].decode("utf-8")
|
||||
for line in out.splitlines():
|
||||
if (
|
||||
not line.startswith("Microsoft (R) Windows (R) Resource Compiler")
|
||||
and not line.startswith("Copyright (C) Microsoft Corporation")
|
||||
and line
|
||||
):
|
||||
print(line)
|
||||
return popen.returncode
|
||||
|
||||
def ExecActionWrapper(self, arch, rspfile, *dir):
|
||||
"""Runs an action command line from a response file using the environment
|
||||
for |arch|. If |dir| is supplied, use that as the working directory."""
|
||||
env = self._GetEnv(arch)
|
||||
# TODO(scottmg): This is a temporary hack to get some specific variables
|
||||
# through to actions that are set after gyp-time. http://crbug.com/333738.
|
||||
for k, v in os.environ.items():
|
||||
if k not in env:
|
||||
env[k] = v
|
||||
args = open(rspfile).read()
|
||||
dir = dir[0] if dir else None
|
||||
return subprocess.call(args, shell=True, env=env, cwd=dir)
|
||||
|
||||
def ExecClCompile(self, project_dir, selected_files):
|
||||
"""Executed by msvs-ninja projects when the 'ClCompile' target is used to
|
||||
build selected C/C++ files."""
|
||||
project_dir = os.path.relpath(project_dir, BASE_DIR)
|
||||
selected_files = selected_files.split(";")
|
||||
ninja_targets = [
|
||||
os.path.join(project_dir, filename) + "^^" for filename in selected_files
|
||||
]
|
||||
cmd = ["ninja.exe"]
|
||||
cmd.extend(ninja_targets)
|
||||
return subprocess.call(cmd, shell=True, cwd=BASE_DIR)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv[1:]))
|
1933
my-app/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
generated
vendored
Executable file
1933
my-app/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
generated
vendored
Executable file
File diff suppressed because it is too large
Load diff
302
my-app/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
generated
vendored
Executable file
302
my-app/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
generated
vendored
Executable file
|
@ -0,0 +1,302 @@
|
|||
# Copyright (c) 2014 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Xcode-ninja wrapper project file generator.
|
||||
|
||||
This updates the data structures passed to the Xcode gyp generator to build
|
||||
with ninja instead. The Xcode project itself is transformed into a list of
|
||||
executable targets, each with a build step to build with ninja, and a target
|
||||
with every source and resource file. This appears to sidestep some of the
|
||||
major performance headaches experienced using complex projects and large number
|
||||
of targets within Xcode.
|
||||
"""
|
||||
|
||||
import errno
|
||||
import gyp.generator.ninja
|
||||
import os
|
||||
import re
|
||||
import xml.sax.saxutils
|
||||
|
||||
|
||||
def _WriteWorkspace(main_gyp, sources_gyp, params):
|
||||
""" Create a workspace to wrap main and sources gyp paths. """
|
||||
(build_file_root, build_file_ext) = os.path.splitext(main_gyp)
|
||||
workspace_path = build_file_root + ".xcworkspace"
|
||||
options = params["options"]
|
||||
if options.generator_output:
|
||||
workspace_path = os.path.join(options.generator_output, workspace_path)
|
||||
try:
|
||||
os.makedirs(workspace_path)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
output_string = (
|
||||
'<?xml version="1.0" encoding="UTF-8"?>\n' + '<Workspace version = "1.0">\n'
|
||||
)
|
||||
for gyp_name in [main_gyp, sources_gyp]:
|
||||
name = os.path.splitext(os.path.basename(gyp_name))[0] + ".xcodeproj"
|
||||
name = xml.sax.saxutils.quoteattr("group:" + name)
|
||||
output_string += " <FileRef location = %s></FileRef>\n" % name
|
||||
output_string += "</Workspace>\n"
|
||||
|
||||
workspace_file = os.path.join(workspace_path, "contents.xcworkspacedata")
|
||||
|
||||
try:
|
||||
with open(workspace_file) as input_file:
|
||||
input_string = input_file.read()
|
||||
if input_string == output_string:
|
||||
return
|
||||
except OSError:
|
||||
# Ignore errors if the file doesn't exist.
|
||||
pass
|
||||
|
||||
with open(workspace_file, "w") as output_file:
|
||||
output_file.write(output_string)
|
||||
|
||||
|
||||
def _TargetFromSpec(old_spec, params):
|
||||
""" Create fake target for xcode-ninja wrapper. """
|
||||
# Determine ninja top level build dir (e.g. /path/to/out).
|
||||
ninja_toplevel = None
|
||||
jobs = 0
|
||||
if params:
|
||||
options = params["options"]
|
||||
ninja_toplevel = os.path.join(
|
||||
options.toplevel_dir, gyp.generator.ninja.ComputeOutputDir(params)
|
||||
)
|
||||
jobs = params.get("generator_flags", {}).get("xcode_ninja_jobs", 0)
|
||||
|
||||
target_name = old_spec.get("target_name")
|
||||
product_name = old_spec.get("product_name", target_name)
|
||||
product_extension = old_spec.get("product_extension")
|
||||
|
||||
ninja_target = {}
|
||||
ninja_target["target_name"] = target_name
|
||||
ninja_target["product_name"] = product_name
|
||||
if product_extension:
|
||||
ninja_target["product_extension"] = product_extension
|
||||
ninja_target["toolset"] = old_spec.get("toolset")
|
||||
ninja_target["default_configuration"] = old_spec.get("default_configuration")
|
||||
ninja_target["configurations"] = {}
|
||||
|
||||
# Tell Xcode to look in |ninja_toplevel| for build products.
|
||||
new_xcode_settings = {}
|
||||
if ninja_toplevel:
|
||||
new_xcode_settings["CONFIGURATION_BUILD_DIR"] = (
|
||||
"%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel
|
||||
)
|
||||
|
||||
if "configurations" in old_spec:
|
||||
for config in old_spec["configurations"]:
|
||||
old_xcode_settings = old_spec["configurations"][config].get(
|
||||
"xcode_settings", {}
|
||||
)
|
||||
if "IPHONEOS_DEPLOYMENT_TARGET" in old_xcode_settings:
|
||||
new_xcode_settings["CODE_SIGNING_REQUIRED"] = "NO"
|
||||
new_xcode_settings["IPHONEOS_DEPLOYMENT_TARGET"] = old_xcode_settings[
|
||||
"IPHONEOS_DEPLOYMENT_TARGET"
|
||||
]
|
||||
for key in ["BUNDLE_LOADER", "TEST_HOST"]:
|
||||
if key in old_xcode_settings:
|
||||
new_xcode_settings[key] = old_xcode_settings[key]
|
||||
|
||||
ninja_target["configurations"][config] = {}
|
||||
ninja_target["configurations"][config][
|
||||
"xcode_settings"
|
||||
] = new_xcode_settings
|
||||
|
||||
ninja_target["mac_bundle"] = old_spec.get("mac_bundle", 0)
|
||||
ninja_target["mac_xctest_bundle"] = old_spec.get("mac_xctest_bundle", 0)
|
||||
ninja_target["ios_app_extension"] = old_spec.get("ios_app_extension", 0)
|
||||
ninja_target["ios_watchkit_extension"] = old_spec.get("ios_watchkit_extension", 0)
|
||||
ninja_target["ios_watchkit_app"] = old_spec.get("ios_watchkit_app", 0)
|
||||
ninja_target["type"] = old_spec["type"]
|
||||
if ninja_toplevel:
|
||||
ninja_target["actions"] = [
|
||||
{
|
||||
"action_name": "Compile and copy %s via ninja" % target_name,
|
||||
"inputs": [],
|
||||
"outputs": [],
|
||||
"action": [
|
||||
"env",
|
||||
"PATH=%s" % os.environ["PATH"],
|
||||
"ninja",
|
||||
"-C",
|
||||
new_xcode_settings["CONFIGURATION_BUILD_DIR"],
|
||||
target_name,
|
||||
],
|
||||
"message": "Compile and copy %s via ninja" % target_name,
|
||||
},
|
||||
]
|
||||
if jobs > 0:
|
||||
ninja_target["actions"][0]["action"].extend(("-j", jobs))
|
||||
return ninja_target
|
||||
|
||||
|
||||
def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
|
||||
"""Limit targets for Xcode wrapper.
|
||||
|
||||
Xcode sometimes performs poorly with too many targets, so only include
|
||||
proper executable targets, with filters to customize.
|
||||
Arguments:
|
||||
target_extras: Regular expression to always add, matching any target.
|
||||
executable_target_pattern: Regular expression limiting executable targets.
|
||||
spec: Specifications for target.
|
||||
"""
|
||||
target_name = spec.get("target_name")
|
||||
# Always include targets matching target_extras.
|
||||
if target_extras is not None and re.search(target_extras, target_name):
|
||||
return True
|
||||
|
||||
# Otherwise just show executable targets and xc_tests.
|
||||
if int(spec.get("mac_xctest_bundle", 0)) != 0 or (
|
||||
spec.get("type", "") == "executable"
|
||||
and spec.get("product_extension", "") != "bundle"
|
||||
):
|
||||
|
||||
# If there is a filter and the target does not match, exclude the target.
|
||||
if executable_target_pattern is not None:
|
||||
if not re.search(executable_target_pattern, target_name):
|
||||
return False
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def CreateWrapper(target_list, target_dicts, data, params):
|
||||
"""Initialize targets for the ninja wrapper.
|
||||
|
||||
This sets up the necessary variables in the targets to generate Xcode projects
|
||||
that use ninja as an external builder.
|
||||
Arguments:
|
||||
target_list: List of target pairs: 'base/base.gyp:base'.
|
||||
target_dicts: Dict of target properties keyed on target pair.
|
||||
data: Dict of flattened build files keyed on gyp path.
|
||||
params: Dict of global options for gyp.
|
||||
"""
|
||||
orig_gyp = params["build_files"][0]
|
||||
for gyp_name, gyp_dict in data.items():
|
||||
if gyp_name == orig_gyp:
|
||||
depth = gyp_dict["_DEPTH"]
|
||||
|
||||
# Check for custom main gyp name, otherwise use the default CHROMIUM_GYP_FILE
|
||||
# and prepend .ninja before the .gyp extension.
|
||||
generator_flags = params.get("generator_flags", {})
|
||||
main_gyp = generator_flags.get("xcode_ninja_main_gyp", None)
|
||||
if main_gyp is None:
|
||||
(build_file_root, build_file_ext) = os.path.splitext(orig_gyp)
|
||||
main_gyp = build_file_root + ".ninja" + build_file_ext
|
||||
|
||||
# Create new |target_list|, |target_dicts| and |data| data structures.
|
||||
new_target_list = []
|
||||
new_target_dicts = {}
|
||||
new_data = {}
|
||||
|
||||
# Set base keys needed for |data|.
|
||||
new_data[main_gyp] = {}
|
||||
new_data[main_gyp]["included_files"] = []
|
||||
new_data[main_gyp]["targets"] = []
|
||||
new_data[main_gyp]["xcode_settings"] = data[orig_gyp].get("xcode_settings", {})
|
||||
|
||||
# Normally the xcode-ninja generator includes only valid executable targets.
|
||||
# If |xcode_ninja_executable_target_pattern| is set, that list is reduced to
|
||||
# executable targets that match the pattern. (Default all)
|
||||
executable_target_pattern = generator_flags.get(
|
||||
"xcode_ninja_executable_target_pattern", None
|
||||
)
|
||||
|
||||
# For including other non-executable targets, add the matching target name
|
||||
# to the |xcode_ninja_target_pattern| regular expression. (Default none)
|
||||
target_extras = generator_flags.get("xcode_ninja_target_pattern", None)
|
||||
|
||||
for old_qualified_target in target_list:
|
||||
spec = target_dicts[old_qualified_target]
|
||||
if IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
|
||||
# Add to new_target_list.
|
||||
target_name = spec.get("target_name")
|
||||
new_target_name = f"{main_gyp}:{target_name}#target"
|
||||
new_target_list.append(new_target_name)
|
||||
|
||||
# Add to new_target_dicts.
|
||||
new_target_dicts[new_target_name] = _TargetFromSpec(spec, params)
|
||||
|
||||
# Add to new_data.
|
||||
for old_target in data[old_qualified_target.split(":")[0]]["targets"]:
|
||||
if old_target["target_name"] == target_name:
|
||||
new_data_target = {}
|
||||
new_data_target["target_name"] = old_target["target_name"]
|
||||
new_data_target["toolset"] = old_target["toolset"]
|
||||
new_data[main_gyp]["targets"].append(new_data_target)
|
||||
|
||||
# Create sources target.
|
||||
sources_target_name = "sources_for_indexing"
|
||||
sources_target = _TargetFromSpec(
|
||||
{
|
||||
"target_name": sources_target_name,
|
||||
"toolset": "target",
|
||||
"default_configuration": "Default",
|
||||
"mac_bundle": "0",
|
||||
"type": "executable",
|
||||
},
|
||||
None,
|
||||
)
|
||||
|
||||
# Tell Xcode to look everywhere for headers.
|
||||
sources_target["configurations"] = {"Default": {"include_dirs": [depth]}}
|
||||
|
||||
# Put excluded files into the sources target so they can be opened in Xcode.
|
||||
skip_excluded_files = not generator_flags.get(
|
||||
"xcode_ninja_list_excluded_files", True
|
||||
)
|
||||
|
||||
sources = []
|
||||
for target, target_dict in target_dicts.items():
|
||||
base = os.path.dirname(target)
|
||||
files = target_dict.get("sources", []) + target_dict.get(
|
||||
"mac_bundle_resources", []
|
||||
)
|
||||
|
||||
if not skip_excluded_files:
|
||||
files.extend(
|
||||
target_dict.get("sources_excluded", [])
|
||||
+ target_dict.get("mac_bundle_resources_excluded", [])
|
||||
)
|
||||
|
||||
for action in target_dict.get("actions", []):
|
||||
files.extend(action.get("inputs", []))
|
||||
|
||||
if not skip_excluded_files:
|
||||
files.extend(action.get("inputs_excluded", []))
|
||||
|
||||
# Remove files starting with $. These are mostly intermediate files for the
|
||||
# build system.
|
||||
files = [file for file in files if not file.startswith("$")]
|
||||
|
||||
# Make sources relative to root build file.
|
||||
relative_path = os.path.dirname(main_gyp)
|
||||
sources += [
|
||||
os.path.relpath(os.path.join(base, file), relative_path) for file in files
|
||||
]
|
||||
|
||||
sources_target["sources"] = sorted(set(sources))
|
||||
|
||||
# Put sources_to_index in it's own gyp.
|
||||
sources_gyp = os.path.join(os.path.dirname(main_gyp), sources_target_name + ".gyp")
|
||||
fully_qualified_target_name = f"{sources_gyp}:{sources_target_name}#target"
|
||||
|
||||
# Add to new_target_list, new_target_dicts and new_data.
|
||||
new_target_list.append(fully_qualified_target_name)
|
||||
new_target_dicts[fully_qualified_target_name] = sources_target
|
||||
new_data_target = {}
|
||||
new_data_target["target_name"] = sources_target["target_name"]
|
||||
new_data_target["_DEPTH"] = depth
|
||||
new_data_target["toolset"] = "target"
|
||||
new_data[sources_gyp] = {}
|
||||
new_data[sources_gyp]["targets"] = []
|
||||
new_data[sources_gyp]["included_files"] = []
|
||||
new_data[sources_gyp]["xcode_settings"] = data[orig_gyp].get("xcode_settings", {})
|
||||
new_data[sources_gyp]["targets"].append(new_data_target)
|
||||
|
||||
# Write workspace to file.
|
||||
_WriteWorkspace(main_gyp, sources_gyp, params)
|
||||
return (new_target_list, new_target_dicts, new_data)
|
3198
my-app/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
generated
vendored
Executable file
3198
my-app/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
generated
vendored
Executable file
File diff suppressed because it is too large
Load diff
65
my-app/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
generated
vendored
Executable file
65
my-app/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
generated
vendored
Executable file
|
@ -0,0 +1,65 @@
|
|||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Applies a fix to CR LF TAB handling in xml.dom.
|
||||
|
||||
Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293
|
||||
Working around this: http://bugs.python.org/issue5752
|
||||
TODO(bradnelson): Consider dropping this when we drop XP support.
|
||||
"""
|
||||
|
||||
|
||||
import xml.dom.minidom
|
||||
|
||||
|
||||
def _Replacement_write_data(writer, data, is_attrib=False):
|
||||
"""Writes datachars to writer."""
|
||||
data = data.replace("&", "&").replace("<", "<")
|
||||
data = data.replace('"', """).replace(">", ">")
|
||||
if is_attrib:
|
||||
data = data.replace("\r", "
").replace("\n", "
").replace("\t", "	")
|
||||
writer.write(data)
|
||||
|
||||
|
||||
def _Replacement_writexml(self, writer, indent="", addindent="", newl=""):
|
||||
# indent = current indentation
|
||||
# addindent = indentation to add to higher levels
|
||||
# newl = newline string
|
||||
writer.write(indent + "<" + self.tagName)
|
||||
|
||||
attrs = self._get_attributes()
|
||||
a_names = sorted(attrs.keys())
|
||||
|
||||
for a_name in a_names:
|
||||
writer.write(' %s="' % a_name)
|
||||
_Replacement_write_data(writer, attrs[a_name].value, is_attrib=True)
|
||||
writer.write('"')
|
||||
if self.childNodes:
|
||||
writer.write(">%s" % newl)
|
||||
for node in self.childNodes:
|
||||
node.writexml(writer, indent + addindent, addindent, newl)
|
||||
writer.write(f"{indent}</{self.tagName}>{newl}")
|
||||
else:
|
||||
writer.write("/>%s" % newl)
|
||||
|
||||
|
||||
class XmlFix:
|
||||
"""Object to manage temporary patching of xml.dom.minidom."""
|
||||
|
||||
def __init__(self):
|
||||
# Preserve current xml.dom.minidom functions.
|
||||
self.write_data = xml.dom.minidom._write_data
|
||||
self.writexml = xml.dom.minidom.Element.writexml
|
||||
# Inject replacement versions of a function and a method.
|
||||
xml.dom.minidom._write_data = _Replacement_write_data
|
||||
xml.dom.minidom.Element.writexml = _Replacement_writexml
|
||||
|
||||
def Cleanup(self):
|
||||
if self.write_data:
|
||||
xml.dom.minidom._write_data = self.write_data
|
||||
xml.dom.minidom.Element.writexml = self.writexml
|
||||
self.write_data = None
|
||||
|
||||
def __del__(self):
|
||||
self.Cleanup()
|
3
my-app/node_modules/node-gyp/gyp/pylib/packaging/LICENSE
generated
vendored
Executable file
3
my-app/node_modules/node-gyp/gyp/pylib/packaging/LICENSE
generated
vendored
Executable file
|
@ -0,0 +1,3 @@
|
|||
This software is made available under the terms of *either* of the licenses
|
||||
found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made
|
||||
under the terms of *both* these licenses.
|
177
my-app/node_modules/node-gyp/gyp/pylib/packaging/LICENSE.APACHE
generated
vendored
Executable file
177
my-app/node_modules/node-gyp/gyp/pylib/packaging/LICENSE.APACHE
generated
vendored
Executable file
|
@ -0,0 +1,177 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
23
my-app/node_modules/node-gyp/gyp/pylib/packaging/LICENSE.BSD
generated
vendored
Executable file
23
my-app/node_modules/node-gyp/gyp/pylib/packaging/LICENSE.BSD
generated
vendored
Executable file
|
@ -0,0 +1,23 @@
|
|||
Copyright (c) Donald Stufft and individual contributors.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
15
my-app/node_modules/node-gyp/gyp/pylib/packaging/__init__.py
generated
vendored
Executable file
15
my-app/node_modules/node-gyp/gyp/pylib/packaging/__init__.py
generated
vendored
Executable file
|
@ -0,0 +1,15 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
__title__ = "packaging"
|
||||
__summary__ = "Core utilities for Python packages"
|
||||
__uri__ = "https://github.com/pypa/packaging"
|
||||
|
||||
__version__ = "23.3.dev0"
|
||||
|
||||
__author__ = "Donald Stufft and individual contributors"
|
||||
__email__ = "donald@stufft.io"
|
||||
|
||||
__license__ = "BSD-2-Clause or Apache-2.0"
|
||||
__copyright__ = "2014 %s" % __author__
|
108
my-app/node_modules/node-gyp/gyp/pylib/packaging/_elffile.py
generated
vendored
Executable file
108
my-app/node_modules/node-gyp/gyp/pylib/packaging/_elffile.py
generated
vendored
Executable file
|
@ -0,0 +1,108 @@
|
|||
"""
|
||||
ELF file parser.
|
||||
|
||||
This provides a class ``ELFFile`` that parses an ELF executable in a similar
|
||||
interface to ``ZipFile``. Only the read interface is implemented.
|
||||
|
||||
Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
|
||||
ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
|
||||
"""
|
||||
|
||||
import enum
|
||||
import os
|
||||
import struct
|
||||
from typing import IO, Optional, Tuple
|
||||
|
||||
|
||||
class ELFInvalid(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
class EIClass(enum.IntEnum):
|
||||
C32 = 1
|
||||
C64 = 2
|
||||
|
||||
|
||||
class EIData(enum.IntEnum):
|
||||
Lsb = 1
|
||||
Msb = 2
|
||||
|
||||
|
||||
class EMachine(enum.IntEnum):
|
||||
I386 = 3
|
||||
S390 = 22
|
||||
Arm = 40
|
||||
X8664 = 62
|
||||
AArc64 = 183
|
||||
|
||||
|
||||
class ELFFile:
|
||||
"""
|
||||
Representation of an ELF executable.
|
||||
"""
|
||||
|
||||
def __init__(self, f: IO[bytes]) -> None:
|
||||
self._f = f
|
||||
|
||||
try:
|
||||
ident = self._read("16B")
|
||||
except struct.error:
|
||||
raise ELFInvalid("unable to parse identification")
|
||||
magic = bytes(ident[:4])
|
||||
if magic != b"\x7fELF":
|
||||
raise ELFInvalid(f"invalid magic: {magic!r}")
|
||||
|
||||
self.capacity = ident[4] # Format for program header (bitness).
|
||||
self.encoding = ident[5] # Data structure encoding (endianness).
|
||||
|
||||
try:
|
||||
# e_fmt: Format for program header.
|
||||
# p_fmt: Format for section header.
|
||||
# p_idx: Indexes to find p_type, p_offset, and p_filesz.
|
||||
e_fmt, self._p_fmt, self._p_idx = {
|
||||
(1, 1): ("<HHIIIIIHHH", "<IIIIIIII", (0, 1, 4)), # 32-bit LSB.
|
||||
(1, 2): (">HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB.
|
||||
(2, 1): ("<HHIQQQIHHH", "<IIQQQQQQ", (0, 2, 5)), # 64-bit LSB.
|
||||
(2, 2): (">HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB.
|
||||
}[(self.capacity, self.encoding)]
|
||||
except KeyError:
|
||||
raise ELFInvalid(
|
||||
f"unrecognized capacity ({self.capacity}) or "
|
||||
f"encoding ({self.encoding})"
|
||||
)
|
||||
|
||||
try:
|
||||
(
|
||||
_,
|
||||
self.machine, # Architecture type.
|
||||
_,
|
||||
_,
|
||||
self._e_phoff, # Offset of program header.
|
||||
_,
|
||||
self.flags, # Processor-specific flags.
|
||||
_,
|
||||
self._e_phentsize, # Size of section.
|
||||
self._e_phnum, # Number of sections.
|
||||
) = self._read(e_fmt)
|
||||
except struct.error as e:
|
||||
raise ELFInvalid("unable to parse machine and section information") from e
|
||||
|
||||
def _read(self, fmt: str) -> Tuple[int, ...]:
|
||||
return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
|
||||
|
||||
@property
|
||||
def interpreter(self) -> Optional[str]:
|
||||
"""
|
||||
The path recorded in the ``PT_INTERP`` section header.
|
||||
"""
|
||||
for index in range(self._e_phnum):
|
||||
self._f.seek(self._e_phoff + self._e_phentsize * index)
|
||||
try:
|
||||
data = self._read(self._p_fmt)
|
||||
except struct.error:
|
||||
continue
|
||||
if data[self._p_idx[0]] != 3: # Not PT_INTERP.
|
||||
continue
|
||||
self._f.seek(data[self._p_idx[1]])
|
||||
return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0")
|
||||
return None
|
252
my-app/node_modules/node-gyp/gyp/pylib/packaging/_manylinux.py
generated
vendored
Executable file
252
my-app/node_modules/node-gyp/gyp/pylib/packaging/_manylinux.py
generated
vendored
Executable file
|
@ -0,0 +1,252 @@
|
|||
import collections
|
||||
import contextlib
|
||||
import functools
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple
|
||||
|
||||
from ._elffile import EIClass, EIData, ELFFile, EMachine
|
||||
|
||||
EF_ARM_ABIMASK = 0xFF000000
|
||||
EF_ARM_ABI_VER5 = 0x05000000
|
||||
EF_ARM_ABI_FLOAT_HARD = 0x00000400
|
||||
|
||||
|
||||
# `os.PathLike` not a generic type until Python 3.9, so sticking with `str`
|
||||
# as the type for `path` until then.
|
||||
@contextlib.contextmanager
|
||||
def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]:
|
||||
try:
|
||||
with open(path, "rb") as f:
|
||||
yield ELFFile(f)
|
||||
except (OSError, TypeError, ValueError):
|
||||
yield None
|
||||
|
||||
|
||||
def _is_linux_armhf(executable: str) -> bool:
|
||||
# hard-float ABI can be detected from the ELF header of the running
|
||||
# process
|
||||
# https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
|
||||
with _parse_elf(executable) as f:
|
||||
return (
|
||||
f is not None
|
||||
and f.capacity == EIClass.C32
|
||||
and f.encoding == EIData.Lsb
|
||||
and f.machine == EMachine.Arm
|
||||
and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5
|
||||
and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD
|
||||
)
|
||||
|
||||
|
||||
def _is_linux_i686(executable: str) -> bool:
|
||||
with _parse_elf(executable) as f:
|
||||
return (
|
||||
f is not None
|
||||
and f.capacity == EIClass.C32
|
||||
and f.encoding == EIData.Lsb
|
||||
and f.machine == EMachine.I386
|
||||
)
|
||||
|
||||
|
||||
def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
|
||||
if "armv7l" in archs:
|
||||
return _is_linux_armhf(executable)
|
||||
if "i686" in archs:
|
||||
return _is_linux_i686(executable)
|
||||
allowed_archs = {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x", "loongarch64"}
|
||||
return any(arch in allowed_archs for arch in archs)
|
||||
|
||||
|
||||
# If glibc ever changes its major version, we need to know what the last
|
||||
# minor version was, so we can build the complete list of all versions.
|
||||
# For now, guess what the highest minor version might be, assume it will
|
||||
# be 50 for testing. Once this actually happens, update the dictionary
|
||||
# with the actual value.
|
||||
_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
|
||||
|
||||
|
||||
class _GLibCVersion(NamedTuple):
|
||||
major: int
|
||||
minor: int
|
||||
|
||||
|
||||
def _glibc_version_string_confstr() -> Optional[str]:
|
||||
"""
|
||||
Primary implementation of glibc_version_string using os.confstr.
|
||||
"""
|
||||
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
|
||||
# to be broken or missing. This strategy is used in the standard library
|
||||
# platform module.
|
||||
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
|
||||
try:
|
||||
# Should be a string like "glibc 2.17".
|
||||
version_string: str = getattr(os, "confstr")("CS_GNU_LIBC_VERSION")
|
||||
assert version_string is not None
|
||||
_, version = version_string.rsplit()
|
||||
except (AssertionError, AttributeError, OSError, ValueError):
|
||||
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
|
||||
return None
|
||||
return version
|
||||
|
||||
|
||||
def _glibc_version_string_ctypes() -> Optional[str]:
|
||||
"""
|
||||
Fallback implementation of glibc_version_string using ctypes.
|
||||
"""
|
||||
try:
|
||||
import ctypes
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
||||
# manpage says, "If filename is NULL, then the returned handle is for the
|
||||
# main program". This way we can let the linker do the work to figure out
|
||||
# which libc our process is actually using.
|
||||
#
|
||||
# We must also handle the special case where the executable is not a
|
||||
# dynamically linked executable. This can occur when using musl libc,
|
||||
# for example. In this situation, dlopen() will error, leading to an
|
||||
# OSError. Interestingly, at least in the case of musl, there is no
|
||||
# errno set on the OSError. The single string argument used to construct
|
||||
# OSError comes from libc itself and is therefore not portable to
|
||||
# hard code here. In any case, failure to call dlopen() means we
|
||||
# can proceed, so we bail on our attempt.
|
||||
try:
|
||||
process_namespace = ctypes.CDLL(None)
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
try:
|
||||
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
||||
except AttributeError:
|
||||
# Symbol doesn't exist -> therefore, we are not linked to
|
||||
# glibc.
|
||||
return None
|
||||
|
||||
# Call gnu_get_libc_version, which returns a string like "2.5"
|
||||
gnu_get_libc_version.restype = ctypes.c_char_p
|
||||
version_str: str = gnu_get_libc_version()
|
||||
# py2 / py3 compatibility:
|
||||
if not isinstance(version_str, str):
|
||||
version_str = version_str.decode("ascii")
|
||||
|
||||
return version_str
|
||||
|
||||
|
||||
def _glibc_version_string() -> Optional[str]:
|
||||
"""Returns glibc version string, or None if not using glibc."""
|
||||
return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
|
||||
|
||||
|
||||
def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
|
||||
"""Parse glibc version.
|
||||
|
||||
We use a regexp instead of str.split because we want to discard any
|
||||
random junk that might come after the minor version -- this might happen
|
||||
in patched/forked versions of glibc (e.g. Linaro's version of glibc
|
||||
uses version strings like "2.20-2014.11"). See gh-3588.
|
||||
"""
|
||||
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
|
||||
if not m:
|
||||
warnings.warn(
|
||||
f"Expected glibc version with 2 components major.minor,"
|
||||
f" got: {version_str}",
|
||||
RuntimeWarning,
|
||||
)
|
||||
return -1, -1
|
||||
return int(m.group("major")), int(m.group("minor"))
|
||||
|
||||
|
||||
@functools.lru_cache()
|
||||
def _get_glibc_version() -> Tuple[int, int]:
|
||||
version_str = _glibc_version_string()
|
||||
if version_str is None:
|
||||
return (-1, -1)
|
||||
return _parse_glibc_version(version_str)
|
||||
|
||||
|
||||
# From PEP 513, PEP 600
|
||||
def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
|
||||
sys_glibc = _get_glibc_version()
|
||||
if sys_glibc < version:
|
||||
return False
|
||||
# Check for presence of _manylinux module.
|
||||
try:
|
||||
import _manylinux # noqa
|
||||
except ImportError:
|
||||
return True
|
||||
if hasattr(_manylinux, "manylinux_compatible"):
|
||||
result = _manylinux.manylinux_compatible(version[0], version[1], arch)
|
||||
if result is not None:
|
||||
return bool(result)
|
||||
return True
|
||||
if version == _GLibCVersion(2, 5):
|
||||
if hasattr(_manylinux, "manylinux1_compatible"):
|
||||
return bool(_manylinux.manylinux1_compatible)
|
||||
if version == _GLibCVersion(2, 12):
|
||||
if hasattr(_manylinux, "manylinux2010_compatible"):
|
||||
return bool(_manylinux.manylinux2010_compatible)
|
||||
if version == _GLibCVersion(2, 17):
|
||||
if hasattr(_manylinux, "manylinux2014_compatible"):
|
||||
return bool(_manylinux.manylinux2014_compatible)
|
||||
return True
|
||||
|
||||
|
||||
_LEGACY_MANYLINUX_MAP = {
|
||||
# CentOS 7 w/ glibc 2.17 (PEP 599)
|
||||
(2, 17): "manylinux2014",
|
||||
# CentOS 6 w/ glibc 2.12 (PEP 571)
|
||||
(2, 12): "manylinux2010",
|
||||
# CentOS 5 w/ glibc 2.5 (PEP 513)
|
||||
(2, 5): "manylinux1",
|
||||
}
|
||||
|
||||
|
||||
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
|
||||
"""Generate manylinux tags compatible to the current platform.
|
||||
|
||||
:param archs: Sequence of compatible architectures.
|
||||
The first one shall be the closest to the actual architecture and be the part of
|
||||
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
|
||||
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
|
||||
be manylinux-compatible.
|
||||
|
||||
:returns: An iterator of compatible manylinux tags.
|
||||
"""
|
||||
if not _have_compatible_abi(sys.executable, archs):
|
||||
return
|
||||
# Oldest glibc to be supported regardless of architecture is (2, 17).
|
||||
too_old_glibc2 = _GLibCVersion(2, 16)
|
||||
if set(archs) & {"x86_64", "i686"}:
|
||||
# On x86/i686 also oldest glibc to be supported is (2, 5).
|
||||
too_old_glibc2 = _GLibCVersion(2, 4)
|
||||
current_glibc = _GLibCVersion(*_get_glibc_version())
|
||||
glibc_max_list = [current_glibc]
|
||||
# We can assume compatibility across glibc major versions.
|
||||
# https://sourceware.org/bugzilla/show_bug.cgi?id=24636
|
||||
#
|
||||
# Build a list of maximum glibc versions so that we can
|
||||
# output the canonical list of all glibc from current_glibc
|
||||
# down to too_old_glibc2, including all intermediary versions.
|
||||
for glibc_major in range(current_glibc.major - 1, 1, -1):
|
||||
glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
|
||||
glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
|
||||
for arch in archs:
|
||||
for glibc_max in glibc_max_list:
|
||||
if glibc_max.major == too_old_glibc2.major:
|
||||
min_minor = too_old_glibc2.minor
|
||||
else:
|
||||
# For other glibc major versions oldest supported is (x, 0).
|
||||
min_minor = -1
|
||||
for glibc_minor in range(glibc_max.minor, min_minor, -1):
|
||||
glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
|
||||
tag = "manylinux_{}_{}".format(*glibc_version)
|
||||
if _is_compatible(arch, glibc_version):
|
||||
yield f"{tag}_{arch}"
|
||||
# Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
|
||||
if glibc_version in _LEGACY_MANYLINUX_MAP:
|
||||
legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
|
||||
if _is_compatible(arch, glibc_version):
|
||||
yield f"{legacy_tag}_{arch}"
|
83
my-app/node_modules/node-gyp/gyp/pylib/packaging/_musllinux.py
generated
vendored
Executable file
83
my-app/node_modules/node-gyp/gyp/pylib/packaging/_musllinux.py
generated
vendored
Executable file
|
@ -0,0 +1,83 @@
|
|||
"""PEP 656 support.
|
||||
|
||||
This module implements logic to detect if the currently running Python is
|
||||
linked against musl, and what musl version is used.
|
||||
"""
|
||||
|
||||
import functools
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Iterator, NamedTuple, Optional, Sequence
|
||||
|
||||
from ._elffile import ELFFile
|
||||
|
||||
|
||||
class _MuslVersion(NamedTuple):
|
||||
major: int
|
||||
minor: int
|
||||
|
||||
|
||||
def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
|
||||
lines = [n for n in (n.strip() for n in output.splitlines()) if n]
|
||||
if len(lines) < 2 or lines[0][:4] != "musl":
|
||||
return None
|
||||
m = re.match(r"Version (\d+)\.(\d+)", lines[1])
|
||||
if not m:
|
||||
return None
|
||||
return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
|
||||
|
||||
|
||||
@functools.lru_cache()
|
||||
def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
|
||||
"""Detect currently-running musl runtime version.
|
||||
|
||||
This is done by checking the specified executable's dynamic linking
|
||||
information, and invoking the loader to parse its output for a version
|
||||
string. If the loader is musl, the output would be something like::
|
||||
|
||||
musl libc (x86_64)
|
||||
Version 1.2.2
|
||||
Dynamic Program Loader
|
||||
"""
|
||||
try:
|
||||
with open(executable, "rb") as f:
|
||||
ld = ELFFile(f).interpreter
|
||||
except (OSError, TypeError, ValueError):
|
||||
return None
|
||||
if ld is None or "musl" not in ld:
|
||||
return None
|
||||
proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
|
||||
return _parse_musl_version(proc.stderr)
|
||||
|
||||
|
||||
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
|
||||
"""Generate musllinux tags compatible to the current platform.
|
||||
|
||||
:param archs: Sequence of compatible architectures.
|
||||
The first one shall be the closest to the actual architecture and be the part of
|
||||
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
|
||||
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
|
||||
be musllinux-compatible.
|
||||
|
||||
:returns: An iterator of compatible musllinux tags.
|
||||
"""
|
||||
sys_musl = _get_musl_version(sys.executable)
|
||||
if sys_musl is None: # Python not dynamically linked against musl.
|
||||
return
|
||||
for arch in archs:
|
||||
for minor in range(sys_musl.minor, -1, -1):
|
||||
yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
import sysconfig
|
||||
|
||||
plat = sysconfig.get_platform()
|
||||
assert plat.startswith("linux-"), "not linux"
|
||||
|
||||
print("plat:", plat)
|
||||
print("musl:", _get_musl_version(sys.executable))
|
||||
print("tags:", end=" ")
|
||||
for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
|
||||
print(t, end="\n ")
|
359
my-app/node_modules/node-gyp/gyp/pylib/packaging/_parser.py
generated
vendored
Executable file
359
my-app/node_modules/node-gyp/gyp/pylib/packaging/_parser.py
generated
vendored
Executable file
|
@ -0,0 +1,359 @@
|
|||
"""Handwritten parser of dependency specifiers.
|
||||
|
||||
The docstring for each __parse_* function contains ENBF-inspired grammar representing
|
||||
the implementation.
|
||||
"""
|
||||
|
||||
import ast
|
||||
from typing import Any, List, NamedTuple, Optional, Tuple, Union
|
||||
|
||||
from ._tokenizer import DEFAULT_RULES, Tokenizer
|
||||
|
||||
|
||||
class Node:
|
||||
def __init__(self, value: str) -> None:
|
||||
self.value = value
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.value
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self.__class__.__name__}('{self}')>"
|
||||
|
||||
def serialize(self) -> str:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class Variable(Node):
|
||||
def serialize(self) -> str:
|
||||
return str(self)
|
||||
|
||||
|
||||
class Value(Node):
|
||||
def serialize(self) -> str:
|
||||
return f'"{self}"'
|
||||
|
||||
|
||||
class Op(Node):
|
||||
def serialize(self) -> str:
|
||||
return str(self)
|
||||
|
||||
|
||||
MarkerVar = Union[Variable, Value]
|
||||
MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
|
||||
# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]]
|
||||
# MarkerList = List[Union["MarkerList", MarkerAtom, str]]
|
||||
# mypy does not support recursive type definition
|
||||
# https://github.com/python/mypy/issues/731
|
||||
MarkerAtom = Any
|
||||
MarkerList = List[Any]
|
||||
|
||||
|
||||
class ParsedRequirement(NamedTuple):
|
||||
name: str
|
||||
url: str
|
||||
extras: List[str]
|
||||
specifier: str
|
||||
marker: Optional[MarkerList]
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------------------
|
||||
# Recursive descent parser for dependency specifier
|
||||
# --------------------------------------------------------------------------------------
|
||||
def parse_requirement(source: str) -> ParsedRequirement:
|
||||
return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES))
|
||||
|
||||
|
||||
def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:
|
||||
"""
|
||||
requirement = WS? IDENTIFIER WS? extras WS? requirement_details
|
||||
"""
|
||||
tokenizer.consume("WS")
|
||||
|
||||
name_token = tokenizer.expect(
|
||||
"IDENTIFIER", expected="package name at the start of dependency specifier"
|
||||
)
|
||||
name = name_token.text
|
||||
tokenizer.consume("WS")
|
||||
|
||||
extras = _parse_extras(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
|
||||
url, specifier, marker = _parse_requirement_details(tokenizer)
|
||||
tokenizer.expect("END", expected="end of dependency specifier")
|
||||
|
||||
return ParsedRequirement(name, url, extras, specifier, marker)
|
||||
|
||||
|
||||
def _parse_requirement_details(
|
||||
tokenizer: Tokenizer,
|
||||
) -> Tuple[str, str, Optional[MarkerList]]:
|
||||
"""
|
||||
requirement_details = AT URL (WS requirement_marker?)?
|
||||
| specifier WS? (requirement_marker)?
|
||||
"""
|
||||
|
||||
specifier = ""
|
||||
url = ""
|
||||
marker = None
|
||||
|
||||
if tokenizer.check("AT"):
|
||||
tokenizer.read()
|
||||
tokenizer.consume("WS")
|
||||
|
||||
url_start = tokenizer.position
|
||||
url = tokenizer.expect("URL", expected="URL after @").text
|
||||
if tokenizer.check("END", peek=True):
|
||||
return (url, specifier, marker)
|
||||
|
||||
tokenizer.expect("WS", expected="whitespace after URL")
|
||||
|
||||
# The input might end after whitespace.
|
||||
if tokenizer.check("END", peek=True):
|
||||
return (url, specifier, marker)
|
||||
|
||||
marker = _parse_requirement_marker(
|
||||
tokenizer, span_start=url_start, after="URL and whitespace"
|
||||
)
|
||||
else:
|
||||
specifier_start = tokenizer.position
|
||||
specifier = _parse_specifier(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
|
||||
if tokenizer.check("END", peek=True):
|
||||
return (url, specifier, marker)
|
||||
|
||||
marker = _parse_requirement_marker(
|
||||
tokenizer,
|
||||
span_start=specifier_start,
|
||||
after=(
|
||||
"version specifier"
|
||||
if specifier
|
||||
else "name and no valid version specifier"
|
||||
),
|
||||
)
|
||||
|
||||
return (url, specifier, marker)
|
||||
|
||||
|
||||
def _parse_requirement_marker(
|
||||
tokenizer: Tokenizer, *, span_start: int, after: str
|
||||
) -> MarkerList:
|
||||
"""
|
||||
requirement_marker = SEMICOLON marker WS?
|
||||
"""
|
||||
|
||||
if not tokenizer.check("SEMICOLON"):
|
||||
tokenizer.raise_syntax_error(
|
||||
f"Expected end or semicolon (after {after})",
|
||||
span_start=span_start,
|
||||
)
|
||||
tokenizer.read()
|
||||
|
||||
marker = _parse_marker(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
|
||||
return marker
|
||||
|
||||
|
||||
def _parse_extras(tokenizer: Tokenizer) -> List[str]:
|
||||
"""
|
||||
extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?
|
||||
"""
|
||||
if not tokenizer.check("LEFT_BRACKET", peek=True):
|
||||
return []
|
||||
|
||||
with tokenizer.enclosing_tokens(
|
||||
"LEFT_BRACKET",
|
||||
"RIGHT_BRACKET",
|
||||
around="extras",
|
||||
):
|
||||
tokenizer.consume("WS")
|
||||
extras = _parse_extras_list(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
|
||||
return extras
|
||||
|
||||
|
||||
def _parse_extras_list(tokenizer: Tokenizer) -> List[str]:
|
||||
"""
|
||||
extras_list = identifier (wsp* ',' wsp* identifier)*
|
||||
"""
|
||||
extras: List[str] = []
|
||||
|
||||
if not tokenizer.check("IDENTIFIER"):
|
||||
return extras
|
||||
|
||||
extras.append(tokenizer.read().text)
|
||||
|
||||
while True:
|
||||
tokenizer.consume("WS")
|
||||
if tokenizer.check("IDENTIFIER", peek=True):
|
||||
tokenizer.raise_syntax_error("Expected comma between extra names")
|
||||
elif not tokenizer.check("COMMA"):
|
||||
break
|
||||
|
||||
tokenizer.read()
|
||||
tokenizer.consume("WS")
|
||||
|
||||
extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma")
|
||||
extras.append(extra_token.text)
|
||||
|
||||
return extras
|
||||
|
||||
|
||||
def _parse_specifier(tokenizer: Tokenizer) -> str:
|
||||
"""
|
||||
specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS
|
||||
| WS? version_many WS?
|
||||
"""
|
||||
with tokenizer.enclosing_tokens(
|
||||
"LEFT_PARENTHESIS",
|
||||
"RIGHT_PARENTHESIS",
|
||||
around="version specifier",
|
||||
):
|
||||
tokenizer.consume("WS")
|
||||
parsed_specifiers = _parse_version_many(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
|
||||
return parsed_specifiers
|
||||
|
||||
|
||||
def _parse_version_many(tokenizer: Tokenizer) -> str:
|
||||
"""
|
||||
version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)?
|
||||
"""
|
||||
parsed_specifiers = ""
|
||||
while tokenizer.check("SPECIFIER"):
|
||||
span_start = tokenizer.position
|
||||
parsed_specifiers += tokenizer.read().text
|
||||
if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True):
|
||||
tokenizer.raise_syntax_error(
|
||||
".* suffix can only be used with `==` or `!=` operators",
|
||||
span_start=span_start,
|
||||
span_end=tokenizer.position + 1,
|
||||
)
|
||||
if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True):
|
||||
tokenizer.raise_syntax_error(
|
||||
"Local version label can only be used with `==` or `!=` operators",
|
||||
span_start=span_start,
|
||||
span_end=tokenizer.position,
|
||||
)
|
||||
tokenizer.consume("WS")
|
||||
if not tokenizer.check("COMMA"):
|
||||
break
|
||||
parsed_specifiers += tokenizer.read().text
|
||||
tokenizer.consume("WS")
|
||||
|
||||
return parsed_specifiers
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------------------
|
||||
# Recursive descent parser for marker expression
|
||||
# --------------------------------------------------------------------------------------
|
||||
def parse_marker(source: str) -> MarkerList:
|
||||
return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))
|
||||
|
||||
|
||||
def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:
|
||||
retval = _parse_marker(tokenizer)
|
||||
tokenizer.expect("END", expected="end of marker expression")
|
||||
return retval
|
||||
|
||||
|
||||
def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
|
||||
"""
|
||||
marker = marker_atom (BOOLOP marker_atom)+
|
||||
"""
|
||||
expression = [_parse_marker_atom(tokenizer)]
|
||||
while tokenizer.check("BOOLOP"):
|
||||
token = tokenizer.read()
|
||||
expr_right = _parse_marker_atom(tokenizer)
|
||||
expression.extend((token.text, expr_right))
|
||||
return expression
|
||||
|
||||
|
||||
def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom:
|
||||
"""
|
||||
marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS?
|
||||
| WS? marker_item WS?
|
||||
"""
|
||||
|
||||
tokenizer.consume("WS")
|
||||
if tokenizer.check("LEFT_PARENTHESIS", peek=True):
|
||||
with tokenizer.enclosing_tokens(
|
||||
"LEFT_PARENTHESIS",
|
||||
"RIGHT_PARENTHESIS",
|
||||
around="marker expression",
|
||||
):
|
||||
tokenizer.consume("WS")
|
||||
marker: MarkerAtom = _parse_marker(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
else:
|
||||
marker = _parse_marker_item(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
return marker
|
||||
|
||||
|
||||
def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem:
|
||||
"""
|
||||
marker_item = WS? marker_var WS? marker_op WS? marker_var WS?
|
||||
"""
|
||||
tokenizer.consume("WS")
|
||||
marker_var_left = _parse_marker_var(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
marker_op = _parse_marker_op(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
marker_var_right = _parse_marker_var(tokenizer)
|
||||
tokenizer.consume("WS")
|
||||
return (marker_var_left, marker_op, marker_var_right)
|
||||
|
||||
|
||||
def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
|
||||
"""
|
||||
marker_var = VARIABLE | QUOTED_STRING
|
||||
"""
|
||||
if tokenizer.check("VARIABLE"):
|
||||
return process_env_var(tokenizer.read().text.replace(".", "_"))
|
||||
elif tokenizer.check("QUOTED_STRING"):
|
||||
return process_python_str(tokenizer.read().text)
|
||||
else:
|
||||
tokenizer.raise_syntax_error(
|
||||
message="Expected a marker variable or quoted string"
|
||||
)
|
||||
|
||||
|
||||
def process_env_var(env_var: str) -> Variable:
|
||||
if (
|
||||
env_var == "platform_python_implementation"
|
||||
or env_var == "python_implementation"
|
||||
):
|
||||
return Variable("platform_python_implementation")
|
||||
else:
|
||||
return Variable(env_var)
|
||||
|
||||
|
||||
def process_python_str(python_str: str) -> Value:
|
||||
value = ast.literal_eval(python_str)
|
||||
return Value(str(value))
|
||||
|
||||
|
||||
def _parse_marker_op(tokenizer: Tokenizer) -> Op:
|
||||
"""
|
||||
marker_op = IN | NOT IN | OP
|
||||
"""
|
||||
if tokenizer.check("IN"):
|
||||
tokenizer.read()
|
||||
return Op("in")
|
||||
elif tokenizer.check("NOT"):
|
||||
tokenizer.read()
|
||||
tokenizer.expect("WS", expected="whitespace after 'not'")
|
||||
tokenizer.expect("IN", expected="'in' after 'not'")
|
||||
return Op("not in")
|
||||
elif tokenizer.check("OP"):
|
||||
return Op(tokenizer.read().text)
|
||||
else:
|
||||
return tokenizer.raise_syntax_error(
|
||||
"Expected marker operator, one of "
|
||||
"<=, <, !=, ==, >=, >, ~=, ===, in, not in"
|
||||
)
|
61
my-app/node_modules/node-gyp/gyp/pylib/packaging/_structures.py
generated
vendored
Executable file
61
my-app/node_modules/node-gyp/gyp/pylib/packaging/_structures.py
generated
vendored
Executable file
|
@ -0,0 +1,61 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
|
||||
class InfinityType:
|
||||
def __repr__(self) -> str:
|
||||
return "Infinity"
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other: object) -> bool:
|
||||
return False
|
||||
|
||||
def __le__(self, other: object) -> bool:
|
||||
return False
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other: object) -> bool:
|
||||
return True
|
||||
|
||||
def __ge__(self, other: object) -> bool:
|
||||
return True
|
||||
|
||||
def __neg__(self: object) -> "NegativeInfinityType":
|
||||
return NegativeInfinity
|
||||
|
||||
|
||||
Infinity = InfinityType()
|
||||
|
||||
|
||||
class NegativeInfinityType:
|
||||
def __repr__(self) -> str:
|
||||
return "-Infinity"
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other: object) -> bool:
|
||||
return True
|
||||
|
||||
def __le__(self, other: object) -> bool:
|
||||
return True
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other: object) -> bool:
|
||||
return False
|
||||
|
||||
def __ge__(self, other: object) -> bool:
|
||||
return False
|
||||
|
||||
def __neg__(self: object) -> InfinityType:
|
||||
return Infinity
|
||||
|
||||
|
||||
NegativeInfinity = NegativeInfinityType()
|
192
my-app/node_modules/node-gyp/gyp/pylib/packaging/_tokenizer.py
generated
vendored
Executable file
192
my-app/node_modules/node-gyp/gyp/pylib/packaging/_tokenizer.py
generated
vendored
Executable file
|
@ -0,0 +1,192 @@
|
|||
import contextlib
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union
|
||||
|
||||
from .specifiers import Specifier
|
||||
|
||||
|
||||
@dataclass
|
||||
class Token:
|
||||
name: str
|
||||
text: str
|
||||
position: int
|
||||
|
||||
|
||||
class ParserSyntaxError(Exception):
|
||||
"""The provided source text could not be parsed correctly."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
*,
|
||||
source: str,
|
||||
span: Tuple[int, int],
|
||||
) -> None:
|
||||
self.span = span
|
||||
self.message = message
|
||||
self.source = source
|
||||
|
||||
super().__init__()
|
||||
|
||||
def __str__(self) -> str:
|
||||
marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^"
|
||||
return "\n ".join([self.message, self.source, marker])
|
||||
|
||||
|
||||
DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = {
|
||||
"LEFT_PARENTHESIS": r"\(",
|
||||
"RIGHT_PARENTHESIS": r"\)",
|
||||
"LEFT_BRACKET": r"\[",
|
||||
"RIGHT_BRACKET": r"\]",
|
||||
"SEMICOLON": r";",
|
||||
"COMMA": r",",
|
||||
"QUOTED_STRING": re.compile(
|
||||
r"""
|
||||
(
|
||||
('[^']*')
|
||||
|
|
||||
("[^"]*")
|
||||
)
|
||||
""",
|
||||
re.VERBOSE,
|
||||
),
|
||||
"OP": r"(===|==|~=|!=|<=|>=|<|>)",
|
||||
"BOOLOP": r"\b(or|and)\b",
|
||||
"IN": r"\bin\b",
|
||||
"NOT": r"\bnot\b",
|
||||
"VARIABLE": re.compile(
|
||||
r"""
|
||||
\b(
|
||||
python_version
|
||||
|python_full_version
|
||||
|os[._]name
|
||||
|sys[._]platform
|
||||
|platform_(release|system)
|
||||
|platform[._](version|machine|python_implementation)
|
||||
|python_implementation
|
||||
|implementation_(name|version)
|
||||
|extra
|
||||
)\b
|
||||
""",
|
||||
re.VERBOSE,
|
||||
),
|
||||
"SPECIFIER": re.compile(
|
||||
Specifier._operator_regex_str + Specifier._version_regex_str,
|
||||
re.VERBOSE | re.IGNORECASE,
|
||||
),
|
||||
"AT": r"\@",
|
||||
"URL": r"[^ \t]+",
|
||||
"IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b",
|
||||
"VERSION_PREFIX_TRAIL": r"\.\*",
|
||||
"VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*",
|
||||
"WS": r"[ \t]+",
|
||||
"END": r"$",
|
||||
}
|
||||
|
||||
|
||||
class Tokenizer:
|
||||
"""Context-sensitive token parsing.
|
||||
|
||||
Provides methods to examine the input stream to check whether the next token
|
||||
matches.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
source: str,
|
||||
*,
|
||||
rules: "Dict[str, Union[str, re.Pattern[str]]]",
|
||||
) -> None:
|
||||
self.source = source
|
||||
self.rules: Dict[str, re.Pattern[str]] = {
|
||||
name: re.compile(pattern) for name, pattern in rules.items()
|
||||
}
|
||||
self.next_token: Optional[Token] = None
|
||||
self.position = 0
|
||||
|
||||
def consume(self, name: str) -> None:
|
||||
"""Move beyond provided token name, if at current position."""
|
||||
if self.check(name):
|
||||
self.read()
|
||||
|
||||
def check(self, name: str, *, peek: bool = False) -> bool:
|
||||
"""Check whether the next token has the provided name.
|
||||
|
||||
By default, if the check succeeds, the token *must* be read before
|
||||
another check. If `peek` is set to `True`, the token is not loaded and
|
||||
would need to be checked again.
|
||||
"""
|
||||
assert (
|
||||
self.next_token is None
|
||||
), f"Cannot check for {name!r}, already have {self.next_token!r}"
|
||||
assert name in self.rules, f"Unknown token name: {name!r}"
|
||||
|
||||
expression = self.rules[name]
|
||||
|
||||
match = expression.match(self.source, self.position)
|
||||
if match is None:
|
||||
return False
|
||||
if not peek:
|
||||
self.next_token = Token(name, match[0], self.position)
|
||||
return True
|
||||
|
||||
def expect(self, name: str, *, expected: str) -> Token:
|
||||
"""Expect a certain token name next, failing with a syntax error otherwise.
|
||||
|
||||
The token is *not* read.
|
||||
"""
|
||||
if not self.check(name):
|
||||
raise self.raise_syntax_error(f"Expected {expected}")
|
||||
return self.read()
|
||||
|
||||
def read(self) -> Token:
|
||||
"""Consume the next token and return it."""
|
||||
token = self.next_token
|
||||
assert token is not None
|
||||
|
||||
self.position += len(token.text)
|
||||
self.next_token = None
|
||||
|
||||
return token
|
||||
|
||||
def raise_syntax_error(
|
||||
self,
|
||||
message: str,
|
||||
*,
|
||||
span_start: Optional[int] = None,
|
||||
span_end: Optional[int] = None,
|
||||
) -> NoReturn:
|
||||
"""Raise ParserSyntaxError at the given position."""
|
||||
span = (
|
||||
self.position if span_start is None else span_start,
|
||||
self.position if span_end is None else span_end,
|
||||
)
|
||||
raise ParserSyntaxError(
|
||||
message,
|
||||
source=self.source,
|
||||
span=span,
|
||||
)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def enclosing_tokens(
|
||||
self, open_token: str, close_token: str, *, around: str
|
||||
) -> Iterator[None]:
|
||||
if self.check(open_token):
|
||||
open_position = self.position
|
||||
self.read()
|
||||
else:
|
||||
open_position = None
|
||||
|
||||
yield
|
||||
|
||||
if open_position is None:
|
||||
return
|
||||
|
||||
if not self.check(close_token):
|
||||
self.raise_syntax_error(
|
||||
f"Expected matching {close_token} for {open_token}, after {around}",
|
||||
span_start=open_position,
|
||||
)
|
||||
|
||||
self.read()
|
252
my-app/node_modules/node-gyp/gyp/pylib/packaging/markers.py
generated
vendored
Executable file
252
my-app/node_modules/node-gyp/gyp/pylib/packaging/markers.py
generated
vendored
Executable file
|
@ -0,0 +1,252 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
import operator
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
||||
|
||||
from ._parser import (
|
||||
MarkerAtom,
|
||||
MarkerList,
|
||||
Op,
|
||||
Value,
|
||||
Variable,
|
||||
parse_marker as _parse_marker,
|
||||
)
|
||||
from ._tokenizer import ParserSyntaxError
|
||||
from .specifiers import InvalidSpecifier, Specifier
|
||||
from .utils import canonicalize_name
|
||||
|
||||
__all__ = [
|
||||
"InvalidMarker",
|
||||
"UndefinedComparison",
|
||||
"UndefinedEnvironmentName",
|
||||
"Marker",
|
||||
"default_environment",
|
||||
]
|
||||
|
||||
Operator = Callable[[str, str], bool]
|
||||
|
||||
|
||||
class InvalidMarker(ValueError):
|
||||
"""
|
||||
An invalid marker was found, users should refer to PEP 508.
|
||||
"""
|
||||
|
||||
|
||||
class UndefinedComparison(ValueError):
|
||||
"""
|
||||
An invalid operation was attempted on a value that doesn't support it.
|
||||
"""
|
||||
|
||||
|
||||
class UndefinedEnvironmentName(ValueError):
|
||||
"""
|
||||
A name was attempted to be used that does not exist inside of the
|
||||
environment.
|
||||
"""
|
||||
|
||||
|
||||
def _normalize_extra_values(results: Any) -> Any:
|
||||
"""
|
||||
Normalize extra values.
|
||||
"""
|
||||
if isinstance(results[0], tuple):
|
||||
lhs, op, rhs = results[0]
|
||||
if isinstance(lhs, Variable) and lhs.value == "extra":
|
||||
normalized_extra = canonicalize_name(rhs.value)
|
||||
rhs = Value(normalized_extra)
|
||||
elif isinstance(rhs, Variable) and rhs.value == "extra":
|
||||
normalized_extra = canonicalize_name(lhs.value)
|
||||
lhs = Value(normalized_extra)
|
||||
results[0] = lhs, op, rhs
|
||||
return results
|
||||
|
||||
|
||||
def _format_marker(
|
||||
marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True
|
||||
) -> str:
|
||||
|
||||
assert isinstance(marker, (list, tuple, str))
|
||||
|
||||
# Sometimes we have a structure like [[...]] which is a single item list
|
||||
# where the single item is itself it's own list. In that case we want skip
|
||||
# the rest of this function so that we don't get extraneous () on the
|
||||
# outside.
|
||||
if (
|
||||
isinstance(marker, list)
|
||||
and len(marker) == 1
|
||||
and isinstance(marker[0], (list, tuple))
|
||||
):
|
||||
return _format_marker(marker[0])
|
||||
|
||||
if isinstance(marker, list):
|
||||
inner = (_format_marker(m, first=False) for m in marker)
|
||||
if first:
|
||||
return " ".join(inner)
|
||||
else:
|
||||
return "(" + " ".join(inner) + ")"
|
||||
elif isinstance(marker, tuple):
|
||||
return " ".join([m.serialize() for m in marker])
|
||||
else:
|
||||
return marker
|
||||
|
||||
|
||||
_operators: Dict[str, Operator] = {
|
||||
"in": lambda lhs, rhs: lhs in rhs,
|
||||
"not in": lambda lhs, rhs: lhs not in rhs,
|
||||
"<": operator.lt,
|
||||
"<=": operator.le,
|
||||
"==": operator.eq,
|
||||
"!=": operator.ne,
|
||||
">=": operator.ge,
|
||||
">": operator.gt,
|
||||
}
|
||||
|
||||
|
||||
def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
|
||||
try:
|
||||
spec = Specifier("".join([op.serialize(), rhs]))
|
||||
except InvalidSpecifier:
|
||||
pass
|
||||
else:
|
||||
return spec.contains(lhs, prereleases=True)
|
||||
|
||||
oper: Optional[Operator] = _operators.get(op.serialize())
|
||||
if oper is None:
|
||||
raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
|
||||
|
||||
return oper(lhs, rhs)
|
||||
|
||||
|
||||
def _normalize(*values: str, key: str) -> Tuple[str, ...]:
|
||||
# PEP 685 – Comparison of extra names for optional distribution dependencies
|
||||
# https://peps.python.org/pep-0685/
|
||||
# > When comparing extra names, tools MUST normalize the names being
|
||||
# > compared using the semantics outlined in PEP 503 for names
|
||||
if key == "extra":
|
||||
return tuple(canonicalize_name(v) for v in values)
|
||||
|
||||
# other environment markers don't have such standards
|
||||
return values
|
||||
|
||||
|
||||
def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
|
||||
groups: List[List[bool]] = [[]]
|
||||
|
||||
for marker in markers:
|
||||
assert isinstance(marker, (list, tuple, str))
|
||||
|
||||
if isinstance(marker, list):
|
||||
groups[-1].append(_evaluate_markers(marker, environment))
|
||||
elif isinstance(marker, tuple):
|
||||
lhs, op, rhs = marker
|
||||
|
||||
if isinstance(lhs, Variable):
|
||||
environment_key = lhs.value
|
||||
lhs_value = environment[environment_key]
|
||||
rhs_value = rhs.value
|
||||
else:
|
||||
lhs_value = lhs.value
|
||||
environment_key = rhs.value
|
||||
rhs_value = environment[environment_key]
|
||||
|
||||
lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
|
||||
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
|
||||
else:
|
||||
assert marker in ["and", "or"]
|
||||
if marker == "or":
|
||||
groups.append([])
|
||||
|
||||
return any(all(item) for item in groups)
|
||||
|
||||
|
||||
def format_full_version(info: "sys._version_info") -> str:
|
||||
version = "{0.major}.{0.minor}.{0.micro}".format(info)
|
||||
kind = info.releaselevel
|
||||
if kind != "final":
|
||||
version += kind[0] + str(info.serial)
|
||||
return version
|
||||
|
||||
|
||||
def default_environment() -> Dict[str, str]:
|
||||
iver = format_full_version(sys.implementation.version)
|
||||
implementation_name = sys.implementation.name
|
||||
return {
|
||||
"implementation_name": implementation_name,
|
||||
"implementation_version": iver,
|
||||
"os_name": os.name,
|
||||
"platform_machine": platform.machine(),
|
||||
"platform_release": platform.release(),
|
||||
"platform_system": platform.system(),
|
||||
"platform_version": platform.version(),
|
||||
"python_full_version": platform.python_version(),
|
||||
"platform_python_implementation": platform.python_implementation(),
|
||||
"python_version": ".".join(platform.python_version_tuple()[:2]),
|
||||
"sys_platform": sys.platform,
|
||||
}
|
||||
|
||||
|
||||
class Marker:
|
||||
def __init__(self, marker: str) -> None:
|
||||
# Note: We create a Marker object without calling this constructor in
|
||||
# packaging.requirements.Requirement. If any additional logic is
|
||||
# added here, make sure to mirror/adapt Requirement.
|
||||
try:
|
||||
self._markers = _normalize_extra_values(_parse_marker(marker))
|
||||
# The attribute `_markers` can be described in terms of a recursive type:
|
||||
# MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
|
||||
#
|
||||
# For example, the following expression:
|
||||
# python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
|
||||
#
|
||||
# is parsed into:
|
||||
# [
|
||||
# (<Variable('python_version')>, <Op('>')>, <Value('3.6')>),
|
||||
# 'and',
|
||||
# [
|
||||
# (<Variable('python_version')>, <Op('==')>, <Value('3.6')>),
|
||||
# 'or',
|
||||
# (<Variable('os_name')>, <Op('==')>, <Value('unix')>)
|
||||
# ]
|
||||
# ]
|
||||
except ParserSyntaxError as e:
|
||||
raise InvalidMarker(str(e)) from e
|
||||
|
||||
def __str__(self) -> str:
|
||||
return _format_marker(self._markers)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Marker('{self}')>"
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash((self.__class__.__name__, str(self)))
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
if not isinstance(other, Marker):
|
||||
return NotImplemented
|
||||
|
||||
return str(self) == str(other)
|
||||
|
||||
def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
|
||||
"""Evaluate a marker.
|
||||
|
||||
Return the boolean from evaluating the given marker against the
|
||||
environment. environment is an optional argument to override all or
|
||||
part of the determined environment.
|
||||
|
||||
The environment is determined from the current Python process.
|
||||
"""
|
||||
current_environment = default_environment()
|
||||
current_environment["extra"] = ""
|
||||
if environment is not None:
|
||||
current_environment.update(environment)
|
||||
# The API used to allow setting extra to None. We need to handle this
|
||||
# case for backwards compatibility.
|
||||
if current_environment["extra"] is None:
|
||||
current_environment["extra"] = ""
|
||||
|
||||
return _evaluate_markers(self._markers, current_environment)
|
825
my-app/node_modules/node-gyp/gyp/pylib/packaging/metadata.py
generated
vendored
Executable file
825
my-app/node_modules/node-gyp/gyp/pylib/packaging/metadata.py
generated
vendored
Executable file
|
@ -0,0 +1,825 @@
|
|||
import email.feedparser
|
||||
import email.header
|
||||
import email.message
|
||||
import email.parser
|
||||
import email.policy
|
||||
import sys
|
||||
import typing
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generic,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
Type,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from . import requirements, specifiers, utils, version as version_module
|
||||
|
||||
T = typing.TypeVar("T")
|
||||
if sys.version_info[:2] >= (3, 8): # pragma: no cover
|
||||
from typing import Literal, TypedDict
|
||||
else: # pragma: no cover
|
||||
if typing.TYPE_CHECKING:
|
||||
from typing_extensions import Literal, TypedDict
|
||||
else:
|
||||
try:
|
||||
from typing_extensions import Literal, TypedDict
|
||||
except ImportError:
|
||||
|
||||
class Literal:
|
||||
def __init_subclass__(*_args, **_kwargs):
|
||||
pass
|
||||
|
||||
class TypedDict:
|
||||
def __init_subclass__(*_args, **_kwargs):
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
ExceptionGroup
|
||||
except NameError: # pragma: no cover
|
||||
|
||||
class ExceptionGroup(Exception): # noqa: N818
|
||||
"""A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11.
|
||||
|
||||
If :external:exc:`ExceptionGroup` is already defined by Python itself,
|
||||
that version is used instead.
|
||||
"""
|
||||
|
||||
message: str
|
||||
exceptions: List[Exception]
|
||||
|
||||
def __init__(self, message: str, exceptions: List[Exception]) -> None:
|
||||
self.message = message
|
||||
self.exceptions = exceptions
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})"
|
||||
|
||||
else: # pragma: no cover
|
||||
ExceptionGroup = ExceptionGroup
|
||||
|
||||
|
||||
class InvalidMetadata(ValueError):
|
||||
"""A metadata field contains invalid data."""
|
||||
|
||||
field: str
|
||||
"""The name of the field that contains invalid data."""
|
||||
|
||||
def __init__(self, field: str, message: str) -> None:
|
||||
self.field = field
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
# The RawMetadata class attempts to make as few assumptions about the underlying
|
||||
# serialization formats as possible. The idea is that as long as a serialization
|
||||
# formats offer some very basic primitives in *some* way then we can support
|
||||
# serializing to and from that format.
|
||||
class RawMetadata(TypedDict, total=False):
|
||||
"""A dictionary of raw core metadata.
|
||||
|
||||
Each field in core metadata maps to a key of this dictionary (when data is
|
||||
provided). The key is lower-case and underscores are used instead of dashes
|
||||
compared to the equivalent core metadata field. Any core metadata field that
|
||||
can be specified multiple times or can hold multiple values in a single
|
||||
field have a key with a plural name. See :class:`Metadata` whose attributes
|
||||
match the keys of this dictionary.
|
||||
|
||||
Core metadata fields that can be specified multiple times are stored as a
|
||||
list or dict depending on which is appropriate for the field. Any fields
|
||||
which hold multiple values in a single field are stored as a list.
|
||||
|
||||
"""
|
||||
|
||||
# Metadata 1.0 - PEP 241
|
||||
metadata_version: str
|
||||
name: str
|
||||
version: str
|
||||
platforms: List[str]
|
||||
summary: str
|
||||
description: str
|
||||
keywords: List[str]
|
||||
home_page: str
|
||||
author: str
|
||||
author_email: str
|
||||
license: str
|
||||
|
||||
# Metadata 1.1 - PEP 314
|
||||
supported_platforms: List[str]
|
||||
download_url: str
|
||||
classifiers: List[str]
|
||||
requires: List[str]
|
||||
provides: List[str]
|
||||
obsoletes: List[str]
|
||||
|
||||
# Metadata 1.2 - PEP 345
|
||||
maintainer: str
|
||||
maintainer_email: str
|
||||
requires_dist: List[str]
|
||||
provides_dist: List[str]
|
||||
obsoletes_dist: List[str]
|
||||
requires_python: str
|
||||
requires_external: List[str]
|
||||
project_urls: Dict[str, str]
|
||||
|
||||
# Metadata 2.0
|
||||
# PEP 426 attempted to completely revamp the metadata format
|
||||
# but got stuck without ever being able to build consensus on
|
||||
# it and ultimately ended up withdrawn.
|
||||
#
|
||||
# However, a number of tools had started emitting METADATA with
|
||||
# `2.0` Metadata-Version, so for historical reasons, this version
|
||||
# was skipped.
|
||||
|
||||
# Metadata 2.1 - PEP 566
|
||||
description_content_type: str
|
||||
provides_extra: List[str]
|
||||
|
||||
# Metadata 2.2 - PEP 643
|
||||
dynamic: List[str]
|
||||
|
||||
# Metadata 2.3 - PEP 685
|
||||
# No new fields were added in PEP 685, just some edge case were
|
||||
# tightened up to provide better interoptability.
|
||||
|
||||
|
||||
_STRING_FIELDS = {
|
||||
"author",
|
||||
"author_email",
|
||||
"description",
|
||||
"description_content_type",
|
||||
"download_url",
|
||||
"home_page",
|
||||
"license",
|
||||
"maintainer",
|
||||
"maintainer_email",
|
||||
"metadata_version",
|
||||
"name",
|
||||
"requires_python",
|
||||
"summary",
|
||||
"version",
|
||||
}
|
||||
|
||||
_LIST_FIELDS = {
|
||||
"classifiers",
|
||||
"dynamic",
|
||||
"obsoletes",
|
||||
"obsoletes_dist",
|
||||
"platforms",
|
||||
"provides",
|
||||
"provides_dist",
|
||||
"provides_extra",
|
||||
"requires",
|
||||
"requires_dist",
|
||||
"requires_external",
|
||||
"supported_platforms",
|
||||
}
|
||||
|
||||
_DICT_FIELDS = {
|
||||
"project_urls",
|
||||
}
|
||||
|
||||
|
||||
def _parse_keywords(data: str) -> List[str]:
|
||||
"""Split a string of comma-separate keyboards into a list of keywords."""
|
||||
return [k.strip() for k in data.split(",")]
|
||||
|
||||
|
||||
def _parse_project_urls(data: List[str]) -> Dict[str, str]:
|
||||
"""Parse a list of label/URL string pairings separated by a comma."""
|
||||
urls = {}
|
||||
for pair in data:
|
||||
# Our logic is slightly tricky here as we want to try and do
|
||||
# *something* reasonable with malformed data.
|
||||
#
|
||||
# The main thing that we have to worry about, is data that does
|
||||
# not have a ',' at all to split the label from the Value. There
|
||||
# isn't a singular right answer here, and we will fail validation
|
||||
# later on (if the caller is validating) so it doesn't *really*
|
||||
# matter, but since the missing value has to be an empty str
|
||||
# and our return value is dict[str, str], if we let the key
|
||||
# be the missing value, then they'd have multiple '' values that
|
||||
# overwrite each other in a accumulating dict.
|
||||
#
|
||||
# The other potentional issue is that it's possible to have the
|
||||
# same label multiple times in the metadata, with no solid "right"
|
||||
# answer with what to do in that case. As such, we'll do the only
|
||||
# thing we can, which is treat the field as unparseable and add it
|
||||
# to our list of unparsed fields.
|
||||
parts = [p.strip() for p in pair.split(",", 1)]
|
||||
parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items
|
||||
|
||||
# TODO: The spec doesn't say anything about if the keys should be
|
||||
# considered case sensitive or not... logically they should
|
||||
# be case-preserving and case-insensitive, but doing that
|
||||
# would open up more cases where we might have duplicate
|
||||
# entries.
|
||||
label, url = parts
|
||||
if label in urls:
|
||||
# The label already exists in our set of urls, so this field
|
||||
# is unparseable, and we can just add the whole thing to our
|
||||
# unparseable data and stop processing it.
|
||||
raise KeyError("duplicate labels in project urls")
|
||||
urls[label] = url
|
||||
|
||||
return urls
|
||||
|
||||
|
||||
def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str:
|
||||
"""Get the body of the message."""
|
||||
# If our source is a str, then our caller has managed encodings for us,
|
||||
# and we don't need to deal with it.
|
||||
if isinstance(source, str):
|
||||
payload: str = msg.get_payload()
|
||||
return payload
|
||||
# If our source is a bytes, then we're managing the encoding and we need
|
||||
# to deal with it.
|
||||
else:
|
||||
bpayload: bytes = msg.get_payload(decode=True)
|
||||
try:
|
||||
return bpayload.decode("utf8", "strict")
|
||||
except UnicodeDecodeError:
|
||||
raise ValueError("payload in an invalid encoding")
|
||||
|
||||
|
||||
# The various parse_FORMAT functions here are intended to be as lenient as
|
||||
# possible in their parsing, while still returning a correctly typed
|
||||
# RawMetadata.
|
||||
#
|
||||
# To aid in this, we also generally want to do as little touching of the
|
||||
# data as possible, except where there are possibly some historic holdovers
|
||||
# that make valid data awkward to work with.
|
||||
#
|
||||
# While this is a lower level, intermediate format than our ``Metadata``
|
||||
# class, some light touch ups can make a massive difference in usability.
|
||||
|
||||
# Map METADATA fields to RawMetadata.
|
||||
_EMAIL_TO_RAW_MAPPING = {
|
||||
"author": "author",
|
||||
"author-email": "author_email",
|
||||
"classifier": "classifiers",
|
||||
"description": "description",
|
||||
"description-content-type": "description_content_type",
|
||||
"download-url": "download_url",
|
||||
"dynamic": "dynamic",
|
||||
"home-page": "home_page",
|
||||
"keywords": "keywords",
|
||||
"license": "license",
|
||||
"maintainer": "maintainer",
|
||||
"maintainer-email": "maintainer_email",
|
||||
"metadata-version": "metadata_version",
|
||||
"name": "name",
|
||||
"obsoletes": "obsoletes",
|
||||
"obsoletes-dist": "obsoletes_dist",
|
||||
"platform": "platforms",
|
||||
"project-url": "project_urls",
|
||||
"provides": "provides",
|
||||
"provides-dist": "provides_dist",
|
||||
"provides-extra": "provides_extra",
|
||||
"requires": "requires",
|
||||
"requires-dist": "requires_dist",
|
||||
"requires-external": "requires_external",
|
||||
"requires-python": "requires_python",
|
||||
"summary": "summary",
|
||||
"supported-platform": "supported_platforms",
|
||||
"version": "version",
|
||||
}
|
||||
_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()}
|
||||
|
||||
|
||||
def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]:
|
||||
"""Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``).
|
||||
|
||||
This function returns a two-item tuple of dicts. The first dict is of
|
||||
recognized fields from the core metadata specification. Fields that can be
|
||||
parsed and translated into Python's built-in types are converted
|
||||
appropriately. All other fields are left as-is. Fields that are allowed to
|
||||
appear multiple times are stored as lists.
|
||||
|
||||
The second dict contains all other fields from the metadata. This includes
|
||||
any unrecognized fields. It also includes any fields which are expected to
|
||||
be parsed into a built-in type but were not formatted appropriately. Finally,
|
||||
any fields that are expected to appear only once but are repeated are
|
||||
included in this dict.
|
||||
|
||||
"""
|
||||
raw: Dict[str, Union[str, List[str], Dict[str, str]]] = {}
|
||||
unparsed: Dict[str, List[str]] = {}
|
||||
|
||||
if isinstance(data, str):
|
||||
parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data)
|
||||
else:
|
||||
parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data)
|
||||
|
||||
# We have to wrap parsed.keys() in a set, because in the case of multiple
|
||||
# values for a key (a list), the key will appear multiple times in the
|
||||
# list of keys, but we're avoiding that by using get_all().
|
||||
for name in frozenset(parsed.keys()):
|
||||
# Header names in RFC are case insensitive, so we'll normalize to all
|
||||
# lower case to make comparisons easier.
|
||||
name = name.lower()
|
||||
|
||||
# We use get_all() here, even for fields that aren't multiple use,
|
||||
# because otherwise someone could have e.g. two Name fields, and we
|
||||
# would just silently ignore it rather than doing something about it.
|
||||
headers = parsed.get_all(name) or []
|
||||
|
||||
# The way the email module works when parsing bytes is that it
|
||||
# unconditionally decodes the bytes as ascii using the surrogateescape
|
||||
# handler. When you pull that data back out (such as with get_all() ),
|
||||
# it looks to see if the str has any surrogate escapes, and if it does
|
||||
# it wraps it in a Header object instead of returning the string.
|
||||
#
|
||||
# As such, we'll look for those Header objects, and fix up the encoding.
|
||||
value = []
|
||||
# Flag if we have run into any issues processing the headers, thus
|
||||
# signalling that the data belongs in 'unparsed'.
|
||||
valid_encoding = True
|
||||
for h in headers:
|
||||
# It's unclear if this can return more types than just a Header or
|
||||
# a str, so we'll just assert here to make sure.
|
||||
assert isinstance(h, (email.header.Header, str))
|
||||
|
||||
# If it's a header object, we need to do our little dance to get
|
||||
# the real data out of it. In cases where there is invalid data
|
||||
# we're going to end up with mojibake, but there's no obvious, good
|
||||
# way around that without reimplementing parts of the Header object
|
||||
# ourselves.
|
||||
#
|
||||
# That should be fine since, if mojibacked happens, this key is
|
||||
# going into the unparsed dict anyways.
|
||||
if isinstance(h, email.header.Header):
|
||||
# The Header object stores it's data as chunks, and each chunk
|
||||
# can be independently encoded, so we'll need to check each
|
||||
# of them.
|
||||
chunks: List[Tuple[bytes, Optional[str]]] = []
|
||||
for bin, encoding in email.header.decode_header(h):
|
||||
try:
|
||||
bin.decode("utf8", "strict")
|
||||
except UnicodeDecodeError:
|
||||
# Enable mojibake.
|
||||
encoding = "latin1"
|
||||
valid_encoding = False
|
||||
else:
|
||||
encoding = "utf8"
|
||||
chunks.append((bin, encoding))
|
||||
|
||||
# Turn our chunks back into a Header object, then let that
|
||||
# Header object do the right thing to turn them into a
|
||||
# string for us.
|
||||
value.append(str(email.header.make_header(chunks)))
|
||||
# This is already a string, so just add it.
|
||||
else:
|
||||
value.append(h)
|
||||
|
||||
# We've processed all of our values to get them into a list of str,
|
||||
# but we may have mojibake data, in which case this is an unparsed
|
||||
# field.
|
||||
if not valid_encoding:
|
||||
unparsed[name] = value
|
||||
continue
|
||||
|
||||
raw_name = _EMAIL_TO_RAW_MAPPING.get(name)
|
||||
if raw_name is None:
|
||||
# This is a bit of a weird situation, we've encountered a key that
|
||||
# we don't know what it means, so we don't know whether it's meant
|
||||
# to be a list or not.
|
||||
#
|
||||
# Since we can't really tell one way or another, we'll just leave it
|
||||
# as a list, even though it may be a single item list, because that's
|
||||
# what makes the most sense for email headers.
|
||||
unparsed[name] = value
|
||||
continue
|
||||
|
||||
# If this is one of our string fields, then we'll check to see if our
|
||||
# value is a list of a single item. If it is then we'll assume that
|
||||
# it was emitted as a single string, and unwrap the str from inside
|
||||
# the list.
|
||||
#
|
||||
# If it's any other kind of data, then we haven't the faintest clue
|
||||
# what we should parse it as, and we have to just add it to our list
|
||||
# of unparsed stuff.
|
||||
if raw_name in _STRING_FIELDS and len(value) == 1:
|
||||
raw[raw_name] = value[0]
|
||||
# If this is one of our list of string fields, then we can just assign
|
||||
# the value, since email *only* has strings, and our get_all() call
|
||||
# above ensures that this is a list.
|
||||
elif raw_name in _LIST_FIELDS:
|
||||
raw[raw_name] = value
|
||||
# Special Case: Keywords
|
||||
# The keywords field is implemented in the metadata spec as a str,
|
||||
# but it conceptually is a list of strings, and is serialized using
|
||||
# ", ".join(keywords), so we'll do some light data massaging to turn
|
||||
# this into what it logically is.
|
||||
elif raw_name == "keywords" and len(value) == 1:
|
||||
raw[raw_name] = _parse_keywords(value[0])
|
||||
# Special Case: Project-URL
|
||||
# The project urls is implemented in the metadata spec as a list of
|
||||
# specially-formatted strings that represent a key and a value, which
|
||||
# is fundamentally a mapping, however the email format doesn't support
|
||||
# mappings in a sane way, so it was crammed into a list of strings
|
||||
# instead.
|
||||
#
|
||||
# We will do a little light data massaging to turn this into a map as
|
||||
# it logically should be.
|
||||
elif raw_name == "project_urls":
|
||||
try:
|
||||
raw[raw_name] = _parse_project_urls(value)
|
||||
except KeyError:
|
||||
unparsed[name] = value
|
||||
# Nothing that we've done has managed to parse this, so it'll just
|
||||
# throw it in our unparseable data and move on.
|
||||
else:
|
||||
unparsed[name] = value
|
||||
|
||||
# We need to support getting the Description from the message payload in
|
||||
# addition to getting it from the the headers. This does mean, though, there
|
||||
# is the possibility of it being set both ways, in which case we put both
|
||||
# in 'unparsed' since we don't know which is right.
|
||||
try:
|
||||
payload = _get_payload(parsed, data)
|
||||
except ValueError:
|
||||
unparsed.setdefault("description", []).append(
|
||||
parsed.get_payload(decode=isinstance(data, bytes))
|
||||
)
|
||||
else:
|
||||
if payload:
|
||||
# Check to see if we've already got a description, if so then both
|
||||
# it, and this body move to unparseable.
|
||||
if "description" in raw:
|
||||
description_header = cast(str, raw.pop("description"))
|
||||
unparsed.setdefault("description", []).extend(
|
||||
[description_header, payload]
|
||||
)
|
||||
elif "description" in unparsed:
|
||||
unparsed["description"].append(payload)
|
||||
else:
|
||||
raw["description"] = payload
|
||||
|
||||
# We need to cast our `raw` to a metadata, because a TypedDict only support
|
||||
# literal key names, but we're computing our key names on purpose, but the
|
||||
# way this function is implemented, our `TypedDict` can only have valid key
|
||||
# names.
|
||||
return cast(RawMetadata, raw), unparsed
|
||||
|
||||
|
||||
_NOT_FOUND = object()
|
||||
|
||||
|
||||
# Keep the two values in sync.
|
||||
_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
|
||||
_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
|
||||
|
||||
_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"])
|
||||
|
||||
|
||||
class _Validator(Generic[T]):
|
||||
"""Validate a metadata field.
|
||||
|
||||
All _process_*() methods correspond to a core metadata field. The method is
|
||||
called with the field's raw value. If the raw value is valid it is returned
|
||||
in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field).
|
||||
If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause
|
||||
as appropriate).
|
||||
"""
|
||||
|
||||
name: str
|
||||
raw_name: str
|
||||
added: _MetadataVersion
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
added: _MetadataVersion = "1.0",
|
||||
) -> None:
|
||||
self.added = added
|
||||
|
||||
def __set_name__(self, _owner: "Metadata", name: str) -> None:
|
||||
self.name = name
|
||||
self.raw_name = _RAW_TO_EMAIL_MAPPING[name]
|
||||
|
||||
def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T:
|
||||
# With Python 3.8, the caching can be replaced with functools.cached_property().
|
||||
# No need to check the cache as attribute lookup will resolve into the
|
||||
# instance's __dict__ before __get__ is called.
|
||||
cache = instance.__dict__
|
||||
value = instance._raw.get(self.name)
|
||||
|
||||
# To make the _process_* methods easier, we'll check if the value is None
|
||||
# and if this field is NOT a required attribute, and if both of those
|
||||
# things are true, we'll skip the the converter. This will mean that the
|
||||
# converters never have to deal with the None union.
|
||||
if self.name in _REQUIRED_ATTRS or value is not None:
|
||||
try:
|
||||
converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}")
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
value = converter(value)
|
||||
|
||||
cache[self.name] = value
|
||||
try:
|
||||
del instance._raw[self.name] # type: ignore[misc]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return cast(T, value)
|
||||
|
||||
def _invalid_metadata(
|
||||
self, msg: str, cause: Optional[Exception] = None
|
||||
) -> InvalidMetadata:
|
||||
exc = InvalidMetadata(
|
||||
self.raw_name, msg.format_map({"field": repr(self.raw_name)})
|
||||
)
|
||||
exc.__cause__ = cause
|
||||
return exc
|
||||
|
||||
def _process_metadata_version(self, value: str) -> _MetadataVersion:
|
||||
# Implicitly makes Metadata-Version required.
|
||||
if value not in _VALID_METADATA_VERSIONS:
|
||||
raise self._invalid_metadata(f"{value!r} is not a valid metadata version")
|
||||
return cast(_MetadataVersion, value)
|
||||
|
||||
def _process_name(self, value: str) -> str:
|
||||
if not value:
|
||||
raise self._invalid_metadata("{field} is a required field")
|
||||
# Validate the name as a side-effect.
|
||||
try:
|
||||
utils.canonicalize_name(value, validate=True)
|
||||
except utils.InvalidName as exc:
|
||||
raise self._invalid_metadata(
|
||||
f"{value!r} is invalid for {{field}}", cause=exc
|
||||
)
|
||||
else:
|
||||
return value
|
||||
|
||||
def _process_version(self, value: str) -> version_module.Version:
|
||||
if not value:
|
||||
raise self._invalid_metadata("{field} is a required field")
|
||||
try:
|
||||
return version_module.parse(value)
|
||||
except version_module.InvalidVersion as exc:
|
||||
raise self._invalid_metadata(
|
||||
f"{value!r} is invalid for {{field}}", cause=exc
|
||||
)
|
||||
|
||||
def _process_summary(self, value: str) -> str:
|
||||
"""Check the field contains no newlines."""
|
||||
if "\n" in value:
|
||||
raise self._invalid_metadata("{field} must be a single line")
|
||||
return value
|
||||
|
||||
def _process_description_content_type(self, value: str) -> str:
|
||||
content_types = {"text/plain", "text/x-rst", "text/markdown"}
|
||||
message = email.message.EmailMessage()
|
||||
message["content-type"] = value
|
||||
|
||||
content_type, parameters = (
|
||||
# Defaults to `text/plain` if parsing failed.
|
||||
message.get_content_type().lower(),
|
||||
message["content-type"].params,
|
||||
)
|
||||
# Check if content-type is valid or defaulted to `text/plain` and thus was
|
||||
# not parseable.
|
||||
if content_type not in content_types or content_type not in value.lower():
|
||||
raise self._invalid_metadata(
|
||||
f"{{field}} must be one of {list(content_types)}, not {value!r}"
|
||||
)
|
||||
|
||||
charset = parameters.get("charset", "UTF-8")
|
||||
if charset != "UTF-8":
|
||||
raise self._invalid_metadata(
|
||||
f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
|
||||
)
|
||||
|
||||
markdown_variants = {"GFM", "CommonMark"}
|
||||
variant = parameters.get("variant", "GFM") # Use an acceptable default.
|
||||
if content_type == "text/markdown" and variant not in markdown_variants:
|
||||
raise self._invalid_metadata(
|
||||
f"valid Markdown variants for {{field}} are {list(markdown_variants)}, "
|
||||
f"not {variant!r}",
|
||||
)
|
||||
return value
|
||||
|
||||
def _process_dynamic(self, value: List[str]) -> List[str]:
|
||||
for dynamic_field in map(str.lower, value):
|
||||
if dynamic_field in {"name", "version", "metadata-version"}:
|
||||
raise self._invalid_metadata(
|
||||
f"{value!r} is not allowed as a dynamic field"
|
||||
)
|
||||
elif dynamic_field not in _EMAIL_TO_RAW_MAPPING:
|
||||
raise self._invalid_metadata(f"{value!r} is not a valid dynamic field")
|
||||
return list(map(str.lower, value))
|
||||
|
||||
def _process_provides_extra(
|
||||
self,
|
||||
value: List[str],
|
||||
) -> List[utils.NormalizedName]:
|
||||
normalized_names = []
|
||||
try:
|
||||
for name in value:
|
||||
normalized_names.append(utils.canonicalize_name(name, validate=True))
|
||||
except utils.InvalidName as exc:
|
||||
raise self._invalid_metadata(
|
||||
f"{name!r} is invalid for {{field}}", cause=exc
|
||||
)
|
||||
else:
|
||||
return normalized_names
|
||||
|
||||
def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:
|
||||
try:
|
||||
return specifiers.SpecifierSet(value)
|
||||
except specifiers.InvalidSpecifier as exc:
|
||||
raise self._invalid_metadata(
|
||||
f"{value!r} is invalid for {{field}}", cause=exc
|
||||
)
|
||||
|
||||
def _process_requires_dist(
|
||||
self,
|
||||
value: List[str],
|
||||
) -> List[requirements.Requirement]:
|
||||
reqs = []
|
||||
try:
|
||||
for req in value:
|
||||
reqs.append(requirements.Requirement(req))
|
||||
except requirements.InvalidRequirement as exc:
|
||||
raise self._invalid_metadata(f"{req!r} is invalid for {{field}}", cause=exc)
|
||||
else:
|
||||
return reqs
|
||||
|
||||
|
||||
class Metadata:
|
||||
"""Representation of distribution metadata.
|
||||
|
||||
Compared to :class:`RawMetadata`, this class provides objects representing
|
||||
metadata fields instead of only using built-in types. Any invalid metadata
|
||||
will cause :exc:`InvalidMetadata` to be raised (with a
|
||||
:py:attr:`~BaseException.__cause__` attribute as appropriate).
|
||||
"""
|
||||
|
||||
_raw: RawMetadata
|
||||
|
||||
@classmethod
|
||||
def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata":
|
||||
"""Create an instance from :class:`RawMetadata`.
|
||||
|
||||
If *validate* is true, all metadata will be validated. All exceptions
|
||||
related to validation will be gathered and raised as an :class:`ExceptionGroup`.
|
||||
"""
|
||||
ins = cls()
|
||||
ins._raw = data.copy() # Mutations occur due to caching enriched values.
|
||||
|
||||
if validate:
|
||||
exceptions: List[Exception] = []
|
||||
try:
|
||||
metadata_version = ins.metadata_version
|
||||
metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version)
|
||||
except InvalidMetadata as metadata_version_exc:
|
||||
exceptions.append(metadata_version_exc)
|
||||
metadata_version = None
|
||||
|
||||
# Make sure to check for the fields that are present, the required
|
||||
# fields (so their absence can be reported).
|
||||
fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS
|
||||
# Remove fields that have already been checked.
|
||||
fields_to_check -= {"metadata_version"}
|
||||
|
||||
for key in fields_to_check:
|
||||
try:
|
||||
if metadata_version:
|
||||
# Can't use getattr() as that triggers descriptor protocol which
|
||||
# will fail due to no value for the instance argument.
|
||||
try:
|
||||
field_metadata_version = cls.__dict__[key].added
|
||||
except KeyError:
|
||||
exc = InvalidMetadata(key, f"unrecognized field: {key!r}")
|
||||
exceptions.append(exc)
|
||||
continue
|
||||
field_age = _VALID_METADATA_VERSIONS.index(
|
||||
field_metadata_version
|
||||
)
|
||||
if field_age > metadata_age:
|
||||
field = _RAW_TO_EMAIL_MAPPING[key]
|
||||
exc = InvalidMetadata(
|
||||
field,
|
||||
"{field} introduced in metadata version "
|
||||
"{field_metadata_version}, not {metadata_version}",
|
||||
)
|
||||
exceptions.append(exc)
|
||||
continue
|
||||
getattr(ins, key)
|
||||
except InvalidMetadata as exc:
|
||||
exceptions.append(exc)
|
||||
|
||||
if exceptions:
|
||||
raise ExceptionGroup("invalid metadata", exceptions)
|
||||
|
||||
return ins
|
||||
|
||||
@classmethod
|
||||
def from_email(
|
||||
cls, data: Union[bytes, str], *, validate: bool = True
|
||||
) -> "Metadata":
|
||||
"""Parse metadata from email headers.
|
||||
|
||||
If *validate* is true, the metadata will be validated. All exceptions
|
||||
related to validation will be gathered and raised as an :class:`ExceptionGroup`.
|
||||
"""
|
||||
raw, unparsed = parse_email(data)
|
||||
|
||||
if validate:
|
||||
exceptions: list[Exception] = []
|
||||
for unparsed_key in unparsed:
|
||||
if unparsed_key in _EMAIL_TO_RAW_MAPPING:
|
||||
message = f"{unparsed_key!r} has invalid data"
|
||||
else:
|
||||
message = f"unrecognized field: {unparsed_key!r}"
|
||||
exceptions.append(InvalidMetadata(unparsed_key, message))
|
||||
|
||||
if exceptions:
|
||||
raise ExceptionGroup("unparsed", exceptions)
|
||||
|
||||
try:
|
||||
return cls.from_raw(raw, validate=validate)
|
||||
except ExceptionGroup as exc_group:
|
||||
raise ExceptionGroup(
|
||||
"invalid or unparsed metadata", exc_group.exceptions
|
||||
) from None
|
||||
|
||||
metadata_version: _Validator[_MetadataVersion] = _Validator()
|
||||
""":external:ref:`core-metadata-metadata-version`
|
||||
(required; validated to be a valid metadata version)"""
|
||||
name: _Validator[str] = _Validator()
|
||||
""":external:ref:`core-metadata-name`
|
||||
(required; validated using :func:`~packaging.utils.canonicalize_name` and its
|
||||
*validate* parameter)"""
|
||||
version: _Validator[version_module.Version] = _Validator()
|
||||
""":external:ref:`core-metadata-version` (required)"""
|
||||
dynamic: _Validator[Optional[List[str]]] = _Validator(
|
||||
added="2.2",
|
||||
)
|
||||
""":external:ref:`core-metadata-dynamic`
|
||||
(validated against core metadata field names and lowercased)"""
|
||||
platforms: _Validator[Optional[List[str]]] = _Validator()
|
||||
""":external:ref:`core-metadata-platform`"""
|
||||
supported_platforms: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
||||
""":external:ref:`core-metadata-supported-platform`"""
|
||||
summary: _Validator[Optional[str]] = _Validator()
|
||||
""":external:ref:`core-metadata-summary` (validated to contain no newlines)"""
|
||||
description: _Validator[Optional[str]] = _Validator() # TODO 2.1: can be in body
|
||||
""":external:ref:`core-metadata-description`"""
|
||||
description_content_type: _Validator[Optional[str]] = _Validator(added="2.1")
|
||||
""":external:ref:`core-metadata-description-content-type` (validated)"""
|
||||
keywords: _Validator[Optional[List[str]]] = _Validator()
|
||||
""":external:ref:`core-metadata-keywords`"""
|
||||
home_page: _Validator[Optional[str]] = _Validator()
|
||||
""":external:ref:`core-metadata-home-page`"""
|
||||
download_url: _Validator[Optional[str]] = _Validator(added="1.1")
|
||||
""":external:ref:`core-metadata-download-url`"""
|
||||
author: _Validator[Optional[str]] = _Validator()
|
||||
""":external:ref:`core-metadata-author`"""
|
||||
author_email: _Validator[Optional[str]] = _Validator()
|
||||
""":external:ref:`core-metadata-author-email`"""
|
||||
maintainer: _Validator[Optional[str]] = _Validator(added="1.2")
|
||||
""":external:ref:`core-metadata-maintainer`"""
|
||||
maintainer_email: _Validator[Optional[str]] = _Validator(added="1.2")
|
||||
""":external:ref:`core-metadata-maintainer-email`"""
|
||||
license: _Validator[Optional[str]] = _Validator()
|
||||
""":external:ref:`core-metadata-license`"""
|
||||
classifiers: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
||||
""":external:ref:`core-metadata-classifier`"""
|
||||
requires_dist: _Validator[Optional[List[requirements.Requirement]]] = _Validator(
|
||||
added="1.2"
|
||||
)
|
||||
""":external:ref:`core-metadata-requires-dist`"""
|
||||
requires_python: _Validator[Optional[specifiers.SpecifierSet]] = _Validator(
|
||||
added="1.2"
|
||||
)
|
||||
""":external:ref:`core-metadata-requires-python`"""
|
||||
# Because `Requires-External` allows for non-PEP 440 version specifiers, we
|
||||
# don't do any processing on the values.
|
||||
requires_external: _Validator[Optional[List[str]]] = _Validator(added="1.2")
|
||||
""":external:ref:`core-metadata-requires-external`"""
|
||||
project_urls: _Validator[Optional[Dict[str, str]]] = _Validator(added="1.2")
|
||||
""":external:ref:`core-metadata-project-url`"""
|
||||
# PEP 685 lets us raise an error if an extra doesn't pass `Name` validation
|
||||
# regardless of metadata version.
|
||||
provides_extra: _Validator[Optional[List[utils.NormalizedName]]] = _Validator(
|
||||
added="2.1",
|
||||
)
|
||||
""":external:ref:`core-metadata-provides-extra`"""
|
||||
provides_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
|
||||
""":external:ref:`core-metadata-provides-dist`"""
|
||||
obsoletes_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
|
||||
""":external:ref:`core-metadata-obsoletes-dist`"""
|
||||
requires: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
||||
"""``Requires`` (deprecated)"""
|
||||
provides: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
||||
"""``Provides`` (deprecated)"""
|
||||
obsoletes: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
||||
"""``Obsoletes`` (deprecated)"""
|
0
my-app/node_modules/node-gyp/gyp/pylib/packaging/py.typed
generated
vendored
Executable file
0
my-app/node_modules/node-gyp/gyp/pylib/packaging/py.typed
generated
vendored
Executable file
90
my-app/node_modules/node-gyp/gyp/pylib/packaging/requirements.py
generated
vendored
Executable file
90
my-app/node_modules/node-gyp/gyp/pylib/packaging/requirements.py
generated
vendored
Executable file
|
@ -0,0 +1,90 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from typing import Any, Iterator, Optional, Set
|
||||
|
||||
from ._parser import parse_requirement as _parse_requirement
|
||||
from ._tokenizer import ParserSyntaxError
|
||||
from .markers import Marker, _normalize_extra_values
|
||||
from .specifiers import SpecifierSet
|
||||
from .utils import canonicalize_name
|
||||
|
||||
|
||||
class InvalidRequirement(ValueError):
|
||||
"""
|
||||
An invalid requirement was found, users should refer to PEP 508.
|
||||
"""
|
||||
|
||||
|
||||
class Requirement:
|
||||
"""Parse a requirement.
|
||||
|
||||
Parse a given requirement string into its parts, such as name, specifier,
|
||||
URL, and extras. Raises InvalidRequirement on a badly-formed requirement
|
||||
string.
|
||||
"""
|
||||
|
||||
# TODO: Can we test whether something is contained within a requirement?
|
||||
# If so how do we do that? Do we need to test against the _name_ of
|
||||
# the thing as well as the version? What about the markers?
|
||||
# TODO: Can we normalize the name and extra name?
|
||||
|
||||
def __init__(self, requirement_string: str) -> None:
|
||||
try:
|
||||
parsed = _parse_requirement(requirement_string)
|
||||
except ParserSyntaxError as e:
|
||||
raise InvalidRequirement(str(e)) from e
|
||||
|
||||
self.name: str = parsed.name
|
||||
self.url: Optional[str] = parsed.url or None
|
||||
self.extras: Set[str] = set(parsed.extras if parsed.extras else [])
|
||||
self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
|
||||
self.marker: Optional[Marker] = None
|
||||
if parsed.marker is not None:
|
||||
self.marker = Marker.__new__(Marker)
|
||||
self.marker._markers = _normalize_extra_values(parsed.marker)
|
||||
|
||||
def _iter_parts(self, name: str) -> Iterator[str]:
|
||||
yield name
|
||||
|
||||
if self.extras:
|
||||
formatted_extras = ",".join(sorted(self.extras))
|
||||
yield f"[{formatted_extras}]"
|
||||
|
||||
if self.specifier:
|
||||
yield str(self.specifier)
|
||||
|
||||
if self.url:
|
||||
yield f"@ {self.url}"
|
||||
if self.marker:
|
||||
yield " "
|
||||
|
||||
if self.marker:
|
||||
yield f"; {self.marker}"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "".join(self._iter_parts(self.name))
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Requirement('{self}')>"
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(
|
||||
(
|
||||
self.__class__.__name__,
|
||||
*self._iter_parts(canonicalize_name(self.name)),
|
||||
)
|
||||
)
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
if not isinstance(other, Requirement):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
canonicalize_name(self.name) == canonicalize_name(other.name)
|
||||
and self.extras == other.extras
|
||||
and self.specifier == other.specifier
|
||||
and self.url == other.url
|
||||
and self.marker == other.marker
|
||||
)
|
1030
my-app/node_modules/node-gyp/gyp/pylib/packaging/specifiers.py
generated
vendored
Executable file
1030
my-app/node_modules/node-gyp/gyp/pylib/packaging/specifiers.py
generated
vendored
Executable file
File diff suppressed because it is too large
Load diff
553
my-app/node_modules/node-gyp/gyp/pylib/packaging/tags.py
generated
vendored
Executable file
553
my-app/node_modules/node-gyp/gyp/pylib/packaging/tags.py
generated
vendored
Executable file
|
@ -0,0 +1,553 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
import logging
|
||||
import platform
|
||||
import struct
|
||||
import subprocess
|
||||
import sys
|
||||
import sysconfig
|
||||
from importlib.machinery import EXTENSION_SUFFIXES
|
||||
from typing import (
|
||||
Dict,
|
||||
FrozenSet,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from . import _manylinux, _musllinux
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
PythonVersion = Sequence[int]
|
||||
MacVersion = Tuple[int, int]
|
||||
|
||||
INTERPRETER_SHORT_NAMES: Dict[str, str] = {
|
||||
"python": "py", # Generic.
|
||||
"cpython": "cp",
|
||||
"pypy": "pp",
|
||||
"ironpython": "ip",
|
||||
"jython": "jy",
|
||||
}
|
||||
|
||||
|
||||
_32_BIT_INTERPRETER = struct.calcsize("P") == 4
|
||||
|
||||
|
||||
class Tag:
|
||||
"""
|
||||
A representation of the tag triple for a wheel.
|
||||
|
||||
Instances are considered immutable and thus are hashable. Equality checking
|
||||
is also supported.
|
||||
"""
|
||||
|
||||
__slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
|
||||
|
||||
def __init__(self, interpreter: str, abi: str, platform: str) -> None:
|
||||
self._interpreter = interpreter.lower()
|
||||
self._abi = abi.lower()
|
||||
self._platform = platform.lower()
|
||||
# The __hash__ of every single element in a Set[Tag] will be evaluated each time
|
||||
# that a set calls its `.disjoint()` method, which may be called hundreds of
|
||||
# times when scanning a page of links for packages with tags matching that
|
||||
# Set[Tag]. Pre-computing the value here produces significant speedups for
|
||||
# downstream consumers.
|
||||
self._hash = hash((self._interpreter, self._abi, self._platform))
|
||||
|
||||
@property
|
||||
def interpreter(self) -> str:
|
||||
return self._interpreter
|
||||
|
||||
@property
|
||||
def abi(self) -> str:
|
||||
return self._abi
|
||||
|
||||
@property
|
||||
def platform(self) -> str:
|
||||
return self._platform
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, Tag):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
(self._hash == other._hash) # Short-circuit ASAP for perf reasons.
|
||||
and (self._platform == other._platform)
|
||||
and (self._abi == other._abi)
|
||||
and (self._interpreter == other._interpreter)
|
||||
)
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return self._hash
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self._interpreter}-{self._abi}-{self._platform}"
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{self} @ {id(self)}>"
|
||||
|
||||
|
||||
def parse_tag(tag: str) -> FrozenSet[Tag]:
|
||||
"""
|
||||
Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
|
||||
|
||||
Returning a set is required due to the possibility that the tag is a
|
||||
compressed tag set.
|
||||
"""
|
||||
tags = set()
|
||||
interpreters, abis, platforms = tag.split("-")
|
||||
for interpreter in interpreters.split("."):
|
||||
for abi in abis.split("."):
|
||||
for platform_ in platforms.split("."):
|
||||
tags.add(Tag(interpreter, abi, platform_))
|
||||
return frozenset(tags)
|
||||
|
||||
|
||||
def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
|
||||
value: Union[int, str, None] = sysconfig.get_config_var(name)
|
||||
if value is None and warn:
|
||||
logger.debug(
|
||||
"Config variable '%s' is unset, Python ABI tag may be incorrect", name
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def _normalize_string(string: str) -> str:
|
||||
return string.replace(".", "_").replace("-", "_").replace(" ", "_")
|
||||
|
||||
|
||||
def _abi3_applies(python_version: PythonVersion) -> bool:
|
||||
"""
|
||||
Determine if the Python version supports abi3.
|
||||
|
||||
PEP 384 was first implemented in Python 3.2.
|
||||
"""
|
||||
return len(python_version) > 1 and tuple(python_version) >= (3, 2)
|
||||
|
||||
|
||||
def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
|
||||
py_version = tuple(py_version) # To allow for version comparison.
|
||||
abis = []
|
||||
version = _version_nodot(py_version[:2])
|
||||
debug = pymalloc = ucs4 = ""
|
||||
with_debug = _get_config_var("Py_DEBUG", warn)
|
||||
has_refcount = hasattr(sys, "gettotalrefcount")
|
||||
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
|
||||
# extension modules is the best option.
|
||||
# https://github.com/pypa/pip/issues/3383#issuecomment-173267692
|
||||
has_ext = "_d.pyd" in EXTENSION_SUFFIXES
|
||||
if with_debug or (with_debug is None and (has_refcount or has_ext)):
|
||||
debug = "d"
|
||||
if py_version < (3, 8):
|
||||
with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
|
||||
if with_pymalloc or with_pymalloc is None:
|
||||
pymalloc = "m"
|
||||
if py_version < (3, 3):
|
||||
unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
|
||||
if unicode_size == 4 or (
|
||||
unicode_size is None and sys.maxunicode == 0x10FFFF
|
||||
):
|
||||
ucs4 = "u"
|
||||
elif debug:
|
||||
# Debug builds can also load "normal" extension modules.
|
||||
# We can also assume no UCS-4 or pymalloc requirement.
|
||||
abis.append(f"cp{version}")
|
||||
abis.insert(
|
||||
0,
|
||||
"cp{version}{debug}{pymalloc}{ucs4}".format(
|
||||
version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
|
||||
),
|
||||
)
|
||||
return abis
|
||||
|
||||
|
||||
def cpython_tags(
|
||||
python_version: Optional[PythonVersion] = None,
|
||||
abis: Optional[Iterable[str]] = None,
|
||||
platforms: Optional[Iterable[str]] = None,
|
||||
*,
|
||||
warn: bool = False,
|
||||
) -> Iterator[Tag]:
|
||||
"""
|
||||
Yields the tags for a CPython interpreter.
|
||||
|
||||
The tags consist of:
|
||||
- cp<python_version>-<abi>-<platform>
|
||||
- cp<python_version>-abi3-<platform>
|
||||
- cp<python_version>-none-<platform>
|
||||
- cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
|
||||
|
||||
If python_version only specifies a major version then user-provided ABIs and
|
||||
the 'none' ABItag will be used.
|
||||
|
||||
If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
|
||||
their normal position and not at the beginning.
|
||||
"""
|
||||
if not python_version:
|
||||
python_version = sys.version_info[:2]
|
||||
|
||||
interpreter = f"cp{_version_nodot(python_version[:2])}"
|
||||
|
||||
if abis is None:
|
||||
if len(python_version) > 1:
|
||||
abis = _cpython_abis(python_version, warn)
|
||||
else:
|
||||
abis = []
|
||||
abis = list(abis)
|
||||
# 'abi3' and 'none' are explicitly handled later.
|
||||
for explicit_abi in ("abi3", "none"):
|
||||
try:
|
||||
abis.remove(explicit_abi)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
platforms = list(platforms or platform_tags())
|
||||
for abi in abis:
|
||||
for platform_ in platforms:
|
||||
yield Tag(interpreter, abi, platform_)
|
||||
if _abi3_applies(python_version):
|
||||
yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
|
||||
yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
|
||||
|
||||
if _abi3_applies(python_version):
|
||||
for minor_version in range(python_version[1] - 1, 1, -1):
|
||||
for platform_ in platforms:
|
||||
interpreter = "cp{version}".format(
|
||||
version=_version_nodot((python_version[0], minor_version))
|
||||
)
|
||||
yield Tag(interpreter, "abi3", platform_)
|
||||
|
||||
|
||||
def _generic_abi() -> List[str]:
|
||||
"""
|
||||
Return the ABI tag based on EXT_SUFFIX.
|
||||
"""
|
||||
# The following are examples of `EXT_SUFFIX`.
|
||||
# We want to keep the parts which are related to the ABI and remove the
|
||||
# parts which are related to the platform:
|
||||
# - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310
|
||||
# - mac: '.cpython-310-darwin.so' => cp310
|
||||
# - win: '.cp310-win_amd64.pyd' => cp310
|
||||
# - win: '.pyd' => cp37 (uses _cpython_abis())
|
||||
# - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73
|
||||
# - graalpy: '.graalpy-38-native-x86_64-darwin.dylib'
|
||||
# => graalpy_38_native
|
||||
|
||||
ext_suffix = _get_config_var("EXT_SUFFIX", warn=True)
|
||||
if not isinstance(ext_suffix, str) or ext_suffix[0] != ".":
|
||||
raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')")
|
||||
parts = ext_suffix.split(".")
|
||||
if len(parts) < 3:
|
||||
# CPython3.7 and earlier uses ".pyd" on Windows.
|
||||
return _cpython_abis(sys.version_info[:2])
|
||||
soabi = parts[1]
|
||||
if soabi.startswith("cpython"):
|
||||
# non-windows
|
||||
abi = "cp" + soabi.split("-")[1]
|
||||
elif soabi.startswith("cp"):
|
||||
# windows
|
||||
abi = soabi.split("-")[0]
|
||||
elif soabi.startswith("pypy"):
|
||||
abi = "-".join(soabi.split("-")[:2])
|
||||
elif soabi.startswith("graalpy"):
|
||||
abi = "-".join(soabi.split("-")[:3])
|
||||
elif soabi:
|
||||
# pyston, ironpython, others?
|
||||
abi = soabi
|
||||
else:
|
||||
return []
|
||||
return [_normalize_string(abi)]
|
||||
|
||||
|
||||
def generic_tags(
|
||||
interpreter: Optional[str] = None,
|
||||
abis: Optional[Iterable[str]] = None,
|
||||
platforms: Optional[Iterable[str]] = None,
|
||||
*,
|
||||
warn: bool = False,
|
||||
) -> Iterator[Tag]:
|
||||
"""
|
||||
Yields the tags for a generic interpreter.
|
||||
|
||||
The tags consist of:
|
||||
- <interpreter>-<abi>-<platform>
|
||||
|
||||
The "none" ABI will be added if it was not explicitly provided.
|
||||
"""
|
||||
if not interpreter:
|
||||
interp_name = interpreter_name()
|
||||
interp_version = interpreter_version(warn=warn)
|
||||
interpreter = "".join([interp_name, interp_version])
|
||||
if abis is None:
|
||||
abis = _generic_abi()
|
||||
else:
|
||||
abis = list(abis)
|
||||
platforms = list(platforms or platform_tags())
|
||||
if "none" not in abis:
|
||||
abis.append("none")
|
||||
for abi in abis:
|
||||
for platform_ in platforms:
|
||||
yield Tag(interpreter, abi, platform_)
|
||||
|
||||
|
||||
def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
|
||||
"""
|
||||
Yields Python versions in descending order.
|
||||
|
||||
After the latest version, the major-only version will be yielded, and then
|
||||
all previous versions of that major version.
|
||||
"""
|
||||
if len(py_version) > 1:
|
||||
yield f"py{_version_nodot(py_version[:2])}"
|
||||
yield f"py{py_version[0]}"
|
||||
if len(py_version) > 1:
|
||||
for minor in range(py_version[1] - 1, -1, -1):
|
||||
yield f"py{_version_nodot((py_version[0], minor))}"
|
||||
|
||||
|
||||
def compatible_tags(
|
||||
python_version: Optional[PythonVersion] = None,
|
||||
interpreter: Optional[str] = None,
|
||||
platforms: Optional[Iterable[str]] = None,
|
||||
) -> Iterator[Tag]:
|
||||
"""
|
||||
Yields the sequence of tags that are compatible with a specific version of Python.
|
||||
|
||||
The tags consist of:
|
||||
- py*-none-<platform>
|
||||
- <interpreter>-none-any # ... if `interpreter` is provided.
|
||||
- py*-none-any
|
||||
"""
|
||||
if not python_version:
|
||||
python_version = sys.version_info[:2]
|
||||
platforms = list(platforms or platform_tags())
|
||||
for version in _py_interpreter_range(python_version):
|
||||
for platform_ in platforms:
|
||||
yield Tag(version, "none", platform_)
|
||||
if interpreter:
|
||||
yield Tag(interpreter, "none", "any")
|
||||
for version in _py_interpreter_range(python_version):
|
||||
yield Tag(version, "none", "any")
|
||||
|
||||
|
||||
def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
|
||||
if not is_32bit:
|
||||
return arch
|
||||
|
||||
if arch.startswith("ppc"):
|
||||
return "ppc"
|
||||
|
||||
return "i386"
|
||||
|
||||
|
||||
def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
|
||||
formats = [cpu_arch]
|
||||
if cpu_arch == "x86_64":
|
||||
if version < (10, 4):
|
||||
return []
|
||||
formats.extend(["intel", "fat64", "fat32"])
|
||||
|
||||
elif cpu_arch == "i386":
|
||||
if version < (10, 4):
|
||||
return []
|
||||
formats.extend(["intel", "fat32", "fat"])
|
||||
|
||||
elif cpu_arch == "ppc64":
|
||||
# TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
|
||||
if version > (10, 5) or version < (10, 4):
|
||||
return []
|
||||
formats.append("fat64")
|
||||
|
||||
elif cpu_arch == "ppc":
|
||||
if version > (10, 6):
|
||||
return []
|
||||
formats.extend(["fat32", "fat"])
|
||||
|
||||
if cpu_arch in {"arm64", "x86_64"}:
|
||||
formats.append("universal2")
|
||||
|
||||
if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
|
||||
formats.append("universal")
|
||||
|
||||
return formats
|
||||
|
||||
|
||||
def mac_platforms(
|
||||
version: Optional[MacVersion] = None, arch: Optional[str] = None
|
||||
) -> Iterator[str]:
|
||||
"""
|
||||
Yields the platform tags for a macOS system.
|
||||
|
||||
The `version` parameter is a two-item tuple specifying the macOS version to
|
||||
generate platform tags for. The `arch` parameter is the CPU architecture to
|
||||
generate platform tags for. Both parameters default to the appropriate value
|
||||
for the current system.
|
||||
"""
|
||||
version_str, _, cpu_arch = platform.mac_ver()
|
||||
if version is None:
|
||||
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
|
||||
if version == (10, 16):
|
||||
# When built against an older macOS SDK, Python will report macOS 10.16
|
||||
# instead of the real version.
|
||||
version_str = subprocess.run(
|
||||
[
|
||||
sys.executable,
|
||||
"-sS",
|
||||
"-c",
|
||||
"import platform; print(platform.mac_ver()[0])",
|
||||
],
|
||||
check=True,
|
||||
env={"SYSTEM_VERSION_COMPAT": "0"},
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
).stdout
|
||||
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
|
||||
else:
|
||||
version = version
|
||||
if arch is None:
|
||||
arch = _mac_arch(cpu_arch)
|
||||
else:
|
||||
arch = arch
|
||||
|
||||
if (10, 0) <= version and version < (11, 0):
|
||||
# Prior to Mac OS 11, each yearly release of Mac OS bumped the
|
||||
# "minor" version number. The major version was always 10.
|
||||
for minor_version in range(version[1], -1, -1):
|
||||
compat_version = 10, minor_version
|
||||
binary_formats = _mac_binary_formats(compat_version, arch)
|
||||
for binary_format in binary_formats:
|
||||
yield "macosx_{major}_{minor}_{binary_format}".format(
|
||||
major=10, minor=minor_version, binary_format=binary_format
|
||||
)
|
||||
|
||||
if version >= (11, 0):
|
||||
# Starting with Mac OS 11, each yearly release bumps the major version
|
||||
# number. The minor versions are now the midyear updates.
|
||||
for major_version in range(version[0], 10, -1):
|
||||
compat_version = major_version, 0
|
||||
binary_formats = _mac_binary_formats(compat_version, arch)
|
||||
for binary_format in binary_formats:
|
||||
yield "macosx_{major}_{minor}_{binary_format}".format(
|
||||
major=major_version, minor=0, binary_format=binary_format
|
||||
)
|
||||
|
||||
if version >= (11, 0):
|
||||
# Mac OS 11 on x86_64 is compatible with binaries from previous releases.
|
||||
# Arm64 support was introduced in 11.0, so no Arm binaries from previous
|
||||
# releases exist.
|
||||
#
|
||||
# However, the "universal2" binary format can have a
|
||||
# macOS version earlier than 11.0 when the x86_64 part of the binary supports
|
||||
# that version of macOS.
|
||||
if arch == "x86_64":
|
||||
for minor_version in range(16, 3, -1):
|
||||
compat_version = 10, minor_version
|
||||
binary_formats = _mac_binary_formats(compat_version, arch)
|
||||
for binary_format in binary_formats:
|
||||
yield "macosx_{major}_{minor}_{binary_format}".format(
|
||||
major=compat_version[0],
|
||||
minor=compat_version[1],
|
||||
binary_format=binary_format,
|
||||
)
|
||||
else:
|
||||
for minor_version in range(16, 3, -1):
|
||||
compat_version = 10, minor_version
|
||||
binary_format = "universal2"
|
||||
yield "macosx_{major}_{minor}_{binary_format}".format(
|
||||
major=compat_version[0],
|
||||
minor=compat_version[1],
|
||||
binary_format=binary_format,
|
||||
)
|
||||
|
||||
|
||||
def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
|
||||
linux = _normalize_string(sysconfig.get_platform())
|
||||
if not linux.startswith("linux_"):
|
||||
# we should never be here, just yield the sysconfig one and return
|
||||
yield linux
|
||||
return
|
||||
if is_32bit:
|
||||
if linux == "linux_x86_64":
|
||||
linux = "linux_i686"
|
||||
elif linux == "linux_aarch64":
|
||||
linux = "linux_armv8l"
|
||||
_, arch = linux.split("_", 1)
|
||||
archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])
|
||||
yield from _manylinux.platform_tags(archs)
|
||||
yield from _musllinux.platform_tags(archs)
|
||||
for arch in archs:
|
||||
yield f"linux_{arch}"
|
||||
|
||||
|
||||
def _generic_platforms() -> Iterator[str]:
|
||||
yield _normalize_string(sysconfig.get_platform())
|
||||
|
||||
|
||||
def platform_tags() -> Iterator[str]:
|
||||
"""
|
||||
Provides the platform tags for this installation.
|
||||
"""
|
||||
if platform.system() == "Darwin":
|
||||
return mac_platforms()
|
||||
elif platform.system() == "Linux":
|
||||
return _linux_platforms()
|
||||
else:
|
||||
return _generic_platforms()
|
||||
|
||||
|
||||
def interpreter_name() -> str:
|
||||
"""
|
||||
Returns the name of the running interpreter.
|
||||
|
||||
Some implementations have a reserved, two-letter abbreviation which will
|
||||
be returned when appropriate.
|
||||
"""
|
||||
name = sys.implementation.name
|
||||
return INTERPRETER_SHORT_NAMES.get(name) or name
|
||||
|
||||
|
||||
def interpreter_version(*, warn: bool = False) -> str:
|
||||
"""
|
||||
Returns the version of the running interpreter.
|
||||
"""
|
||||
version = _get_config_var("py_version_nodot", warn=warn)
|
||||
if version:
|
||||
version = str(version)
|
||||
else:
|
||||
version = _version_nodot(sys.version_info[:2])
|
||||
return version
|
||||
|
||||
|
||||
def _version_nodot(version: PythonVersion) -> str:
|
||||
return "".join(map(str, version))
|
||||
|
||||
|
||||
def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
|
||||
"""
|
||||
Returns the sequence of tag triples for the running interpreter.
|
||||
|
||||
The order of the sequence corresponds to priority order for the
|
||||
interpreter, from most to least important.
|
||||
"""
|
||||
|
||||
interp_name = interpreter_name()
|
||||
if interp_name == "cp":
|
||||
yield from cpython_tags(warn=warn)
|
||||
else:
|
||||
yield from generic_tags()
|
||||
|
||||
if interp_name == "pp":
|
||||
interp = "pp3"
|
||||
elif interp_name == "cp":
|
||||
interp = "cp" + interpreter_version(warn=warn)
|
||||
else:
|
||||
interp = None
|
||||
yield from compatible_tags(interpreter=interp)
|
172
my-app/node_modules/node-gyp/gyp/pylib/packaging/utils.py
generated
vendored
Executable file
172
my-app/node_modules/node-gyp/gyp/pylib/packaging/utils.py
generated
vendored
Executable file
|
@ -0,0 +1,172 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
import re
|
||||
from typing import FrozenSet, NewType, Tuple, Union, cast
|
||||
|
||||
from .tags import Tag, parse_tag
|
||||
from .version import InvalidVersion, Version
|
||||
|
||||
BuildTag = Union[Tuple[()], Tuple[int, str]]
|
||||
NormalizedName = NewType("NormalizedName", str)
|
||||
|
||||
|
||||
class InvalidName(ValueError):
|
||||
"""
|
||||
An invalid distribution name; users should refer to the packaging user guide.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidWheelFilename(ValueError):
|
||||
"""
|
||||
An invalid wheel filename was found, users should refer to PEP 427.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidSdistFilename(ValueError):
|
||||
"""
|
||||
An invalid sdist filename was found, users should refer to the packaging user guide.
|
||||
"""
|
||||
|
||||
|
||||
# Core metadata spec for `Name`
|
||||
_validate_regex = re.compile(
|
||||
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
|
||||
)
|
||||
_canonicalize_regex = re.compile(r"[-_.]+")
|
||||
_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
|
||||
# PEP 427: The build number must start with a digit.
|
||||
_build_tag_regex = re.compile(r"(\d+)(.*)")
|
||||
|
||||
|
||||
def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
|
||||
if validate and not _validate_regex.match(name):
|
||||
raise InvalidName(f"name is invalid: {name!r}")
|
||||
# This is taken from PEP 503.
|
||||
value = _canonicalize_regex.sub("-", name).lower()
|
||||
return cast(NormalizedName, value)
|
||||
|
||||
|
||||
def is_normalized_name(name: str) -> bool:
|
||||
return _normalized_regex.match(name) is not None
|
||||
|
||||
|
||||
def canonicalize_version(
|
||||
version: Union[Version, str], *, strip_trailing_zero: bool = True
|
||||
) -> str:
|
||||
"""
|
||||
This is very similar to Version.__str__, but has one subtle difference
|
||||
with the way it handles the release segment.
|
||||
"""
|
||||
if isinstance(version, str):
|
||||
try:
|
||||
parsed = Version(version)
|
||||
except InvalidVersion:
|
||||
# Legacy versions cannot be normalized
|
||||
return version
|
||||
else:
|
||||
parsed = version
|
||||
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if parsed.epoch != 0:
|
||||
parts.append(f"{parsed.epoch}!")
|
||||
|
||||
# Release segment
|
||||
release_segment = ".".join(str(x) for x in parsed.release)
|
||||
if strip_trailing_zero:
|
||||
# NB: This strips trailing '.0's to normalize
|
||||
release_segment = re.sub(r"(\.0)+$", "", release_segment)
|
||||
parts.append(release_segment)
|
||||
|
||||
# Pre-release
|
||||
if parsed.pre is not None:
|
||||
parts.append("".join(str(x) for x in parsed.pre))
|
||||
|
||||
# Post-release
|
||||
if parsed.post is not None:
|
||||
parts.append(f".post{parsed.post}")
|
||||
|
||||
# Development release
|
||||
if parsed.dev is not None:
|
||||
parts.append(f".dev{parsed.dev}")
|
||||
|
||||
# Local version segment
|
||||
if parsed.local is not None:
|
||||
parts.append(f"+{parsed.local}")
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
|
||||
def parse_wheel_filename(
|
||||
filename: str,
|
||||
) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
|
||||
if not filename.endswith(".whl"):
|
||||
raise InvalidWheelFilename(
|
||||
f"Invalid wheel filename (extension must be '.whl'): {filename}"
|
||||
)
|
||||
|
||||
filename = filename[:-4]
|
||||
dashes = filename.count("-")
|
||||
if dashes not in (4, 5):
|
||||
raise InvalidWheelFilename(
|
||||
f"Invalid wheel filename (wrong number of parts): {filename}"
|
||||
)
|
||||
|
||||
parts = filename.split("-", dashes - 2)
|
||||
name_part = parts[0]
|
||||
# See PEP 427 for the rules on escaping the project name.
|
||||
if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
|
||||
raise InvalidWheelFilename(f"Invalid project name: {filename}")
|
||||
name = canonicalize_name(name_part)
|
||||
|
||||
try:
|
||||
version = Version(parts[1])
|
||||
except InvalidVersion as e:
|
||||
raise InvalidWheelFilename(
|
||||
f"Invalid wheel filename (invalid version): {filename}"
|
||||
) from e
|
||||
|
||||
if dashes == 5:
|
||||
build_part = parts[2]
|
||||
build_match = _build_tag_regex.match(build_part)
|
||||
if build_match is None:
|
||||
raise InvalidWheelFilename(
|
||||
f"Invalid build number: {build_part} in '{filename}'"
|
||||
)
|
||||
build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
|
||||
else:
|
||||
build = ()
|
||||
tags = parse_tag(parts[-1])
|
||||
return (name, version, build, tags)
|
||||
|
||||
|
||||
def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
|
||||
if filename.endswith(".tar.gz"):
|
||||
file_stem = filename[: -len(".tar.gz")]
|
||||
elif filename.endswith(".zip"):
|
||||
file_stem = filename[: -len(".zip")]
|
||||
else:
|
||||
raise InvalidSdistFilename(
|
||||
f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
|
||||
f" {filename}"
|
||||
)
|
||||
|
||||
# We are requiring a PEP 440 version, which cannot contain dashes,
|
||||
# so we split on the last dash.
|
||||
name_part, sep, version_part = file_stem.rpartition("-")
|
||||
if not sep:
|
||||
raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
|
||||
|
||||
name = canonicalize_name(name_part)
|
||||
|
||||
try:
|
||||
version = Version(version_part)
|
||||
except InvalidVersion as e:
|
||||
raise InvalidSdistFilename(
|
||||
f"Invalid sdist filename (invalid version): {filename}"
|
||||
) from e
|
||||
|
||||
return (name, version)
|
563
my-app/node_modules/node-gyp/gyp/pylib/packaging/version.py
generated
vendored
Executable file
563
my-app/node_modules/node-gyp/gyp/pylib/packaging/version.py
generated
vendored
Executable file
|
@ -0,0 +1,563 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
"""
|
||||
.. testsetup::
|
||||
|
||||
from packaging.version import parse, Version
|
||||
"""
|
||||
|
||||
import itertools
|
||||
import re
|
||||
from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union
|
||||
|
||||
from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
|
||||
|
||||
__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
|
||||
|
||||
LocalType = Tuple[Union[int, str], ...]
|
||||
|
||||
CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]]
|
||||
CmpLocalType = Union[
|
||||
NegativeInfinityType,
|
||||
Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...],
|
||||
]
|
||||
CmpKey = Tuple[
|
||||
int,
|
||||
Tuple[int, ...],
|
||||
CmpPrePostDevType,
|
||||
CmpPrePostDevType,
|
||||
CmpPrePostDevType,
|
||||
CmpLocalType,
|
||||
]
|
||||
VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
|
||||
|
||||
|
||||
class _Version(NamedTuple):
|
||||
epoch: int
|
||||
release: Tuple[int, ...]
|
||||
dev: Optional[Tuple[str, int]]
|
||||
pre: Optional[Tuple[str, int]]
|
||||
post: Optional[Tuple[str, int]]
|
||||
local: Optional[LocalType]
|
||||
|
||||
|
||||
def parse(version: str) -> "Version":
|
||||
"""Parse the given version string.
|
||||
|
||||
>>> parse('1.0.dev1')
|
||||
<Version('1.0.dev1')>
|
||||
|
||||
:param version: The version string to parse.
|
||||
:raises InvalidVersion: When the version string is not a valid version.
|
||||
"""
|
||||
return Version(version)
|
||||
|
||||
|
||||
class InvalidVersion(ValueError):
|
||||
"""Raised when a version string is not a valid version.
|
||||
|
||||
>>> Version("invalid")
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
packaging.version.InvalidVersion: Invalid version: 'invalid'
|
||||
"""
|
||||
|
||||
|
||||
class _BaseVersion:
|
||||
_key: Tuple[Any, ...]
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self._key)
|
||||
|
||||
# Please keep the duplicated `isinstance` check
|
||||
# in the six comparisons hereunder
|
||||
# unless you find a way to avoid adding overhead function calls.
|
||||
def __lt__(self, other: "_BaseVersion") -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key < other._key
|
||||
|
||||
def __le__(self, other: "_BaseVersion") -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key <= other._key
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key == other._key
|
||||
|
||||
def __ge__(self, other: "_BaseVersion") -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key >= other._key
|
||||
|
||||
def __gt__(self, other: "_BaseVersion") -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key > other._key
|
||||
|
||||
def __ne__(self, other: object) -> bool:
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key != other._key
|
||||
|
||||
|
||||
# Deliberately not anchored to the start and end of the string, to make it
|
||||
# easier for 3rd party code to reuse
|
||||
_VERSION_PATTERN = r"""
|
||||
v?
|
||||
(?:
|
||||
(?:(?P<epoch>[0-9]+)!)? # epoch
|
||||
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
||||
(?P<pre> # pre-release
|
||||
[-_\.]?
|
||||
(?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)
|
||||
[-_\.]?
|
||||
(?P<pre_n>[0-9]+)?
|
||||
)?
|
||||
(?P<post> # post release
|
||||
(?:-(?P<post_n1>[0-9]+))
|
||||
|
|
||||
(?:
|
||||
[-_\.]?
|
||||
(?P<post_l>post|rev|r)
|
||||
[-_\.]?
|
||||
(?P<post_n2>[0-9]+)?
|
||||
)
|
||||
)?
|
||||
(?P<dev> # dev release
|
||||
[-_\.]?
|
||||
(?P<dev_l>dev)
|
||||
[-_\.]?
|
||||
(?P<dev_n>[0-9]+)?
|
||||
)?
|
||||
)
|
||||
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
||||
"""
|
||||
|
||||
VERSION_PATTERN = _VERSION_PATTERN
|
||||
"""
|
||||
A string containing the regular expression used to match a valid version.
|
||||
|
||||
The pattern is not anchored at either end, and is intended for embedding in larger
|
||||
expressions (for example, matching a version number as part of a file name). The
|
||||
regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
|
||||
flags set.
|
||||
|
||||
:meta hide-value:
|
||||
"""
|
||||
|
||||
|
||||
class Version(_BaseVersion):
|
||||
"""This class abstracts handling of a project's versions.
|
||||
|
||||
A :class:`Version` instance is comparison aware and can be compared and
|
||||
sorted using the standard Python interfaces.
|
||||
|
||||
>>> v1 = Version("1.0a5")
|
||||
>>> v2 = Version("1.0")
|
||||
>>> v1
|
||||
<Version('1.0a5')>
|
||||
>>> v2
|
||||
<Version('1.0')>
|
||||
>>> v1 < v2
|
||||
True
|
||||
>>> v1 == v2
|
||||
False
|
||||
>>> v1 > v2
|
||||
False
|
||||
>>> v1 >= v2
|
||||
False
|
||||
>>> v1 <= v2
|
||||
True
|
||||
"""
|
||||
|
||||
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
||||
_key: CmpKey
|
||||
|
||||
def __init__(self, version: str) -> None:
|
||||
"""Initialize a Version object.
|
||||
|
||||
:param version:
|
||||
The string representation of a version which will be parsed and normalized
|
||||
before use.
|
||||
:raises InvalidVersion:
|
||||
If the ``version`` does not conform to PEP 440 in any way then this
|
||||
exception will be raised.
|
||||
"""
|
||||
|
||||
# Validate the version and parse it into pieces
|
||||
match = self._regex.search(version)
|
||||
if not match:
|
||||
raise InvalidVersion(f"Invalid version: '{version}'")
|
||||
|
||||
# Store the parsed out pieces of the version
|
||||
self._version = _Version(
|
||||
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
|
||||
release=tuple(int(i) for i in match.group("release").split(".")),
|
||||
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
|
||||
post=_parse_letter_version(
|
||||
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
|
||||
),
|
||||
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
|
||||
local=_parse_local_version(match.group("local")),
|
||||
)
|
||||
|
||||
# Generate a key which will be used for sorting
|
||||
self._key = _cmpkey(
|
||||
self._version.epoch,
|
||||
self._version.release,
|
||||
self._version.pre,
|
||||
self._version.post,
|
||||
self._version.dev,
|
||||
self._version.local,
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""A representation of the Version that shows all internal state.
|
||||
|
||||
>>> Version('1.0.0')
|
||||
<Version('1.0.0')>
|
||||
"""
|
||||
return f"<Version('{self}')>"
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""A string representation of the version that can be rounded-tripped.
|
||||
|
||||
>>> str(Version("1.0a5"))
|
||||
'1.0a5'
|
||||
"""
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self.epoch != 0:
|
||||
parts.append(f"{self.epoch}!")
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self.release))
|
||||
|
||||
# Pre-release
|
||||
if self.pre is not None:
|
||||
parts.append("".join(str(x) for x in self.pre))
|
||||
|
||||
# Post-release
|
||||
if self.post is not None:
|
||||
parts.append(f".post{self.post}")
|
||||
|
||||
# Development release
|
||||
if self.dev is not None:
|
||||
parts.append(f".dev{self.dev}")
|
||||
|
||||
# Local version segment
|
||||
if self.local is not None:
|
||||
parts.append(f"+{self.local}")
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def epoch(self) -> int:
|
||||
"""The epoch of the version.
|
||||
|
||||
>>> Version("2.0.0").epoch
|
||||
0
|
||||
>>> Version("1!2.0.0").epoch
|
||||
1
|
||||
"""
|
||||
return self._version.epoch
|
||||
|
||||
@property
|
||||
def release(self) -> Tuple[int, ...]:
|
||||
"""The components of the "release" segment of the version.
|
||||
|
||||
>>> Version("1.2.3").release
|
||||
(1, 2, 3)
|
||||
>>> Version("2.0.0").release
|
||||
(2, 0, 0)
|
||||
>>> Version("1!2.0.0.post0").release
|
||||
(2, 0, 0)
|
||||
|
||||
Includes trailing zeroes but not the epoch or any pre-release / development /
|
||||
post-release suffixes.
|
||||
"""
|
||||
return self._version.release
|
||||
|
||||
@property
|
||||
def pre(self) -> Optional[Tuple[str, int]]:
|
||||
"""The pre-release segment of the version.
|
||||
|
||||
>>> print(Version("1.2.3").pre)
|
||||
None
|
||||
>>> Version("1.2.3a1").pre
|
||||
('a', 1)
|
||||
>>> Version("1.2.3b1").pre
|
||||
('b', 1)
|
||||
>>> Version("1.2.3rc1").pre
|
||||
('rc', 1)
|
||||
"""
|
||||
return self._version.pre
|
||||
|
||||
@property
|
||||
def post(self) -> Optional[int]:
|
||||
"""The post-release number of the version.
|
||||
|
||||
>>> print(Version("1.2.3").post)
|
||||
None
|
||||
>>> Version("1.2.3.post1").post
|
||||
1
|
||||
"""
|
||||
return self._version.post[1] if self._version.post else None
|
||||
|
||||
@property
|
||||
def dev(self) -> Optional[int]:
|
||||
"""The development number of the version.
|
||||
|
||||
>>> print(Version("1.2.3").dev)
|
||||
None
|
||||
>>> Version("1.2.3.dev1").dev
|
||||
1
|
||||
"""
|
||||
return self._version.dev[1] if self._version.dev else None
|
||||
|
||||
@property
|
||||
def local(self) -> Optional[str]:
|
||||
"""The local version segment of the version.
|
||||
|
||||
>>> print(Version("1.2.3").local)
|
||||
None
|
||||
>>> Version("1.2.3+abc").local
|
||||
'abc'
|
||||
"""
|
||||
if self._version.local:
|
||||
return ".".join(str(x) for x in self._version.local)
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def public(self) -> str:
|
||||
"""The public portion of the version.
|
||||
|
||||
>>> Version("1.2.3").public
|
||||
'1.2.3'
|
||||
>>> Version("1.2.3+abc").public
|
||||
'1.2.3'
|
||||
>>> Version("1.2.3+abc.dev1").public
|
||||
'1.2.3'
|
||||
"""
|
||||
return str(self).split("+", 1)[0]
|
||||
|
||||
@property
|
||||
def base_version(self) -> str:
|
||||
"""The "base version" of the version.
|
||||
|
||||
>>> Version("1.2.3").base_version
|
||||
'1.2.3'
|
||||
>>> Version("1.2.3+abc").base_version
|
||||
'1.2.3'
|
||||
>>> Version("1!1.2.3+abc.dev1").base_version
|
||||
'1!1.2.3'
|
||||
|
||||
The "base version" is the public version of the project without any pre or post
|
||||
release markers.
|
||||
"""
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self.epoch != 0:
|
||||
parts.append(f"{self.epoch}!")
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self.release))
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def is_prerelease(self) -> bool:
|
||||
"""Whether this version is a pre-release.
|
||||
|
||||
>>> Version("1.2.3").is_prerelease
|
||||
False
|
||||
>>> Version("1.2.3a1").is_prerelease
|
||||
True
|
||||
>>> Version("1.2.3b1").is_prerelease
|
||||
True
|
||||
>>> Version("1.2.3rc1").is_prerelease
|
||||
True
|
||||
>>> Version("1.2.3dev1").is_prerelease
|
||||
True
|
||||
"""
|
||||
return self.dev is not None or self.pre is not None
|
||||
|
||||
@property
|
||||
def is_postrelease(self) -> bool:
|
||||
"""Whether this version is a post-release.
|
||||
|
||||
>>> Version("1.2.3").is_postrelease
|
||||
False
|
||||
>>> Version("1.2.3.post1").is_postrelease
|
||||
True
|
||||
"""
|
||||
return self.post is not None
|
||||
|
||||
@property
|
||||
def is_devrelease(self) -> bool:
|
||||
"""Whether this version is a development release.
|
||||
|
||||
>>> Version("1.2.3").is_devrelease
|
||||
False
|
||||
>>> Version("1.2.3.dev1").is_devrelease
|
||||
True
|
||||
"""
|
||||
return self.dev is not None
|
||||
|
||||
@property
|
||||
def major(self) -> int:
|
||||
"""The first item of :attr:`release` or ``0`` if unavailable.
|
||||
|
||||
>>> Version("1.2.3").major
|
||||
1
|
||||
"""
|
||||
return self.release[0] if len(self.release) >= 1 else 0
|
||||
|
||||
@property
|
||||
def minor(self) -> int:
|
||||
"""The second item of :attr:`release` or ``0`` if unavailable.
|
||||
|
||||
>>> Version("1.2.3").minor
|
||||
2
|
||||
>>> Version("1").minor
|
||||
0
|
||||
"""
|
||||
return self.release[1] if len(self.release) >= 2 else 0
|
||||
|
||||
@property
|
||||
def micro(self) -> int:
|
||||
"""The third item of :attr:`release` or ``0`` if unavailable.
|
||||
|
||||
>>> Version("1.2.3").micro
|
||||
3
|
||||
>>> Version("1").micro
|
||||
0
|
||||
"""
|
||||
return self.release[2] if len(self.release) >= 3 else 0
|
||||
|
||||
|
||||
def _parse_letter_version(
|
||||
letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
|
||||
) -> Optional[Tuple[str, int]]:
|
||||
|
||||
if letter:
|
||||
# We consider there to be an implicit 0 in a pre-release if there is
|
||||
# not a numeral associated with it.
|
||||
if number is None:
|
||||
number = 0
|
||||
|
||||
# We normalize any letters to their lower case form
|
||||
letter = letter.lower()
|
||||
|
||||
# We consider some words to be alternate spellings of other words and
|
||||
# in those cases we want to normalize the spellings to our preferred
|
||||
# spelling.
|
||||
if letter == "alpha":
|
||||
letter = "a"
|
||||
elif letter == "beta":
|
||||
letter = "b"
|
||||
elif letter in ["c", "pre", "preview"]:
|
||||
letter = "rc"
|
||||
elif letter in ["rev", "r"]:
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
if not letter and number:
|
||||
# We assume if we are given a number, but we are not given a letter
|
||||
# then this is using the implicit post release syntax (e.g. 1.0-1)
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
_local_version_separators = re.compile(r"[\._-]")
|
||||
|
||||
|
||||
def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
|
||||
"""
|
||||
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
||||
"""
|
||||
if local is not None:
|
||||
return tuple(
|
||||
part.lower() if not part.isdigit() else int(part)
|
||||
for part in _local_version_separators.split(local)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def _cmpkey(
|
||||
epoch: int,
|
||||
release: Tuple[int, ...],
|
||||
pre: Optional[Tuple[str, int]],
|
||||
post: Optional[Tuple[str, int]],
|
||||
dev: Optional[Tuple[str, int]],
|
||||
local: Optional[LocalType],
|
||||
) -> CmpKey:
|
||||
|
||||
# When we compare a release version, we want to compare it with all of the
|
||||
# trailing zeros removed. So we'll use a reverse the list, drop all the now
|
||||
# leading zeros until we come to something non zero, then take the rest
|
||||
# re-reverse it back into the correct order and make it a tuple and use
|
||||
# that for our sorting key.
|
||||
_release = tuple(
|
||||
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
|
||||
)
|
||||
|
||||
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
|
||||
# We'll do this by abusing the pre segment, but we _only_ want to do this
|
||||
# if there is not a pre or a post segment. If we have one of those then
|
||||
# the normal sorting rules will handle this case correctly.
|
||||
if pre is None and post is None and dev is not None:
|
||||
_pre: CmpPrePostDevType = NegativeInfinity
|
||||
# Versions without a pre-release (except as noted above) should sort after
|
||||
# those with one.
|
||||
elif pre is None:
|
||||
_pre = Infinity
|
||||
else:
|
||||
_pre = pre
|
||||
|
||||
# Versions without a post segment should sort before those with one.
|
||||
if post is None:
|
||||
_post: CmpPrePostDevType = NegativeInfinity
|
||||
|
||||
else:
|
||||
_post = post
|
||||
|
||||
# Versions without a development segment should sort after those with one.
|
||||
if dev is None:
|
||||
_dev: CmpPrePostDevType = Infinity
|
||||
|
||||
else:
|
||||
_dev = dev
|
||||
|
||||
if local is None:
|
||||
# Versions without a local segment should sort before those with one.
|
||||
_local: CmpLocalType = NegativeInfinity
|
||||
else:
|
||||
# Versions with a local segment need that segment parsed to implement
|
||||
# the sorting rules in PEP440.
|
||||
# - Alpha numeric segments sort before numeric segments
|
||||
# - Alpha numeric segments sort lexicographically
|
||||
# - Numeric segments sort numerically
|
||||
# - Shorter versions sort before longer versions when the prefixes
|
||||
# match exactly
|
||||
_local = tuple(
|
||||
(i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
|
||||
)
|
||||
|
||||
return epoch, _release, _pre, _post, _dev, _local
|
119
my-app/node_modules/node-gyp/gyp/pyproject.toml
generated
vendored
Executable file
119
my-app/node_modules/node-gyp/gyp/pyproject.toml
generated
vendored
Executable file
|
@ -0,0 +1,119 @@
|
|||
[build-system]
|
||||
requires = ["setuptools>=61.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "gyp-next"
|
||||
version = "0.16.1"
|
||||
authors = [
|
||||
{ name="Node.js contributors", email="ryzokuken@disroot.org" },
|
||||
]
|
||||
description = "A fork of the GYP build system for use in the Node.js projects"
|
||||
readme = "README.md"
|
||||
license = { file="LICENSE" }
|
||||
requires-python = ">=3.8"
|
||||
# The Python module "packaging" is vendored in the "pylib/packaging" directory to support Python >= 3.12.
|
||||
# dependencies = ["packaging>=23.1"] # Uncomment this line if the vendored version is removed.
|
||||
classifiers = [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: BSD License",
|
||||
"Natural Language :: English",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = ["flake8", "ruff", "pytest"]
|
||||
|
||||
[project.scripts]
|
||||
gyp = "gyp:script_main"
|
||||
|
||||
[project.urls]
|
||||
"Homepage" = "https://github.com/nodejs/gyp-next"
|
||||
|
||||
[tool.ruff]
|
||||
select = [
|
||||
"C4", # flake8-comprehensions
|
||||
"C90", # McCabe cyclomatic complexity
|
||||
"DTZ", # flake8-datetimez
|
||||
"E", # pycodestyle
|
||||
"F", # Pyflakes
|
||||
"G", # flake8-logging-format
|
||||
"ICN", # flake8-import-conventions
|
||||
"INT", # flake8-gettext
|
||||
"PL", # Pylint
|
||||
"PYI", # flake8-pyi
|
||||
"RSE", # flake8-raise
|
||||
"RUF", # Ruff-specific rules
|
||||
"T10", # flake8-debugger
|
||||
"TCH", # flake8-type-checking
|
||||
"TID", # flake8-tidy-imports
|
||||
"UP", # pyupgrade
|
||||
"W", # pycodestyle
|
||||
"YTT", # flake8-2020
|
||||
# "A", # flake8-builtins
|
||||
# "ANN", # flake8-annotations
|
||||
# "ARG", # flake8-unused-arguments
|
||||
# "B", # flake8-bugbear
|
||||
# "BLE", # flake8-blind-except
|
||||
# "COM", # flake8-commas
|
||||
# "D", # pydocstyle
|
||||
# "DJ", # flake8-django
|
||||
# "EM", # flake8-errmsg
|
||||
# "ERA", # eradicate
|
||||
# "EXE", # flake8-executable
|
||||
# "FBT", # flake8-boolean-trap
|
||||
# "I", # isort
|
||||
# "INP", # flake8-no-pep420
|
||||
# "ISC", # flake8-implicit-str-concat
|
||||
# "N", # pep8-naming
|
||||
# "NPY", # NumPy-specific rules
|
||||
# "PD", # pandas-vet
|
||||
# "PGH", # pygrep-hooks
|
||||
# "PIE", # flake8-pie
|
||||
# "PT", # flake8-pytest-style
|
||||
# "PTH", # flake8-use-pathlib
|
||||
# "Q", # flake8-quotes
|
||||
# "RET", # flake8-return
|
||||
# "S", # flake8-bandit
|
||||
# "SIM", # flake8-simplify
|
||||
# "SLF", # flake8-self
|
||||
# "T20", # flake8-print
|
||||
# "TRY", # tryceratops
|
||||
]
|
||||
ignore = [
|
||||
"E721",
|
||||
"PLC1901",
|
||||
"PLR0402",
|
||||
"PLR1714",
|
||||
"PLR2004",
|
||||
"PLR5501",
|
||||
"PLW0603",
|
||||
"PLW2901",
|
||||
"PYI024",
|
||||
"RUF005",
|
||||
"RUF012",
|
||||
"UP031",
|
||||
]
|
||||
extend-exclude = ["pylib/packaging"]
|
||||
line-length = 88
|
||||
target-version = "py37"
|
||||
|
||||
[tool.ruff.mccabe]
|
||||
max-complexity = 101
|
||||
|
||||
[tool.ruff.pylint]
|
||||
max-args = 11
|
||||
max-branches = 108
|
||||
max-returns = 10
|
||||
max-statements = 286
|
||||
|
||||
[tool.setuptools]
|
||||
package-dir = {"" = "pylib"}
|
||||
packages = ["gyp", "gyp.generator"]
|
261
my-app/node_modules/node-gyp/gyp/test_gyp.py
generated
vendored
Executable file
261
my-app/node_modules/node-gyp/gyp/test_gyp.py
generated
vendored
Executable file
|
@ -0,0 +1,261 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2012 Google Inc. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""gyptest.py -- test runner for GYP tests."""
|
||||
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
|
||||
def is_test_name(f):
|
||||
return f.startswith("gyptest") and f.endswith(".py")
|
||||
|
||||
|
||||
def find_all_gyptest_files(directory):
|
||||
result = []
|
||||
for root, dirs, files in os.walk(directory):
|
||||
result.extend([os.path.join(root, f) for f in files if is_test_name(f)])
|
||||
result.sort()
|
||||
return result
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
if argv is None:
|
||||
argv = sys.argv
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("-a", "--all", action="store_true", help="run all tests")
|
||||
parser.add_argument("-C", "--chdir", action="store", help="change to directory")
|
||||
parser.add_argument(
|
||||
"-f",
|
||||
"--format",
|
||||
action="store",
|
||||
default="",
|
||||
help="run tests with the specified formats",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-G",
|
||||
"--gyp_option",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Add -G options to the gyp command line",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l", "--list", action="store_true", help="list available tests and exit"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
"--no-exec",
|
||||
action="store_true",
|
||||
help="no execute, just print the command line",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--path", action="append", default=[], help="additional $PATH directory"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-q",
|
||||
"--quiet",
|
||||
action="store_true",
|
||||
help="quiet, don't print anything unless there are failures",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v",
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
help="print configuration info and test results.",
|
||||
)
|
||||
parser.add_argument("tests", nargs="*")
|
||||
args = parser.parse_args(argv[1:])
|
||||
|
||||
if args.chdir:
|
||||
os.chdir(args.chdir)
|
||||
|
||||
if args.path:
|
||||
extra_path = [os.path.abspath(p) for p in args.path]
|
||||
extra_path = os.pathsep.join(extra_path)
|
||||
os.environ["PATH"] = extra_path + os.pathsep + os.environ["PATH"]
|
||||
|
||||
if not args.tests:
|
||||
if not args.all:
|
||||
sys.stderr.write("Specify -a to get all tests.\n")
|
||||
return 1
|
||||
args.tests = ["test"]
|
||||
|
||||
tests = []
|
||||
for arg in args.tests:
|
||||
if os.path.isdir(arg):
|
||||
tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
|
||||
else:
|
||||
if not is_test_name(os.path.basename(arg)):
|
||||
print(arg, "is not a valid gyp test name.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
tests.append(arg)
|
||||
|
||||
if args.list:
|
||||
for test in tests:
|
||||
print(test)
|
||||
sys.exit(0)
|
||||
|
||||
os.environ["PYTHONPATH"] = os.path.abspath("test/lib")
|
||||
|
||||
if args.verbose:
|
||||
print_configuration_info()
|
||||
|
||||
if args.gyp_option and not args.quiet:
|
||||
print("Extra Gyp options: %s\n" % args.gyp_option)
|
||||
|
||||
if args.format:
|
||||
format_list = args.format.split(",")
|
||||
else:
|
||||
format_list = {
|
||||
"aix5": ["make"],
|
||||
"os400": ["make"],
|
||||
"freebsd7": ["make"],
|
||||
"freebsd8": ["make"],
|
||||
"openbsd5": ["make"],
|
||||
"cygwin": ["msvs"],
|
||||
"win32": ["msvs", "ninja"],
|
||||
"linux": ["make", "ninja"],
|
||||
"linux2": ["make", "ninja"],
|
||||
"linux3": ["make", "ninja"],
|
||||
# TODO: Re-enable xcode-ninja.
|
||||
# https://bugs.chromium.org/p/gyp/issues/detail?id=530
|
||||
# 'darwin': ['make', 'ninja', 'xcode', 'xcode-ninja'],
|
||||
"darwin": ["make", "ninja", "xcode"],
|
||||
}[sys.platform]
|
||||
|
||||
gyp_options = []
|
||||
for option in args.gyp_option:
|
||||
gyp_options += ["-G", option]
|
||||
|
||||
runner = Runner(format_list, tests, gyp_options, args.verbose)
|
||||
runner.run()
|
||||
|
||||
if not args.quiet:
|
||||
runner.print_results()
|
||||
|
||||
return 1 if runner.failures else 0
|
||||
|
||||
|
||||
def print_configuration_info():
|
||||
print("Test configuration:")
|
||||
if sys.platform == "darwin":
|
||||
sys.path.append(os.path.abspath("test/lib"))
|
||||
import TestMac
|
||||
|
||||
print(f" Mac {platform.mac_ver()[0]} {platform.mac_ver()[2]}")
|
||||
print(f" Xcode {TestMac.Xcode.Version()}")
|
||||
elif sys.platform == "win32":
|
||||
sys.path.append(os.path.abspath("pylib"))
|
||||
import gyp.MSVSVersion
|
||||
|
||||
print(" Win %s %s\n" % platform.win32_ver()[0:2])
|
||||
print(" MSVS %s" % gyp.MSVSVersion.SelectVisualStudioVersion().Description())
|
||||
elif sys.platform in ("linux", "linux2"):
|
||||
print(" Linux %s" % " ".join(platform.linux_distribution()))
|
||||
print(f" Python {platform.python_version()}")
|
||||
print(f" PYTHONPATH={os.environ['PYTHONPATH']}")
|
||||
print()
|
||||
|
||||
|
||||
class Runner:
|
||||
def __init__(self, formats, tests, gyp_options, verbose):
|
||||
self.formats = formats
|
||||
self.tests = tests
|
||||
self.verbose = verbose
|
||||
self.gyp_options = gyp_options
|
||||
self.failures = []
|
||||
self.num_tests = len(formats) * len(tests)
|
||||
num_digits = len(str(self.num_tests))
|
||||
self.fmt_str = "[%%%dd/%%%dd] (%%s) %%s" % (num_digits, num_digits)
|
||||
self.isatty = sys.stdout.isatty() and not self.verbose
|
||||
self.env = os.environ.copy()
|
||||
self.hpos = 0
|
||||
|
||||
def run(self):
|
||||
run_start = time.time()
|
||||
|
||||
i = 1
|
||||
for fmt in self.formats:
|
||||
for test in self.tests:
|
||||
self.run_test(test, fmt, i)
|
||||
i += 1
|
||||
|
||||
if self.isatty:
|
||||
self.erase_current_line()
|
||||
|
||||
self.took = time.time() - run_start
|
||||
|
||||
def run_test(self, test, fmt, i):
|
||||
if self.isatty:
|
||||
self.erase_current_line()
|
||||
|
||||
msg = self.fmt_str % (i, self.num_tests, fmt, test)
|
||||
self.print_(msg)
|
||||
|
||||
start = time.time()
|
||||
cmd = [sys.executable, test] + self.gyp_options
|
||||
self.env["TESTGYP_FORMAT"] = fmt
|
||||
proc = subprocess.Popen(
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=self.env
|
||||
)
|
||||
proc.wait()
|
||||
took = time.time() - start
|
||||
|
||||
stdout = proc.stdout.read().decode("utf8")
|
||||
if proc.returncode == 2:
|
||||
res = "skipped"
|
||||
elif proc.returncode:
|
||||
res = "failed"
|
||||
self.failures.append(f"({test}) {fmt}")
|
||||
else:
|
||||
res = "passed"
|
||||
res_msg = f" {res} {took:.3f}s"
|
||||
self.print_(res_msg)
|
||||
|
||||
if stdout and not stdout.endswith(("PASSED\n", "NO RESULT\n")):
|
||||
print()
|
||||
print("\n".join(f" {line}" for line in stdout.splitlines()))
|
||||
elif not self.isatty:
|
||||
print()
|
||||
|
||||
def print_(self, msg):
|
||||
print(msg, end="")
|
||||
index = msg.rfind("\n")
|
||||
if index == -1:
|
||||
self.hpos += len(msg)
|
||||
else:
|
||||
self.hpos = len(msg) - index
|
||||
sys.stdout.flush()
|
||||
|
||||
def erase_current_line(self):
|
||||
print("\b" * self.hpos + " " * self.hpos + "\b" * self.hpos, end="")
|
||||
sys.stdout.flush()
|
||||
self.hpos = 0
|
||||
|
||||
def print_results(self):
|
||||
num_failures = len(self.failures)
|
||||
if num_failures:
|
||||
print()
|
||||
if num_failures == 1:
|
||||
print("Failed the following test:")
|
||||
else:
|
||||
print("Failed the following %d tests:" % num_failures)
|
||||
print("\t" + "\n\t".join(sorted(self.failures)))
|
||||
print()
|
||||
print(
|
||||
"Ran %d tests in %.3fs, %d failed."
|
||||
% (self.num_tests, self.took, num_failures)
|
||||
)
|
||||
print()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
Loading…
Add table
Add a link
Reference in a new issue