Created starter files for the project.
This commit is contained in:
commit
73f0c0db42
1992 changed files with 769897 additions and 0 deletions
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,37 @@
|
|||
"""Metadata generation logic for source distributions.
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from pip._internal.build_env import BuildEnvironment
|
||||
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
||||
|
||||
|
||||
def generate_metadata(build_env, backend):
|
||||
# type: (BuildEnvironment, Pep517HookCaller) -> str
|
||||
"""Generate metadata using mechanisms described in PEP 517.
|
||||
|
||||
Returns the generated metadata directory.
|
||||
"""
|
||||
metadata_tmpdir = TempDirectory(
|
||||
kind="modern-metadata", globally_managed=True
|
||||
)
|
||||
|
||||
metadata_dir = metadata_tmpdir.path
|
||||
|
||||
with build_env:
|
||||
# Note that Pep517HookCaller implements a fallback for
|
||||
# prepare_metadata_for_build_wheel, so we don't have to
|
||||
# consider the possibility that this hook doesn't exist.
|
||||
runner = runner_with_spinner_message("Preparing wheel metadata")
|
||||
with backend.subprocess_runner(runner):
|
||||
distinfo_dir = backend.prepare_metadata_for_build_wheel(
|
||||
metadata_dir
|
||||
)
|
||||
|
||||
return os.path.join(metadata_dir, distinfo_dir)
|
|
@ -0,0 +1,77 @@
|
|||
"""Metadata generation logic for legacy source distributions.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
|
||||
from pip._internal.utils.subprocess import call_subprocess
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from pip._internal.build_env import BuildEnvironment
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _find_egg_info(directory):
|
||||
# type: (str) -> str
|
||||
"""Find an .egg-info subdirectory in `directory`.
|
||||
"""
|
||||
filenames = [
|
||||
f for f in os.listdir(directory) if f.endswith(".egg-info")
|
||||
]
|
||||
|
||||
if not filenames:
|
||||
raise InstallationError(
|
||||
"No .egg-info directory found in {}".format(directory)
|
||||
)
|
||||
|
||||
if len(filenames) > 1:
|
||||
raise InstallationError(
|
||||
"More than one .egg-info directory found in {}".format(
|
||||
directory
|
||||
)
|
||||
)
|
||||
|
||||
return os.path.join(directory, filenames[0])
|
||||
|
||||
|
||||
def generate_metadata(
|
||||
build_env, # type: BuildEnvironment
|
||||
setup_py_path, # type: str
|
||||
source_dir, # type: str
|
||||
isolated, # type: bool
|
||||
details, # type: str
|
||||
):
|
||||
# type: (...) -> str
|
||||
"""Generate metadata using setup.py-based defacto mechanisms.
|
||||
|
||||
Returns the generated metadata directory.
|
||||
"""
|
||||
logger.debug(
|
||||
'Running setup.py (path:%s) egg_info for package %s',
|
||||
setup_py_path, details,
|
||||
)
|
||||
|
||||
egg_info_dir = TempDirectory(
|
||||
kind="pip-egg-info", globally_managed=True
|
||||
).path
|
||||
|
||||
args = make_setuptools_egg_info_args(
|
||||
setup_py_path,
|
||||
egg_info_dir=egg_info_dir,
|
||||
no_user_config=isolated,
|
||||
)
|
||||
|
||||
with build_env:
|
||||
call_subprocess(
|
||||
args,
|
||||
cwd=source_dir,
|
||||
command_desc='python setup.py egg_info',
|
||||
)
|
||||
|
||||
# Return the .egg-info directory.
|
||||
return _find_egg_info(egg_info_dir)
|
|
@ -0,0 +1,46 @@
|
|||
import logging
|
||||
import os
|
||||
|
||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import List, Optional
|
||||
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def build_wheel_pep517(
|
||||
name, # type: str
|
||||
backend, # type: Pep517HookCaller
|
||||
metadata_directory, # type: str
|
||||
build_options, # type: List[str]
|
||||
tempd, # type: str
|
||||
):
|
||||
# type: (...) -> Optional[str]
|
||||
"""Build one InstallRequirement using the PEP 517 build process.
|
||||
|
||||
Returns path to wheel if successfully built. Otherwise, returns None.
|
||||
"""
|
||||
assert metadata_directory is not None
|
||||
if build_options:
|
||||
# PEP 517 does not support --build-options
|
||||
logger.error('Cannot build wheel for %s using PEP 517 when '
|
||||
'--build-option is present', name)
|
||||
return None
|
||||
try:
|
||||
logger.debug('Destination directory: %s', tempd)
|
||||
|
||||
runner = runner_with_spinner_message(
|
||||
'Building wheel for {} (PEP 517)'.format(name)
|
||||
)
|
||||
with backend.subprocess_runner(runner):
|
||||
wheel_name = backend.build_wheel(
|
||||
tempd,
|
||||
metadata_directory=metadata_directory,
|
||||
)
|
||||
except Exception:
|
||||
logger.error('Failed building wheel for %s', name)
|
||||
return None
|
||||
return os.path.join(tempd, wheel_name)
|
|
@ -0,0 +1,115 @@
|
|||
import logging
|
||||
import os.path
|
||||
|
||||
from pip._internal.cli.spinners import open_spinner
|
||||
from pip._internal.utils.setuptools_build import (
|
||||
make_setuptools_bdist_wheel_args,
|
||||
)
|
||||
from pip._internal.utils.subprocess import (
|
||||
LOG_DIVIDER,
|
||||
call_subprocess,
|
||||
format_command_args,
|
||||
)
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import List, Optional, Text
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def format_command_result(
|
||||
command_args, # type: List[str]
|
||||
command_output, # type: Text
|
||||
):
|
||||
# type: (...) -> str
|
||||
"""Format command information for logging."""
|
||||
command_desc = format_command_args(command_args)
|
||||
text = 'Command arguments: {}\n'.format(command_desc)
|
||||
|
||||
if not command_output:
|
||||
text += 'Command output: None'
|
||||
elif logger.getEffectiveLevel() > logging.DEBUG:
|
||||
text += 'Command output: [use --verbose to show]'
|
||||
else:
|
||||
if not command_output.endswith('\n'):
|
||||
command_output += '\n'
|
||||
text += 'Command output:\n{}{}'.format(command_output, LOG_DIVIDER)
|
||||
|
||||
return text
|
||||
|
||||
|
||||
def get_legacy_build_wheel_path(
|
||||
names, # type: List[str]
|
||||
temp_dir, # type: str
|
||||
name, # type: str
|
||||
command_args, # type: List[str]
|
||||
command_output, # type: Text
|
||||
):
|
||||
# type: (...) -> Optional[str]
|
||||
"""Return the path to the wheel in the temporary build directory."""
|
||||
# Sort for determinism.
|
||||
names = sorted(names)
|
||||
if not names:
|
||||
msg = (
|
||||
'Legacy build of wheel for {!r} created no files.\n'
|
||||
).format(name)
|
||||
msg += format_command_result(command_args, command_output)
|
||||
logger.warning(msg)
|
||||
return None
|
||||
|
||||
if len(names) > 1:
|
||||
msg = (
|
||||
'Legacy build of wheel for {!r} created more than one file.\n'
|
||||
'Filenames (choosing first): {}\n'
|
||||
).format(name, names)
|
||||
msg += format_command_result(command_args, command_output)
|
||||
logger.warning(msg)
|
||||
|
||||
return os.path.join(temp_dir, names[0])
|
||||
|
||||
|
||||
def build_wheel_legacy(
|
||||
name, # type: str
|
||||
setup_py_path, # type: str
|
||||
source_dir, # type: str
|
||||
global_options, # type: List[str]
|
||||
build_options, # type: List[str]
|
||||
tempd, # type: str
|
||||
):
|
||||
# type: (...) -> Optional[str]
|
||||
"""Build one unpacked package using the "legacy" build process.
|
||||
|
||||
Returns path to wheel if successfully built. Otherwise, returns None.
|
||||
"""
|
||||
wheel_args = make_setuptools_bdist_wheel_args(
|
||||
setup_py_path,
|
||||
global_options=global_options,
|
||||
build_options=build_options,
|
||||
destination_dir=tempd,
|
||||
)
|
||||
|
||||
spin_message = 'Building wheel for {} (setup.py)'.format(name)
|
||||
with open_spinner(spin_message) as spinner:
|
||||
logger.debug('Destination directory: %s', tempd)
|
||||
|
||||
try:
|
||||
output = call_subprocess(
|
||||
wheel_args,
|
||||
cwd=source_dir,
|
||||
spinner=spinner,
|
||||
)
|
||||
except Exception:
|
||||
spinner.finish("error")
|
||||
logger.error('Failed building wheel for %s', name)
|
||||
return None
|
||||
|
||||
names = os.listdir(tempd)
|
||||
wheel_path = get_legacy_build_wheel_path(
|
||||
names=names,
|
||||
temp_dir=tempd,
|
||||
name=name,
|
||||
command_args=wheel_args,
|
||||
command_output=output,
|
||||
)
|
||||
return wheel_path
|
158
venv/Lib/site-packages/pip/_internal/operations/check.py
Normal file
158
venv/Lib/site-packages/pip/_internal/operations/check.py
Normal file
|
@ -0,0 +1,158 @@
|
|||
"""Validation of dependencies of packages
|
||||
"""
|
||||
|
||||
import logging
|
||||
from collections import namedtuple
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.pkg_resources import RequirementParseError
|
||||
|
||||
from pip._internal.distributions import (
|
||||
make_distribution_for_install_requirement,
|
||||
)
|
||||
from pip._internal.utils.misc import get_installed_distributions
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from typing import (
|
||||
Any, Callable, Dict, Optional, Set, Tuple, List
|
||||
)
|
||||
|
||||
# Shorthands
|
||||
PackageSet = Dict[str, 'PackageDetails']
|
||||
Missing = Tuple[str, Any]
|
||||
Conflicting = Tuple[str, str, Any]
|
||||
|
||||
MissingDict = Dict[str, List[Missing]]
|
||||
ConflictingDict = Dict[str, List[Conflicting]]
|
||||
CheckResult = Tuple[MissingDict, ConflictingDict]
|
||||
ConflictDetails = Tuple[PackageSet, CheckResult]
|
||||
|
||||
PackageDetails = namedtuple('PackageDetails', ['version', 'requires'])
|
||||
|
||||
|
||||
def create_package_set_from_installed(**kwargs):
|
||||
# type: (**Any) -> Tuple[PackageSet, bool]
|
||||
"""Converts a list of distributions into a PackageSet.
|
||||
"""
|
||||
# Default to using all packages installed on the system
|
||||
if kwargs == {}:
|
||||
kwargs = {"local_only": False, "skip": ()}
|
||||
|
||||
package_set = {}
|
||||
problems = False
|
||||
for dist in get_installed_distributions(**kwargs):
|
||||
name = canonicalize_name(dist.project_name)
|
||||
try:
|
||||
package_set[name] = PackageDetails(dist.version, dist.requires())
|
||||
except RequirementParseError as e:
|
||||
# Don't crash on broken metadata
|
||||
logger.warning("Error parsing requirements for %s: %s", name, e)
|
||||
problems = True
|
||||
return package_set, problems
|
||||
|
||||
|
||||
def check_package_set(package_set, should_ignore=None):
|
||||
# type: (PackageSet, Optional[Callable[[str], bool]]) -> CheckResult
|
||||
"""Check if a package set is consistent
|
||||
|
||||
If should_ignore is passed, it should be a callable that takes a
|
||||
package name and returns a boolean.
|
||||
"""
|
||||
|
||||
missing = {}
|
||||
conflicting = {}
|
||||
|
||||
for package_name in package_set:
|
||||
# Info about dependencies of package_name
|
||||
missing_deps = set() # type: Set[Missing]
|
||||
conflicting_deps = set() # type: Set[Conflicting]
|
||||
|
||||
if should_ignore and should_ignore(package_name):
|
||||
continue
|
||||
|
||||
for req in package_set[package_name].requires:
|
||||
name = canonicalize_name(req.project_name) # type: str
|
||||
|
||||
# Check if it's missing
|
||||
if name not in package_set:
|
||||
missed = True
|
||||
if req.marker is not None:
|
||||
missed = req.marker.evaluate()
|
||||
if missed:
|
||||
missing_deps.add((name, req))
|
||||
continue
|
||||
|
||||
# Check if there's a conflict
|
||||
version = package_set[name].version # type: str
|
||||
if not req.specifier.contains(version, prereleases=True):
|
||||
conflicting_deps.add((name, version, req))
|
||||
|
||||
if missing_deps:
|
||||
missing[package_name] = sorted(missing_deps, key=str)
|
||||
if conflicting_deps:
|
||||
conflicting[package_name] = sorted(conflicting_deps, key=str)
|
||||
|
||||
return missing, conflicting
|
||||
|
||||
|
||||
def check_install_conflicts(to_install):
|
||||
# type: (List[InstallRequirement]) -> ConflictDetails
|
||||
"""For checking if the dependency graph would be consistent after \
|
||||
installing given requirements
|
||||
"""
|
||||
# Start from the current state
|
||||
package_set, _ = create_package_set_from_installed()
|
||||
# Install packages
|
||||
would_be_installed = _simulate_installation_of(to_install, package_set)
|
||||
|
||||
# Only warn about directly-dependent packages; create a whitelist of them
|
||||
whitelist = _create_whitelist(would_be_installed, package_set)
|
||||
|
||||
return (
|
||||
package_set,
|
||||
check_package_set(
|
||||
package_set, should_ignore=lambda name: name not in whitelist
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _simulate_installation_of(to_install, package_set):
|
||||
# type: (List[InstallRequirement], PackageSet) -> Set[str]
|
||||
"""Computes the version of packages after installing to_install.
|
||||
"""
|
||||
|
||||
# Keep track of packages that were installed
|
||||
installed = set()
|
||||
|
||||
# Modify it as installing requirement_set would (assuming no errors)
|
||||
for inst_req in to_install:
|
||||
abstract_dist = make_distribution_for_install_requirement(inst_req)
|
||||
dist = abstract_dist.get_pkg_resources_distribution()
|
||||
|
||||
assert dist is not None
|
||||
name = canonicalize_name(dist.key)
|
||||
package_set[name] = PackageDetails(dist.version, dist.requires())
|
||||
|
||||
installed.add(name)
|
||||
|
||||
return installed
|
||||
|
||||
|
||||
def _create_whitelist(would_be_installed, package_set):
|
||||
# type: (Set[str], PackageSet) -> Set[str]
|
||||
packages_affected = set(would_be_installed)
|
||||
|
||||
for package_name in package_set:
|
||||
if package_name in packages_affected:
|
||||
continue
|
||||
|
||||
for req in package_set[package_name].requires:
|
||||
if canonicalize_name(req.name) in packages_affected:
|
||||
packages_affected.add(package_name)
|
||||
break
|
||||
|
||||
return packages_affected
|
272
venv/Lib/site-packages/pip/_internal/operations/freeze.py
Normal file
272
venv/Lib/site-packages/pip/_internal/operations/freeze.py
Normal file
|
@ -0,0 +1,272 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import collections
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip._vendor import six
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.pkg_resources import RequirementParseError
|
||||
|
||||
from pip._internal.exceptions import BadCommand, InstallationError
|
||||
from pip._internal.req.constructors import (
|
||||
install_req_from_editable,
|
||||
install_req_from_line,
|
||||
)
|
||||
from pip._internal.req.req_file import COMMENT_RE
|
||||
from pip._internal.utils.direct_url_helpers import (
|
||||
direct_url_as_pep440_direct_reference,
|
||||
dist_get_direct_url,
|
||||
)
|
||||
from pip._internal.utils.misc import (
|
||||
dist_is_editable,
|
||||
get_installed_distributions,
|
||||
)
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import (
|
||||
Iterator, Optional, List, Container, Set, Dict, Tuple, Iterable, Union
|
||||
)
|
||||
from pip._internal.cache import WheelCache
|
||||
from pip._vendor.pkg_resources import (
|
||||
Distribution, Requirement
|
||||
)
|
||||
|
||||
RequirementInfo = Tuple[Optional[Union[str, Requirement]], bool, List[str]]
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def freeze(
|
||||
requirement=None, # type: Optional[List[str]]
|
||||
find_links=None, # type: Optional[List[str]]
|
||||
local_only=False, # type: bool
|
||||
user_only=False, # type: bool
|
||||
paths=None, # type: Optional[List[str]]
|
||||
isolated=False, # type: bool
|
||||
wheel_cache=None, # type: Optional[WheelCache]
|
||||
exclude_editable=False, # type: bool
|
||||
skip=() # type: Container[str]
|
||||
):
|
||||
# type: (...) -> Iterator[str]
|
||||
find_links = find_links or []
|
||||
|
||||
for link in find_links:
|
||||
yield '-f {}'.format(link)
|
||||
installations = {} # type: Dict[str, FrozenRequirement]
|
||||
|
||||
for dist in get_installed_distributions(
|
||||
local_only=local_only,
|
||||
skip=(),
|
||||
user_only=user_only,
|
||||
paths=paths
|
||||
):
|
||||
try:
|
||||
req = FrozenRequirement.from_dist(dist)
|
||||
except RequirementParseError as exc:
|
||||
# We include dist rather than dist.project_name because the
|
||||
# dist string includes more information, like the version and
|
||||
# location. We also include the exception message to aid
|
||||
# troubleshooting.
|
||||
logger.warning(
|
||||
'Could not generate requirement for distribution %r: %s',
|
||||
dist, exc
|
||||
)
|
||||
continue
|
||||
if exclude_editable and req.editable:
|
||||
continue
|
||||
installations[req.canonical_name] = req
|
||||
|
||||
if requirement:
|
||||
# the options that don't get turned into an InstallRequirement
|
||||
# should only be emitted once, even if the same option is in multiple
|
||||
# requirements files, so we need to keep track of what has been emitted
|
||||
# so that we don't emit it again if it's seen again
|
||||
emitted_options = set() # type: Set[str]
|
||||
# keep track of which files a requirement is in so that we can
|
||||
# give an accurate warning if a requirement appears multiple times.
|
||||
req_files = collections.defaultdict(list) # type: Dict[str, List[str]]
|
||||
for req_file_path in requirement:
|
||||
with open(req_file_path) as req_file:
|
||||
for line in req_file:
|
||||
if (not line.strip() or
|
||||
line.strip().startswith('#') or
|
||||
line.startswith((
|
||||
'-r', '--requirement',
|
||||
'-f', '--find-links',
|
||||
'-i', '--index-url',
|
||||
'--pre',
|
||||
'--trusted-host',
|
||||
'--process-dependency-links',
|
||||
'--extra-index-url',
|
||||
'--use-feature'))):
|
||||
line = line.rstrip()
|
||||
if line not in emitted_options:
|
||||
emitted_options.add(line)
|
||||
yield line
|
||||
continue
|
||||
|
||||
if line.startswith('-e') or line.startswith('--editable'):
|
||||
if line.startswith('-e'):
|
||||
line = line[2:].strip()
|
||||
else:
|
||||
line = line[len('--editable'):].strip().lstrip('=')
|
||||
line_req = install_req_from_editable(
|
||||
line,
|
||||
isolated=isolated,
|
||||
)
|
||||
else:
|
||||
line_req = install_req_from_line(
|
||||
COMMENT_RE.sub('', line).strip(),
|
||||
isolated=isolated,
|
||||
)
|
||||
|
||||
if not line_req.name:
|
||||
logger.info(
|
||||
"Skipping line in requirement file [%s] because "
|
||||
"it's not clear what it would install: %s",
|
||||
req_file_path, line.strip(),
|
||||
)
|
||||
logger.info(
|
||||
" (add #egg=PackageName to the URL to avoid"
|
||||
" this warning)"
|
||||
)
|
||||
else:
|
||||
line_req_canonical_name = canonicalize_name(
|
||||
line_req.name)
|
||||
if line_req_canonical_name not in installations:
|
||||
# either it's not installed, or it is installed
|
||||
# but has been processed already
|
||||
if not req_files[line_req.name]:
|
||||
logger.warning(
|
||||
"Requirement file [%s] contains %s, but "
|
||||
"package %r is not installed",
|
||||
req_file_path,
|
||||
COMMENT_RE.sub('', line).strip(),
|
||||
line_req.name
|
||||
)
|
||||
else:
|
||||
req_files[line_req.name].append(req_file_path)
|
||||
else:
|
||||
yield str(installations[
|
||||
line_req_canonical_name]).rstrip()
|
||||
del installations[line_req_canonical_name]
|
||||
req_files[line_req.name].append(req_file_path)
|
||||
|
||||
# Warn about requirements that were included multiple times (in a
|
||||
# single requirements file or in different requirements files).
|
||||
for name, files in six.iteritems(req_files):
|
||||
if len(files) > 1:
|
||||
logger.warning("Requirement %s included multiple times [%s]",
|
||||
name, ', '.join(sorted(set(files))))
|
||||
|
||||
yield(
|
||||
'## The following requirements were added by '
|
||||
'pip freeze:'
|
||||
)
|
||||
for installation in sorted(
|
||||
installations.values(), key=lambda x: x.name.lower()):
|
||||
if installation.canonical_name not in skip:
|
||||
yield str(installation).rstrip()
|
||||
|
||||
|
||||
def get_requirement_info(dist):
|
||||
# type: (Distribution) -> RequirementInfo
|
||||
"""
|
||||
Compute and return values (req, editable, comments) for use in
|
||||
FrozenRequirement.from_dist().
|
||||
"""
|
||||
if not dist_is_editable(dist):
|
||||
return (None, False, [])
|
||||
|
||||
location = os.path.normcase(os.path.abspath(dist.location))
|
||||
|
||||
from pip._internal.vcs import vcs, RemoteNotFoundError
|
||||
vcs_backend = vcs.get_backend_for_dir(location)
|
||||
|
||||
if vcs_backend is None:
|
||||
req = dist.as_requirement()
|
||||
logger.debug(
|
||||
'No VCS found for editable requirement "%s" in: %r', req,
|
||||
location,
|
||||
)
|
||||
comments = [
|
||||
'# Editable install with no version control ({})'.format(req)
|
||||
]
|
||||
return (location, True, comments)
|
||||
|
||||
try:
|
||||
req = vcs_backend.get_src_requirement(location, dist.project_name)
|
||||
except RemoteNotFoundError:
|
||||
req = dist.as_requirement()
|
||||
comments = [
|
||||
'# Editable {} install with no remote ({})'.format(
|
||||
type(vcs_backend).__name__, req,
|
||||
)
|
||||
]
|
||||
return (location, True, comments)
|
||||
|
||||
except BadCommand:
|
||||
logger.warning(
|
||||
'cannot determine version of editable source in %s '
|
||||
'(%s command not found in path)',
|
||||
location,
|
||||
vcs_backend.name,
|
||||
)
|
||||
return (None, True, [])
|
||||
|
||||
except InstallationError as exc:
|
||||
logger.warning(
|
||||
"Error when trying to get requirement for VCS system %s, "
|
||||
"falling back to uneditable format", exc
|
||||
)
|
||||
else:
|
||||
if req is not None:
|
||||
return (req, True, [])
|
||||
|
||||
logger.warning(
|
||||
'Could not determine repository location of %s', location
|
||||
)
|
||||
comments = ['## !! Could not determine repository location']
|
||||
|
||||
return (None, False, comments)
|
||||
|
||||
|
||||
class FrozenRequirement(object):
|
||||
def __init__(self, name, req, editable, comments=()):
|
||||
# type: (str, Union[str, Requirement], bool, Iterable[str]) -> None
|
||||
self.name = name
|
||||
self.canonical_name = canonicalize_name(name)
|
||||
self.req = req
|
||||
self.editable = editable
|
||||
self.comments = comments
|
||||
|
||||
@classmethod
|
||||
def from_dist(cls, dist):
|
||||
# type: (Distribution) -> FrozenRequirement
|
||||
# TODO `get_requirement_info` is taking care of editable requirements.
|
||||
# TODO This should be refactored when we will add detection of
|
||||
# editable that provide .dist-info metadata.
|
||||
req, editable, comments = get_requirement_info(dist)
|
||||
if req is None and not editable:
|
||||
# if PEP 610 metadata is present, attempt to use it
|
||||
direct_url = dist_get_direct_url(dist)
|
||||
if direct_url:
|
||||
req = direct_url_as_pep440_direct_reference(
|
||||
direct_url, dist.project_name
|
||||
)
|
||||
comments = []
|
||||
if req is None:
|
||||
# name==version requirement
|
||||
req = dist.as_requirement()
|
||||
|
||||
return cls(dist.project_name, req, editable, comments=comments)
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
req = self.req
|
||||
if self.editable:
|
||||
req = '-e {}'.format(req)
|
||||
return '\n'.join(list(self.comments) + [str(req)]) + '\n'
|
|
@ -0,0 +1,2 @@
|
|||
"""For modules related to installing packages.
|
||||
"""
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,52 @@
|
|||
"""Legacy editable installation process, i.e. `setup.py develop`.
|
||||
"""
|
||||
import logging
|
||||
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.setuptools_build import make_setuptools_develop_args
|
||||
from pip._internal.utils.subprocess import call_subprocess
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import List, Optional, Sequence
|
||||
|
||||
from pip._internal.build_env import BuildEnvironment
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def install_editable(
|
||||
install_options, # type: List[str]
|
||||
global_options, # type: Sequence[str]
|
||||
prefix, # type: Optional[str]
|
||||
home, # type: Optional[str]
|
||||
use_user_site, # type: bool
|
||||
name, # type: str
|
||||
setup_py_path, # type: str
|
||||
isolated, # type: bool
|
||||
build_env, # type: BuildEnvironment
|
||||
unpacked_source_directory, # type: str
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""Install a package in editable mode. Most arguments are pass-through
|
||||
to setuptools.
|
||||
"""
|
||||
logger.info('Running setup.py develop for %s', name)
|
||||
|
||||
args = make_setuptools_develop_args(
|
||||
setup_py_path,
|
||||
global_options=global_options,
|
||||
install_options=install_options,
|
||||
no_user_config=isolated,
|
||||
prefix=prefix,
|
||||
home=home,
|
||||
use_user_site=use_user_site,
|
||||
)
|
||||
|
||||
with indent_log():
|
||||
with build_env:
|
||||
call_subprocess(
|
||||
args,
|
||||
cwd=unpacked_source_directory,
|
||||
)
|
|
@ -0,0 +1,130 @@
|
|||
"""Legacy installation process, i.e. `setup.py install`.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from distutils.util import change_root
|
||||
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import ensure_dir
|
||||
from pip._internal.utils.setuptools_build import make_setuptools_install_args
|
||||
from pip._internal.utils.subprocess import runner_with_spinner_message
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import List, Optional, Sequence
|
||||
|
||||
from pip._internal.build_env import BuildEnvironment
|
||||
from pip._internal.models.scheme import Scheme
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LegacyInstallFailure(Exception):
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
self.parent = sys.exc_info()
|
||||
|
||||
|
||||
def install(
|
||||
install_options, # type: List[str]
|
||||
global_options, # type: Sequence[str]
|
||||
root, # type: Optional[str]
|
||||
home, # type: Optional[str]
|
||||
prefix, # type: Optional[str]
|
||||
use_user_site, # type: bool
|
||||
pycompile, # type: bool
|
||||
scheme, # type: Scheme
|
||||
setup_py_path, # type: str
|
||||
isolated, # type: bool
|
||||
req_name, # type: str
|
||||
build_env, # type: BuildEnvironment
|
||||
unpacked_source_directory, # type: str
|
||||
req_description, # type: str
|
||||
):
|
||||
# type: (...) -> bool
|
||||
|
||||
header_dir = scheme.headers
|
||||
|
||||
with TempDirectory(kind="record") as temp_dir:
|
||||
try:
|
||||
record_filename = os.path.join(temp_dir.path, 'install-record.txt')
|
||||
install_args = make_setuptools_install_args(
|
||||
setup_py_path,
|
||||
global_options=global_options,
|
||||
install_options=install_options,
|
||||
record_filename=record_filename,
|
||||
root=root,
|
||||
prefix=prefix,
|
||||
header_dir=header_dir,
|
||||
home=home,
|
||||
use_user_site=use_user_site,
|
||||
no_user_config=isolated,
|
||||
pycompile=pycompile,
|
||||
)
|
||||
|
||||
runner = runner_with_spinner_message(
|
||||
"Running setup.py install for {}".format(req_name)
|
||||
)
|
||||
with indent_log(), build_env:
|
||||
runner(
|
||||
cmd=install_args,
|
||||
cwd=unpacked_source_directory,
|
||||
)
|
||||
|
||||
if not os.path.exists(record_filename):
|
||||
logger.debug('Record file %s not found', record_filename)
|
||||
# Signal to the caller that we didn't install the new package
|
||||
return False
|
||||
|
||||
except Exception:
|
||||
# Signal to the caller that we didn't install the new package
|
||||
raise LegacyInstallFailure
|
||||
|
||||
# At this point, we have successfully installed the requirement.
|
||||
|
||||
# We intentionally do not use any encoding to read the file because
|
||||
# setuptools writes the file using distutils.file_util.write_file,
|
||||
# which does not specify an encoding.
|
||||
with open(record_filename) as f:
|
||||
record_lines = f.read().splitlines()
|
||||
|
||||
def prepend_root(path):
|
||||
# type: (str) -> str
|
||||
if root is None or not os.path.isabs(path):
|
||||
return path
|
||||
else:
|
||||
return change_root(root, path)
|
||||
|
||||
for line in record_lines:
|
||||
directory = os.path.dirname(line)
|
||||
if directory.endswith('.egg-info'):
|
||||
egg_info_dir = prepend_root(directory)
|
||||
break
|
||||
else:
|
||||
message = (
|
||||
"{} did not indicate that it installed an "
|
||||
".egg-info directory. Only setup.py projects "
|
||||
"generating .egg-info directories are supported."
|
||||
).format(req_description)
|
||||
raise InstallationError(message)
|
||||
|
||||
new_lines = []
|
||||
for line in record_lines:
|
||||
filename = line.strip()
|
||||
if os.path.isdir(filename):
|
||||
filename += os.path.sep
|
||||
new_lines.append(
|
||||
os.path.relpath(prepend_root(filename), egg_info_dir)
|
||||
)
|
||||
new_lines.sort()
|
||||
ensure_dir(egg_info_dir)
|
||||
inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt')
|
||||
with open(inst_files_path, 'w') as f:
|
||||
f.write('\n'.join(new_lines) + '\n')
|
||||
|
||||
return True
|
861
venv/Lib/site-packages/pip/_internal/operations/install/wheel.py
Normal file
861
venv/Lib/site-packages/pip/_internal/operations/install/wheel.py
Normal file
|
@ -0,0 +1,861 @@
|
|||
"""Support for installing and building the "wheel" binary package format.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import collections
|
||||
import compileall
|
||||
import contextlib
|
||||
import csv
|
||||
import importlib
|
||||
import logging
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import warnings
|
||||
from base64 import urlsafe_b64encode
|
||||
from itertools import chain, starmap
|
||||
from zipfile import ZipFile
|
||||
|
||||
from pip._vendor import pkg_resources
|
||||
from pip._vendor.distlib.scripts import ScriptMaker
|
||||
from pip._vendor.distlib.util import get_export_entry
|
||||
from pip._vendor.six import (
|
||||
PY2,
|
||||
ensure_str,
|
||||
ensure_text,
|
||||
itervalues,
|
||||
reraise,
|
||||
text_type,
|
||||
)
|
||||
from pip._vendor.six.moves import filterfalse, map
|
||||
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.locations import get_major_minor_version
|
||||
from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl
|
||||
from pip._internal.models.scheme import SCHEME_KEYS
|
||||
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
|
||||
from pip._internal.utils.misc import (
|
||||
captured_stdout,
|
||||
ensure_dir,
|
||||
hash_file,
|
||||
partition,
|
||||
)
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
from pip._internal.utils.unpacking import (
|
||||
current_umask,
|
||||
is_within_directory,
|
||||
set_extracted_file_to_default_mode_plus_executable,
|
||||
zip_item_is_executable,
|
||||
)
|
||||
from pip._internal.utils.wheel import (
|
||||
parse_wheel,
|
||||
pkg_resources_distribution_for_wheel,
|
||||
)
|
||||
|
||||
# Use the custom cast function at runtime to make cast work,
|
||||
# and import typing.cast when performing pre-commit and type
|
||||
# checks
|
||||
if not MYPY_CHECK_RUNNING:
|
||||
from pip._internal.utils.typing import cast
|
||||
else:
|
||||
from email.message import Message
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
IO,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
NewType,
|
||||
Optional,
|
||||
Protocol,
|
||||
Sequence,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
from zipfile import ZipInfo
|
||||
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
|
||||
from pip._internal.models.scheme import Scheme
|
||||
from pip._internal.utils.filesystem import NamedTemporaryFileResult
|
||||
|
||||
RecordPath = NewType('RecordPath', text_type)
|
||||
InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]]
|
||||
|
||||
class File(Protocol):
|
||||
src_record_path = None # type: RecordPath
|
||||
dest_path = None # type: text_type
|
||||
changed = None # type: bool
|
||||
|
||||
def save(self):
|
||||
# type: () -> None
|
||||
pass
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def rehash(path, blocksize=1 << 20):
|
||||
# type: (text_type, int) -> Tuple[str, str]
|
||||
"""Return (encoded_digest, length) for path using hashlib.sha256()"""
|
||||
h, length = hash_file(path, blocksize)
|
||||
digest = 'sha256=' + urlsafe_b64encode(
|
||||
h.digest()
|
||||
).decode('latin1').rstrip('=')
|
||||
# unicode/str python2 issues
|
||||
return (digest, str(length)) # type: ignore
|
||||
|
||||
|
||||
def csv_io_kwargs(mode):
|
||||
# type: (str) -> Dict[str, Any]
|
||||
"""Return keyword arguments to properly open a CSV file
|
||||
in the given mode.
|
||||
"""
|
||||
if PY2:
|
||||
return {'mode': '{}b'.format(mode)}
|
||||
else:
|
||||
return {'mode': mode, 'newline': '', 'encoding': 'utf-8'}
|
||||
|
||||
|
||||
def fix_script(path):
|
||||
# type: (text_type) -> bool
|
||||
"""Replace #!python with #!/path/to/python
|
||||
Return True if file was changed.
|
||||
"""
|
||||
# XXX RECORD hashes will need to be updated
|
||||
assert os.path.isfile(path)
|
||||
|
||||
with open(path, 'rb') as script:
|
||||
firstline = script.readline()
|
||||
if not firstline.startswith(b'#!python'):
|
||||
return False
|
||||
exename = sys.executable.encode(sys.getfilesystemencoding())
|
||||
firstline = b'#!' + exename + os.linesep.encode("ascii")
|
||||
rest = script.read()
|
||||
with open(path, 'wb') as script:
|
||||
script.write(firstline)
|
||||
script.write(rest)
|
||||
return True
|
||||
|
||||
|
||||
def wheel_root_is_purelib(metadata):
|
||||
# type: (Message) -> bool
|
||||
return metadata.get("Root-Is-Purelib", "").lower() == "true"
|
||||
|
||||
|
||||
def get_entrypoints(distribution):
|
||||
# type: (Distribution) -> Tuple[Dict[str, str], Dict[str, str]]
|
||||
# get the entry points and then the script names
|
||||
try:
|
||||
console = distribution.get_entry_map('console_scripts')
|
||||
gui = distribution.get_entry_map('gui_scripts')
|
||||
except KeyError:
|
||||
# Our dict-based Distribution raises KeyError if entry_points.txt
|
||||
# doesn't exist.
|
||||
return {}, {}
|
||||
|
||||
def _split_ep(s):
|
||||
# type: (pkg_resources.EntryPoint) -> Tuple[str, str]
|
||||
"""get the string representation of EntryPoint,
|
||||
remove space and split on '='
|
||||
"""
|
||||
split_parts = str(s).replace(" ", "").split("=")
|
||||
return split_parts[0], split_parts[1]
|
||||
|
||||
# convert the EntryPoint objects into strings with module:function
|
||||
console = dict(_split_ep(v) for v in console.values())
|
||||
gui = dict(_split_ep(v) for v in gui.values())
|
||||
return console, gui
|
||||
|
||||
|
||||
def message_about_scripts_not_on_PATH(scripts):
|
||||
# type: (Sequence[str]) -> Optional[str]
|
||||
"""Determine if any scripts are not on PATH and format a warning.
|
||||
Returns a warning message if one or more scripts are not on PATH,
|
||||
otherwise None.
|
||||
"""
|
||||
if not scripts:
|
||||
return None
|
||||
|
||||
# Group scripts by the path they were installed in
|
||||
grouped_by_dir = collections.defaultdict(set) # type: Dict[str, Set[str]]
|
||||
for destfile in scripts:
|
||||
parent_dir = os.path.dirname(destfile)
|
||||
script_name = os.path.basename(destfile)
|
||||
grouped_by_dir[parent_dir].add(script_name)
|
||||
|
||||
# We don't want to warn for directories that are on PATH.
|
||||
not_warn_dirs = [
|
||||
os.path.normcase(i).rstrip(os.sep) for i in
|
||||
os.environ.get("PATH", "").split(os.pathsep)
|
||||
]
|
||||
# If an executable sits with sys.executable, we don't warn for it.
|
||||
# This covers the case of venv invocations without activating the venv.
|
||||
not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
|
||||
warn_for = {
|
||||
parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
|
||||
if os.path.normcase(parent_dir) not in not_warn_dirs
|
||||
} # type: Dict[str, Set[str]]
|
||||
if not warn_for:
|
||||
return None
|
||||
|
||||
# Format a message
|
||||
msg_lines = []
|
||||
for parent_dir, dir_scripts in warn_for.items():
|
||||
sorted_scripts = sorted(dir_scripts) # type: List[str]
|
||||
if len(sorted_scripts) == 1:
|
||||
start_text = "script {} is".format(sorted_scripts[0])
|
||||
else:
|
||||
start_text = "scripts {} are".format(
|
||||
", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
|
||||
)
|
||||
|
||||
msg_lines.append(
|
||||
"The {} installed in '{}' which is not on PATH."
|
||||
.format(start_text, parent_dir)
|
||||
)
|
||||
|
||||
last_line_fmt = (
|
||||
"Consider adding {} to PATH or, if you prefer "
|
||||
"to suppress this warning, use --no-warn-script-location."
|
||||
)
|
||||
if len(msg_lines) == 1:
|
||||
msg_lines.append(last_line_fmt.format("this directory"))
|
||||
else:
|
||||
msg_lines.append(last_line_fmt.format("these directories"))
|
||||
|
||||
# Add a note if any directory starts with ~
|
||||
warn_for_tilde = any(
|
||||
i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i
|
||||
)
|
||||
if warn_for_tilde:
|
||||
tilde_warning_msg = (
|
||||
"NOTE: The current PATH contains path(s) starting with `~`, "
|
||||
"which may not be expanded by all applications."
|
||||
)
|
||||
msg_lines.append(tilde_warning_msg)
|
||||
|
||||
# Returns the formatted multiline message
|
||||
return "\n".join(msg_lines)
|
||||
|
||||
|
||||
def _normalized_outrows(outrows):
|
||||
# type: (Iterable[InstalledCSVRow]) -> List[Tuple[str, str, str]]
|
||||
"""Normalize the given rows of a RECORD file.
|
||||
|
||||
Items in each row are converted into str. Rows are then sorted to make
|
||||
the value more predictable for tests.
|
||||
|
||||
Each row is a 3-tuple (path, hash, size) and corresponds to a record of
|
||||
a RECORD file (see PEP 376 and PEP 427 for details). For the rows
|
||||
passed to this function, the size can be an integer as an int or string,
|
||||
or the empty string.
|
||||
"""
|
||||
# Normally, there should only be one row per path, in which case the
|
||||
# second and third elements don't come into play when sorting.
|
||||
# However, in cases in the wild where a path might happen to occur twice,
|
||||
# we don't want the sort operation to trigger an error (but still want
|
||||
# determinism). Since the third element can be an int or string, we
|
||||
# coerce each element to a string to avoid a TypeError in this case.
|
||||
# For additional background, see--
|
||||
# https://github.com/pypa/pip/issues/5868
|
||||
return sorted(
|
||||
(ensure_str(record_path, encoding='utf-8'), hash_, str(size))
|
||||
for record_path, hash_, size in outrows
|
||||
)
|
||||
|
||||
|
||||
def _record_to_fs_path(record_path):
|
||||
# type: (RecordPath) -> text_type
|
||||
return record_path
|
||||
|
||||
|
||||
def _fs_to_record_path(path, relative_to=None):
|
||||
# type: (text_type, Optional[text_type]) -> RecordPath
|
||||
if relative_to is not None:
|
||||
# On Windows, do not handle relative paths if they belong to different
|
||||
# logical disks
|
||||
if os.path.splitdrive(path)[0].lower() == \
|
||||
os.path.splitdrive(relative_to)[0].lower():
|
||||
path = os.path.relpath(path, relative_to)
|
||||
path = path.replace(os.path.sep, '/')
|
||||
return cast('RecordPath', path)
|
||||
|
||||
|
||||
def _parse_record_path(record_column):
|
||||
# type: (str) -> RecordPath
|
||||
p = ensure_text(record_column, encoding='utf-8')
|
||||
return cast('RecordPath', p)
|
||||
|
||||
|
||||
def get_csv_rows_for_installed(
|
||||
old_csv_rows, # type: List[List[str]]
|
||||
installed, # type: Dict[RecordPath, RecordPath]
|
||||
changed, # type: Set[RecordPath]
|
||||
generated, # type: List[str]
|
||||
lib_dir, # type: str
|
||||
):
|
||||
# type: (...) -> List[InstalledCSVRow]
|
||||
"""
|
||||
:param installed: A map from archive RECORD path to installation RECORD
|
||||
path.
|
||||
"""
|
||||
installed_rows = [] # type: List[InstalledCSVRow]
|
||||
for row in old_csv_rows:
|
||||
if len(row) > 3:
|
||||
logger.warning('RECORD line has more than three elements: %s', row)
|
||||
old_record_path = _parse_record_path(row[0])
|
||||
new_record_path = installed.pop(old_record_path, old_record_path)
|
||||
if new_record_path in changed:
|
||||
digest, length = rehash(_record_to_fs_path(new_record_path))
|
||||
else:
|
||||
digest = row[1] if len(row) > 1 else ''
|
||||
length = row[2] if len(row) > 2 else ''
|
||||
installed_rows.append((new_record_path, digest, length))
|
||||
for f in generated:
|
||||
path = _fs_to_record_path(f, lib_dir)
|
||||
digest, length = rehash(f)
|
||||
installed_rows.append((path, digest, length))
|
||||
for installed_record_path in itervalues(installed):
|
||||
installed_rows.append((installed_record_path, '', ''))
|
||||
return installed_rows
|
||||
|
||||
|
||||
def get_console_script_specs(console):
|
||||
# type: (Dict[str, str]) -> List[str]
|
||||
"""
|
||||
Given the mapping from entrypoint name to callable, return the relevant
|
||||
console script specs.
|
||||
"""
|
||||
# Don't mutate caller's version
|
||||
console = console.copy()
|
||||
|
||||
scripts_to_generate = []
|
||||
|
||||
# Special case pip and setuptools to generate versioned wrappers
|
||||
#
|
||||
# The issue is that some projects (specifically, pip and setuptools) use
|
||||
# code in setup.py to create "versioned" entry points - pip2.7 on Python
|
||||
# 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
|
||||
# the wheel metadata at build time, and so if the wheel is installed with
|
||||
# a *different* version of Python the entry points will be wrong. The
|
||||
# correct fix for this is to enhance the metadata to be able to describe
|
||||
# such versioned entry points, but that won't happen till Metadata 2.0 is
|
||||
# available.
|
||||
# In the meantime, projects using versioned entry points will either have
|
||||
# incorrect versioned entry points, or they will not be able to distribute
|
||||
# "universal" wheels (i.e., they will need a wheel per Python version).
|
||||
#
|
||||
# Because setuptools and pip are bundled with _ensurepip and virtualenv,
|
||||
# we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
|
||||
# override the versioned entry points in the wheel and generate the
|
||||
# correct ones. This code is purely a short-term measure until Metadata 2.0
|
||||
# is available.
|
||||
#
|
||||
# To add the level of hack in this section of code, in order to support
|
||||
# ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
|
||||
# variable which will control which version scripts get installed.
|
||||
#
|
||||
# ENSUREPIP_OPTIONS=altinstall
|
||||
# - Only pipX.Y and easy_install-X.Y will be generated and installed
|
||||
# ENSUREPIP_OPTIONS=install
|
||||
# - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
|
||||
# that this option is technically if ENSUREPIP_OPTIONS is set and is
|
||||
# not altinstall
|
||||
# DEFAULT
|
||||
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
|
||||
# and easy_install-X.Y.
|
||||
pip_script = console.pop('pip', None)
|
||||
if pip_script:
|
||||
if "ENSUREPIP_OPTIONS" not in os.environ:
|
||||
scripts_to_generate.append('pip = ' + pip_script)
|
||||
|
||||
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
|
||||
scripts_to_generate.append(
|
||||
'pip{} = {}'.format(sys.version_info[0], pip_script)
|
||||
)
|
||||
|
||||
scripts_to_generate.append(
|
||||
'pip{} = {}'.format(get_major_minor_version(), pip_script)
|
||||
)
|
||||
# Delete any other versioned pip entry points
|
||||
pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
|
||||
for k in pip_ep:
|
||||
del console[k]
|
||||
easy_install_script = console.pop('easy_install', None)
|
||||
if easy_install_script:
|
||||
if "ENSUREPIP_OPTIONS" not in os.environ:
|
||||
scripts_to_generate.append(
|
||||
'easy_install = ' + easy_install_script
|
||||
)
|
||||
|
||||
scripts_to_generate.append(
|
||||
'easy_install-{} = {}'.format(
|
||||
get_major_minor_version(), easy_install_script
|
||||
)
|
||||
)
|
||||
# Delete any other versioned easy_install entry points
|
||||
easy_install_ep = [
|
||||
k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
|
||||
]
|
||||
for k in easy_install_ep:
|
||||
del console[k]
|
||||
|
||||
# Generate the console entry points specified in the wheel
|
||||
scripts_to_generate.extend(starmap('{} = {}'.format, console.items()))
|
||||
|
||||
return scripts_to_generate
|
||||
|
||||
|
||||
class ZipBackedFile(object):
|
||||
def __init__(self, src_record_path, dest_path, zip_file):
|
||||
# type: (RecordPath, text_type, ZipFile) -> None
|
||||
self.src_record_path = src_record_path
|
||||
self.dest_path = dest_path
|
||||
self._zip_file = zip_file
|
||||
self.changed = False
|
||||
|
||||
def _getinfo(self):
|
||||
# type: () -> ZipInfo
|
||||
if not PY2:
|
||||
return self._zip_file.getinfo(self.src_record_path)
|
||||
# Python 2 does not expose a way to detect a ZIP's encoding, but the
|
||||
# wheel specification (PEP 427) explicitly mandates that paths should
|
||||
# use UTF-8, so we assume it is true.
|
||||
return self._zip_file.getinfo(self.src_record_path.encode("utf-8"))
|
||||
|
||||
def save(self):
|
||||
# type: () -> None
|
||||
# directory creation is lazy and after file filtering
|
||||
# to ensure we don't install empty dirs; empty dirs can't be
|
||||
# uninstalled.
|
||||
parent_dir = os.path.dirname(self.dest_path)
|
||||
ensure_dir(parent_dir)
|
||||
|
||||
# When we open the output file below, any existing file is truncated
|
||||
# before we start writing the new contents. This is fine in most
|
||||
# cases, but can cause a segfault if pip has loaded a shared
|
||||
# object (e.g. from pyopenssl through its vendored urllib3)
|
||||
# Since the shared object is mmap'd an attempt to call a
|
||||
# symbol in it will then cause a segfault. Unlinking the file
|
||||
# allows writing of new contents while allowing the process to
|
||||
# continue to use the old copy.
|
||||
if os.path.exists(self.dest_path):
|
||||
os.unlink(self.dest_path)
|
||||
|
||||
zipinfo = self._getinfo()
|
||||
|
||||
with self._zip_file.open(zipinfo) as f:
|
||||
with open(self.dest_path, "wb") as dest:
|
||||
shutil.copyfileobj(f, dest)
|
||||
|
||||
if zip_item_is_executable(zipinfo):
|
||||
set_extracted_file_to_default_mode_plus_executable(self.dest_path)
|
||||
|
||||
|
||||
class ScriptFile(object):
|
||||
def __init__(self, file):
|
||||
# type: (File) -> None
|
||||
self._file = file
|
||||
self.src_record_path = self._file.src_record_path
|
||||
self.dest_path = self._file.dest_path
|
||||
self.changed = False
|
||||
|
||||
def save(self):
|
||||
# type: () -> None
|
||||
self._file.save()
|
||||
self.changed = fix_script(self.dest_path)
|
||||
|
||||
|
||||
class MissingCallableSuffix(InstallationError):
|
||||
def __init__(self, entry_point):
|
||||
# type: (str) -> None
|
||||
super(MissingCallableSuffix, self).__init__(
|
||||
"Invalid script entry point: {} - A callable "
|
||||
"suffix is required. Cf https://packaging.python.org/"
|
||||
"specifications/entry-points/#use-for-scripts for more "
|
||||
"information.".format(entry_point)
|
||||
)
|
||||
|
||||
|
||||
def _raise_for_invalid_entrypoint(specification):
|
||||
# type: (str) -> None
|
||||
entry = get_export_entry(specification)
|
||||
if entry is not None and entry.suffix is None:
|
||||
raise MissingCallableSuffix(str(entry))
|
||||
|
||||
|
||||
class PipScriptMaker(ScriptMaker):
|
||||
def make(self, specification, options=None):
|
||||
# type: (str, Dict[str, Any]) -> List[str]
|
||||
_raise_for_invalid_entrypoint(specification)
|
||||
return super(PipScriptMaker, self).make(specification, options)
|
||||
|
||||
|
||||
def _install_wheel(
|
||||
name, # type: str
|
||||
wheel_zip, # type: ZipFile
|
||||
wheel_path, # type: str
|
||||
scheme, # type: Scheme
|
||||
pycompile=True, # type: bool
|
||||
warn_script_location=True, # type: bool
|
||||
direct_url=None, # type: Optional[DirectUrl]
|
||||
requested=False, # type: bool
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""Install a wheel.
|
||||
|
||||
:param name: Name of the project to install
|
||||
:param wheel_zip: open ZipFile for wheel being installed
|
||||
:param scheme: Distutils scheme dictating the install directories
|
||||
:param req_description: String used in place of the requirement, for
|
||||
logging
|
||||
:param pycompile: Whether to byte-compile installed Python files
|
||||
:param warn_script_location: Whether to check that scripts are installed
|
||||
into a directory on PATH
|
||||
:raises UnsupportedWheel:
|
||||
* when the directory holds an unpacked wheel with incompatible
|
||||
Wheel-Version
|
||||
* when the .dist-info dir does not match the wheel
|
||||
"""
|
||||
info_dir, metadata = parse_wheel(wheel_zip, name)
|
||||
|
||||
if wheel_root_is_purelib(metadata):
|
||||
lib_dir = scheme.purelib
|
||||
else:
|
||||
lib_dir = scheme.platlib
|
||||
|
||||
# Record details of the files moved
|
||||
# installed = files copied from the wheel to the destination
|
||||
# changed = files changed while installing (scripts #! line typically)
|
||||
# generated = files newly generated during the install (script wrappers)
|
||||
installed = {} # type: Dict[RecordPath, RecordPath]
|
||||
changed = set() # type: Set[RecordPath]
|
||||
generated = [] # type: List[str]
|
||||
|
||||
def record_installed(srcfile, destfile, modified=False):
|
||||
# type: (RecordPath, text_type, bool) -> None
|
||||
"""Map archive RECORD paths to installation RECORD paths."""
|
||||
newpath = _fs_to_record_path(destfile, lib_dir)
|
||||
installed[srcfile] = newpath
|
||||
if modified:
|
||||
changed.add(_fs_to_record_path(destfile))
|
||||
|
||||
def all_paths():
|
||||
# type: () -> Iterable[RecordPath]
|
||||
names = wheel_zip.namelist()
|
||||
# If a flag is set, names may be unicode in Python 2. We convert to
|
||||
# text explicitly so these are valid for lookup in RECORD.
|
||||
decoded_names = map(ensure_text, names)
|
||||
for name in decoded_names:
|
||||
yield cast("RecordPath", name)
|
||||
|
||||
def is_dir_path(path):
|
||||
# type: (RecordPath) -> bool
|
||||
return path.endswith("/")
|
||||
|
||||
def assert_no_path_traversal(dest_dir_path, target_path):
|
||||
# type: (text_type, text_type) -> None
|
||||
if not is_within_directory(dest_dir_path, target_path):
|
||||
message = (
|
||||
"The wheel {!r} has a file {!r} trying to install"
|
||||
" outside the target directory {!r}"
|
||||
)
|
||||
raise InstallationError(
|
||||
message.format(wheel_path, target_path, dest_dir_path)
|
||||
)
|
||||
|
||||
def root_scheme_file_maker(zip_file, dest):
|
||||
# type: (ZipFile, text_type) -> Callable[[RecordPath], File]
|
||||
def make_root_scheme_file(record_path):
|
||||
# type: (RecordPath) -> File
|
||||
normed_path = os.path.normpath(record_path)
|
||||
dest_path = os.path.join(dest, normed_path)
|
||||
assert_no_path_traversal(dest, dest_path)
|
||||
return ZipBackedFile(record_path, dest_path, zip_file)
|
||||
|
||||
return make_root_scheme_file
|
||||
|
||||
def data_scheme_file_maker(zip_file, scheme):
|
||||
# type: (ZipFile, Scheme) -> Callable[[RecordPath], File]
|
||||
scheme_paths = {}
|
||||
for key in SCHEME_KEYS:
|
||||
encoded_key = ensure_text(key)
|
||||
scheme_paths[encoded_key] = ensure_text(
|
||||
getattr(scheme, key), encoding=sys.getfilesystemencoding()
|
||||
)
|
||||
|
||||
def make_data_scheme_file(record_path):
|
||||
# type: (RecordPath) -> File
|
||||
normed_path = os.path.normpath(record_path)
|
||||
try:
|
||||
_, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2)
|
||||
except ValueError:
|
||||
message = (
|
||||
"Unexpected file in {}: {!r}. .data directory contents"
|
||||
" should be named like: '<scheme key>/<path>'."
|
||||
).format(wheel_path, record_path)
|
||||
raise InstallationError(message)
|
||||
|
||||
try:
|
||||
scheme_path = scheme_paths[scheme_key]
|
||||
except KeyError:
|
||||
valid_scheme_keys = ", ".join(sorted(scheme_paths))
|
||||
message = (
|
||||
"Unknown scheme key used in {}: {} (for file {!r}). .data"
|
||||
" directory contents should be in subdirectories named"
|
||||
" with a valid scheme key ({})"
|
||||
).format(
|
||||
wheel_path, scheme_key, record_path, valid_scheme_keys
|
||||
)
|
||||
raise InstallationError(message)
|
||||
|
||||
dest_path = os.path.join(scheme_path, dest_subpath)
|
||||
assert_no_path_traversal(scheme_path, dest_path)
|
||||
return ZipBackedFile(record_path, dest_path, zip_file)
|
||||
|
||||
return make_data_scheme_file
|
||||
|
||||
def is_data_scheme_path(path):
|
||||
# type: (RecordPath) -> bool
|
||||
return path.split("/", 1)[0].endswith(".data")
|
||||
|
||||
paths = all_paths()
|
||||
file_paths = filterfalse(is_dir_path, paths)
|
||||
root_scheme_paths, data_scheme_paths = partition(
|
||||
is_data_scheme_path, file_paths
|
||||
)
|
||||
|
||||
make_root_scheme_file = root_scheme_file_maker(
|
||||
wheel_zip,
|
||||
ensure_text(lib_dir, encoding=sys.getfilesystemencoding()),
|
||||
)
|
||||
files = map(make_root_scheme_file, root_scheme_paths)
|
||||
|
||||
def is_script_scheme_path(path):
|
||||
# type: (RecordPath) -> bool
|
||||
parts = path.split("/", 2)
|
||||
return (
|
||||
len(parts) > 2 and
|
||||
parts[0].endswith(".data") and
|
||||
parts[1] == "scripts"
|
||||
)
|
||||
|
||||
other_scheme_paths, script_scheme_paths = partition(
|
||||
is_script_scheme_path, data_scheme_paths
|
||||
)
|
||||
|
||||
make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme)
|
||||
other_scheme_files = map(make_data_scheme_file, other_scheme_paths)
|
||||
files = chain(files, other_scheme_files)
|
||||
|
||||
# Get the defined entry points
|
||||
distribution = pkg_resources_distribution_for_wheel(
|
||||
wheel_zip, name, wheel_path
|
||||
)
|
||||
console, gui = get_entrypoints(distribution)
|
||||
|
||||
def is_entrypoint_wrapper(file):
|
||||
# type: (File) -> bool
|
||||
# EP, EP.exe and EP-script.py are scripts generated for
|
||||
# entry point EP by setuptools
|
||||
path = file.dest_path
|
||||
name = os.path.basename(path)
|
||||
if name.lower().endswith('.exe'):
|
||||
matchname = name[:-4]
|
||||
elif name.lower().endswith('-script.py'):
|
||||
matchname = name[:-10]
|
||||
elif name.lower().endswith(".pya"):
|
||||
matchname = name[:-4]
|
||||
else:
|
||||
matchname = name
|
||||
# Ignore setuptools-generated scripts
|
||||
return (matchname in console or matchname in gui)
|
||||
|
||||
script_scheme_files = map(make_data_scheme_file, script_scheme_paths)
|
||||
script_scheme_files = filterfalse(
|
||||
is_entrypoint_wrapper, script_scheme_files
|
||||
)
|
||||
script_scheme_files = map(ScriptFile, script_scheme_files)
|
||||
files = chain(files, script_scheme_files)
|
||||
|
||||
for file in files:
|
||||
file.save()
|
||||
record_installed(file.src_record_path, file.dest_path, file.changed)
|
||||
|
||||
def pyc_source_file_paths():
|
||||
# type: () -> Iterator[text_type]
|
||||
# We de-duplicate installation paths, since there can be overlap (e.g.
|
||||
# file in .data maps to same location as file in wheel root).
|
||||
# Sorting installation paths makes it easier to reproduce and debug
|
||||
# issues related to permissions on existing files.
|
||||
for installed_path in sorted(set(installed.values())):
|
||||
full_installed_path = os.path.join(lib_dir, installed_path)
|
||||
if not os.path.isfile(full_installed_path):
|
||||
continue
|
||||
if not full_installed_path.endswith('.py'):
|
||||
continue
|
||||
yield full_installed_path
|
||||
|
||||
def pyc_output_path(path):
|
||||
# type: (text_type) -> text_type
|
||||
"""Return the path the pyc file would have been written to.
|
||||
"""
|
||||
if PY2:
|
||||
if sys.flags.optimize:
|
||||
return path + 'o'
|
||||
else:
|
||||
return path + 'c'
|
||||
else:
|
||||
return importlib.util.cache_from_source(path)
|
||||
|
||||
# Compile all of the pyc files for the installed files
|
||||
if pycompile:
|
||||
with captured_stdout() as stdout:
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings('ignore')
|
||||
for path in pyc_source_file_paths():
|
||||
# Python 2's `compileall.compile_file` requires a str in
|
||||
# error cases, so we must convert to the native type.
|
||||
path_arg = ensure_str(
|
||||
path, encoding=sys.getfilesystemencoding()
|
||||
)
|
||||
success = compileall.compile_file(
|
||||
path_arg, force=True, quiet=True
|
||||
)
|
||||
if success:
|
||||
pyc_path = pyc_output_path(path)
|
||||
assert os.path.exists(pyc_path)
|
||||
pyc_record_path = cast(
|
||||
"RecordPath", pyc_path.replace(os.path.sep, "/")
|
||||
)
|
||||
record_installed(pyc_record_path, pyc_path)
|
||||
logger.debug(stdout.getvalue())
|
||||
|
||||
maker = PipScriptMaker(None, scheme.scripts)
|
||||
|
||||
# Ensure old scripts are overwritten.
|
||||
# See https://github.com/pypa/pip/issues/1800
|
||||
maker.clobber = True
|
||||
|
||||
# Ensure we don't generate any variants for scripts because this is almost
|
||||
# never what somebody wants.
|
||||
# See https://bitbucket.org/pypa/distlib/issue/35/
|
||||
maker.variants = {''}
|
||||
|
||||
# This is required because otherwise distlib creates scripts that are not
|
||||
# executable.
|
||||
# See https://bitbucket.org/pypa/distlib/issue/32/
|
||||
maker.set_mode = True
|
||||
|
||||
# Generate the console and GUI entry points specified in the wheel
|
||||
scripts_to_generate = get_console_script_specs(console)
|
||||
|
||||
gui_scripts_to_generate = list(starmap('{} = {}'.format, gui.items()))
|
||||
|
||||
generated_console_scripts = maker.make_multiple(scripts_to_generate)
|
||||
generated.extend(generated_console_scripts)
|
||||
|
||||
generated.extend(
|
||||
maker.make_multiple(gui_scripts_to_generate, {'gui': True})
|
||||
)
|
||||
|
||||
if warn_script_location:
|
||||
msg = message_about_scripts_not_on_PATH(generated_console_scripts)
|
||||
if msg is not None:
|
||||
logger.warning(msg)
|
||||
|
||||
generated_file_mode = 0o666 & ~current_umask()
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _generate_file(path, **kwargs):
|
||||
# type: (str, **Any) -> Iterator[NamedTemporaryFileResult]
|
||||
with adjacent_tmp_file(path, **kwargs) as f:
|
||||
yield f
|
||||
os.chmod(f.name, generated_file_mode)
|
||||
replace(f.name, path)
|
||||
|
||||
dest_info_dir = os.path.join(lib_dir, info_dir)
|
||||
|
||||
# Record pip as the installer
|
||||
installer_path = os.path.join(dest_info_dir, 'INSTALLER')
|
||||
with _generate_file(installer_path) as installer_file:
|
||||
installer_file.write(b'pip\n')
|
||||
generated.append(installer_path)
|
||||
|
||||
# Record the PEP 610 direct URL reference
|
||||
if direct_url is not None:
|
||||
direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME)
|
||||
with _generate_file(direct_url_path) as direct_url_file:
|
||||
direct_url_file.write(direct_url.to_json().encode("utf-8"))
|
||||
generated.append(direct_url_path)
|
||||
|
||||
# Record the REQUESTED file
|
||||
if requested:
|
||||
requested_path = os.path.join(dest_info_dir, 'REQUESTED')
|
||||
with open(requested_path, "w"):
|
||||
pass
|
||||
generated.append(requested_path)
|
||||
|
||||
record_text = distribution.get_metadata('RECORD')
|
||||
record_rows = list(csv.reader(record_text.splitlines()))
|
||||
|
||||
rows = get_csv_rows_for_installed(
|
||||
record_rows,
|
||||
installed=installed,
|
||||
changed=changed,
|
||||
generated=generated,
|
||||
lib_dir=lib_dir)
|
||||
|
||||
# Record details of all files installed
|
||||
record_path = os.path.join(dest_info_dir, 'RECORD')
|
||||
|
||||
with _generate_file(record_path, **csv_io_kwargs('w')) as record_file:
|
||||
# The type mypy infers for record_file is different for Python 3
|
||||
# (typing.IO[Any]) and Python 2 (typing.BinaryIO). We explicitly
|
||||
# cast to typing.IO[str] as a workaround.
|
||||
writer = csv.writer(cast('IO[str]', record_file))
|
||||
writer.writerows(_normalized_outrows(rows))
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def req_error_context(req_description):
|
||||
# type: (str) -> Iterator[None]
|
||||
try:
|
||||
yield
|
||||
except InstallationError as e:
|
||||
message = "For req: {}. {}".format(req_description, e.args[0])
|
||||
reraise(
|
||||
InstallationError, InstallationError(message), sys.exc_info()[2]
|
||||
)
|
||||
|
||||
|
||||
def install_wheel(
|
||||
name, # type: str
|
||||
wheel_path, # type: str
|
||||
scheme, # type: Scheme
|
||||
req_description, # type: str
|
||||
pycompile=True, # type: bool
|
||||
warn_script_location=True, # type: bool
|
||||
direct_url=None, # type: Optional[DirectUrl]
|
||||
requested=False, # type: bool
|
||||
):
|
||||
# type: (...) -> None
|
||||
with ZipFile(wheel_path, allowZip64=True) as z:
|
||||
with req_error_context(req_description):
|
||||
_install_wheel(
|
||||
name=name,
|
||||
wheel_zip=z,
|
||||
wheel_path=wheel_path,
|
||||
scheme=scheme,
|
||||
pycompile=pycompile,
|
||||
warn_script_location=warn_script_location,
|
||||
direct_url=direct_url,
|
||||
requested=requested,
|
||||
)
|
562
venv/Lib/site-packages/pip/_internal/operations/prepare.py
Normal file
562
venv/Lib/site-packages/pip/_internal/operations/prepare.py
Normal file
|
@ -0,0 +1,562 @@
|
|||
"""Prepares a distribution for installation
|
||||
"""
|
||||
|
||||
# The following comment should be removed at some point in the future.
|
||||
# mypy: strict-optional=False
|
||||
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from pip._vendor.six import PY2
|
||||
|
||||
from pip._internal.distributions import (
|
||||
make_distribution_for_install_requirement,
|
||||
)
|
||||
from pip._internal.distributions.installed import InstalledDistribution
|
||||
from pip._internal.exceptions import (
|
||||
DirectoryUrlHashUnsupported,
|
||||
HashMismatch,
|
||||
HashUnpinned,
|
||||
InstallationError,
|
||||
NetworkConnectionError,
|
||||
PreviousBuildDirError,
|
||||
VcsHashUnsupported,
|
||||
)
|
||||
from pip._internal.utils.filesystem import copy2_fixed
|
||||
from pip._internal.utils.hashes import MissingHashes
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import (
|
||||
display_path,
|
||||
hide_url,
|
||||
path_to_display,
|
||||
rmtree,
|
||||
)
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
from pip._internal.utils.unpacking import unpack_file
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import (
|
||||
Callable, List, Optional, Tuple,
|
||||
)
|
||||
|
||||
from mypy_extensions import TypedDict
|
||||
|
||||
from pip._internal.distributions import AbstractDistribution
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.network.download import Downloader
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.req.req_tracker import RequirementTracker
|
||||
from pip._internal.utils.hashes import Hashes
|
||||
|
||||
if PY2:
|
||||
CopytreeKwargs = TypedDict(
|
||||
'CopytreeKwargs',
|
||||
{
|
||||
'ignore': Callable[[str, List[str]], List[str]],
|
||||
'symlinks': bool,
|
||||
},
|
||||
total=False,
|
||||
)
|
||||
else:
|
||||
CopytreeKwargs = TypedDict(
|
||||
'CopytreeKwargs',
|
||||
{
|
||||
'copy_function': Callable[[str, str], None],
|
||||
'ignore': Callable[[str, List[str]], List[str]],
|
||||
'ignore_dangling_symlinks': bool,
|
||||
'symlinks': bool,
|
||||
},
|
||||
total=False,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_prepared_distribution(
|
||||
req, # type: InstallRequirement
|
||||
req_tracker, # type: RequirementTracker
|
||||
finder, # type: PackageFinder
|
||||
build_isolation # type: bool
|
||||
):
|
||||
# type: (...) -> AbstractDistribution
|
||||
"""Prepare a distribution for installation.
|
||||
"""
|
||||
abstract_dist = make_distribution_for_install_requirement(req)
|
||||
with req_tracker.track(req):
|
||||
abstract_dist.prepare_distribution_metadata(finder, build_isolation)
|
||||
return abstract_dist
|
||||
|
||||
|
||||
def unpack_vcs_link(link, location):
|
||||
# type: (Link, str) -> None
|
||||
vcs_backend = vcs.get_backend_for_scheme(link.scheme)
|
||||
assert vcs_backend is not None
|
||||
vcs_backend.unpack(location, url=hide_url(link.url))
|
||||
|
||||
|
||||
class File(object):
|
||||
def __init__(self, path, content_type):
|
||||
# type: (str, str) -> None
|
||||
self.path = path
|
||||
self.content_type = content_type
|
||||
|
||||
|
||||
def get_http_url(
|
||||
link, # type: Link
|
||||
downloader, # type: Downloader
|
||||
download_dir=None, # type: Optional[str]
|
||||
hashes=None, # type: Optional[Hashes]
|
||||
):
|
||||
# type: (...) -> File
|
||||
temp_dir = TempDirectory(kind="unpack", globally_managed=True)
|
||||
# If a download dir is specified, is the file already downloaded there?
|
||||
already_downloaded_path = None
|
||||
if download_dir:
|
||||
already_downloaded_path = _check_download_dir(
|
||||
link, download_dir, hashes
|
||||
)
|
||||
|
||||
if already_downloaded_path:
|
||||
from_path = already_downloaded_path
|
||||
content_type = mimetypes.guess_type(from_path)[0]
|
||||
else:
|
||||
# let's download to a tmp dir
|
||||
from_path, content_type = _download_http_url(
|
||||
link, downloader, temp_dir.path, hashes
|
||||
)
|
||||
|
||||
return File(from_path, content_type)
|
||||
|
||||
|
||||
def _copy2_ignoring_special_files(src, dest):
|
||||
# type: (str, str) -> None
|
||||
"""Copying special files is not supported, but as a convenience to users
|
||||
we skip errors copying them. This supports tools that may create e.g.
|
||||
socket files in the project source directory.
|
||||
"""
|
||||
try:
|
||||
copy2_fixed(src, dest)
|
||||
except shutil.SpecialFileError as e:
|
||||
# SpecialFileError may be raised due to either the source or
|
||||
# destination. If the destination was the cause then we would actually
|
||||
# care, but since the destination directory is deleted prior to
|
||||
# copy we ignore all of them assuming it is caused by the source.
|
||||
logger.warning(
|
||||
"Ignoring special file error '%s' encountered copying %s to %s.",
|
||||
str(e),
|
||||
path_to_display(src),
|
||||
path_to_display(dest),
|
||||
)
|
||||
|
||||
|
||||
def _copy_source_tree(source, target):
|
||||
# type: (str, str) -> None
|
||||
target_abspath = os.path.abspath(target)
|
||||
target_basename = os.path.basename(target_abspath)
|
||||
target_dirname = os.path.dirname(target_abspath)
|
||||
|
||||
def ignore(d, names):
|
||||
# type: (str, List[str]) -> List[str]
|
||||
skipped = [] # type: List[str]
|
||||
if d == source:
|
||||
# Pulling in those directories can potentially be very slow,
|
||||
# exclude the following directories if they appear in the top
|
||||
# level dir (and only it).
|
||||
# See discussion at https://github.com/pypa/pip/pull/6770
|
||||
skipped += ['.tox', '.nox']
|
||||
if os.path.abspath(d) == target_dirname:
|
||||
# Prevent an infinite recursion if the target is in source.
|
||||
# This can happen when TMPDIR is set to ${PWD}/...
|
||||
# and we copy PWD to TMPDIR.
|
||||
skipped += [target_basename]
|
||||
return skipped
|
||||
|
||||
kwargs = dict(ignore=ignore, symlinks=True) # type: CopytreeKwargs
|
||||
|
||||
if not PY2:
|
||||
# Python 2 does not support copy_function, so we only ignore
|
||||
# errors on special file copy in Python 3.
|
||||
kwargs['copy_function'] = _copy2_ignoring_special_files
|
||||
|
||||
shutil.copytree(source, target, **kwargs)
|
||||
|
||||
|
||||
def get_file_url(
|
||||
link, # type: Link
|
||||
download_dir=None, # type: Optional[str]
|
||||
hashes=None # type: Optional[Hashes]
|
||||
):
|
||||
# type: (...) -> File
|
||||
"""Get file and optionally check its hash.
|
||||
"""
|
||||
# If a download dir is specified, is the file already there and valid?
|
||||
already_downloaded_path = None
|
||||
if download_dir:
|
||||
already_downloaded_path = _check_download_dir(
|
||||
link, download_dir, hashes
|
||||
)
|
||||
|
||||
if already_downloaded_path:
|
||||
from_path = already_downloaded_path
|
||||
else:
|
||||
from_path = link.file_path
|
||||
|
||||
# If --require-hashes is off, `hashes` is either empty, the
|
||||
# link's embedded hash, or MissingHashes; it is required to
|
||||
# match. If --require-hashes is on, we are satisfied by any
|
||||
# hash in `hashes` matching: a URL-based or an option-based
|
||||
# one; no internet-sourced hash will be in `hashes`.
|
||||
if hashes:
|
||||
hashes.check_against_path(from_path)
|
||||
|
||||
content_type = mimetypes.guess_type(from_path)[0]
|
||||
|
||||
return File(from_path, content_type)
|
||||
|
||||
|
||||
def unpack_url(
|
||||
link, # type: Link
|
||||
location, # type: str
|
||||
downloader, # type: Downloader
|
||||
download_dir=None, # type: Optional[str]
|
||||
hashes=None, # type: Optional[Hashes]
|
||||
):
|
||||
# type: (...) -> Optional[File]
|
||||
"""Unpack link into location, downloading if required.
|
||||
|
||||
:param hashes: A Hashes object, one of whose embedded hashes must match,
|
||||
or HashMismatch will be raised. If the Hashes is empty, no matches are
|
||||
required, and unhashable types of requirements (like VCS ones, which
|
||||
would ordinarily raise HashUnsupported) are allowed.
|
||||
"""
|
||||
# non-editable vcs urls
|
||||
if link.is_vcs:
|
||||
unpack_vcs_link(link, location)
|
||||
return None
|
||||
|
||||
# If it's a url to a local directory
|
||||
if link.is_existing_dir():
|
||||
if os.path.isdir(location):
|
||||
rmtree(location)
|
||||
_copy_source_tree(link.file_path, location)
|
||||
return None
|
||||
|
||||
# file urls
|
||||
if link.is_file:
|
||||
file = get_file_url(link, download_dir, hashes=hashes)
|
||||
|
||||
# http urls
|
||||
else:
|
||||
file = get_http_url(
|
||||
link,
|
||||
downloader,
|
||||
download_dir,
|
||||
hashes=hashes,
|
||||
)
|
||||
|
||||
# unpack the archive to the build dir location. even when only downloading
|
||||
# archives, they have to be unpacked to parse dependencies, except wheels
|
||||
if not link.is_wheel:
|
||||
unpack_file(file.path, location, file.content_type)
|
||||
|
||||
return file
|
||||
|
||||
|
||||
def _download_http_url(
|
||||
link, # type: Link
|
||||
downloader, # type: Downloader
|
||||
temp_dir, # type: str
|
||||
hashes, # type: Optional[Hashes]
|
||||
):
|
||||
# type: (...) -> Tuple[str, str]
|
||||
"""Download link url into temp_dir using provided session"""
|
||||
download = downloader(link)
|
||||
|
||||
file_path = os.path.join(temp_dir, download.filename)
|
||||
with open(file_path, 'wb') as content_file:
|
||||
for chunk in download.chunks:
|
||||
content_file.write(chunk)
|
||||
|
||||
if hashes:
|
||||
hashes.check_against_path(file_path)
|
||||
|
||||
return file_path, download.response.headers.get('content-type', '')
|
||||
|
||||
|
||||
def _check_download_dir(link, download_dir, hashes):
|
||||
# type: (Link, str, Optional[Hashes]) -> Optional[str]
|
||||
""" Check download_dir for previously downloaded file with correct hash
|
||||
If a correct file is found return its path else None
|
||||
"""
|
||||
download_path = os.path.join(download_dir, link.filename)
|
||||
|
||||
if not os.path.exists(download_path):
|
||||
return None
|
||||
|
||||
# If already downloaded, does its hash match?
|
||||
logger.info('File was already downloaded %s', download_path)
|
||||
if hashes:
|
||||
try:
|
||||
hashes.check_against_path(download_path)
|
||||
except HashMismatch:
|
||||
logger.warning(
|
||||
'Previously-downloaded file %s has bad hash. '
|
||||
'Re-downloading.',
|
||||
download_path
|
||||
)
|
||||
os.unlink(download_path)
|
||||
return None
|
||||
return download_path
|
||||
|
||||
|
||||
class RequirementPreparer(object):
|
||||
"""Prepares a Requirement
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
build_dir, # type: str
|
||||
download_dir, # type: Optional[str]
|
||||
src_dir, # type: str
|
||||
wheel_download_dir, # type: Optional[str]
|
||||
build_isolation, # type: bool
|
||||
req_tracker, # type: RequirementTracker
|
||||
downloader, # type: Downloader
|
||||
finder, # type: PackageFinder
|
||||
require_hashes, # type: bool
|
||||
use_user_site, # type: bool
|
||||
):
|
||||
# type: (...) -> None
|
||||
super(RequirementPreparer, self).__init__()
|
||||
|
||||
self.src_dir = src_dir
|
||||
self.build_dir = build_dir
|
||||
self.req_tracker = req_tracker
|
||||
self.downloader = downloader
|
||||
self.finder = finder
|
||||
|
||||
# Where still-packed archives should be written to. If None, they are
|
||||
# not saved, and are deleted immediately after unpacking.
|
||||
self.download_dir = download_dir
|
||||
|
||||
# Where still-packed .whl files should be written to. If None, they are
|
||||
# written to the download_dir parameter. Separate to download_dir to
|
||||
# permit only keeping wheel archives for pip wheel.
|
||||
self.wheel_download_dir = wheel_download_dir
|
||||
|
||||
# NOTE
|
||||
# download_dir and wheel_download_dir overlap semantically and may
|
||||
# be combined if we're willing to have non-wheel archives present in
|
||||
# the wheelhouse output by 'pip wheel'.
|
||||
|
||||
# Is build isolation allowed?
|
||||
self.build_isolation = build_isolation
|
||||
|
||||
# Should hash-checking be required?
|
||||
self.require_hashes = require_hashes
|
||||
|
||||
# Should install in user site-packages?
|
||||
self.use_user_site = use_user_site
|
||||
|
||||
@property
|
||||
def _download_should_save(self):
|
||||
# type: () -> bool
|
||||
if not self.download_dir:
|
||||
return False
|
||||
|
||||
if os.path.exists(self.download_dir):
|
||||
return True
|
||||
|
||||
logger.critical('Could not find download directory')
|
||||
raise InstallationError(
|
||||
"Could not find or access download directory '{}'"
|
||||
.format(self.download_dir))
|
||||
|
||||
def _log_preparing_link(self, req):
|
||||
# type: (InstallRequirement) -> None
|
||||
"""Log the way the link prepared."""
|
||||
if req.link.is_file:
|
||||
path = req.link.file_path
|
||||
logger.info('Processing %s', display_path(path))
|
||||
else:
|
||||
logger.info('Collecting %s', req.req or req)
|
||||
|
||||
def _ensure_link_req_src_dir(self, req, download_dir, parallel_builds):
|
||||
# type: (InstallRequirement, Optional[str], bool) -> None
|
||||
"""Ensure source_dir of a linked InstallRequirement."""
|
||||
# Since source_dir is only set for editable requirements.
|
||||
if req.link.is_wheel:
|
||||
# We don't need to unpack wheels, so no need for a source
|
||||
# directory.
|
||||
return
|
||||
assert req.source_dir is None
|
||||
# We always delete unpacked sdists after pip runs.
|
||||
req.ensure_has_source_dir(
|
||||
self.build_dir,
|
||||
autodelete=True,
|
||||
parallel_builds=parallel_builds,
|
||||
)
|
||||
|
||||
# If a checkout exists, it's unwise to keep going. version
|
||||
# inconsistencies are logged later, but do not fail the
|
||||
# installation.
|
||||
# FIXME: this won't upgrade when there's an existing
|
||||
# package unpacked in `req.source_dir`
|
||||
if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
|
||||
raise PreviousBuildDirError(
|
||||
"pip can't proceed with requirements '{}' due to a"
|
||||
"pre-existing build directory ({}). This is likely "
|
||||
"due to a previous installation that failed . pip is "
|
||||
"being responsible and not assuming it can delete this. "
|
||||
"Please delete it and try again.".format(req, req.source_dir)
|
||||
)
|
||||
|
||||
def _get_linked_req_hashes(self, req):
|
||||
# type: (InstallRequirement) -> Hashes
|
||||
# By the time this is called, the requirement's link should have
|
||||
# been checked so we can tell what kind of requirements req is
|
||||
# and raise some more informative errors than otherwise.
|
||||
# (For example, we can raise VcsHashUnsupported for a VCS URL
|
||||
# rather than HashMissing.)
|
||||
if not self.require_hashes:
|
||||
return req.hashes(trust_internet=True)
|
||||
|
||||
# We could check these first 2 conditions inside unpack_url
|
||||
# and save repetition of conditions, but then we would
|
||||
# report less-useful error messages for unhashable
|
||||
# requirements, complaining that there's no hash provided.
|
||||
if req.link.is_vcs:
|
||||
raise VcsHashUnsupported()
|
||||
if req.link.is_existing_dir():
|
||||
raise DirectoryUrlHashUnsupported()
|
||||
|
||||
# Unpinned packages are asking for trouble when a new version
|
||||
# is uploaded. This isn't a security check, but it saves users
|
||||
# a surprising hash mismatch in the future.
|
||||
# file:/// URLs aren't pinnable, so don't complain about them
|
||||
# not being pinned.
|
||||
if req.original_link is None and not req.is_pinned:
|
||||
raise HashUnpinned()
|
||||
|
||||
# If known-good hashes are missing for this requirement,
|
||||
# shim it with a facade object that will provoke hash
|
||||
# computation and then raise a HashMissing exception
|
||||
# showing the user what the hash should be.
|
||||
return req.hashes(trust_internet=False) or MissingHashes()
|
||||
|
||||
def prepare_linked_requirement(self, req, parallel_builds=False):
|
||||
# type: (InstallRequirement, bool) -> AbstractDistribution
|
||||
"""Prepare a requirement to be obtained from req.link."""
|
||||
assert req.link
|
||||
link = req.link
|
||||
self._log_preparing_link(req)
|
||||
if link.is_wheel and self.wheel_download_dir:
|
||||
# Download wheels to a dedicated dir when doing `pip wheel`.
|
||||
download_dir = self.wheel_download_dir
|
||||
else:
|
||||
download_dir = self.download_dir
|
||||
|
||||
with indent_log():
|
||||
self._ensure_link_req_src_dir(req, download_dir, parallel_builds)
|
||||
try:
|
||||
local_file = unpack_url(
|
||||
link, req.source_dir, self.downloader, download_dir,
|
||||
hashes=self._get_linked_req_hashes(req)
|
||||
)
|
||||
except NetworkConnectionError as exc:
|
||||
raise InstallationError(
|
||||
'Could not install requirement {} because of HTTP '
|
||||
'error {} for URL {}'.format(req, exc, link)
|
||||
)
|
||||
|
||||
# For use in later processing, preserve the file path on the
|
||||
# requirement.
|
||||
if local_file:
|
||||
req.local_file_path = local_file.path
|
||||
|
||||
abstract_dist = _get_prepared_distribution(
|
||||
req, self.req_tracker, self.finder, self.build_isolation,
|
||||
)
|
||||
|
||||
if download_dir:
|
||||
if link.is_existing_dir():
|
||||
logger.info('Link is a directory, ignoring download_dir')
|
||||
elif local_file:
|
||||
download_location = os.path.join(
|
||||
download_dir, link.filename
|
||||
)
|
||||
if not os.path.exists(download_location):
|
||||
shutil.copy(local_file.path, download_location)
|
||||
download_path = display_path(download_location)
|
||||
logger.info('Saved %s', download_path)
|
||||
|
||||
if self._download_should_save:
|
||||
# Make a .zip of the source_dir we already created.
|
||||
if link.is_vcs:
|
||||
req.archive(self.download_dir)
|
||||
return abstract_dist
|
||||
|
||||
def prepare_editable_requirement(
|
||||
self,
|
||||
req, # type: InstallRequirement
|
||||
):
|
||||
# type: (...) -> AbstractDistribution
|
||||
"""Prepare an editable requirement
|
||||
"""
|
||||
assert req.editable, "cannot prepare a non-editable req as editable"
|
||||
|
||||
logger.info('Obtaining %s', req)
|
||||
|
||||
with indent_log():
|
||||
if self.require_hashes:
|
||||
raise InstallationError(
|
||||
'The editable requirement {} cannot be installed when '
|
||||
'requiring hashes, because there is no single file to '
|
||||
'hash.'.format(req)
|
||||
)
|
||||
req.ensure_has_source_dir(self.src_dir)
|
||||
req.update_editable(not self._download_should_save)
|
||||
|
||||
abstract_dist = _get_prepared_distribution(
|
||||
req, self.req_tracker, self.finder, self.build_isolation,
|
||||
)
|
||||
|
||||
if self._download_should_save:
|
||||
req.archive(self.download_dir)
|
||||
req.check_if_exists(self.use_user_site)
|
||||
|
||||
return abstract_dist
|
||||
|
||||
def prepare_installed_requirement(
|
||||
self,
|
||||
req, # type: InstallRequirement
|
||||
skip_reason # type: str
|
||||
):
|
||||
# type: (...) -> AbstractDistribution
|
||||
"""Prepare an already-installed requirement
|
||||
"""
|
||||
assert req.satisfied_by, "req should have been satisfied but isn't"
|
||||
assert skip_reason is not None, (
|
||||
"did not get skip reason skipped but req.satisfied_by "
|
||||
"is set to {}".format(req.satisfied_by)
|
||||
)
|
||||
logger.info(
|
||||
'Requirement %s: %s (%s)',
|
||||
skip_reason, req, req.satisfied_by.version
|
||||
)
|
||||
with indent_log():
|
||||
if self.require_hashes:
|
||||
logger.debug(
|
||||
'Since it is already installed, we are trusting this '
|
||||
'package without checking its hash. To ensure a '
|
||||
'completely repeatable environment, install into an '
|
||||
'empty virtualenv.'
|
||||
)
|
||||
abstract_dist = InstalledDistribution(req)
|
||||
|
||||
return abstract_dist
|
Loading…
Add table
Add a link
Reference in a new issue