Created starter files for the project.
This commit is contained in:
commit
73f0c0db42
1992 changed files with 769897 additions and 0 deletions
103
venv/Lib/site-packages/pip/_internal/req/__init__.py
Normal file
103
venv/Lib/site-packages/pip/_internal/req/__init__.py
Normal file
|
@ -0,0 +1,103 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import collections
|
||||
import logging
|
||||
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
from .req_file import parse_requirements
|
||||
from .req_install import InstallRequirement
|
||||
from .req_set import RequirementSet
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Iterator, List, Optional, Sequence, Tuple
|
||||
|
||||
__all__ = [
|
||||
"RequirementSet", "InstallRequirement",
|
||||
"parse_requirements", "install_given_reqs",
|
||||
]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class InstallationResult(object):
|
||||
def __init__(self, name):
|
||||
# type: (str) -> None
|
||||
self.name = name
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return "InstallationResult(name={!r})".format(self.name)
|
||||
|
||||
|
||||
def _validate_requirements(
|
||||
requirements, # type: List[InstallRequirement]
|
||||
):
|
||||
# type: (...) -> Iterator[Tuple[str, InstallRequirement]]
|
||||
for req in requirements:
|
||||
assert req.name, "invalid to-be-installed requirement: {}".format(req)
|
||||
yield req.name, req
|
||||
|
||||
|
||||
def install_given_reqs(
|
||||
requirements, # type: List[InstallRequirement]
|
||||
install_options, # type: List[str]
|
||||
global_options, # type: Sequence[str]
|
||||
root, # type: Optional[str]
|
||||
home, # type: Optional[str]
|
||||
prefix, # type: Optional[str]
|
||||
warn_script_location, # type: bool
|
||||
use_user_site, # type: bool
|
||||
pycompile, # type: bool
|
||||
):
|
||||
# type: (...) -> List[InstallationResult]
|
||||
"""
|
||||
Install everything in the given list.
|
||||
|
||||
(to be called after having downloaded and unpacked the packages)
|
||||
"""
|
||||
to_install = collections.OrderedDict(_validate_requirements(requirements))
|
||||
|
||||
if to_install:
|
||||
logger.info(
|
||||
'Installing collected packages: %s',
|
||||
', '.join(to_install.keys()),
|
||||
)
|
||||
|
||||
installed = []
|
||||
|
||||
with indent_log():
|
||||
for req_name, requirement in to_install.items():
|
||||
if requirement.should_reinstall:
|
||||
logger.info('Attempting uninstall: %s', req_name)
|
||||
with indent_log():
|
||||
uninstalled_pathset = requirement.uninstall(
|
||||
auto_confirm=True
|
||||
)
|
||||
else:
|
||||
uninstalled_pathset = None
|
||||
|
||||
try:
|
||||
requirement.install(
|
||||
install_options,
|
||||
global_options,
|
||||
root=root,
|
||||
home=home,
|
||||
prefix=prefix,
|
||||
warn_script_location=warn_script_location,
|
||||
use_user_site=use_user_site,
|
||||
pycompile=pycompile,
|
||||
)
|
||||
except Exception:
|
||||
# if install did not succeed, rollback previous uninstall
|
||||
if uninstalled_pathset and not requirement.install_succeeded:
|
||||
uninstalled_pathset.rollback()
|
||||
raise
|
||||
else:
|
||||
if uninstalled_pathset and requirement.install_succeeded:
|
||||
uninstalled_pathset.commit()
|
||||
|
||||
installed.append(InstallationResult(req_name))
|
||||
|
||||
return installed
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
486
venv/Lib/site-packages/pip/_internal/req/constructors.py
Normal file
486
venv/Lib/site-packages/pip/_internal/req/constructors.py
Normal file
|
@ -0,0 +1,486 @@
|
|||
"""Backing implementation for InstallRequirement's various constructors
|
||||
|
||||
The idea here is that these formed a major chunk of InstallRequirement's size
|
||||
so, moving them and support code dedicated to them outside of that class
|
||||
helps creates for better understandability for the rest of the code.
|
||||
|
||||
These are meant to be used elsewhere within pip to create instances of
|
||||
InstallRequirement.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
from pip._vendor.packaging.markers import Marker
|
||||
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
||||
from pip._vendor.packaging.specifiers import Specifier
|
||||
from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
|
||||
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.models.index import PyPI, TestPyPI
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.models.wheel import Wheel
|
||||
from pip._internal.pyproject import make_pyproject_path
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.utils.deprecation import deprecated
|
||||
from pip._internal.utils.filetypes import ARCHIVE_EXTENSIONS
|
||||
from pip._internal.utils.misc import is_installable_dir, splitext
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
from pip._internal.utils.urls import path_to_url
|
||||
from pip._internal.vcs import is_url, vcs
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import (
|
||||
Any, Dict, Optional, Set, Tuple, Union,
|
||||
)
|
||||
from pip._internal.req.req_file import ParsedRequirement
|
||||
|
||||
|
||||
__all__ = [
|
||||
"install_req_from_editable", "install_req_from_line",
|
||||
"parse_editable"
|
||||
]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
operators = Specifier._operators.keys()
|
||||
|
||||
|
||||
def is_archive_file(name):
|
||||
# type: (str) -> bool
|
||||
"""Return True if `name` is a considered as an archive file."""
|
||||
ext = splitext(name)[1].lower()
|
||||
if ext in ARCHIVE_EXTENSIONS:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _strip_extras(path):
|
||||
# type: (str) -> Tuple[str, Optional[str]]
|
||||
m = re.match(r'^(.+)(\[[^\]]+\])$', path)
|
||||
extras = None
|
||||
if m:
|
||||
path_no_extras = m.group(1)
|
||||
extras = m.group(2)
|
||||
else:
|
||||
path_no_extras = path
|
||||
|
||||
return path_no_extras, extras
|
||||
|
||||
|
||||
def convert_extras(extras):
|
||||
# type: (Optional[str]) -> Set[str]
|
||||
if not extras:
|
||||
return set()
|
||||
return Requirement("placeholder" + extras.lower()).extras
|
||||
|
||||
|
||||
def parse_editable(editable_req):
|
||||
# type: (str) -> Tuple[Optional[str], str, Set[str]]
|
||||
"""Parses an editable requirement into:
|
||||
- a requirement name
|
||||
- an URL
|
||||
- extras
|
||||
- editable options
|
||||
Accepted requirements:
|
||||
svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
|
||||
.[some_extra]
|
||||
"""
|
||||
|
||||
url = editable_req
|
||||
|
||||
# If a file path is specified with extras, strip off the extras.
|
||||
url_no_extras, extras = _strip_extras(url)
|
||||
|
||||
if os.path.isdir(url_no_extras):
|
||||
if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
|
||||
msg = (
|
||||
'File "setup.py" not found. Directory cannot be installed '
|
||||
'in editable mode: {}'.format(os.path.abspath(url_no_extras))
|
||||
)
|
||||
pyproject_path = make_pyproject_path(url_no_extras)
|
||||
if os.path.isfile(pyproject_path):
|
||||
msg += (
|
||||
'\n(A "pyproject.toml" file was found, but editable '
|
||||
'mode currently requires a setup.py based build.)'
|
||||
)
|
||||
raise InstallationError(msg)
|
||||
|
||||
# Treating it as code that has already been checked out
|
||||
url_no_extras = path_to_url(url_no_extras)
|
||||
|
||||
if url_no_extras.lower().startswith('file:'):
|
||||
package_name = Link(url_no_extras).egg_fragment
|
||||
if extras:
|
||||
return (
|
||||
package_name,
|
||||
url_no_extras,
|
||||
Requirement("placeholder" + extras.lower()).extras,
|
||||
)
|
||||
else:
|
||||
return package_name, url_no_extras, set()
|
||||
|
||||
for version_control in vcs:
|
||||
if url.lower().startswith('{}:'.format(version_control)):
|
||||
url = '{}+{}'.format(version_control, url)
|
||||
break
|
||||
|
||||
if '+' not in url:
|
||||
raise InstallationError(
|
||||
'{} is not a valid editable requirement. '
|
||||
'It should either be a path to a local project or a VCS URL '
|
||||
'(beginning with svn+, git+, hg+, or bzr+).'.format(editable_req)
|
||||
)
|
||||
|
||||
vc_type = url.split('+', 1)[0].lower()
|
||||
|
||||
if not vcs.get_backend(vc_type):
|
||||
backends = ", ".join([bends.name + '+URL' for bends in vcs.backends])
|
||||
error_message = "For --editable={}, " \
|
||||
"only {} are currently supported".format(
|
||||
editable_req, backends)
|
||||
raise InstallationError(error_message)
|
||||
|
||||
package_name = Link(url).egg_fragment
|
||||
if not package_name:
|
||||
raise InstallationError(
|
||||
"Could not detect requirement name for '{}', please specify one "
|
||||
"with #egg=your_package_name".format(editable_req)
|
||||
)
|
||||
return package_name, url, set()
|
||||
|
||||
|
||||
def deduce_helpful_msg(req):
|
||||
# type: (str) -> str
|
||||
"""Returns helpful msg in case requirements file does not exist,
|
||||
or cannot be parsed.
|
||||
|
||||
:params req: Requirements file path
|
||||
"""
|
||||
msg = ""
|
||||
if os.path.exists(req):
|
||||
msg = " It does exist."
|
||||
# Try to parse and check if it is a requirements file.
|
||||
try:
|
||||
with open(req, 'r') as fp:
|
||||
# parse first line only
|
||||
next(parse_requirements(fp.read()))
|
||||
msg += (
|
||||
"The argument you provided "
|
||||
"({}) appears to be a"
|
||||
" requirements file. If that is the"
|
||||
" case, use the '-r' flag to install"
|
||||
" the packages specified within it."
|
||||
).format(req)
|
||||
except RequirementParseError:
|
||||
logger.debug(
|
||||
"Cannot parse '%s' as requirements file", req, exc_info=True
|
||||
)
|
||||
else:
|
||||
msg += " File '{}' does not exist.".format(req)
|
||||
return msg
|
||||
|
||||
|
||||
class RequirementParts(object):
|
||||
def __init__(
|
||||
self,
|
||||
requirement, # type: Optional[Requirement]
|
||||
link, # type: Optional[Link]
|
||||
markers, # type: Optional[Marker]
|
||||
extras, # type: Set[str]
|
||||
):
|
||||
self.requirement = requirement
|
||||
self.link = link
|
||||
self.markers = markers
|
||||
self.extras = extras
|
||||
|
||||
|
||||
def parse_req_from_editable(editable_req):
|
||||
# type: (str) -> RequirementParts
|
||||
name, url, extras_override = parse_editable(editable_req)
|
||||
|
||||
if name is not None:
|
||||
try:
|
||||
req = Requirement(name)
|
||||
except InvalidRequirement:
|
||||
raise InstallationError("Invalid requirement: '{}'".format(name))
|
||||
else:
|
||||
req = None
|
||||
|
||||
link = Link(url)
|
||||
|
||||
return RequirementParts(req, link, None, extras_override)
|
||||
|
||||
|
||||
# ---- The actual constructors follow ----
|
||||
|
||||
|
||||
def install_req_from_editable(
|
||||
editable_req, # type: str
|
||||
comes_from=None, # type: Optional[Union[InstallRequirement, str]]
|
||||
use_pep517=None, # type: Optional[bool]
|
||||
isolated=False, # type: bool
|
||||
options=None, # type: Optional[Dict[str, Any]]
|
||||
constraint=False, # type: bool
|
||||
user_supplied=False, # type: bool
|
||||
):
|
||||
# type: (...) -> InstallRequirement
|
||||
|
||||
parts = parse_req_from_editable(editable_req)
|
||||
|
||||
return InstallRequirement(
|
||||
parts.requirement,
|
||||
comes_from=comes_from,
|
||||
user_supplied=user_supplied,
|
||||
editable=True,
|
||||
link=parts.link,
|
||||
constraint=constraint,
|
||||
use_pep517=use_pep517,
|
||||
isolated=isolated,
|
||||
install_options=options.get("install_options", []) if options else [],
|
||||
global_options=options.get("global_options", []) if options else [],
|
||||
hash_options=options.get("hashes", {}) if options else {},
|
||||
extras=parts.extras,
|
||||
)
|
||||
|
||||
|
||||
def _looks_like_path(name):
|
||||
# type: (str) -> bool
|
||||
"""Checks whether the string "looks like" a path on the filesystem.
|
||||
|
||||
This does not check whether the target actually exists, only judge from the
|
||||
appearance.
|
||||
|
||||
Returns true if any of the following conditions is true:
|
||||
* a path separator is found (either os.path.sep or os.path.altsep);
|
||||
* a dot is found (which represents the current directory).
|
||||
"""
|
||||
if os.path.sep in name:
|
||||
return True
|
||||
if os.path.altsep is not None and os.path.altsep in name:
|
||||
return True
|
||||
if name.startswith("."):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _get_url_from_path(path, name):
|
||||
# type: (str, str) -> Optional[str]
|
||||
"""
|
||||
First, it checks whether a provided path is an installable directory
|
||||
(e.g. it has a setup.py). If it is, returns the path.
|
||||
|
||||
If false, check if the path is an archive file (such as a .whl).
|
||||
The function checks if the path is a file. If false, if the path has
|
||||
an @, it will treat it as a PEP 440 URL requirement and return the path.
|
||||
"""
|
||||
if _looks_like_path(name) and os.path.isdir(path):
|
||||
if is_installable_dir(path):
|
||||
return path_to_url(path)
|
||||
raise InstallationError(
|
||||
"Directory {name!r} is not installable. Neither 'setup.py' "
|
||||
"nor 'pyproject.toml' found.".format(**locals())
|
||||
)
|
||||
if not is_archive_file(path):
|
||||
return None
|
||||
if os.path.isfile(path):
|
||||
return path_to_url(path)
|
||||
urlreq_parts = name.split('@', 1)
|
||||
if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]):
|
||||
# If the path contains '@' and the part before it does not look
|
||||
# like a path, try to treat it as a PEP 440 URL req instead.
|
||||
return None
|
||||
logger.warning(
|
||||
'Requirement %r looks like a filename, but the '
|
||||
'file does not exist',
|
||||
name
|
||||
)
|
||||
return path_to_url(path)
|
||||
|
||||
|
||||
def parse_req_from_line(name, line_source):
|
||||
# type: (str, Optional[str]) -> RequirementParts
|
||||
if is_url(name):
|
||||
marker_sep = '; '
|
||||
else:
|
||||
marker_sep = ';'
|
||||
if marker_sep in name:
|
||||
name, markers_as_string = name.split(marker_sep, 1)
|
||||
markers_as_string = markers_as_string.strip()
|
||||
if not markers_as_string:
|
||||
markers = None
|
||||
else:
|
||||
markers = Marker(markers_as_string)
|
||||
else:
|
||||
markers = None
|
||||
name = name.strip()
|
||||
req_as_string = None
|
||||
path = os.path.normpath(os.path.abspath(name))
|
||||
link = None
|
||||
extras_as_string = None
|
||||
|
||||
if is_url(name):
|
||||
link = Link(name)
|
||||
else:
|
||||
p, extras_as_string = _strip_extras(path)
|
||||
url = _get_url_from_path(p, name)
|
||||
if url is not None:
|
||||
link = Link(url)
|
||||
|
||||
# it's a local file, dir, or url
|
||||
if link:
|
||||
# Handle relative file URLs
|
||||
if link.scheme == 'file' and re.search(r'\.\./', link.url):
|
||||
link = Link(
|
||||
path_to_url(os.path.normpath(os.path.abspath(link.path))))
|
||||
# wheel file
|
||||
if link.is_wheel:
|
||||
wheel = Wheel(link.filename) # can raise InvalidWheelFilename
|
||||
req_as_string = "{wheel.name}=={wheel.version}".format(**locals())
|
||||
else:
|
||||
# set the req to the egg fragment. when it's not there, this
|
||||
# will become an 'unnamed' requirement
|
||||
req_as_string = link.egg_fragment
|
||||
|
||||
# a requirement specifier
|
||||
else:
|
||||
req_as_string = name
|
||||
|
||||
extras = convert_extras(extras_as_string)
|
||||
|
||||
def with_source(text):
|
||||
# type: (str) -> str
|
||||
if not line_source:
|
||||
return text
|
||||
return '{} (from {})'.format(text, line_source)
|
||||
|
||||
if req_as_string is not None:
|
||||
try:
|
||||
req = Requirement(req_as_string)
|
||||
except InvalidRequirement:
|
||||
if os.path.sep in req_as_string:
|
||||
add_msg = "It looks like a path."
|
||||
add_msg += deduce_helpful_msg(req_as_string)
|
||||
elif ('=' in req_as_string and
|
||||
not any(op in req_as_string for op in operators)):
|
||||
add_msg = "= is not a valid operator. Did you mean == ?"
|
||||
else:
|
||||
add_msg = ''
|
||||
msg = with_source(
|
||||
'Invalid requirement: {!r}'.format(req_as_string)
|
||||
)
|
||||
if add_msg:
|
||||
msg += '\nHint: {}'.format(add_msg)
|
||||
raise InstallationError(msg)
|
||||
else:
|
||||
# Deprecate extras after specifiers: "name>=1.0[extras]"
|
||||
# This currently works by accident because _strip_extras() parses
|
||||
# any extras in the end of the string and those are saved in
|
||||
# RequirementParts
|
||||
for spec in req.specifier:
|
||||
spec_str = str(spec)
|
||||
if spec_str.endswith(']'):
|
||||
msg = "Extras after version '{}'.".format(spec_str)
|
||||
replace = "moving the extras before version specifiers"
|
||||
deprecated(msg, replacement=replace, gone_in="21.0")
|
||||
else:
|
||||
req = None
|
||||
|
||||
return RequirementParts(req, link, markers, extras)
|
||||
|
||||
|
||||
def install_req_from_line(
|
||||
name, # type: str
|
||||
comes_from=None, # type: Optional[Union[str, InstallRequirement]]
|
||||
use_pep517=None, # type: Optional[bool]
|
||||
isolated=False, # type: bool
|
||||
options=None, # type: Optional[Dict[str, Any]]
|
||||
constraint=False, # type: bool
|
||||
line_source=None, # type: Optional[str]
|
||||
user_supplied=False, # type: bool
|
||||
):
|
||||
# type: (...) -> InstallRequirement
|
||||
"""Creates an InstallRequirement from a name, which might be a
|
||||
requirement, directory containing 'setup.py', filename, or URL.
|
||||
|
||||
:param line_source: An optional string describing where the line is from,
|
||||
for logging purposes in case of an error.
|
||||
"""
|
||||
parts = parse_req_from_line(name, line_source)
|
||||
|
||||
return InstallRequirement(
|
||||
parts.requirement, comes_from, link=parts.link, markers=parts.markers,
|
||||
use_pep517=use_pep517, isolated=isolated,
|
||||
install_options=options.get("install_options", []) if options else [],
|
||||
global_options=options.get("global_options", []) if options else [],
|
||||
hash_options=options.get("hashes", {}) if options else {},
|
||||
constraint=constraint,
|
||||
extras=parts.extras,
|
||||
user_supplied=user_supplied,
|
||||
)
|
||||
|
||||
|
||||
def install_req_from_req_string(
|
||||
req_string, # type: str
|
||||
comes_from=None, # type: Optional[InstallRequirement]
|
||||
isolated=False, # type: bool
|
||||
use_pep517=None, # type: Optional[bool]
|
||||
user_supplied=False, # type: bool
|
||||
):
|
||||
# type: (...) -> InstallRequirement
|
||||
try:
|
||||
req = Requirement(req_string)
|
||||
except InvalidRequirement:
|
||||
raise InstallationError("Invalid requirement: '{}'".format(req_string))
|
||||
|
||||
domains_not_allowed = [
|
||||
PyPI.file_storage_domain,
|
||||
TestPyPI.file_storage_domain,
|
||||
]
|
||||
if (req.url and comes_from and comes_from.link and
|
||||
comes_from.link.netloc in domains_not_allowed):
|
||||
# Explicitly disallow pypi packages that depend on external urls
|
||||
raise InstallationError(
|
||||
"Packages installed from PyPI cannot depend on packages "
|
||||
"which are not also hosted on PyPI.\n"
|
||||
"{} depends on {} ".format(comes_from.name, req)
|
||||
)
|
||||
|
||||
return InstallRequirement(
|
||||
req,
|
||||
comes_from,
|
||||
isolated=isolated,
|
||||
use_pep517=use_pep517,
|
||||
user_supplied=user_supplied,
|
||||
)
|
||||
|
||||
|
||||
def install_req_from_parsed_requirement(
|
||||
parsed_req, # type: ParsedRequirement
|
||||
isolated=False, # type: bool
|
||||
use_pep517=None, # type: Optional[bool]
|
||||
user_supplied=False, # type: bool
|
||||
):
|
||||
# type: (...) -> InstallRequirement
|
||||
if parsed_req.is_editable:
|
||||
req = install_req_from_editable(
|
||||
parsed_req.requirement,
|
||||
comes_from=parsed_req.comes_from,
|
||||
use_pep517=use_pep517,
|
||||
constraint=parsed_req.constraint,
|
||||
isolated=isolated,
|
||||
user_supplied=user_supplied,
|
||||
)
|
||||
|
||||
else:
|
||||
req = install_req_from_line(
|
||||
parsed_req.requirement,
|
||||
comes_from=parsed_req.comes_from,
|
||||
use_pep517=use_pep517,
|
||||
isolated=isolated,
|
||||
options=parsed_req.options,
|
||||
constraint=parsed_req.constraint,
|
||||
line_source=parsed_req.line_source,
|
||||
user_supplied=user_supplied,
|
||||
)
|
||||
return req
|
592
venv/Lib/site-packages/pip/_internal/req/req_file.py
Normal file
592
venv/Lib/site-packages/pip/_internal/req/req_file.py
Normal file
|
@ -0,0 +1,592 @@
|
|||
"""
|
||||
Requirements file parsing
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||
|
||||
from pip._internal.cli import cmdoptions
|
||||
from pip._internal.exceptions import (
|
||||
InstallationError,
|
||||
RequirementsFileParseError,
|
||||
)
|
||||
from pip._internal.models.search_scope import SearchScope
|
||||
from pip._internal.network.utils import raise_for_status
|
||||
from pip._internal.utils.encoding import auto_decode
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
from pip._internal.utils.urls import get_url_scheme
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from optparse import Values
|
||||
from typing import (
|
||||
Any, Callable, Dict, Iterator, List, NoReturn, Optional, Text, Tuple,
|
||||
)
|
||||
|
||||
from pip._internal.index.package_finder import PackageFinder
|
||||
from pip._internal.network.session import PipSession
|
||||
|
||||
ReqFileLines = Iterator[Tuple[int, Text]]
|
||||
|
||||
LineParser = Callable[[Text], Tuple[str, Values]]
|
||||
|
||||
|
||||
__all__ = ['parse_requirements']
|
||||
|
||||
SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
|
||||
COMMENT_RE = re.compile(r'(^|\s+)#.*$')
|
||||
|
||||
# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
|
||||
# variable name consisting of only uppercase letters, digits or the '_'
|
||||
# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
|
||||
# 2013 Edition.
|
||||
ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})')
|
||||
|
||||
SUPPORTED_OPTIONS = [
|
||||
cmdoptions.index_url,
|
||||
cmdoptions.extra_index_url,
|
||||
cmdoptions.no_index,
|
||||
cmdoptions.constraints,
|
||||
cmdoptions.requirements,
|
||||
cmdoptions.editable,
|
||||
cmdoptions.find_links,
|
||||
cmdoptions.no_binary,
|
||||
cmdoptions.only_binary,
|
||||
cmdoptions.prefer_binary,
|
||||
cmdoptions.require_hashes,
|
||||
cmdoptions.pre,
|
||||
cmdoptions.trusted_host,
|
||||
cmdoptions.use_new_feature,
|
||||
] # type: List[Callable[..., optparse.Option]]
|
||||
|
||||
# options to be passed to requirements
|
||||
SUPPORTED_OPTIONS_REQ = [
|
||||
cmdoptions.install_options,
|
||||
cmdoptions.global_options,
|
||||
cmdoptions.hash,
|
||||
] # type: List[Callable[..., optparse.Option]]
|
||||
|
||||
# the 'dest' string values
|
||||
SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
|
||||
|
||||
|
||||
class ParsedRequirement(object):
|
||||
def __init__(
|
||||
self,
|
||||
requirement, # type:str
|
||||
is_editable, # type: bool
|
||||
comes_from, # type: str
|
||||
constraint, # type: bool
|
||||
options=None, # type: Optional[Dict[str, Any]]
|
||||
line_source=None, # type: Optional[str]
|
||||
):
|
||||
# type: (...) -> None
|
||||
self.requirement = requirement
|
||||
self.is_editable = is_editable
|
||||
self.comes_from = comes_from
|
||||
self.options = options
|
||||
self.constraint = constraint
|
||||
self.line_source = line_source
|
||||
|
||||
|
||||
class ParsedLine(object):
|
||||
def __init__(
|
||||
self,
|
||||
filename, # type: str
|
||||
lineno, # type: int
|
||||
comes_from, # type: Optional[str]
|
||||
args, # type: str
|
||||
opts, # type: Values
|
||||
constraint, # type: bool
|
||||
):
|
||||
# type: (...) -> None
|
||||
self.filename = filename
|
||||
self.lineno = lineno
|
||||
self.comes_from = comes_from
|
||||
self.opts = opts
|
||||
self.constraint = constraint
|
||||
|
||||
if args:
|
||||
self.is_requirement = True
|
||||
self.is_editable = False
|
||||
self.requirement = args
|
||||
elif opts.editables:
|
||||
self.is_requirement = True
|
||||
self.is_editable = True
|
||||
# We don't support multiple -e on one line
|
||||
self.requirement = opts.editables[0]
|
||||
else:
|
||||
self.is_requirement = False
|
||||
|
||||
|
||||
def parse_requirements(
|
||||
filename, # type: str
|
||||
session, # type: PipSession
|
||||
finder=None, # type: Optional[PackageFinder]
|
||||
comes_from=None, # type: Optional[str]
|
||||
options=None, # type: Optional[optparse.Values]
|
||||
constraint=False, # type: bool
|
||||
):
|
||||
# type: (...) -> Iterator[ParsedRequirement]
|
||||
"""Parse a requirements file and yield ParsedRequirement instances.
|
||||
|
||||
:param filename: Path or url of requirements file.
|
||||
:param session: PipSession instance.
|
||||
:param finder: Instance of pip.index.PackageFinder.
|
||||
:param comes_from: Origin description of requirements.
|
||||
:param options: cli options.
|
||||
:param constraint: If true, parsing a constraint file rather than
|
||||
requirements file.
|
||||
"""
|
||||
line_parser = get_line_parser(finder)
|
||||
parser = RequirementsFileParser(session, line_parser, comes_from)
|
||||
|
||||
for parsed_line in parser.parse(filename, constraint):
|
||||
parsed_req = handle_line(
|
||||
parsed_line,
|
||||
options=options,
|
||||
finder=finder,
|
||||
session=session
|
||||
)
|
||||
if parsed_req is not None:
|
||||
yield parsed_req
|
||||
|
||||
|
||||
def preprocess(content):
|
||||
# type: (Text) -> ReqFileLines
|
||||
"""Split, filter, and join lines, and return a line iterator
|
||||
|
||||
:param content: the content of the requirements file
|
||||
"""
|
||||
lines_enum = enumerate(content.splitlines(), start=1) # type: ReqFileLines
|
||||
lines_enum = join_lines(lines_enum)
|
||||
lines_enum = ignore_comments(lines_enum)
|
||||
lines_enum = expand_env_variables(lines_enum)
|
||||
return lines_enum
|
||||
|
||||
|
||||
def handle_requirement_line(
|
||||
line, # type: ParsedLine
|
||||
options=None, # type: Optional[optparse.Values]
|
||||
):
|
||||
# type: (...) -> ParsedRequirement
|
||||
|
||||
# preserve for the nested code path
|
||||
line_comes_from = '{} {} (line {})'.format(
|
||||
'-c' if line.constraint else '-r', line.filename, line.lineno,
|
||||
)
|
||||
|
||||
assert line.is_requirement
|
||||
|
||||
if line.is_editable:
|
||||
# For editable requirements, we don't support per-requirement
|
||||
# options, so just return the parsed requirement.
|
||||
return ParsedRequirement(
|
||||
requirement=line.requirement,
|
||||
is_editable=line.is_editable,
|
||||
comes_from=line_comes_from,
|
||||
constraint=line.constraint,
|
||||
)
|
||||
else:
|
||||
if options:
|
||||
# Disable wheels if the user has specified build options
|
||||
cmdoptions.check_install_build_global(options, line.opts)
|
||||
|
||||
# get the options that apply to requirements
|
||||
req_options = {}
|
||||
for dest in SUPPORTED_OPTIONS_REQ_DEST:
|
||||
if dest in line.opts.__dict__ and line.opts.__dict__[dest]:
|
||||
req_options[dest] = line.opts.__dict__[dest]
|
||||
|
||||
line_source = 'line {} of {}'.format(line.lineno, line.filename)
|
||||
return ParsedRequirement(
|
||||
requirement=line.requirement,
|
||||
is_editable=line.is_editable,
|
||||
comes_from=line_comes_from,
|
||||
constraint=line.constraint,
|
||||
options=req_options,
|
||||
line_source=line_source,
|
||||
)
|
||||
|
||||
|
||||
def handle_option_line(
|
||||
opts, # type: Values
|
||||
filename, # type: str
|
||||
lineno, # type: int
|
||||
finder=None, # type: Optional[PackageFinder]
|
||||
options=None, # type: Optional[optparse.Values]
|
||||
session=None, # type: Optional[PipSession]
|
||||
):
|
||||
# type: (...) -> None
|
||||
|
||||
if options:
|
||||
# percolate options upward
|
||||
if opts.require_hashes:
|
||||
options.require_hashes = opts.require_hashes
|
||||
if opts.features_enabled:
|
||||
options.features_enabled.extend(
|
||||
f for f in opts.features_enabled
|
||||
if f not in options.features_enabled
|
||||
)
|
||||
|
||||
# set finder options
|
||||
if finder:
|
||||
find_links = finder.find_links
|
||||
index_urls = finder.index_urls
|
||||
if opts.index_url:
|
||||
index_urls = [opts.index_url]
|
||||
if opts.no_index is True:
|
||||
index_urls = []
|
||||
if opts.extra_index_urls:
|
||||
index_urls.extend(opts.extra_index_urls)
|
||||
if opts.find_links:
|
||||
# FIXME: it would be nice to keep track of the source
|
||||
# of the find_links: support a find-links local path
|
||||
# relative to a requirements file.
|
||||
value = opts.find_links[0]
|
||||
req_dir = os.path.dirname(os.path.abspath(filename))
|
||||
relative_to_reqs_file = os.path.join(req_dir, value)
|
||||
if os.path.exists(relative_to_reqs_file):
|
||||
value = relative_to_reqs_file
|
||||
find_links.append(value)
|
||||
|
||||
search_scope = SearchScope(
|
||||
find_links=find_links,
|
||||
index_urls=index_urls,
|
||||
)
|
||||
finder.search_scope = search_scope
|
||||
|
||||
if opts.pre:
|
||||
finder.set_allow_all_prereleases()
|
||||
|
||||
if opts.prefer_binary:
|
||||
finder.set_prefer_binary()
|
||||
|
||||
if session:
|
||||
for host in opts.trusted_hosts or []:
|
||||
source = 'line {} of {}'.format(lineno, filename)
|
||||
session.add_trusted_host(host, source=source)
|
||||
|
||||
|
||||
def handle_line(
|
||||
line, # type: ParsedLine
|
||||
options=None, # type: Optional[optparse.Values]
|
||||
finder=None, # type: Optional[PackageFinder]
|
||||
session=None, # type: Optional[PipSession]
|
||||
):
|
||||
# type: (...) -> Optional[ParsedRequirement]
|
||||
"""Handle a single parsed requirements line; This can result in
|
||||
creating/yielding requirements, or updating the finder.
|
||||
|
||||
:param line: The parsed line to be processed.
|
||||
:param options: CLI options.
|
||||
:param finder: The finder - updated by non-requirement lines.
|
||||
:param session: The session - updated by non-requirement lines.
|
||||
|
||||
Returns a ParsedRequirement object if the line is a requirement line,
|
||||
otherwise returns None.
|
||||
|
||||
For lines that contain requirements, the only options that have an effect
|
||||
are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
|
||||
requirement. Other options from SUPPORTED_OPTIONS may be present, but are
|
||||
ignored.
|
||||
|
||||
For lines that do not contain requirements, the only options that have an
|
||||
effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
|
||||
be present, but are ignored. These lines may contain multiple options
|
||||
(although our docs imply only one is supported), and all our parsed and
|
||||
affect the finder.
|
||||
"""
|
||||
|
||||
if line.is_requirement:
|
||||
parsed_req = handle_requirement_line(line, options)
|
||||
return parsed_req
|
||||
else:
|
||||
handle_option_line(
|
||||
line.opts,
|
||||
line.filename,
|
||||
line.lineno,
|
||||
finder,
|
||||
options,
|
||||
session,
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
class RequirementsFileParser(object):
|
||||
def __init__(
|
||||
self,
|
||||
session, # type: PipSession
|
||||
line_parser, # type: LineParser
|
||||
comes_from, # type: Optional[str]
|
||||
):
|
||||
# type: (...) -> None
|
||||
self._session = session
|
||||
self._line_parser = line_parser
|
||||
self._comes_from = comes_from
|
||||
|
||||
def parse(self, filename, constraint):
|
||||
# type: (str, bool) -> Iterator[ParsedLine]
|
||||
"""Parse a given file, yielding parsed lines.
|
||||
"""
|
||||
for line in self._parse_and_recurse(filename, constraint):
|
||||
yield line
|
||||
|
||||
def _parse_and_recurse(self, filename, constraint):
|
||||
# type: (str, bool) -> Iterator[ParsedLine]
|
||||
for line in self._parse_file(filename, constraint):
|
||||
if (
|
||||
not line.is_requirement and
|
||||
(line.opts.requirements or line.opts.constraints)
|
||||
):
|
||||
# parse a nested requirements file
|
||||
if line.opts.requirements:
|
||||
req_path = line.opts.requirements[0]
|
||||
nested_constraint = False
|
||||
else:
|
||||
req_path = line.opts.constraints[0]
|
||||
nested_constraint = True
|
||||
|
||||
# original file is over http
|
||||
if SCHEME_RE.search(filename):
|
||||
# do a url join so relative paths work
|
||||
req_path = urllib_parse.urljoin(filename, req_path)
|
||||
# original file and nested file are paths
|
||||
elif not SCHEME_RE.search(req_path):
|
||||
# do a join so relative paths work
|
||||
req_path = os.path.join(
|
||||
os.path.dirname(filename), req_path,
|
||||
)
|
||||
|
||||
for inner_line in self._parse_and_recurse(
|
||||
req_path, nested_constraint,
|
||||
):
|
||||
yield inner_line
|
||||
else:
|
||||
yield line
|
||||
|
||||
def _parse_file(self, filename, constraint):
|
||||
# type: (str, bool) -> Iterator[ParsedLine]
|
||||
_, content = get_file_content(
|
||||
filename, self._session, comes_from=self._comes_from
|
||||
)
|
||||
|
||||
lines_enum = preprocess(content)
|
||||
|
||||
for line_number, line in lines_enum:
|
||||
try:
|
||||
args_str, opts = self._line_parser(line)
|
||||
except OptionParsingError as e:
|
||||
# add offending line
|
||||
msg = 'Invalid requirement: {}\n{}'.format(line, e.msg)
|
||||
raise RequirementsFileParseError(msg)
|
||||
|
||||
yield ParsedLine(
|
||||
filename,
|
||||
line_number,
|
||||
self._comes_from,
|
||||
args_str,
|
||||
opts,
|
||||
constraint,
|
||||
)
|
||||
|
||||
|
||||
def get_line_parser(finder):
|
||||
# type: (Optional[PackageFinder]) -> LineParser
|
||||
def parse_line(line):
|
||||
# type: (Text) -> Tuple[str, Values]
|
||||
# Build new parser for each line since it accumulates appendable
|
||||
# options.
|
||||
parser = build_parser()
|
||||
defaults = parser.get_default_values()
|
||||
defaults.index_url = None
|
||||
if finder:
|
||||
defaults.format_control = finder.format_control
|
||||
|
||||
args_str, options_str = break_args_options(line)
|
||||
# Prior to 2.7.3, shlex cannot deal with unicode entries
|
||||
if sys.version_info < (2, 7, 3):
|
||||
# https://github.com/python/mypy/issues/1174
|
||||
options_str = options_str.encode('utf8') # type: ignore
|
||||
|
||||
# https://github.com/python/mypy/issues/1174
|
||||
opts, _ = parser.parse_args(
|
||||
shlex.split(options_str), defaults) # type: ignore
|
||||
|
||||
return args_str, opts
|
||||
|
||||
return parse_line
|
||||
|
||||
|
||||
def break_args_options(line):
|
||||
# type: (Text) -> Tuple[str, Text]
|
||||
"""Break up the line into an args and options string. We only want to shlex
|
||||
(and then optparse) the options, not the args. args can contain markers
|
||||
which are corrupted by shlex.
|
||||
"""
|
||||
tokens = line.split(' ')
|
||||
args = []
|
||||
options = tokens[:]
|
||||
for token in tokens:
|
||||
if token.startswith('-') or token.startswith('--'):
|
||||
break
|
||||
else:
|
||||
args.append(token)
|
||||
options.pop(0)
|
||||
return ' '.join(args), ' '.join(options) # type: ignore
|
||||
|
||||
|
||||
class OptionParsingError(Exception):
|
||||
def __init__(self, msg):
|
||||
# type: (str) -> None
|
||||
self.msg = msg
|
||||
|
||||
|
||||
def build_parser():
|
||||
# type: () -> optparse.OptionParser
|
||||
"""
|
||||
Return a parser for parsing requirement lines
|
||||
"""
|
||||
parser = optparse.OptionParser(add_help_option=False)
|
||||
|
||||
option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
|
||||
for option_factory in option_factories:
|
||||
option = option_factory()
|
||||
parser.add_option(option)
|
||||
|
||||
# By default optparse sys.exits on parsing errors. We want to wrap
|
||||
# that in our own exception.
|
||||
def parser_exit(self, msg):
|
||||
# type: (Any, str) -> NoReturn
|
||||
raise OptionParsingError(msg)
|
||||
# NOTE: mypy disallows assigning to a method
|
||||
# https://github.com/python/mypy/issues/2427
|
||||
parser.exit = parser_exit # type: ignore
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def join_lines(lines_enum):
|
||||
# type: (ReqFileLines) -> ReqFileLines
|
||||
"""Joins a line ending in '\' with the previous line (except when following
|
||||
comments). The joined line takes on the index of the first line.
|
||||
"""
|
||||
primary_line_number = None
|
||||
new_line = [] # type: List[Text]
|
||||
for line_number, line in lines_enum:
|
||||
if not line.endswith('\\') or COMMENT_RE.match(line):
|
||||
if COMMENT_RE.match(line):
|
||||
# this ensures comments are always matched later
|
||||
line = ' ' + line
|
||||
if new_line:
|
||||
new_line.append(line)
|
||||
assert primary_line_number is not None
|
||||
yield primary_line_number, ''.join(new_line)
|
||||
new_line = []
|
||||
else:
|
||||
yield line_number, line
|
||||
else:
|
||||
if not new_line:
|
||||
primary_line_number = line_number
|
||||
new_line.append(line.strip('\\'))
|
||||
|
||||
# last line contains \
|
||||
if new_line:
|
||||
assert primary_line_number is not None
|
||||
yield primary_line_number, ''.join(new_line)
|
||||
|
||||
# TODO: handle space after '\'.
|
||||
|
||||
|
||||
def ignore_comments(lines_enum):
|
||||
# type: (ReqFileLines) -> ReqFileLines
|
||||
"""
|
||||
Strips comments and filter empty lines.
|
||||
"""
|
||||
for line_number, line in lines_enum:
|
||||
line = COMMENT_RE.sub('', line)
|
||||
line = line.strip()
|
||||
if line:
|
||||
yield line_number, line
|
||||
|
||||
|
||||
def expand_env_variables(lines_enum):
|
||||
# type: (ReqFileLines) -> ReqFileLines
|
||||
"""Replace all environment variables that can be retrieved via `os.getenv`.
|
||||
|
||||
The only allowed format for environment variables defined in the
|
||||
requirement file is `${MY_VARIABLE_1}` to ensure two things:
|
||||
|
||||
1. Strings that contain a `$` aren't accidentally (partially) expanded.
|
||||
2. Ensure consistency across platforms for requirement files.
|
||||
|
||||
These points are the result of a discussion on the `github pull
|
||||
request #3514 <https://github.com/pypa/pip/pull/3514>`_.
|
||||
|
||||
Valid characters in variable names follow the `POSIX standard
|
||||
<http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited
|
||||
to uppercase letter, digits and the `_` (underscore).
|
||||
"""
|
||||
for line_number, line in lines_enum:
|
||||
for env_var, var_name in ENV_VAR_RE.findall(line):
|
||||
value = os.getenv(var_name)
|
||||
if not value:
|
||||
continue
|
||||
|
||||
line = line.replace(env_var, value)
|
||||
|
||||
yield line_number, line
|
||||
|
||||
|
||||
def get_file_content(url, session, comes_from=None):
|
||||
# type: (str, PipSession, Optional[str]) -> Tuple[str, Text]
|
||||
"""Gets the content of a file; it may be a filename, file: URL, or
|
||||
http: URL. Returns (location, content). Content is unicode.
|
||||
Respects # -*- coding: declarations on the retrieved files.
|
||||
|
||||
:param url: File path or url.
|
||||
:param session: PipSession instance.
|
||||
:param comes_from: Origin description of requirements.
|
||||
"""
|
||||
scheme = get_url_scheme(url)
|
||||
|
||||
if scheme in ['http', 'https']:
|
||||
# FIXME: catch some errors
|
||||
resp = session.get(url)
|
||||
raise_for_status(resp)
|
||||
return resp.url, resp.text
|
||||
|
||||
elif scheme == 'file':
|
||||
if comes_from and comes_from.startswith('http'):
|
||||
raise InstallationError(
|
||||
'Requirements file {} references URL {}, '
|
||||
'which is local'.format(comes_from, url)
|
||||
)
|
||||
|
||||
path = url.split(':', 1)[1]
|
||||
path = path.replace('\\', '/')
|
||||
match = _url_slash_drive_re.match(path)
|
||||
if match:
|
||||
path = match.group(1) + ':' + path.split('|', 1)[1]
|
||||
path = urllib_parse.unquote(path)
|
||||
if path.startswith('/'):
|
||||
path = '/' + path.lstrip('/')
|
||||
url = path
|
||||
|
||||
try:
|
||||
with open(url, 'rb') as f:
|
||||
content = auto_decode(f.read())
|
||||
except IOError as exc:
|
||||
raise InstallationError(
|
||||
'Could not open requirements file: {}'.format(exc)
|
||||
)
|
||||
return url, content
|
||||
|
||||
|
||||
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
|
905
venv/Lib/site-packages/pip/_internal/req/req_install.py
Normal file
905
venv/Lib/site-packages/pip/_internal/req/req_install.py
Normal file
|
@ -0,0 +1,905 @@
|
|||
# The following comment should be removed at some point in the future.
|
||||
# mypy: strict-optional=False
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import uuid
|
||||
import zipfile
|
||||
|
||||
from pip._vendor import pkg_resources, six
|
||||
from pip._vendor.packaging.requirements import Requirement
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.packaging.version import Version
|
||||
from pip._vendor.packaging.version import parse as parse_version
|
||||
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
||||
|
||||
from pip._internal.build_env import NoOpBuildEnvironment
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.locations import get_scheme
|
||||
from pip._internal.models.link import Link
|
||||
from pip._internal.operations.build.metadata import generate_metadata
|
||||
from pip._internal.operations.build.metadata_legacy import \
|
||||
generate_metadata as generate_metadata_legacy
|
||||
from pip._internal.operations.install.editable_legacy import \
|
||||
install_editable as install_editable_legacy
|
||||
from pip._internal.operations.install.legacy import LegacyInstallFailure
|
||||
from pip._internal.operations.install.legacy import install as install_legacy
|
||||
from pip._internal.operations.install.wheel import install_wheel
|
||||
from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
|
||||
from pip._internal.req.req_uninstall import UninstallPathSet
|
||||
from pip._internal.utils.deprecation import deprecated
|
||||
from pip._internal.utils.direct_url_helpers import direct_url_from_link
|
||||
from pip._internal.utils.hashes import Hashes
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import (
|
||||
ask_path_exists,
|
||||
backup_dir,
|
||||
display_path,
|
||||
dist_in_site_packages,
|
||||
dist_in_usersite,
|
||||
get_distribution,
|
||||
get_installed_version,
|
||||
hide_url,
|
||||
redact_auth_from_url,
|
||||
)
|
||||
from pip._internal.utils.packaging import get_metadata
|
||||
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
from pip._internal.utils.virtualenv import running_under_virtualenv
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import (
|
||||
Any, Dict, Iterable, List, Optional, Sequence, Union,
|
||||
)
|
||||
from pip._internal.build_env import BuildEnvironment
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
from pip._vendor.packaging.specifiers import SpecifierSet
|
||||
from pip._vendor.packaging.markers import Marker
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_dist(metadata_directory):
|
||||
# type: (str) -> Distribution
|
||||
"""Return a pkg_resources.Distribution for the provided
|
||||
metadata directory.
|
||||
"""
|
||||
dist_dir = metadata_directory.rstrip(os.sep)
|
||||
|
||||
# Build a PathMetadata object, from path to metadata. :wink:
|
||||
base_dir, dist_dir_name = os.path.split(dist_dir)
|
||||
metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
|
||||
|
||||
# Determine the correct Distribution object type.
|
||||
if dist_dir.endswith(".egg-info"):
|
||||
dist_cls = pkg_resources.Distribution
|
||||
dist_name = os.path.splitext(dist_dir_name)[0]
|
||||
else:
|
||||
assert dist_dir.endswith(".dist-info")
|
||||
dist_cls = pkg_resources.DistInfoDistribution
|
||||
dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0]
|
||||
|
||||
return dist_cls(
|
||||
base_dir,
|
||||
project_name=dist_name,
|
||||
metadata=metadata,
|
||||
)
|
||||
|
||||
|
||||
class InstallRequirement(object):
|
||||
"""
|
||||
Represents something that may be installed later on, may have information
|
||||
about where to fetch the relevant requirement and also contains logic for
|
||||
installing the said requirement.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
req, # type: Optional[Requirement]
|
||||
comes_from, # type: Optional[Union[str, InstallRequirement]]
|
||||
editable=False, # type: bool
|
||||
link=None, # type: Optional[Link]
|
||||
markers=None, # type: Optional[Marker]
|
||||
use_pep517=None, # type: Optional[bool]
|
||||
isolated=False, # type: bool
|
||||
install_options=None, # type: Optional[List[str]]
|
||||
global_options=None, # type: Optional[List[str]]
|
||||
hash_options=None, # type: Optional[Dict[str, List[str]]]
|
||||
constraint=False, # type: bool
|
||||
extras=(), # type: Iterable[str]
|
||||
user_supplied=False, # type: bool
|
||||
):
|
||||
# type: (...) -> None
|
||||
assert req is None or isinstance(req, Requirement), req
|
||||
self.req = req
|
||||
self.comes_from = comes_from
|
||||
self.constraint = constraint
|
||||
self.editable = editable
|
||||
self.legacy_install_reason = None # type: Optional[int]
|
||||
|
||||
# source_dir is the local directory where the linked requirement is
|
||||
# located, or unpacked. In case unpacking is needed, creating and
|
||||
# populating source_dir is done by the RequirementPreparer. Note this
|
||||
# is not necessarily the directory where pyproject.toml or setup.py is
|
||||
# located - that one is obtained via unpacked_source_directory.
|
||||
self.source_dir = None # type: Optional[str]
|
||||
if self.editable:
|
||||
assert link
|
||||
if link.is_file:
|
||||
self.source_dir = os.path.normpath(
|
||||
os.path.abspath(link.file_path)
|
||||
)
|
||||
|
||||
if link is None and req and req.url:
|
||||
# PEP 508 URL requirement
|
||||
link = Link(req.url)
|
||||
self.link = self.original_link = link
|
||||
self.original_link_is_in_wheel_cache = False
|
||||
|
||||
# Path to any downloaded or already-existing package.
|
||||
self.local_file_path = None # type: Optional[str]
|
||||
if self.link and self.link.is_file:
|
||||
self.local_file_path = self.link.file_path
|
||||
|
||||
if extras:
|
||||
self.extras = extras
|
||||
elif req:
|
||||
self.extras = {
|
||||
pkg_resources.safe_extra(extra) for extra in req.extras
|
||||
}
|
||||
else:
|
||||
self.extras = set()
|
||||
if markers is None and req:
|
||||
markers = req.marker
|
||||
self.markers = markers
|
||||
|
||||
# This holds the pkg_resources.Distribution object if this requirement
|
||||
# is already available:
|
||||
self.satisfied_by = None # type: Optional[Distribution]
|
||||
# Whether the installation process should try to uninstall an existing
|
||||
# distribution before installing this requirement.
|
||||
self.should_reinstall = False
|
||||
# Temporary build location
|
||||
self._temp_build_dir = None # type: Optional[TempDirectory]
|
||||
# Set to True after successful installation
|
||||
self.install_succeeded = None # type: Optional[bool]
|
||||
# Supplied options
|
||||
self.install_options = install_options if install_options else []
|
||||
self.global_options = global_options if global_options else []
|
||||
self.hash_options = hash_options if hash_options else {}
|
||||
# Set to True after successful preparation of this requirement
|
||||
self.prepared = False
|
||||
# User supplied requirement are explicitly requested for installation
|
||||
# by the user via CLI arguments or requirements files, as opposed to,
|
||||
# e.g. dependencies, extras or constraints.
|
||||
self.user_supplied = user_supplied
|
||||
|
||||
# Set by the legacy resolver when the requirement has been downloaded
|
||||
# TODO: This introduces a strong coupling between the resolver and the
|
||||
# requirement (the coupling was previously between the resolver
|
||||
# and the requirement set). This should be refactored to allow
|
||||
# the requirement to decide for itself when it has been
|
||||
# successfully downloaded - but that is more tricky to get right,
|
||||
# se we are making the change in stages.
|
||||
self.successfully_downloaded = False
|
||||
|
||||
self.isolated = isolated
|
||||
self.build_env = NoOpBuildEnvironment() # type: BuildEnvironment
|
||||
|
||||
# For PEP 517, the directory where we request the project metadata
|
||||
# gets stored. We need this to pass to build_wheel, so the backend
|
||||
# can ensure that the wheel matches the metadata (see the PEP for
|
||||
# details).
|
||||
self.metadata_directory = None # type: Optional[str]
|
||||
|
||||
# The static build requirements (from pyproject.toml)
|
||||
self.pyproject_requires = None # type: Optional[List[str]]
|
||||
|
||||
# Build requirements that we will check are available
|
||||
self.requirements_to_check = [] # type: List[str]
|
||||
|
||||
# The PEP 517 backend we should use to build the project
|
||||
self.pep517_backend = None # type: Optional[Pep517HookCaller]
|
||||
|
||||
# Are we using PEP 517 for this requirement?
|
||||
# After pyproject.toml has been loaded, the only valid values are True
|
||||
# and False. Before loading, None is valid (meaning "use the default").
|
||||
# Setting an explicit value before loading pyproject.toml is supported,
|
||||
# but after loading this flag should be treated as read only.
|
||||
self.use_pep517 = use_pep517
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
if self.req:
|
||||
s = str(self.req)
|
||||
if self.link:
|
||||
s += ' from {}'.format(redact_auth_from_url(self.link.url))
|
||||
elif self.link:
|
||||
s = redact_auth_from_url(self.link.url)
|
||||
else:
|
||||
s = '<InstallRequirement>'
|
||||
if self.satisfied_by is not None:
|
||||
s += ' in {}'.format(display_path(self.satisfied_by.location))
|
||||
if self.comes_from:
|
||||
if isinstance(self.comes_from, six.string_types):
|
||||
comes_from = self.comes_from # type: Optional[str]
|
||||
else:
|
||||
comes_from = self.comes_from.from_path()
|
||||
if comes_from:
|
||||
s += ' (from {})'.format(comes_from)
|
||||
return s
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return '<{} object: {} editable={!r}>'.format(
|
||||
self.__class__.__name__, str(self), self.editable)
|
||||
|
||||
def format_debug(self):
|
||||
# type: () -> str
|
||||
"""An un-tested helper for getting state, for debugging.
|
||||
"""
|
||||
attributes = vars(self)
|
||||
names = sorted(attributes)
|
||||
|
||||
state = (
|
||||
"{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)
|
||||
)
|
||||
return '<{name} object: {{{state}}}>'.format(
|
||||
name=self.__class__.__name__,
|
||||
state=", ".join(state),
|
||||
)
|
||||
|
||||
# Things that are valid for all kinds of requirements?
|
||||
@property
|
||||
def name(self):
|
||||
# type: () -> Optional[str]
|
||||
if self.req is None:
|
||||
return None
|
||||
return six.ensure_str(pkg_resources.safe_name(self.req.name))
|
||||
|
||||
@property
|
||||
def specifier(self):
|
||||
# type: () -> SpecifierSet
|
||||
return self.req.specifier
|
||||
|
||||
@property
|
||||
def is_pinned(self):
|
||||
# type: () -> bool
|
||||
"""Return whether I am pinned to an exact version.
|
||||
|
||||
For example, some-package==1.2 is pinned; some-package>1.2 is not.
|
||||
"""
|
||||
specifiers = self.specifier
|
||||
return (len(specifiers) == 1 and
|
||||
next(iter(specifiers)).operator in {'==', '==='})
|
||||
|
||||
@property
|
||||
def installed_version(self):
|
||||
# type: () -> Optional[str]
|
||||
return get_installed_version(self.name)
|
||||
|
||||
def match_markers(self, extras_requested=None):
|
||||
# type: (Optional[Iterable[str]]) -> bool
|
||||
if not extras_requested:
|
||||
# Provide an extra to safely evaluate the markers
|
||||
# without matching any extra
|
||||
extras_requested = ('',)
|
||||
if self.markers is not None:
|
||||
return any(
|
||||
self.markers.evaluate({'extra': extra})
|
||||
for extra in extras_requested)
|
||||
else:
|
||||
return True
|
||||
|
||||
@property
|
||||
def has_hash_options(self):
|
||||
# type: () -> bool
|
||||
"""Return whether any known-good hashes are specified as options.
|
||||
|
||||
These activate --require-hashes mode; hashes specified as part of a
|
||||
URL do not.
|
||||
|
||||
"""
|
||||
return bool(self.hash_options)
|
||||
|
||||
def hashes(self, trust_internet=True):
|
||||
# type: (bool) -> Hashes
|
||||
"""Return a hash-comparer that considers my option- and URL-based
|
||||
hashes to be known-good.
|
||||
|
||||
Hashes in URLs--ones embedded in the requirements file, not ones
|
||||
downloaded from an index server--are almost peers with ones from
|
||||
flags. They satisfy --require-hashes (whether it was implicitly or
|
||||
explicitly activated) but do not activate it. md5 and sha224 are not
|
||||
allowed in flags, which should nudge people toward good algos. We
|
||||
always OR all hashes together, even ones from URLs.
|
||||
|
||||
:param trust_internet: Whether to trust URL-based (#md5=...) hashes
|
||||
downloaded from the internet, as by populate_link()
|
||||
|
||||
"""
|
||||
good_hashes = self.hash_options.copy()
|
||||
link = self.link if trust_internet else self.original_link
|
||||
if link and link.hash:
|
||||
good_hashes.setdefault(link.hash_name, []).append(link.hash)
|
||||
return Hashes(good_hashes)
|
||||
|
||||
def from_path(self):
|
||||
# type: () -> Optional[str]
|
||||
"""Format a nice indicator to show where this "comes from"
|
||||
"""
|
||||
if self.req is None:
|
||||
return None
|
||||
s = str(self.req)
|
||||
if self.comes_from:
|
||||
if isinstance(self.comes_from, six.string_types):
|
||||
comes_from = self.comes_from
|
||||
else:
|
||||
comes_from = self.comes_from.from_path()
|
||||
if comes_from:
|
||||
s += '->' + comes_from
|
||||
return s
|
||||
|
||||
def ensure_build_location(self, build_dir, autodelete, parallel_builds):
|
||||
# type: (str, bool, bool) -> str
|
||||
assert build_dir is not None
|
||||
if self._temp_build_dir is not None:
|
||||
assert self._temp_build_dir.path
|
||||
return self._temp_build_dir.path
|
||||
if self.req is None:
|
||||
# Some systems have /tmp as a symlink which confuses custom
|
||||
# builds (such as numpy). Thus, we ensure that the real path
|
||||
# is returned.
|
||||
self._temp_build_dir = TempDirectory(
|
||||
kind=tempdir_kinds.REQ_BUILD, globally_managed=True
|
||||
)
|
||||
|
||||
return self._temp_build_dir.path
|
||||
|
||||
# When parallel builds are enabled, add a UUID to the build directory
|
||||
# name so multiple builds do not interfere with each other.
|
||||
dir_name = canonicalize_name(self.name)
|
||||
if parallel_builds:
|
||||
dir_name = "{}_{}".format(dir_name, uuid.uuid4().hex)
|
||||
|
||||
# FIXME: Is there a better place to create the build_dir? (hg and bzr
|
||||
# need this)
|
||||
if not os.path.exists(build_dir):
|
||||
logger.debug('Creating directory %s', build_dir)
|
||||
os.makedirs(build_dir)
|
||||
actual_build_dir = os.path.join(build_dir, dir_name)
|
||||
# `None` indicates that we respect the globally-configured deletion
|
||||
# settings, which is what we actually want when auto-deleting.
|
||||
delete_arg = None if autodelete else False
|
||||
return TempDirectory(
|
||||
path=actual_build_dir,
|
||||
delete=delete_arg,
|
||||
kind=tempdir_kinds.REQ_BUILD,
|
||||
globally_managed=True,
|
||||
).path
|
||||
|
||||
def _set_requirement(self):
|
||||
# type: () -> None
|
||||
"""Set requirement after generating metadata.
|
||||
"""
|
||||
assert self.req is None
|
||||
assert self.metadata is not None
|
||||
assert self.source_dir is not None
|
||||
|
||||
# Construct a Requirement object from the generated metadata
|
||||
if isinstance(parse_version(self.metadata["Version"]), Version):
|
||||
op = "=="
|
||||
else:
|
||||
op = "==="
|
||||
|
||||
self.req = Requirement(
|
||||
"".join([
|
||||
self.metadata["Name"],
|
||||
op,
|
||||
self.metadata["Version"],
|
||||
])
|
||||
)
|
||||
|
||||
def warn_on_mismatching_name(self):
|
||||
# type: () -> None
|
||||
metadata_name = canonicalize_name(self.metadata["Name"])
|
||||
if canonicalize_name(self.req.name) == metadata_name:
|
||||
# Everything is fine.
|
||||
return
|
||||
|
||||
# If we're here, there's a mismatch. Log a warning about it.
|
||||
logger.warning(
|
||||
'Generating metadata for package %s '
|
||||
'produced metadata for project name %s. Fix your '
|
||||
'#egg=%s fragments.',
|
||||
self.name, metadata_name, self.name
|
||||
)
|
||||
self.req = Requirement(metadata_name)
|
||||
|
||||
def check_if_exists(self, use_user_site):
|
||||
# type: (bool) -> None
|
||||
"""Find an installed distribution that satisfies or conflicts
|
||||
with this requirement, and set self.satisfied_by or
|
||||
self.should_reinstall appropriately.
|
||||
"""
|
||||
if self.req is None:
|
||||
return
|
||||
existing_dist = get_distribution(self.req.name)
|
||||
if not existing_dist:
|
||||
return
|
||||
|
||||
existing_version = existing_dist.parsed_version
|
||||
if not self.req.specifier.contains(existing_version, prereleases=True):
|
||||
self.satisfied_by = None
|
||||
if use_user_site:
|
||||
if dist_in_usersite(existing_dist):
|
||||
self.should_reinstall = True
|
||||
elif (running_under_virtualenv() and
|
||||
dist_in_site_packages(existing_dist)):
|
||||
raise InstallationError(
|
||||
"Will not install to the user site because it will "
|
||||
"lack sys.path precedence to {} in {}".format(
|
||||
existing_dist.project_name, existing_dist.location)
|
||||
)
|
||||
else:
|
||||
self.should_reinstall = True
|
||||
else:
|
||||
if self.editable:
|
||||
self.should_reinstall = True
|
||||
# when installing editables, nothing pre-existing should ever
|
||||
# satisfy
|
||||
self.satisfied_by = None
|
||||
else:
|
||||
self.satisfied_by = existing_dist
|
||||
|
||||
# Things valid for wheels
|
||||
@property
|
||||
def is_wheel(self):
|
||||
# type: () -> bool
|
||||
if not self.link:
|
||||
return False
|
||||
return self.link.is_wheel
|
||||
|
||||
# Things valid for sdists
|
||||
@property
|
||||
def unpacked_source_directory(self):
|
||||
# type: () -> str
|
||||
return os.path.join(
|
||||
self.source_dir,
|
||||
self.link and self.link.subdirectory_fragment or '')
|
||||
|
||||
@property
|
||||
def setup_py_path(self):
|
||||
# type: () -> str
|
||||
assert self.source_dir, "No source dir for {}".format(self)
|
||||
setup_py = os.path.join(self.unpacked_source_directory, 'setup.py')
|
||||
|
||||
# Python2 __file__ should not be unicode
|
||||
if six.PY2 and isinstance(setup_py, six.text_type):
|
||||
setup_py = setup_py.encode(sys.getfilesystemencoding())
|
||||
|
||||
return setup_py
|
||||
|
||||
@property
|
||||
def pyproject_toml_path(self):
|
||||
# type: () -> str
|
||||
assert self.source_dir, "No source dir for {}".format(self)
|
||||
return make_pyproject_path(self.unpacked_source_directory)
|
||||
|
||||
def load_pyproject_toml(self):
|
||||
# type: () -> None
|
||||
"""Load the pyproject.toml file.
|
||||
|
||||
After calling this routine, all of the attributes related to PEP 517
|
||||
processing for this requirement have been set. In particular, the
|
||||
use_pep517 attribute can be used to determine whether we should
|
||||
follow the PEP 517 or legacy (setup.py) code path.
|
||||
"""
|
||||
pyproject_toml_data = load_pyproject_toml(
|
||||
self.use_pep517,
|
||||
self.pyproject_toml_path,
|
||||
self.setup_py_path,
|
||||
str(self)
|
||||
)
|
||||
|
||||
if pyproject_toml_data is None:
|
||||
self.use_pep517 = False
|
||||
return
|
||||
|
||||
self.use_pep517 = True
|
||||
requires, backend, check, backend_path = pyproject_toml_data
|
||||
self.requirements_to_check = check
|
||||
self.pyproject_requires = requires
|
||||
self.pep517_backend = Pep517HookCaller(
|
||||
self.unpacked_source_directory, backend, backend_path=backend_path,
|
||||
)
|
||||
|
||||
def _generate_metadata(self):
|
||||
# type: () -> str
|
||||
"""Invokes metadata generator functions, with the required arguments.
|
||||
"""
|
||||
if not self.use_pep517:
|
||||
assert self.unpacked_source_directory
|
||||
|
||||
return generate_metadata_legacy(
|
||||
build_env=self.build_env,
|
||||
setup_py_path=self.setup_py_path,
|
||||
source_dir=self.unpacked_source_directory,
|
||||
isolated=self.isolated,
|
||||
details=self.name or "from {}".format(self.link)
|
||||
)
|
||||
|
||||
assert self.pep517_backend is not None
|
||||
|
||||
return generate_metadata(
|
||||
build_env=self.build_env,
|
||||
backend=self.pep517_backend,
|
||||
)
|
||||
|
||||
def prepare_metadata(self):
|
||||
# type: () -> None
|
||||
"""Ensure that project metadata is available.
|
||||
|
||||
Under PEP 517, call the backend hook to prepare the metadata.
|
||||
Under legacy processing, call setup.py egg-info.
|
||||
"""
|
||||
assert self.source_dir
|
||||
|
||||
with indent_log():
|
||||
self.metadata_directory = self._generate_metadata()
|
||||
|
||||
# Act on the newly generated metadata, based on the name and version.
|
||||
if not self.name:
|
||||
self._set_requirement()
|
||||
else:
|
||||
self.warn_on_mismatching_name()
|
||||
|
||||
self.assert_source_matches_version()
|
||||
|
||||
@property
|
||||
def metadata(self):
|
||||
# type: () -> Any
|
||||
if not hasattr(self, '_metadata'):
|
||||
self._metadata = get_metadata(self.get_dist())
|
||||
|
||||
return self._metadata
|
||||
|
||||
def get_dist(self):
|
||||
# type: () -> Distribution
|
||||
return _get_dist(self.metadata_directory)
|
||||
|
||||
def assert_source_matches_version(self):
|
||||
# type: () -> None
|
||||
assert self.source_dir
|
||||
version = self.metadata['version']
|
||||
if self.req.specifier and version not in self.req.specifier:
|
||||
logger.warning(
|
||||
'Requested %s, but installing version %s',
|
||||
self,
|
||||
version,
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
'Source in %s has version %s, which satisfies requirement %s',
|
||||
display_path(self.source_dir),
|
||||
version,
|
||||
self,
|
||||
)
|
||||
|
||||
# For both source distributions and editables
|
||||
def ensure_has_source_dir(
|
||||
self,
|
||||
parent_dir,
|
||||
autodelete=False,
|
||||
parallel_builds=False,
|
||||
):
|
||||
# type: (str, bool, bool) -> None
|
||||
"""Ensure that a source_dir is set.
|
||||
|
||||
This will create a temporary build dir if the name of the requirement
|
||||
isn't known yet.
|
||||
|
||||
:param parent_dir: The ideal pip parent_dir for the source_dir.
|
||||
Generally src_dir for editables and build_dir for sdists.
|
||||
:return: self.source_dir
|
||||
"""
|
||||
if self.source_dir is None:
|
||||
self.source_dir = self.ensure_build_location(
|
||||
parent_dir,
|
||||
autodelete=autodelete,
|
||||
parallel_builds=parallel_builds,
|
||||
)
|
||||
|
||||
# For editable installations
|
||||
def update_editable(self, obtain=True):
|
||||
# type: (bool) -> None
|
||||
if not self.link:
|
||||
logger.debug(
|
||||
"Cannot update repository at %s; repository location is "
|
||||
"unknown",
|
||||
self.source_dir,
|
||||
)
|
||||
return
|
||||
assert self.editable
|
||||
assert self.source_dir
|
||||
if self.link.scheme == 'file':
|
||||
# Static paths don't get updated
|
||||
return
|
||||
assert '+' in self.link.url, \
|
||||
"bad url: {self.link.url!r}".format(**locals())
|
||||
vc_type, url = self.link.url.split('+', 1)
|
||||
vcs_backend = vcs.get_backend(vc_type)
|
||||
if vcs_backend:
|
||||
if not self.link.is_vcs:
|
||||
reason = (
|
||||
"This form of VCS requirement is being deprecated: {}."
|
||||
).format(
|
||||
self.link.url
|
||||
)
|
||||
replacement = None
|
||||
if self.link.url.startswith("git+git@"):
|
||||
replacement = (
|
||||
"git+https://git@example.com/..., "
|
||||
"git+ssh://git@example.com/..., "
|
||||
"or the insecure git+git://git@example.com/..."
|
||||
)
|
||||
deprecated(reason, replacement, gone_in="21.0", issue=7554)
|
||||
hidden_url = hide_url(self.link.url)
|
||||
if obtain:
|
||||
vcs_backend.obtain(self.source_dir, url=hidden_url)
|
||||
else:
|
||||
vcs_backend.export(self.source_dir, url=hidden_url)
|
||||
else:
|
||||
assert 0, (
|
||||
'Unexpected version control type (in {}): {}'.format(
|
||||
self.link, vc_type))
|
||||
|
||||
# Top-level Actions
|
||||
def uninstall(self, auto_confirm=False, verbose=False):
|
||||
# type: (bool, bool) -> Optional[UninstallPathSet]
|
||||
"""
|
||||
Uninstall the distribution currently satisfying this requirement.
|
||||
|
||||
Prompts before removing or modifying files unless
|
||||
``auto_confirm`` is True.
|
||||
|
||||
Refuses to delete or modify files outside of ``sys.prefix`` -
|
||||
thus uninstallation within a virtual environment can only
|
||||
modify that virtual environment, even if the virtualenv is
|
||||
linked to global site-packages.
|
||||
|
||||
"""
|
||||
assert self.req
|
||||
dist = get_distribution(self.req.name)
|
||||
if not dist:
|
||||
logger.warning("Skipping %s as it is not installed.", self.name)
|
||||
return None
|
||||
logger.info('Found existing installation: %s', dist)
|
||||
|
||||
uninstalled_pathset = UninstallPathSet.from_dist(dist)
|
||||
uninstalled_pathset.remove(auto_confirm, verbose)
|
||||
return uninstalled_pathset
|
||||
|
||||
def _get_archive_name(self, path, parentdir, rootdir):
|
||||
# type: (str, str, str) -> str
|
||||
|
||||
def _clean_zip_name(name, prefix):
|
||||
# type: (str, str) -> str
|
||||
assert name.startswith(prefix + os.path.sep), (
|
||||
"name {name!r} doesn't start with prefix {prefix!r}"
|
||||
.format(**locals())
|
||||
)
|
||||
name = name[len(prefix) + 1:]
|
||||
name = name.replace(os.path.sep, '/')
|
||||
return name
|
||||
|
||||
path = os.path.join(parentdir, path)
|
||||
name = _clean_zip_name(path, rootdir)
|
||||
return self.name + '/' + name
|
||||
|
||||
def archive(self, build_dir):
|
||||
# type: (str) -> None
|
||||
"""Saves archive to provided build_dir.
|
||||
|
||||
Used for saving downloaded VCS requirements as part of `pip download`.
|
||||
"""
|
||||
assert self.source_dir
|
||||
|
||||
create_archive = True
|
||||
archive_name = '{}-{}.zip'.format(self.name, self.metadata["version"])
|
||||
archive_path = os.path.join(build_dir, archive_name)
|
||||
|
||||
if os.path.exists(archive_path):
|
||||
response = ask_path_exists(
|
||||
'The file {} exists. (i)gnore, (w)ipe, '
|
||||
'(b)ackup, (a)bort '.format(
|
||||
display_path(archive_path)),
|
||||
('i', 'w', 'b', 'a'))
|
||||
if response == 'i':
|
||||
create_archive = False
|
||||
elif response == 'w':
|
||||
logger.warning('Deleting %s', display_path(archive_path))
|
||||
os.remove(archive_path)
|
||||
elif response == 'b':
|
||||
dest_file = backup_dir(archive_path)
|
||||
logger.warning(
|
||||
'Backing up %s to %s',
|
||||
display_path(archive_path),
|
||||
display_path(dest_file),
|
||||
)
|
||||
shutil.move(archive_path, dest_file)
|
||||
elif response == 'a':
|
||||
sys.exit(-1)
|
||||
|
||||
if not create_archive:
|
||||
return
|
||||
|
||||
zip_output = zipfile.ZipFile(
|
||||
archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True,
|
||||
)
|
||||
with zip_output:
|
||||
dir = os.path.normcase(
|
||||
os.path.abspath(self.unpacked_source_directory)
|
||||
)
|
||||
for dirpath, dirnames, filenames in os.walk(dir):
|
||||
for dirname in dirnames:
|
||||
dir_arcname = self._get_archive_name(
|
||||
dirname, parentdir=dirpath, rootdir=dir,
|
||||
)
|
||||
zipdir = zipfile.ZipInfo(dir_arcname + '/')
|
||||
zipdir.external_attr = 0x1ED << 16 # 0o755
|
||||
zip_output.writestr(zipdir, '')
|
||||
for filename in filenames:
|
||||
file_arcname = self._get_archive_name(
|
||||
filename, parentdir=dirpath, rootdir=dir,
|
||||
)
|
||||
filename = os.path.join(dirpath, filename)
|
||||
zip_output.write(filename, file_arcname)
|
||||
|
||||
logger.info('Saved %s', display_path(archive_path))
|
||||
|
||||
def install(
|
||||
self,
|
||||
install_options, # type: List[str]
|
||||
global_options=None, # type: Optional[Sequence[str]]
|
||||
root=None, # type: Optional[str]
|
||||
home=None, # type: Optional[str]
|
||||
prefix=None, # type: Optional[str]
|
||||
warn_script_location=True, # type: bool
|
||||
use_user_site=False, # type: bool
|
||||
pycompile=True # type: bool
|
||||
):
|
||||
# type: (...) -> None
|
||||
scheme = get_scheme(
|
||||
self.name,
|
||||
user=use_user_site,
|
||||
home=home,
|
||||
root=root,
|
||||
isolated=self.isolated,
|
||||
prefix=prefix,
|
||||
)
|
||||
|
||||
global_options = global_options if global_options is not None else []
|
||||
if self.editable:
|
||||
install_editable_legacy(
|
||||
install_options,
|
||||
global_options,
|
||||
prefix=prefix,
|
||||
home=home,
|
||||
use_user_site=use_user_site,
|
||||
name=self.name,
|
||||
setup_py_path=self.setup_py_path,
|
||||
isolated=self.isolated,
|
||||
build_env=self.build_env,
|
||||
unpacked_source_directory=self.unpacked_source_directory,
|
||||
)
|
||||
self.install_succeeded = True
|
||||
return
|
||||
|
||||
if self.is_wheel:
|
||||
assert self.local_file_path
|
||||
direct_url = None
|
||||
if self.original_link:
|
||||
direct_url = direct_url_from_link(
|
||||
self.original_link,
|
||||
self.source_dir,
|
||||
self.original_link_is_in_wheel_cache,
|
||||
)
|
||||
install_wheel(
|
||||
self.name,
|
||||
self.local_file_path,
|
||||
scheme=scheme,
|
||||
req_description=str(self.req),
|
||||
pycompile=pycompile,
|
||||
warn_script_location=warn_script_location,
|
||||
direct_url=direct_url,
|
||||
requested=self.user_supplied,
|
||||
)
|
||||
self.install_succeeded = True
|
||||
return
|
||||
|
||||
# TODO: Why don't we do this for editable installs?
|
||||
|
||||
# Extend the list of global and install options passed on to
|
||||
# the setup.py call with the ones from the requirements file.
|
||||
# Options specified in requirements file override those
|
||||
# specified on the command line, since the last option given
|
||||
# to setup.py is the one that is used.
|
||||
global_options = list(global_options) + self.global_options
|
||||
install_options = list(install_options) + self.install_options
|
||||
|
||||
try:
|
||||
success = install_legacy(
|
||||
install_options=install_options,
|
||||
global_options=global_options,
|
||||
root=root,
|
||||
home=home,
|
||||
prefix=prefix,
|
||||
use_user_site=use_user_site,
|
||||
pycompile=pycompile,
|
||||
scheme=scheme,
|
||||
setup_py_path=self.setup_py_path,
|
||||
isolated=self.isolated,
|
||||
req_name=self.name,
|
||||
build_env=self.build_env,
|
||||
unpacked_source_directory=self.unpacked_source_directory,
|
||||
req_description=str(self.req),
|
||||
)
|
||||
except LegacyInstallFailure as exc:
|
||||
self.install_succeeded = False
|
||||
six.reraise(*exc.parent)
|
||||
except Exception:
|
||||
self.install_succeeded = True
|
||||
raise
|
||||
|
||||
self.install_succeeded = success
|
||||
|
||||
if success and self.legacy_install_reason == 8368:
|
||||
deprecated(
|
||||
reason=(
|
||||
"{} was installed using the legacy 'setup.py install' "
|
||||
"method, because a wheel could not be built for it.".
|
||||
format(self.name)
|
||||
),
|
||||
replacement="to fix the wheel build issue reported above",
|
||||
gone_in="21.0",
|
||||
issue=8368,
|
||||
)
|
||||
|
||||
|
||||
def check_invalid_constraint_type(req):
|
||||
# type: (InstallRequirement) -> str
|
||||
|
||||
# Check for unsupported forms
|
||||
problem = ""
|
||||
if not req.name:
|
||||
problem = "Unnamed requirements are not allowed as constraints"
|
||||
elif req.link:
|
||||
problem = "Links are not allowed as constraints"
|
||||
elif req.extras:
|
||||
problem = "Constraints cannot have extras"
|
||||
|
||||
if problem:
|
||||
deprecated(
|
||||
reason=(
|
||||
"Constraints are only allowed to take the form of a package "
|
||||
"name and a version specifier. Other forms were originally "
|
||||
"permitted as an accident of the implementation, but were "
|
||||
"undocumented. The new implementation of the resolver no "
|
||||
"longer supports these forms."
|
||||
),
|
||||
replacement=(
|
||||
"replacing the constraint with a requirement."
|
||||
),
|
||||
# No plan yet for when the new resolver becomes default
|
||||
gone_in=None,
|
||||
issue=8210
|
||||
)
|
||||
|
||||
return problem
|
203
venv/Lib/site-packages/pip/_internal/req/req_set.py
Normal file
203
venv/Lib/site-packages/pip/_internal/req/req_set.py
Normal file
|
@ -0,0 +1,203 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.exceptions import InstallationError
|
||||
from pip._internal.models.wheel import Wheel
|
||||
from pip._internal.utils import compatibility_tags
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Dict, Iterable, List, Optional, Tuple
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RequirementSet(object):
|
||||
|
||||
def __init__(self, check_supported_wheels=True):
|
||||
# type: (bool) -> None
|
||||
"""Create a RequirementSet.
|
||||
"""
|
||||
|
||||
self.requirements = OrderedDict() # type: Dict[str, InstallRequirement] # noqa: E501
|
||||
self.check_supported_wheels = check_supported_wheels
|
||||
|
||||
self.unnamed_requirements = [] # type: List[InstallRequirement]
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
requirements = sorted(
|
||||
(req for req in self.requirements.values() if not req.comes_from),
|
||||
key=lambda req: canonicalize_name(req.name),
|
||||
)
|
||||
return ' '.join(str(req.req) for req in requirements)
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
requirements = sorted(
|
||||
self.requirements.values(),
|
||||
key=lambda req: canonicalize_name(req.name),
|
||||
)
|
||||
|
||||
format_string = '<{classname} object; {count} requirement(s): {reqs}>'
|
||||
return format_string.format(
|
||||
classname=self.__class__.__name__,
|
||||
count=len(requirements),
|
||||
reqs=', '.join(str(req.req) for req in requirements),
|
||||
)
|
||||
|
||||
def add_unnamed_requirement(self, install_req):
|
||||
# type: (InstallRequirement) -> None
|
||||
assert not install_req.name
|
||||
self.unnamed_requirements.append(install_req)
|
||||
|
||||
def add_named_requirement(self, install_req):
|
||||
# type: (InstallRequirement) -> None
|
||||
assert install_req.name
|
||||
|
||||
project_name = canonicalize_name(install_req.name)
|
||||
self.requirements[project_name] = install_req
|
||||
|
||||
def add_requirement(
|
||||
self,
|
||||
install_req, # type: InstallRequirement
|
||||
parent_req_name=None, # type: Optional[str]
|
||||
extras_requested=None # type: Optional[Iterable[str]]
|
||||
):
|
||||
# type: (...) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]] # noqa: E501
|
||||
"""Add install_req as a requirement to install.
|
||||
|
||||
:param parent_req_name: The name of the requirement that needed this
|
||||
added. The name is used because when multiple unnamed requirements
|
||||
resolve to the same name, we could otherwise end up with dependency
|
||||
links that point outside the Requirements set. parent_req must
|
||||
already be added. Note that None implies that this is a user
|
||||
supplied requirement, vs an inferred one.
|
||||
:param extras_requested: an iterable of extras used to evaluate the
|
||||
environment markers.
|
||||
:return: Additional requirements to scan. That is either [] if
|
||||
the requirement is not applicable, or [install_req] if the
|
||||
requirement is applicable and has just been added.
|
||||
"""
|
||||
# If the markers do not match, ignore this requirement.
|
||||
if not install_req.match_markers(extras_requested):
|
||||
logger.info(
|
||||
"Ignoring %s: markers '%s' don't match your environment",
|
||||
install_req.name, install_req.markers,
|
||||
)
|
||||
return [], None
|
||||
|
||||
# If the wheel is not supported, raise an error.
|
||||
# Should check this after filtering out based on environment markers to
|
||||
# allow specifying different wheels based on the environment/OS, in a
|
||||
# single requirements file.
|
||||
if install_req.link and install_req.link.is_wheel:
|
||||
wheel = Wheel(install_req.link.filename)
|
||||
tags = compatibility_tags.get_supported()
|
||||
if (self.check_supported_wheels and not wheel.supported(tags)):
|
||||
raise InstallationError(
|
||||
"{} is not a supported wheel on this platform.".format(
|
||||
wheel.filename)
|
||||
)
|
||||
|
||||
# This next bit is really a sanity check.
|
||||
assert not install_req.user_supplied or parent_req_name is None, (
|
||||
"a user supplied req shouldn't have a parent"
|
||||
)
|
||||
|
||||
# Unnamed requirements are scanned again and the requirement won't be
|
||||
# added as a dependency until after scanning.
|
||||
if not install_req.name:
|
||||
self.add_unnamed_requirement(install_req)
|
||||
return [install_req], None
|
||||
|
||||
try:
|
||||
existing_req = self.get_requirement(
|
||||
install_req.name) # type: Optional[InstallRequirement]
|
||||
except KeyError:
|
||||
existing_req = None
|
||||
|
||||
has_conflicting_requirement = (
|
||||
parent_req_name is None and
|
||||
existing_req and
|
||||
not existing_req.constraint and
|
||||
existing_req.extras == install_req.extras and
|
||||
existing_req.req.specifier != install_req.req.specifier
|
||||
)
|
||||
if has_conflicting_requirement:
|
||||
raise InstallationError(
|
||||
"Double requirement given: {} (already in {}, name={!r})"
|
||||
.format(install_req, existing_req, install_req.name)
|
||||
)
|
||||
|
||||
# When no existing requirement exists, add the requirement as a
|
||||
# dependency and it will be scanned again after.
|
||||
if not existing_req:
|
||||
self.add_named_requirement(install_req)
|
||||
# We'd want to rescan this requirement later
|
||||
return [install_req], install_req
|
||||
|
||||
# Assume there's no need to scan, and that we've already
|
||||
# encountered this for scanning.
|
||||
if install_req.constraint or not existing_req.constraint:
|
||||
return [], existing_req
|
||||
|
||||
does_not_satisfy_constraint = (
|
||||
install_req.link and
|
||||
not (
|
||||
existing_req.link and
|
||||
install_req.link.path == existing_req.link.path
|
||||
)
|
||||
)
|
||||
if does_not_satisfy_constraint:
|
||||
raise InstallationError(
|
||||
"Could not satisfy constraints for '{}': "
|
||||
"installation from path or url cannot be "
|
||||
"constrained to a version".format(install_req.name)
|
||||
)
|
||||
# If we're now installing a constraint, mark the existing
|
||||
# object for real installation.
|
||||
existing_req.constraint = False
|
||||
# If we're now installing a user supplied requirement,
|
||||
# mark the existing object as such.
|
||||
if install_req.user_supplied:
|
||||
existing_req.user_supplied = True
|
||||
existing_req.extras = tuple(sorted(
|
||||
set(existing_req.extras) | set(install_req.extras)
|
||||
))
|
||||
logger.debug(
|
||||
"Setting %s extras to: %s",
|
||||
existing_req, existing_req.extras,
|
||||
)
|
||||
# Return the existing requirement for addition to the parent and
|
||||
# scanning again.
|
||||
return [existing_req], existing_req
|
||||
|
||||
def has_requirement(self, name):
|
||||
# type: (str) -> bool
|
||||
project_name = canonicalize_name(name)
|
||||
|
||||
return (
|
||||
project_name in self.requirements and
|
||||
not self.requirements[project_name].constraint
|
||||
)
|
||||
|
||||
def get_requirement(self, name):
|
||||
# type: (str) -> InstallRequirement
|
||||
project_name = canonicalize_name(name)
|
||||
|
||||
if project_name in self.requirements:
|
||||
return self.requirements[project_name]
|
||||
|
||||
raise KeyError("No project with the name {name!r}".format(**locals()))
|
||||
|
||||
@property
|
||||
def all_requirements(self):
|
||||
# type: () -> List[InstallRequirement]
|
||||
return self.unnamed_requirements + list(self.requirements.values())
|
150
venv/Lib/site-packages/pip/_internal/req/req_tracker.py
Normal file
150
venv/Lib/site-packages/pip/_internal/req/req_tracker.py
Normal file
|
@ -0,0 +1,150 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import contextlib
|
||||
import errno
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pip._vendor import contextlib2
|
||||
|
||||
from pip._internal.utils.temp_dir import TempDirectory
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from types import TracebackType
|
||||
from typing import Dict, Iterator, Optional, Set, Type, Union
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
from pip._internal.models.link import Link
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def update_env_context_manager(**changes):
|
||||
# type: (str) -> Iterator[None]
|
||||
target = os.environ
|
||||
|
||||
# Save values from the target and change them.
|
||||
non_existent_marker = object()
|
||||
saved_values = {} # type: Dict[str, Union[object, str]]
|
||||
for name, new_value in changes.items():
|
||||
try:
|
||||
saved_values[name] = target[name]
|
||||
except KeyError:
|
||||
saved_values[name] = non_existent_marker
|
||||
target[name] = new_value
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
# Restore original values in the target.
|
||||
for name, original_value in saved_values.items():
|
||||
if original_value is non_existent_marker:
|
||||
del target[name]
|
||||
else:
|
||||
assert isinstance(original_value, str) # for mypy
|
||||
target[name] = original_value
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def get_requirement_tracker():
|
||||
# type: () -> Iterator[RequirementTracker]
|
||||
root = os.environ.get('PIP_REQ_TRACKER')
|
||||
with contextlib2.ExitStack() as ctx:
|
||||
if root is None:
|
||||
root = ctx.enter_context(
|
||||
TempDirectory(kind='req-tracker')
|
||||
).path
|
||||
ctx.enter_context(update_env_context_manager(PIP_REQ_TRACKER=root))
|
||||
logger.debug("Initialized build tracking at %s", root)
|
||||
|
||||
with RequirementTracker(root) as tracker:
|
||||
yield tracker
|
||||
|
||||
|
||||
class RequirementTracker(object):
|
||||
|
||||
def __init__(self, root):
|
||||
# type: (str) -> None
|
||||
self._root = root
|
||||
self._entries = set() # type: Set[InstallRequirement]
|
||||
logger.debug("Created build tracker: %s", self._root)
|
||||
|
||||
def __enter__(self):
|
||||
# type: () -> RequirementTracker
|
||||
logger.debug("Entered build tracker: %s", self._root)
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type, # type: Optional[Type[BaseException]]
|
||||
exc_val, # type: Optional[BaseException]
|
||||
exc_tb # type: Optional[TracebackType]
|
||||
):
|
||||
# type: (...) -> None
|
||||
self.cleanup()
|
||||
|
||||
def _entry_path(self, link):
|
||||
# type: (Link) -> str
|
||||
hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
|
||||
return os.path.join(self._root, hashed)
|
||||
|
||||
def add(self, req):
|
||||
# type: (InstallRequirement) -> None
|
||||
"""Add an InstallRequirement to build tracking.
|
||||
"""
|
||||
|
||||
assert req.link
|
||||
# Get the file to write information about this requirement.
|
||||
entry_path = self._entry_path(req.link)
|
||||
|
||||
# Try reading from the file. If it exists and can be read from, a build
|
||||
# is already in progress, so a LookupError is raised.
|
||||
try:
|
||||
with open(entry_path) as fp:
|
||||
contents = fp.read()
|
||||
except IOError as e:
|
||||
# if the error is anything other than "file does not exist", raise.
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
else:
|
||||
message = '{} is already being built: {}'.format(
|
||||
req.link, contents)
|
||||
raise LookupError(message)
|
||||
|
||||
# If we're here, req should really not be building already.
|
||||
assert req not in self._entries
|
||||
|
||||
# Start tracking this requirement.
|
||||
with open(entry_path, 'w') as fp:
|
||||
fp.write(str(req))
|
||||
self._entries.add(req)
|
||||
|
||||
logger.debug('Added %s to build tracker %r', req, self._root)
|
||||
|
||||
def remove(self, req):
|
||||
# type: (InstallRequirement) -> None
|
||||
"""Remove an InstallRequirement from build tracking.
|
||||
"""
|
||||
|
||||
assert req.link
|
||||
# Delete the created file and the corresponding entries.
|
||||
os.unlink(self._entry_path(req.link))
|
||||
self._entries.remove(req)
|
||||
|
||||
logger.debug('Removed %s from build tracker %r', req, self._root)
|
||||
|
||||
def cleanup(self):
|
||||
# type: () -> None
|
||||
for req in set(self._entries):
|
||||
self.remove(req)
|
||||
|
||||
logger.debug("Removed build tracker: %r", self._root)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def track(self, req):
|
||||
# type: (InstallRequirement) -> Iterator[None]
|
||||
self.add(req)
|
||||
yield
|
||||
self.remove(req)
|
648
venv/Lib/site-packages/pip/_internal/req/req_uninstall.py
Normal file
648
venv/Lib/site-packages/pip/_internal/req/req_uninstall.py
Normal file
|
@ -0,0 +1,648 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import csv
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import sysconfig
|
||||
|
||||
from pip._vendor import pkg_resources
|
||||
|
||||
from pip._internal.exceptions import UninstallationError
|
||||
from pip._internal.locations import bin_py, bin_user
|
||||
from pip._internal.utils.compat import WINDOWS, cache_from_source, uses_pycache
|
||||
from pip._internal.utils.logging import indent_log
|
||||
from pip._internal.utils.misc import (
|
||||
FakeFile,
|
||||
ask,
|
||||
dist_in_usersite,
|
||||
dist_is_local,
|
||||
egg_link_path,
|
||||
is_local,
|
||||
normalize_path,
|
||||
renames,
|
||||
rmtree,
|
||||
)
|
||||
from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import (
|
||||
Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple,
|
||||
)
|
||||
from pip._vendor.pkg_resources import Distribution
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _script_names(dist, script_name, is_gui):
|
||||
# type: (Distribution, str, bool) -> List[str]
|
||||
"""Create the fully qualified name of the files created by
|
||||
{console,gui}_scripts for the given ``dist``.
|
||||
Returns the list of file names
|
||||
"""
|
||||
if dist_in_usersite(dist):
|
||||
bin_dir = bin_user
|
||||
else:
|
||||
bin_dir = bin_py
|
||||
exe_name = os.path.join(bin_dir, script_name)
|
||||
paths_to_remove = [exe_name]
|
||||
if WINDOWS:
|
||||
paths_to_remove.append(exe_name + '.exe')
|
||||
paths_to_remove.append(exe_name + '.exe.manifest')
|
||||
if is_gui:
|
||||
paths_to_remove.append(exe_name + '-script.pyw')
|
||||
else:
|
||||
paths_to_remove.append(exe_name + '-script.py')
|
||||
return paths_to_remove
|
||||
|
||||
|
||||
def _unique(fn):
|
||||
# type: (Callable[..., Iterator[Any]]) -> Callable[..., Iterator[Any]]
|
||||
@functools.wraps(fn)
|
||||
def unique(*args, **kw):
|
||||
# type: (Any, Any) -> Iterator[Any]
|
||||
seen = set() # type: Set[Any]
|
||||
for item in fn(*args, **kw):
|
||||
if item not in seen:
|
||||
seen.add(item)
|
||||
yield item
|
||||
return unique
|
||||
|
||||
|
||||
@_unique
|
||||
def uninstallation_paths(dist):
|
||||
# type: (Distribution) -> Iterator[str]
|
||||
"""
|
||||
Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
|
||||
|
||||
Yield paths to all the files in RECORD. For each .py file in RECORD, add
|
||||
the .pyc and .pyo in the same directory.
|
||||
|
||||
UninstallPathSet.add() takes care of the __pycache__ .py[co].
|
||||
"""
|
||||
r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
|
||||
for row in r:
|
||||
path = os.path.join(dist.location, row[0])
|
||||
yield path
|
||||
if path.endswith('.py'):
|
||||
dn, fn = os.path.split(path)
|
||||
base = fn[:-3]
|
||||
path = os.path.join(dn, base + '.pyc')
|
||||
yield path
|
||||
path = os.path.join(dn, base + '.pyo')
|
||||
yield path
|
||||
|
||||
|
||||
def compact(paths):
|
||||
# type: (Iterable[str]) -> Set[str]
|
||||
"""Compact a path set to contain the minimal number of paths
|
||||
necessary to contain all paths in the set. If /a/path/ and
|
||||
/a/path/to/a/file.txt are both in the set, leave only the
|
||||
shorter path."""
|
||||
|
||||
sep = os.path.sep
|
||||
short_paths = set() # type: Set[str]
|
||||
for path in sorted(paths, key=len):
|
||||
should_skip = any(
|
||||
path.startswith(shortpath.rstrip("*")) and
|
||||
path[len(shortpath.rstrip("*").rstrip(sep))] == sep
|
||||
for shortpath in short_paths
|
||||
)
|
||||
if not should_skip:
|
||||
short_paths.add(path)
|
||||
return short_paths
|
||||
|
||||
|
||||
def compress_for_rename(paths):
|
||||
# type: (Iterable[str]) -> Set[str]
|
||||
"""Returns a set containing the paths that need to be renamed.
|
||||
|
||||
This set may include directories when the original sequence of paths
|
||||
included every file on disk.
|
||||
"""
|
||||
case_map = dict((os.path.normcase(p), p) for p in paths)
|
||||
remaining = set(case_map)
|
||||
unchecked = sorted(set(os.path.split(p)[0]
|
||||
for p in case_map.values()), key=len)
|
||||
wildcards = set() # type: Set[str]
|
||||
|
||||
def norm_join(*a):
|
||||
# type: (str) -> str
|
||||
return os.path.normcase(os.path.join(*a))
|
||||
|
||||
for root in unchecked:
|
||||
if any(os.path.normcase(root).startswith(w)
|
||||
for w in wildcards):
|
||||
# This directory has already been handled.
|
||||
continue
|
||||
|
||||
all_files = set() # type: Set[str]
|
||||
all_subdirs = set() # type: Set[str]
|
||||
for dirname, subdirs, files in os.walk(root):
|
||||
all_subdirs.update(norm_join(root, dirname, d)
|
||||
for d in subdirs)
|
||||
all_files.update(norm_join(root, dirname, f)
|
||||
for f in files)
|
||||
# If all the files we found are in our remaining set of files to
|
||||
# remove, then remove them from the latter set and add a wildcard
|
||||
# for the directory.
|
||||
if not (all_files - remaining):
|
||||
remaining.difference_update(all_files)
|
||||
wildcards.add(root + os.sep)
|
||||
|
||||
return set(map(case_map.__getitem__, remaining)) | wildcards
|
||||
|
||||
|
||||
def compress_for_output_listing(paths):
|
||||
# type: (Iterable[str]) -> Tuple[Set[str], Set[str]]
|
||||
"""Returns a tuple of 2 sets of which paths to display to user
|
||||
|
||||
The first set contains paths that would be deleted. Files of a package
|
||||
are not added and the top-level directory of the package has a '*' added
|
||||
at the end - to signify that all it's contents are removed.
|
||||
|
||||
The second set contains files that would have been skipped in the above
|
||||
folders.
|
||||
"""
|
||||
|
||||
will_remove = set(paths)
|
||||
will_skip = set()
|
||||
|
||||
# Determine folders and files
|
||||
folders = set()
|
||||
files = set()
|
||||
for path in will_remove:
|
||||
if path.endswith(".pyc"):
|
||||
continue
|
||||
if path.endswith("__init__.py") or ".dist-info" in path:
|
||||
folders.add(os.path.dirname(path))
|
||||
files.add(path)
|
||||
|
||||
# probably this one https://github.com/python/mypy/issues/390
|
||||
_normcased_files = set(map(os.path.normcase, files)) # type: ignore
|
||||
|
||||
folders = compact(folders)
|
||||
|
||||
# This walks the tree using os.walk to not miss extra folders
|
||||
# that might get added.
|
||||
for folder in folders:
|
||||
for dirpath, _, dirfiles in os.walk(folder):
|
||||
for fname in dirfiles:
|
||||
if fname.endswith(".pyc"):
|
||||
continue
|
||||
|
||||
file_ = os.path.join(dirpath, fname)
|
||||
if (os.path.isfile(file_) and
|
||||
os.path.normcase(file_) not in _normcased_files):
|
||||
# We are skipping this file. Add it to the set.
|
||||
will_skip.add(file_)
|
||||
|
||||
will_remove = files | {
|
||||
os.path.join(folder, "*") for folder in folders
|
||||
}
|
||||
|
||||
return will_remove, will_skip
|
||||
|
||||
|
||||
class StashedUninstallPathSet(object):
|
||||
"""A set of file rename operations to stash files while
|
||||
tentatively uninstalling them."""
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
# Mapping from source file root to [Adjacent]TempDirectory
|
||||
# for files under that directory.
|
||||
self._save_dirs = {} # type: Dict[str, TempDirectory]
|
||||
# (old path, new path) tuples for each move that may need
|
||||
# to be undone.
|
||||
self._moves = [] # type: List[Tuple[str, str]]
|
||||
|
||||
def _get_directory_stash(self, path):
|
||||
# type: (str) -> str
|
||||
"""Stashes a directory.
|
||||
|
||||
Directories are stashed adjacent to their original location if
|
||||
possible, or else moved/copied into the user's temp dir."""
|
||||
|
||||
try:
|
||||
save_dir = AdjacentTempDirectory(path) # type: TempDirectory
|
||||
except OSError:
|
||||
save_dir = TempDirectory(kind="uninstall")
|
||||
self._save_dirs[os.path.normcase(path)] = save_dir
|
||||
|
||||
return save_dir.path
|
||||
|
||||
def _get_file_stash(self, path):
|
||||
# type: (str) -> str
|
||||
"""Stashes a file.
|
||||
|
||||
If no root has been provided, one will be created for the directory
|
||||
in the user's temp directory."""
|
||||
path = os.path.normcase(path)
|
||||
head, old_head = os.path.dirname(path), None
|
||||
save_dir = None
|
||||
|
||||
while head != old_head:
|
||||
try:
|
||||
save_dir = self._save_dirs[head]
|
||||
break
|
||||
except KeyError:
|
||||
pass
|
||||
head, old_head = os.path.dirname(head), head
|
||||
else:
|
||||
# Did not find any suitable root
|
||||
head = os.path.dirname(path)
|
||||
save_dir = TempDirectory(kind='uninstall')
|
||||
self._save_dirs[head] = save_dir
|
||||
|
||||
relpath = os.path.relpath(path, head)
|
||||
if relpath and relpath != os.path.curdir:
|
||||
return os.path.join(save_dir.path, relpath)
|
||||
return save_dir.path
|
||||
|
||||
def stash(self, path):
|
||||
# type: (str) -> str
|
||||
"""Stashes the directory or file and returns its new location.
|
||||
Handle symlinks as files to avoid modifying the symlink targets.
|
||||
"""
|
||||
path_is_dir = os.path.isdir(path) and not os.path.islink(path)
|
||||
if path_is_dir:
|
||||
new_path = self._get_directory_stash(path)
|
||||
else:
|
||||
new_path = self._get_file_stash(path)
|
||||
|
||||
self._moves.append((path, new_path))
|
||||
if (path_is_dir and os.path.isdir(new_path)):
|
||||
# If we're moving a directory, we need to
|
||||
# remove the destination first or else it will be
|
||||
# moved to inside the existing directory.
|
||||
# We just created new_path ourselves, so it will
|
||||
# be removable.
|
||||
os.rmdir(new_path)
|
||||
renames(path, new_path)
|
||||
return new_path
|
||||
|
||||
def commit(self):
|
||||
# type: () -> None
|
||||
"""Commits the uninstall by removing stashed files."""
|
||||
for _, save_dir in self._save_dirs.items():
|
||||
save_dir.cleanup()
|
||||
self._moves = []
|
||||
self._save_dirs = {}
|
||||
|
||||
def rollback(self):
|
||||
# type: () -> None
|
||||
"""Undoes the uninstall by moving stashed files back."""
|
||||
for p in self._moves:
|
||||
logger.info("Moving to %s\n from %s", *p)
|
||||
|
||||
for new_path, path in self._moves:
|
||||
try:
|
||||
logger.debug('Replacing %s from %s', new_path, path)
|
||||
if os.path.isfile(new_path) or os.path.islink(new_path):
|
||||
os.unlink(new_path)
|
||||
elif os.path.isdir(new_path):
|
||||
rmtree(new_path)
|
||||
renames(path, new_path)
|
||||
except OSError as ex:
|
||||
logger.error("Failed to restore %s", new_path)
|
||||
logger.debug("Exception: %s", ex)
|
||||
|
||||
self.commit()
|
||||
|
||||
@property
|
||||
def can_rollback(self):
|
||||
# type: () -> bool
|
||||
return bool(self._moves)
|
||||
|
||||
|
||||
class UninstallPathSet(object):
|
||||
"""A set of file paths to be removed in the uninstallation of a
|
||||
requirement."""
|
||||
def __init__(self, dist):
|
||||
# type: (Distribution) -> None
|
||||
self.paths = set() # type: Set[str]
|
||||
self._refuse = set() # type: Set[str]
|
||||
self.pth = {} # type: Dict[str, UninstallPthEntries]
|
||||
self.dist = dist
|
||||
self._moved_paths = StashedUninstallPathSet()
|
||||
|
||||
def _permitted(self, path):
|
||||
# type: (str) -> bool
|
||||
"""
|
||||
Return True if the given path is one we are permitted to
|
||||
remove/modify, False otherwise.
|
||||
|
||||
"""
|
||||
return is_local(path)
|
||||
|
||||
def add(self, path):
|
||||
# type: (str) -> None
|
||||
head, tail = os.path.split(path)
|
||||
|
||||
# we normalize the head to resolve parent directory symlinks, but not
|
||||
# the tail, since we only want to uninstall symlinks, not their targets
|
||||
path = os.path.join(normalize_path(head), os.path.normcase(tail))
|
||||
|
||||
if not os.path.exists(path):
|
||||
return
|
||||
if self._permitted(path):
|
||||
self.paths.add(path)
|
||||
else:
|
||||
self._refuse.add(path)
|
||||
|
||||
# __pycache__ files can show up after 'installed-files.txt' is created,
|
||||
# due to imports
|
||||
if os.path.splitext(path)[1] == '.py' and uses_pycache:
|
||||
self.add(cache_from_source(path))
|
||||
|
||||
def add_pth(self, pth_file, entry):
|
||||
# type: (str, str) -> None
|
||||
pth_file = normalize_path(pth_file)
|
||||
if self._permitted(pth_file):
|
||||
if pth_file not in self.pth:
|
||||
self.pth[pth_file] = UninstallPthEntries(pth_file)
|
||||
self.pth[pth_file].add(entry)
|
||||
else:
|
||||
self._refuse.add(pth_file)
|
||||
|
||||
def remove(self, auto_confirm=False, verbose=False):
|
||||
# type: (bool, bool) -> None
|
||||
"""Remove paths in ``self.paths`` with confirmation (unless
|
||||
``auto_confirm`` is True)."""
|
||||
|
||||
if not self.paths:
|
||||
logger.info(
|
||||
"Can't uninstall '%s'. No files were found to uninstall.",
|
||||
self.dist.project_name,
|
||||
)
|
||||
return
|
||||
|
||||
dist_name_version = (
|
||||
self.dist.project_name + "-" + self.dist.version
|
||||
)
|
||||
logger.info('Uninstalling %s:', dist_name_version)
|
||||
|
||||
with indent_log():
|
||||
if auto_confirm or self._allowed_to_proceed(verbose):
|
||||
moved = self._moved_paths
|
||||
|
||||
for_rename = compress_for_rename(self.paths)
|
||||
|
||||
for path in sorted(compact(for_rename)):
|
||||
moved.stash(path)
|
||||
logger.debug('Removing file or directory %s', path)
|
||||
|
||||
for pth in self.pth.values():
|
||||
pth.remove()
|
||||
|
||||
logger.info('Successfully uninstalled %s', dist_name_version)
|
||||
|
||||
def _allowed_to_proceed(self, verbose):
|
||||
# type: (bool) -> bool
|
||||
"""Display which files would be deleted and prompt for confirmation
|
||||
"""
|
||||
|
||||
def _display(msg, paths):
|
||||
# type: (str, Iterable[str]) -> None
|
||||
if not paths:
|
||||
return
|
||||
|
||||
logger.info(msg)
|
||||
with indent_log():
|
||||
for path in sorted(compact(paths)):
|
||||
logger.info(path)
|
||||
|
||||
if not verbose:
|
||||
will_remove, will_skip = compress_for_output_listing(self.paths)
|
||||
else:
|
||||
# In verbose mode, display all the files that are going to be
|
||||
# deleted.
|
||||
will_remove = set(self.paths)
|
||||
will_skip = set()
|
||||
|
||||
_display('Would remove:', will_remove)
|
||||
_display('Would not remove (might be manually added):', will_skip)
|
||||
_display('Would not remove (outside of prefix):', self._refuse)
|
||||
if verbose:
|
||||
_display('Will actually move:', compress_for_rename(self.paths))
|
||||
|
||||
return ask('Proceed (y/n)? ', ('y', 'n')) == 'y'
|
||||
|
||||
def rollback(self):
|
||||
# type: () -> None
|
||||
"""Rollback the changes previously made by remove()."""
|
||||
if not self._moved_paths.can_rollback:
|
||||
logger.error(
|
||||
"Can't roll back %s; was not uninstalled",
|
||||
self.dist.project_name,
|
||||
)
|
||||
return
|
||||
logger.info('Rolling back uninstall of %s', self.dist.project_name)
|
||||
self._moved_paths.rollback()
|
||||
for pth in self.pth.values():
|
||||
pth.rollback()
|
||||
|
||||
def commit(self):
|
||||
# type: () -> None
|
||||
"""Remove temporary save dir: rollback will no longer be possible."""
|
||||
self._moved_paths.commit()
|
||||
|
||||
@classmethod
|
||||
def from_dist(cls, dist):
|
||||
# type: (Distribution) -> UninstallPathSet
|
||||
dist_path = normalize_path(dist.location)
|
||||
if not dist_is_local(dist):
|
||||
logger.info(
|
||||
"Not uninstalling %s at %s, outside environment %s",
|
||||
dist.key,
|
||||
dist_path,
|
||||
sys.prefix,
|
||||
)
|
||||
return cls(dist)
|
||||
|
||||
if dist_path in {p for p in {sysconfig.get_path("stdlib"),
|
||||
sysconfig.get_path("platstdlib")}
|
||||
if p}:
|
||||
logger.info(
|
||||
"Not uninstalling %s at %s, as it is in the standard library.",
|
||||
dist.key,
|
||||
dist_path,
|
||||
)
|
||||
return cls(dist)
|
||||
|
||||
paths_to_remove = cls(dist)
|
||||
develop_egg_link = egg_link_path(dist)
|
||||
develop_egg_link_egg_info = '{}.egg-info'.format(
|
||||
pkg_resources.to_filename(dist.project_name))
|
||||
egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
|
||||
# Special case for distutils installed package
|
||||
distutils_egg_info = getattr(dist._provider, 'path', None)
|
||||
|
||||
# Uninstall cases order do matter as in the case of 2 installs of the
|
||||
# same package, pip needs to uninstall the currently detected version
|
||||
if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
|
||||
not dist.egg_info.endswith(develop_egg_link_egg_info)):
|
||||
# if dist.egg_info.endswith(develop_egg_link_egg_info), we
|
||||
# are in fact in the develop_egg_link case
|
||||
paths_to_remove.add(dist.egg_info)
|
||||
if dist.has_metadata('installed-files.txt'):
|
||||
for installed_file in dist.get_metadata(
|
||||
'installed-files.txt').splitlines():
|
||||
path = os.path.normpath(
|
||||
os.path.join(dist.egg_info, installed_file)
|
||||
)
|
||||
paths_to_remove.add(path)
|
||||
# FIXME: need a test for this elif block
|
||||
# occurs with --single-version-externally-managed/--record outside
|
||||
# of pip
|
||||
elif dist.has_metadata('top_level.txt'):
|
||||
if dist.has_metadata('namespace_packages.txt'):
|
||||
namespaces = dist.get_metadata('namespace_packages.txt')
|
||||
else:
|
||||
namespaces = []
|
||||
for top_level_pkg in [
|
||||
p for p
|
||||
in dist.get_metadata('top_level.txt').splitlines()
|
||||
if p and p not in namespaces]:
|
||||
path = os.path.join(dist.location, top_level_pkg)
|
||||
paths_to_remove.add(path)
|
||||
paths_to_remove.add(path + '.py')
|
||||
paths_to_remove.add(path + '.pyc')
|
||||
paths_to_remove.add(path + '.pyo')
|
||||
|
||||
elif distutils_egg_info:
|
||||
raise UninstallationError(
|
||||
"Cannot uninstall {!r}. It is a distutils installed project "
|
||||
"and thus we cannot accurately determine which files belong "
|
||||
"to it which would lead to only a partial uninstall.".format(
|
||||
dist.project_name,
|
||||
)
|
||||
)
|
||||
|
||||
elif dist.location.endswith('.egg'):
|
||||
# package installed by easy_install
|
||||
# We cannot match on dist.egg_name because it can slightly vary
|
||||
# i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
|
||||
paths_to_remove.add(dist.location)
|
||||
easy_install_egg = os.path.split(dist.location)[1]
|
||||
easy_install_pth = os.path.join(os.path.dirname(dist.location),
|
||||
'easy-install.pth')
|
||||
paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
|
||||
|
||||
elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
|
||||
for path in uninstallation_paths(dist):
|
||||
paths_to_remove.add(path)
|
||||
|
||||
elif develop_egg_link:
|
||||
# develop egg
|
||||
with open(develop_egg_link, 'r') as fh:
|
||||
link_pointer = os.path.normcase(fh.readline().strip())
|
||||
assert (link_pointer == dist.location), (
|
||||
'Egg-link {} does not match installed location of {} '
|
||||
'(at {})'.format(
|
||||
link_pointer, dist.project_name, dist.location)
|
||||
)
|
||||
paths_to_remove.add(develop_egg_link)
|
||||
easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
|
||||
'easy-install.pth')
|
||||
paths_to_remove.add_pth(easy_install_pth, dist.location)
|
||||
|
||||
else:
|
||||
logger.debug(
|
||||
'Not sure how to uninstall: %s - Check: %s',
|
||||
dist, dist.location,
|
||||
)
|
||||
|
||||
# find distutils scripts= scripts
|
||||
if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
|
||||
for script in dist.metadata_listdir('scripts'):
|
||||
if dist_in_usersite(dist):
|
||||
bin_dir = bin_user
|
||||
else:
|
||||
bin_dir = bin_py
|
||||
paths_to_remove.add(os.path.join(bin_dir, script))
|
||||
if WINDOWS:
|
||||
paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
|
||||
|
||||
# find console_scripts
|
||||
_scripts_to_remove = []
|
||||
console_scripts = dist.get_entry_map(group='console_scripts')
|
||||
for name in console_scripts.keys():
|
||||
_scripts_to_remove.extend(_script_names(dist, name, False))
|
||||
# find gui_scripts
|
||||
gui_scripts = dist.get_entry_map(group='gui_scripts')
|
||||
for name in gui_scripts.keys():
|
||||
_scripts_to_remove.extend(_script_names(dist, name, True))
|
||||
|
||||
for s in _scripts_to_remove:
|
||||
paths_to_remove.add(s)
|
||||
|
||||
return paths_to_remove
|
||||
|
||||
|
||||
class UninstallPthEntries(object):
|
||||
def __init__(self, pth_file):
|
||||
# type: (str) -> None
|
||||
self.file = pth_file
|
||||
self.entries = set() # type: Set[str]
|
||||
self._saved_lines = None # type: Optional[List[bytes]]
|
||||
|
||||
def add(self, entry):
|
||||
# type: (str) -> None
|
||||
entry = os.path.normcase(entry)
|
||||
# On Windows, os.path.normcase converts the entry to use
|
||||
# backslashes. This is correct for entries that describe absolute
|
||||
# paths outside of site-packages, but all the others use forward
|
||||
# slashes.
|
||||
# os.path.splitdrive is used instead of os.path.isabs because isabs
|
||||
# treats non-absolute paths with drive letter markings like c:foo\bar
|
||||
# as absolute paths. It also does not recognize UNC paths if they don't
|
||||
# have more than "\\sever\share". Valid examples: "\\server\share\" or
|
||||
# "\\server\share\folder". Python 2.7.8+ support UNC in splitdrive.
|
||||
if WINDOWS and not os.path.splitdrive(entry)[0]:
|
||||
entry = entry.replace('\\', '/')
|
||||
self.entries.add(entry)
|
||||
|
||||
def remove(self):
|
||||
# type: () -> None
|
||||
logger.debug('Removing pth entries from %s:', self.file)
|
||||
|
||||
# If the file doesn't exist, log a warning and return
|
||||
if not os.path.isfile(self.file):
|
||||
logger.warning(
|
||||
"Cannot remove entries from nonexistent file %s", self.file
|
||||
)
|
||||
return
|
||||
with open(self.file, 'rb') as fh:
|
||||
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
|
||||
lines = fh.readlines()
|
||||
self._saved_lines = lines
|
||||
if any(b'\r\n' in line for line in lines):
|
||||
endline = '\r\n'
|
||||
else:
|
||||
endline = '\n'
|
||||
# handle missing trailing newline
|
||||
if lines and not lines[-1].endswith(endline.encode("utf-8")):
|
||||
lines[-1] = lines[-1] + endline.encode("utf-8")
|
||||
for entry in self.entries:
|
||||
try:
|
||||
logger.debug('Removing entry: %s', entry)
|
||||
lines.remove((entry + endline).encode("utf-8"))
|
||||
except ValueError:
|
||||
pass
|
||||
with open(self.file, 'wb') as fh:
|
||||
fh.writelines(lines)
|
||||
|
||||
def rollback(self):
|
||||
# type: () -> bool
|
||||
if self._saved_lines is None:
|
||||
logger.error(
|
||||
'Cannot roll back changes to %s, none were made', self.file
|
||||
)
|
||||
return False
|
||||
logger.debug('Rolling %s back to previous state', self.file)
|
||||
with open(self.file, 'wb') as fh:
|
||||
fh.writelines(self._saved_lines)
|
||||
return True
|
Loading…
Add table
Add a link
Reference in a new issue