Created starter files for the project.
This commit is contained in:
commit
73f0c0db42
1992 changed files with 769897 additions and 0 deletions
2
venv/Lib/site-packages/pip/_internal/models/__init__.py
Normal file
2
venv/Lib/site-packages/pip/_internal/models/__init__.py
Normal file
|
@ -0,0 +1,2 @@
|
|||
"""A package that contains models that represent entities.
|
||||
"""
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
38
venv/Lib/site-packages/pip/_internal/models/candidate.py
Normal file
38
venv/Lib/site-packages/pip/_internal/models/candidate.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
from pip._vendor.packaging.version import parse as parse_version
|
||||
|
||||
from pip._internal.utils.models import KeyBasedCompareMixin
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from pip._vendor.packaging.version import _BaseVersion
|
||||
from pip._internal.models.link import Link
|
||||
|
||||
|
||||
class InstallationCandidate(KeyBasedCompareMixin):
|
||||
"""Represents a potential "candidate" for installation.
|
||||
"""
|
||||
|
||||
__slots__ = ["name", "version", "link"]
|
||||
|
||||
def __init__(self, name, version, link):
|
||||
# type: (str, str, Link) -> None
|
||||
self.name = name
|
||||
self.version = parse_version(version) # type: _BaseVersion
|
||||
self.link = link
|
||||
|
||||
super(InstallationCandidate, self).__init__(
|
||||
key=(self.name, self.version, self.link),
|
||||
defining_class=InstallationCandidate
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
|
||||
self.name, self.version, self.link,
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
return '{!r} candidate (version {} at {})'.format(
|
||||
self.name, self.version, self.link,
|
||||
)
|
245
venv/Lib/site-packages/pip/_internal/models/direct_url.py
Normal file
245
venv/Lib/site-packages/pip/_internal/models/direct_url.py
Normal file
|
@ -0,0 +1,245 @@
|
|||
""" PEP 610 """
|
||||
import json
|
||||
import re
|
||||
|
||||
from pip._vendor import six
|
||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import (
|
||||
Any, Dict, Iterable, Optional, Type, TypeVar, Union
|
||||
)
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
DIRECT_URL_METADATA_NAME = "direct_url.json"
|
||||
ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$")
|
||||
|
||||
__all__ = [
|
||||
"DirectUrl",
|
||||
"DirectUrlValidationError",
|
||||
"DirInfo",
|
||||
"ArchiveInfo",
|
||||
"VcsInfo",
|
||||
]
|
||||
|
||||
|
||||
class DirectUrlValidationError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _get(d, expected_type, key, default=None):
|
||||
# type: (Dict[str, Any], Type[T], str, Optional[T]) -> Optional[T]
|
||||
"""Get value from dictionary and verify expected type."""
|
||||
if key not in d:
|
||||
return default
|
||||
value = d[key]
|
||||
if six.PY2 and expected_type is str:
|
||||
expected_type = six.string_types # type: ignore
|
||||
if not isinstance(value, expected_type):
|
||||
raise DirectUrlValidationError(
|
||||
"{!r} has unexpected type for {} (expected {})".format(
|
||||
value, key, expected_type
|
||||
)
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def _get_required(d, expected_type, key, default=None):
|
||||
# type: (Dict[str, Any], Type[T], str, Optional[T]) -> T
|
||||
value = _get(d, expected_type, key, default)
|
||||
if value is None:
|
||||
raise DirectUrlValidationError("{} must have a value".format(key))
|
||||
return value
|
||||
|
||||
|
||||
def _exactly_one_of(infos):
|
||||
# type: (Iterable[Optional[InfoType]]) -> InfoType
|
||||
infos = [info for info in infos if info is not None]
|
||||
if not infos:
|
||||
raise DirectUrlValidationError(
|
||||
"missing one of archive_info, dir_info, vcs_info"
|
||||
)
|
||||
if len(infos) > 1:
|
||||
raise DirectUrlValidationError(
|
||||
"more than one of archive_info, dir_info, vcs_info"
|
||||
)
|
||||
assert infos[0] is not None
|
||||
return infos[0]
|
||||
|
||||
|
||||
def _filter_none(**kwargs):
|
||||
# type: (Any) -> Dict[str, Any]
|
||||
"""Make dict excluding None values."""
|
||||
return {k: v for k, v in kwargs.items() if v is not None}
|
||||
|
||||
|
||||
class VcsInfo(object):
|
||||
name = "vcs_info"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
vcs, # type: str
|
||||
commit_id, # type: str
|
||||
requested_revision=None, # type: Optional[str]
|
||||
resolved_revision=None, # type: Optional[str]
|
||||
resolved_revision_type=None, # type: Optional[str]
|
||||
):
|
||||
self.vcs = vcs
|
||||
self.requested_revision = requested_revision
|
||||
self.commit_id = commit_id
|
||||
self.resolved_revision = resolved_revision
|
||||
self.resolved_revision_type = resolved_revision_type
|
||||
|
||||
@classmethod
|
||||
def _from_dict(cls, d):
|
||||
# type: (Optional[Dict[str, Any]]) -> Optional[VcsInfo]
|
||||
if d is None:
|
||||
return None
|
||||
return cls(
|
||||
vcs=_get_required(d, str, "vcs"),
|
||||
commit_id=_get_required(d, str, "commit_id"),
|
||||
requested_revision=_get(d, str, "requested_revision"),
|
||||
resolved_revision=_get(d, str, "resolved_revision"),
|
||||
resolved_revision_type=_get(d, str, "resolved_revision_type"),
|
||||
)
|
||||
|
||||
def _to_dict(self):
|
||||
# type: () -> Dict[str, Any]
|
||||
return _filter_none(
|
||||
vcs=self.vcs,
|
||||
requested_revision=self.requested_revision,
|
||||
commit_id=self.commit_id,
|
||||
resolved_revision=self.resolved_revision,
|
||||
resolved_revision_type=self.resolved_revision_type,
|
||||
)
|
||||
|
||||
|
||||
class ArchiveInfo(object):
|
||||
name = "archive_info"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hash=None, # type: Optional[str]
|
||||
):
|
||||
self.hash = hash
|
||||
|
||||
@classmethod
|
||||
def _from_dict(cls, d):
|
||||
# type: (Optional[Dict[str, Any]]) -> Optional[ArchiveInfo]
|
||||
if d is None:
|
||||
return None
|
||||
return cls(hash=_get(d, str, "hash"))
|
||||
|
||||
def _to_dict(self):
|
||||
# type: () -> Dict[str, Any]
|
||||
return _filter_none(hash=self.hash)
|
||||
|
||||
|
||||
class DirInfo(object):
|
||||
name = "dir_info"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
editable=False, # type: bool
|
||||
):
|
||||
self.editable = editable
|
||||
|
||||
@classmethod
|
||||
def _from_dict(cls, d):
|
||||
# type: (Optional[Dict[str, Any]]) -> Optional[DirInfo]
|
||||
if d is None:
|
||||
return None
|
||||
return cls(
|
||||
editable=_get_required(d, bool, "editable", default=False)
|
||||
)
|
||||
|
||||
def _to_dict(self):
|
||||
# type: () -> Dict[str, Any]
|
||||
return _filter_none(editable=self.editable or None)
|
||||
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
InfoType = Union[ArchiveInfo, DirInfo, VcsInfo]
|
||||
|
||||
|
||||
class DirectUrl(object):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
url, # type: str
|
||||
info, # type: InfoType
|
||||
subdirectory=None, # type: Optional[str]
|
||||
):
|
||||
self.url = url
|
||||
self.info = info
|
||||
self.subdirectory = subdirectory
|
||||
|
||||
def _remove_auth_from_netloc(self, netloc):
|
||||
# type: (str) -> str
|
||||
if "@" not in netloc:
|
||||
return netloc
|
||||
user_pass, netloc_no_user_pass = netloc.split("@", 1)
|
||||
if (
|
||||
isinstance(self.info, VcsInfo) and
|
||||
self.info.vcs == "git" and
|
||||
user_pass == "git"
|
||||
):
|
||||
return netloc
|
||||
if ENV_VAR_RE.match(user_pass):
|
||||
return netloc
|
||||
return netloc_no_user_pass
|
||||
|
||||
@property
|
||||
def redacted_url(self):
|
||||
# type: () -> str
|
||||
"""url with user:password part removed unless it is formed with
|
||||
environment variables as specified in PEP 610, or it is ``git``
|
||||
in the case of a git URL.
|
||||
"""
|
||||
purl = urllib_parse.urlsplit(self.url)
|
||||
netloc = self._remove_auth_from_netloc(purl.netloc)
|
||||
surl = urllib_parse.urlunsplit(
|
||||
(purl.scheme, netloc, purl.path, purl.query, purl.fragment)
|
||||
)
|
||||
return surl
|
||||
|
||||
def validate(self):
|
||||
# type: () -> None
|
||||
self.from_dict(self.to_dict())
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, d):
|
||||
# type: (Dict[str, Any]) -> DirectUrl
|
||||
return DirectUrl(
|
||||
url=_get_required(d, str, "url"),
|
||||
subdirectory=_get(d, str, "subdirectory"),
|
||||
info=_exactly_one_of(
|
||||
[
|
||||
ArchiveInfo._from_dict(_get(d, dict, "archive_info")),
|
||||
DirInfo._from_dict(_get(d, dict, "dir_info")),
|
||||
VcsInfo._from_dict(_get(d, dict, "vcs_info")),
|
||||
]
|
||||
),
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
# type: () -> Dict[str, Any]
|
||||
res = _filter_none(
|
||||
url=self.redacted_url,
|
||||
subdirectory=self.subdirectory,
|
||||
)
|
||||
res[self.info.name] = self.info._to_dict()
|
||||
return res
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, s):
|
||||
# type: (str) -> DirectUrl
|
||||
return cls.from_dict(json.loads(s))
|
||||
|
||||
def to_json(self):
|
||||
# type: () -> str
|
||||
return json.dumps(self.to_dict(), sort_keys=True)
|
|
@ -0,0 +1,92 @@
|
|||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
|
||||
from pip._internal.exceptions import CommandError
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Optional, Set, FrozenSet
|
||||
|
||||
|
||||
class FormatControl(object):
|
||||
"""Helper for managing formats from which a package can be installed.
|
||||
"""
|
||||
|
||||
__slots__ = ["no_binary", "only_binary"]
|
||||
|
||||
def __init__(self, no_binary=None, only_binary=None):
|
||||
# type: (Optional[Set[str]], Optional[Set[str]]) -> None
|
||||
if no_binary is None:
|
||||
no_binary = set()
|
||||
if only_binary is None:
|
||||
only_binary = set()
|
||||
|
||||
self.no_binary = no_binary
|
||||
self.only_binary = only_binary
|
||||
|
||||
def __eq__(self, other):
|
||||
# type: (object) -> bool
|
||||
if not isinstance(other, self.__class__):
|
||||
return NotImplemented
|
||||
|
||||
if self.__slots__ != other.__slots__:
|
||||
return False
|
||||
|
||||
return all(
|
||||
getattr(self, k) == getattr(other, k)
|
||||
for k in self.__slots__
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
# type: (object) -> bool
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return "{}({}, {})".format(
|
||||
self.__class__.__name__,
|
||||
self.no_binary,
|
||||
self.only_binary
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def handle_mutual_excludes(value, target, other):
|
||||
# type: (str, Set[str], Set[str]) -> None
|
||||
if value.startswith('-'):
|
||||
raise CommandError(
|
||||
"--no-binary / --only-binary option requires 1 argument."
|
||||
)
|
||||
new = value.split(',')
|
||||
while ':all:' in new:
|
||||
other.clear()
|
||||
target.clear()
|
||||
target.add(':all:')
|
||||
del new[:new.index(':all:') + 1]
|
||||
# Without a none, we want to discard everything as :all: covers it
|
||||
if ':none:' not in new:
|
||||
return
|
||||
for name in new:
|
||||
if name == ':none:':
|
||||
target.clear()
|
||||
continue
|
||||
name = canonicalize_name(name)
|
||||
other.discard(name)
|
||||
target.add(name)
|
||||
|
||||
def get_allowed_formats(self, canonical_name):
|
||||
# type: (str) -> FrozenSet[str]
|
||||
result = {"binary", "source"}
|
||||
if canonical_name in self.only_binary:
|
||||
result.discard('source')
|
||||
elif canonical_name in self.no_binary:
|
||||
result.discard('binary')
|
||||
elif ':all:' in self.only_binary:
|
||||
result.discard('source')
|
||||
elif ':all:' in self.no_binary:
|
||||
result.discard('binary')
|
||||
return frozenset(result)
|
||||
|
||||
def disallow_binaries(self):
|
||||
# type: () -> None
|
||||
self.handle_mutual_excludes(
|
||||
':all:', self.no_binary, self.only_binary,
|
||||
)
|
34
venv/Lib/site-packages/pip/_internal/models/index.py
Normal file
34
venv/Lib/site-packages/pip/_internal/models/index.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||
|
||||
|
||||
class PackageIndex(object):
|
||||
"""Represents a Package Index and provides easier access to endpoints
|
||||
"""
|
||||
|
||||
__slots__ = ['url', 'netloc', 'simple_url', 'pypi_url',
|
||||
'file_storage_domain']
|
||||
|
||||
def __init__(self, url, file_storage_domain):
|
||||
# type: (str, str) -> None
|
||||
super(PackageIndex, self).__init__()
|
||||
self.url = url
|
||||
self.netloc = urllib_parse.urlsplit(url).netloc
|
||||
self.simple_url = self._url_for_path('simple')
|
||||
self.pypi_url = self._url_for_path('pypi')
|
||||
|
||||
# This is part of a temporary hack used to block installs of PyPI
|
||||
# packages which depend on external urls only necessary until PyPI can
|
||||
# block such packages themselves
|
||||
self.file_storage_domain = file_storage_domain
|
||||
|
||||
def _url_for_path(self, path):
|
||||
# type: (str) -> str
|
||||
return urllib_parse.urljoin(self.url, path)
|
||||
|
||||
|
||||
PyPI = PackageIndex(
|
||||
'https://pypi.org/', file_storage_domain='files.pythonhosted.org'
|
||||
)
|
||||
TestPyPI = PackageIndex(
|
||||
'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org'
|
||||
)
|
245
venv/Lib/site-packages/pip/_internal/models/link.py
Normal file
245
venv/Lib/site-packages/pip/_internal/models/link.py
Normal file
|
@ -0,0 +1,245 @@
|
|||
import os
|
||||
import posixpath
|
||||
import re
|
||||
|
||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||
|
||||
from pip._internal.utils.filetypes import WHEEL_EXTENSION
|
||||
from pip._internal.utils.misc import (
|
||||
redact_auth_from_url,
|
||||
split_auth_from_netloc,
|
||||
splitext,
|
||||
)
|
||||
from pip._internal.utils.models import KeyBasedCompareMixin
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
from pip._internal.utils.urls import path_to_url, url_to_path
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Optional, Text, Tuple, Union
|
||||
from pip._internal.index.collector import HTMLPage
|
||||
from pip._internal.utils.hashes import Hashes
|
||||
|
||||
|
||||
class Link(KeyBasedCompareMixin):
|
||||
"""Represents a parsed link from a Package Index's simple URL
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
"_parsed_url",
|
||||
"_url",
|
||||
"comes_from",
|
||||
"requires_python",
|
||||
"yanked_reason",
|
||||
"cache_link_parsing",
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
url, # type: str
|
||||
comes_from=None, # type: Optional[Union[str, HTMLPage]]
|
||||
requires_python=None, # type: Optional[str]
|
||||
yanked_reason=None, # type: Optional[Text]
|
||||
cache_link_parsing=True, # type: bool
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""
|
||||
:param url: url of the resource pointed to (href of the link)
|
||||
:param comes_from: instance of HTMLPage where the link was found,
|
||||
or string.
|
||||
:param requires_python: String containing the `Requires-Python`
|
||||
metadata field, specified in PEP 345. This may be specified by
|
||||
a data-requires-python attribute in the HTML link tag, as
|
||||
described in PEP 503.
|
||||
:param yanked_reason: the reason the file has been yanked, if the
|
||||
file has been yanked, or None if the file hasn't been yanked.
|
||||
This is the value of the "data-yanked" attribute, if present, in
|
||||
a simple repository HTML link. If the file has been yanked but
|
||||
no reason was provided, this should be the empty string. See
|
||||
PEP 592 for more information and the specification.
|
||||
:param cache_link_parsing: A flag that is used elsewhere to determine
|
||||
whether resources retrieved from this link
|
||||
should be cached. PyPI index urls should
|
||||
generally have this set to False, for
|
||||
example.
|
||||
"""
|
||||
|
||||
# url can be a UNC windows share
|
||||
if url.startswith('\\\\'):
|
||||
url = path_to_url(url)
|
||||
|
||||
self._parsed_url = urllib_parse.urlsplit(url)
|
||||
# Store the url as a private attribute to prevent accidentally
|
||||
# trying to set a new value.
|
||||
self._url = url
|
||||
|
||||
self.comes_from = comes_from
|
||||
self.requires_python = requires_python if requires_python else None
|
||||
self.yanked_reason = yanked_reason
|
||||
|
||||
super(Link, self).__init__(key=url, defining_class=Link)
|
||||
|
||||
self.cache_link_parsing = cache_link_parsing
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
if self.requires_python:
|
||||
rp = ' (requires-python:{})'.format(self.requires_python)
|
||||
else:
|
||||
rp = ''
|
||||
if self.comes_from:
|
||||
return '{} (from {}){}'.format(
|
||||
redact_auth_from_url(self._url), self.comes_from, rp)
|
||||
else:
|
||||
return redact_auth_from_url(str(self._url))
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return '<Link {}>'.format(self)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
# type: () -> str
|
||||
return self._url
|
||||
|
||||
@property
|
||||
def filename(self):
|
||||
# type: () -> str
|
||||
path = self.path.rstrip('/')
|
||||
name = posixpath.basename(path)
|
||||
if not name:
|
||||
# Make sure we don't leak auth information if the netloc
|
||||
# includes a username and password.
|
||||
netloc, user_pass = split_auth_from_netloc(self.netloc)
|
||||
return netloc
|
||||
|
||||
name = urllib_parse.unquote(name)
|
||||
assert name, (
|
||||
'URL {self._url!r} produced no filename'.format(**locals()))
|
||||
return name
|
||||
|
||||
@property
|
||||
def file_path(self):
|
||||
# type: () -> str
|
||||
return url_to_path(self.url)
|
||||
|
||||
@property
|
||||
def scheme(self):
|
||||
# type: () -> str
|
||||
return self._parsed_url.scheme
|
||||
|
||||
@property
|
||||
def netloc(self):
|
||||
# type: () -> str
|
||||
"""
|
||||
This can contain auth information.
|
||||
"""
|
||||
return self._parsed_url.netloc
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
# type: () -> str
|
||||
return urllib_parse.unquote(self._parsed_url.path)
|
||||
|
||||
def splitext(self):
|
||||
# type: () -> Tuple[str, str]
|
||||
return splitext(posixpath.basename(self.path.rstrip('/')))
|
||||
|
||||
@property
|
||||
def ext(self):
|
||||
# type: () -> str
|
||||
return self.splitext()[1]
|
||||
|
||||
@property
|
||||
def url_without_fragment(self):
|
||||
# type: () -> str
|
||||
scheme, netloc, path, query, fragment = self._parsed_url
|
||||
return urllib_parse.urlunsplit((scheme, netloc, path, query, None))
|
||||
|
||||
_egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')
|
||||
|
||||
@property
|
||||
def egg_fragment(self):
|
||||
# type: () -> Optional[str]
|
||||
match = self._egg_fragment_re.search(self._url)
|
||||
if not match:
|
||||
return None
|
||||
return match.group(1)
|
||||
|
||||
_subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')
|
||||
|
||||
@property
|
||||
def subdirectory_fragment(self):
|
||||
# type: () -> Optional[str]
|
||||
match = self._subdirectory_fragment_re.search(self._url)
|
||||
if not match:
|
||||
return None
|
||||
return match.group(1)
|
||||
|
||||
_hash_re = re.compile(
|
||||
r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
|
||||
)
|
||||
|
||||
@property
|
||||
def hash(self):
|
||||
# type: () -> Optional[str]
|
||||
match = self._hash_re.search(self._url)
|
||||
if match:
|
||||
return match.group(2)
|
||||
return None
|
||||
|
||||
@property
|
||||
def hash_name(self):
|
||||
# type: () -> Optional[str]
|
||||
match = self._hash_re.search(self._url)
|
||||
if match:
|
||||
return match.group(1)
|
||||
return None
|
||||
|
||||
@property
|
||||
def show_url(self):
|
||||
# type: () -> str
|
||||
return posixpath.basename(self._url.split('#', 1)[0].split('?', 1)[0])
|
||||
|
||||
@property
|
||||
def is_file(self):
|
||||
# type: () -> bool
|
||||
return self.scheme == 'file'
|
||||
|
||||
def is_existing_dir(self):
|
||||
# type: () -> bool
|
||||
return self.is_file and os.path.isdir(self.file_path)
|
||||
|
||||
@property
|
||||
def is_wheel(self):
|
||||
# type: () -> bool
|
||||
return self.ext == WHEEL_EXTENSION
|
||||
|
||||
@property
|
||||
def is_vcs(self):
|
||||
# type: () -> bool
|
||||
from pip._internal.vcs import vcs
|
||||
|
||||
return self.scheme in vcs.all_schemes
|
||||
|
||||
@property
|
||||
def is_yanked(self):
|
||||
# type: () -> bool
|
||||
return self.yanked_reason is not None
|
||||
|
||||
@property
|
||||
def has_hash(self):
|
||||
# type: () -> bool
|
||||
return self.hash_name is not None
|
||||
|
||||
def is_hash_allowed(self, hashes):
|
||||
# type: (Optional[Hashes]) -> bool
|
||||
"""
|
||||
Return True if the link has a hash and it is allowed.
|
||||
"""
|
||||
if hashes is None or not self.has_hash:
|
||||
return False
|
||||
# Assert non-None so mypy knows self.hash_name and self.hash are str.
|
||||
assert self.hash_name is not None
|
||||
assert self.hash is not None
|
||||
|
||||
return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash)
|
31
venv/Lib/site-packages/pip/_internal/models/scheme.py
Normal file
31
venv/Lib/site-packages/pip/_internal/models/scheme.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
"""
|
||||
For types associated with installation schemes.
|
||||
|
||||
For a general overview of available schemes and their context, see
|
||||
https://docs.python.org/3/install/index.html#alternate-installation.
|
||||
"""
|
||||
|
||||
|
||||
SCHEME_KEYS = ['platlib', 'purelib', 'headers', 'scripts', 'data']
|
||||
|
||||
|
||||
class Scheme(object):
|
||||
"""A Scheme holds paths which are used as the base directories for
|
||||
artifacts associated with a Python package.
|
||||
"""
|
||||
|
||||
__slots__ = SCHEME_KEYS
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
platlib, # type: str
|
||||
purelib, # type: str
|
||||
headers, # type: str
|
||||
scripts, # type: str
|
||||
data, # type: str
|
||||
):
|
||||
self.platlib = platlib
|
||||
self.purelib = purelib
|
||||
self.headers = headers
|
||||
self.scripts = scripts
|
||||
self.data = data
|
135
venv/Lib/site-packages/pip/_internal/models/search_scope.py
Normal file
135
venv/Lib/site-packages/pip/_internal/models/search_scope.py
Normal file
|
@ -0,0 +1,135 @@
|
|||
import itertools
|
||||
import logging
|
||||
import os
|
||||
import posixpath
|
||||
|
||||
from pip._vendor.packaging.utils import canonicalize_name
|
||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
||||
|
||||
from pip._internal.models.index import PyPI
|
||||
from pip._internal.utils.compat import has_tls
|
||||
from pip._internal.utils.misc import normalize_path, redact_auth_from_url
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import List
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SearchScope(object):
|
||||
|
||||
"""
|
||||
Encapsulates the locations that pip is configured to search.
|
||||
"""
|
||||
|
||||
__slots__ = ["find_links", "index_urls"]
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls,
|
||||
find_links, # type: List[str]
|
||||
index_urls, # type: List[str]
|
||||
):
|
||||
# type: (...) -> SearchScope
|
||||
"""
|
||||
Create a SearchScope object after normalizing the `find_links`.
|
||||
"""
|
||||
# Build find_links. If an argument starts with ~, it may be
|
||||
# a local file relative to a home directory. So try normalizing
|
||||
# it and if it exists, use the normalized version.
|
||||
# This is deliberately conservative - it might be fine just to
|
||||
# blindly normalize anything starting with a ~...
|
||||
built_find_links = [] # type: List[str]
|
||||
for link in find_links:
|
||||
if link.startswith('~'):
|
||||
new_link = normalize_path(link)
|
||||
if os.path.exists(new_link):
|
||||
link = new_link
|
||||
built_find_links.append(link)
|
||||
|
||||
# If we don't have TLS enabled, then WARN if anyplace we're looking
|
||||
# relies on TLS.
|
||||
if not has_tls():
|
||||
for link in itertools.chain(index_urls, built_find_links):
|
||||
parsed = urllib_parse.urlparse(link)
|
||||
if parsed.scheme == 'https':
|
||||
logger.warning(
|
||||
'pip is configured with locations that require '
|
||||
'TLS/SSL, however the ssl module in Python is not '
|
||||
'available.'
|
||||
)
|
||||
break
|
||||
|
||||
return cls(
|
||||
find_links=built_find_links,
|
||||
index_urls=index_urls,
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
find_links, # type: List[str]
|
||||
index_urls, # type: List[str]
|
||||
):
|
||||
# type: (...) -> None
|
||||
self.find_links = find_links
|
||||
self.index_urls = index_urls
|
||||
|
||||
def get_formatted_locations(self):
|
||||
# type: () -> str
|
||||
lines = []
|
||||
redacted_index_urls = []
|
||||
if self.index_urls and self.index_urls != [PyPI.simple_url]:
|
||||
for url in self.index_urls:
|
||||
|
||||
redacted_index_url = redact_auth_from_url(url)
|
||||
|
||||
# Parse the URL
|
||||
purl = urllib_parse.urlsplit(redacted_index_url)
|
||||
|
||||
# URL is generally invalid if scheme and netloc is missing
|
||||
# there are issues with Python and URL parsing, so this test
|
||||
# is a bit crude. See bpo-20271, bpo-23505. Python doesn't
|
||||
# always parse invalid URLs correctly - it should raise
|
||||
# exceptions for malformed URLs
|
||||
if not purl.scheme and not purl.netloc:
|
||||
logger.warning(
|
||||
'The index url "%s" seems invalid, '
|
||||
'please provide a scheme.', redacted_index_url)
|
||||
|
||||
redacted_index_urls.append(redacted_index_url)
|
||||
|
||||
lines.append('Looking in indexes: {}'.format(
|
||||
', '.join(redacted_index_urls)))
|
||||
|
||||
if self.find_links:
|
||||
lines.append(
|
||||
'Looking in links: {}'.format(', '.join(
|
||||
redact_auth_from_url(url) for url in self.find_links))
|
||||
)
|
||||
return '\n'.join(lines)
|
||||
|
||||
def get_index_urls_locations(self, project_name):
|
||||
# type: (str) -> List[str]
|
||||
"""Returns the locations found via self.index_urls
|
||||
|
||||
Checks the url_name on the main (first in the list) index and
|
||||
use this url_name to produce all locations
|
||||
"""
|
||||
|
||||
def mkurl_pypi_url(url):
|
||||
# type: (str) -> str
|
||||
loc = posixpath.join(
|
||||
url,
|
||||
urllib_parse.quote(canonicalize_name(project_name)))
|
||||
# For maximum compatibility with easy_install, ensure the path
|
||||
# ends in a trailing slash. Although this isn't in the spec
|
||||
# (and PyPI can handle it without the slash) some other index
|
||||
# implementations might break if they relied on easy_install's
|
||||
# behavior.
|
||||
if not loc.endswith('/'):
|
||||
loc = loc + '/'
|
||||
return loc
|
||||
|
||||
return [mkurl_pypi_url(url) for url in self.index_urls]
|
|
@ -0,0 +1,49 @@
|
|||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import Optional
|
||||
from pip._internal.models.format_control import FormatControl
|
||||
|
||||
|
||||
class SelectionPreferences(object):
|
||||
"""
|
||||
Encapsulates the candidate selection preferences for downloading
|
||||
and installing files.
|
||||
"""
|
||||
|
||||
__slots__ = ['allow_yanked', 'allow_all_prereleases', 'format_control',
|
||||
'prefer_binary', 'ignore_requires_python']
|
||||
|
||||
# Don't include an allow_yanked default value to make sure each call
|
||||
# site considers whether yanked releases are allowed. This also causes
|
||||
# that decision to be made explicit in the calling code, which helps
|
||||
# people when reading the code.
|
||||
def __init__(
|
||||
self,
|
||||
allow_yanked, # type: bool
|
||||
allow_all_prereleases=False, # type: bool
|
||||
format_control=None, # type: Optional[FormatControl]
|
||||
prefer_binary=False, # type: bool
|
||||
ignore_requires_python=None, # type: Optional[bool]
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""Create a SelectionPreferences object.
|
||||
|
||||
:param allow_yanked: Whether files marked as yanked (in the sense
|
||||
of PEP 592) are permitted to be candidates for install.
|
||||
:param format_control: A FormatControl object or None. Used to control
|
||||
the selection of source packages / binary packages when consulting
|
||||
the index and links.
|
||||
:param prefer_binary: Whether to prefer an old, but valid, binary
|
||||
dist over a new source dist.
|
||||
:param ignore_requires_python: Whether to ignore incompatible
|
||||
"Requires-Python" values in links. Defaults to False.
|
||||
"""
|
||||
if ignore_requires_python is None:
|
||||
ignore_requires_python = False
|
||||
|
||||
self.allow_yanked = allow_yanked
|
||||
self.allow_all_prereleases = allow_all_prereleases
|
||||
self.format_control = format_control
|
||||
self.prefer_binary = prefer_binary
|
||||
self.ignore_requires_python = ignore_requires_python
|
120
venv/Lib/site-packages/pip/_internal/models/target_python.py
Normal file
120
venv/Lib/site-packages/pip/_internal/models/target_python.py
Normal file
|
@ -0,0 +1,120 @@
|
|||
import sys
|
||||
|
||||
from pip._internal.utils.compatibility_tags import (
|
||||
get_supported,
|
||||
version_info_to_nodot,
|
||||
)
|
||||
from pip._internal.utils.misc import normalize_version_info
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from pip._vendor.packaging.tags import Tag
|
||||
|
||||
|
||||
class TargetPython(object):
|
||||
|
||||
"""
|
||||
Encapsulates the properties of a Python interpreter one is targeting
|
||||
for a package install, download, etc.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
"_given_py_version_info",
|
||||
"abi",
|
||||
"implementation",
|
||||
"platform",
|
||||
"py_version",
|
||||
"py_version_info",
|
||||
"_valid_tags",
|
||||
]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
platform=None, # type: Optional[str]
|
||||
py_version_info=None, # type: Optional[Tuple[int, ...]]
|
||||
abi=None, # type: Optional[str]
|
||||
implementation=None, # type: Optional[str]
|
||||
):
|
||||
# type: (...) -> None
|
||||
"""
|
||||
:param platform: A string or None. If None, searches for packages
|
||||
that are supported by the current system. Otherwise, will find
|
||||
packages that can be built on the platform passed in. These
|
||||
packages will only be downloaded for distribution: they will
|
||||
not be built locally.
|
||||
:param py_version_info: An optional tuple of ints representing the
|
||||
Python version information to use (e.g. `sys.version_info[:3]`).
|
||||
This can have length 1, 2, or 3 when provided.
|
||||
:param abi: A string or None. This is passed to compatibility_tags.py's
|
||||
get_supported() function as is.
|
||||
:param implementation: A string or None. This is passed to
|
||||
compatibility_tags.py's get_supported() function as is.
|
||||
"""
|
||||
# Store the given py_version_info for when we call get_supported().
|
||||
self._given_py_version_info = py_version_info
|
||||
|
||||
if py_version_info is None:
|
||||
py_version_info = sys.version_info[:3]
|
||||
else:
|
||||
py_version_info = normalize_version_info(py_version_info)
|
||||
|
||||
py_version = '.'.join(map(str, py_version_info[:2]))
|
||||
|
||||
self.abi = abi
|
||||
self.implementation = implementation
|
||||
self.platform = platform
|
||||
self.py_version = py_version
|
||||
self.py_version_info = py_version_info
|
||||
|
||||
# This is used to cache the return value of get_tags().
|
||||
self._valid_tags = None # type: Optional[List[Tag]]
|
||||
|
||||
def format_given(self):
|
||||
# type: () -> str
|
||||
"""
|
||||
Format the given, non-None attributes for display.
|
||||
"""
|
||||
display_version = None
|
||||
if self._given_py_version_info is not None:
|
||||
display_version = '.'.join(
|
||||
str(part) for part in self._given_py_version_info
|
||||
)
|
||||
|
||||
key_values = [
|
||||
('platform', self.platform),
|
||||
('version_info', display_version),
|
||||
('abi', self.abi),
|
||||
('implementation', self.implementation),
|
||||
]
|
||||
return ' '.join(
|
||||
'{}={!r}'.format(key, value) for key, value in key_values
|
||||
if value is not None
|
||||
)
|
||||
|
||||
def get_tags(self):
|
||||
# type: () -> List[Tag]
|
||||
"""
|
||||
Return the supported PEP 425 tags to check wheel candidates against.
|
||||
|
||||
The tags are returned in order of preference (most preferred first).
|
||||
"""
|
||||
if self._valid_tags is None:
|
||||
# Pass versions=None if no py_version_info was given since
|
||||
# versions=None uses special default logic.
|
||||
py_version_info = self._given_py_version_info
|
||||
if py_version_info is None:
|
||||
version = None
|
||||
else:
|
||||
version = version_info_to_nodot(py_version_info)
|
||||
|
||||
tags = get_supported(
|
||||
version=version,
|
||||
platform=self.platform,
|
||||
abi=self.abi,
|
||||
impl=self.implementation,
|
||||
)
|
||||
self._valid_tags = tags
|
||||
|
||||
return self._valid_tags
|
78
venv/Lib/site-packages/pip/_internal/models/wheel.py
Normal file
78
venv/Lib/site-packages/pip/_internal/models/wheel.py
Normal file
|
@ -0,0 +1,78 @@
|
|||
"""Represents a wheel file and provides access to the various parts of the
|
||||
name that have meaning.
|
||||
"""
|
||||
import re
|
||||
|
||||
from pip._vendor.packaging.tags import Tag
|
||||
|
||||
from pip._internal.exceptions import InvalidWheelFilename
|
||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
||||
|
||||
if MYPY_CHECK_RUNNING:
|
||||
from typing import List
|
||||
|
||||
|
||||
class Wheel(object):
|
||||
"""A wheel file"""
|
||||
|
||||
wheel_file_re = re.compile(
|
||||
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.*?))
|
||||
((-(?P<build>\d[^-]*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)
|
||||
\.whl|\.dist-info)$""",
|
||||
re.VERBOSE
|
||||
)
|
||||
|
||||
def __init__(self, filename):
|
||||
# type: (str) -> None
|
||||
"""
|
||||
:raises InvalidWheelFilename: when the filename is invalid for a wheel
|
||||
"""
|
||||
wheel_info = self.wheel_file_re.match(filename)
|
||||
if not wheel_info:
|
||||
raise InvalidWheelFilename(
|
||||
"{} is not a valid wheel filename.".format(filename)
|
||||
)
|
||||
self.filename = filename
|
||||
self.name = wheel_info.group('name').replace('_', '-')
|
||||
# we'll assume "_" means "-" due to wheel naming scheme
|
||||
# (https://github.com/pypa/pip/issues/1150)
|
||||
self.version = wheel_info.group('ver').replace('_', '-')
|
||||
self.build_tag = wheel_info.group('build')
|
||||
self.pyversions = wheel_info.group('pyver').split('.')
|
||||
self.abis = wheel_info.group('abi').split('.')
|
||||
self.plats = wheel_info.group('plat').split('.')
|
||||
|
||||
# All the tag combinations from this file
|
||||
self.file_tags = {
|
||||
Tag(x, y, z) for x in self.pyversions
|
||||
for y in self.abis for z in self.plats
|
||||
}
|
||||
|
||||
def get_formatted_file_tags(self):
|
||||
# type: () -> List[str]
|
||||
"""Return the wheel's tags as a sorted list of strings."""
|
||||
return sorted(str(tag) for tag in self.file_tags)
|
||||
|
||||
def support_index_min(self, tags):
|
||||
# type: (List[Tag]) -> int
|
||||
"""Return the lowest index that one of the wheel's file_tag combinations
|
||||
achieves in the given list of supported tags.
|
||||
|
||||
For example, if there are 8 supported tags and one of the file tags
|
||||
is first in the list, then return 0.
|
||||
|
||||
:param tags: the PEP 425 tags to check the wheel against, in order
|
||||
with most preferred first.
|
||||
|
||||
:raises ValueError: If none of the wheel's file tags match one of
|
||||
the supported tags.
|
||||
"""
|
||||
return min(tags.index(tag) for tag in self.file_tags if tag in tags)
|
||||
|
||||
def supported(self, tags):
|
||||
# type: (List[Tag]) -> bool
|
||||
"""Return whether the wheel is compatible with one of the given tags.
|
||||
|
||||
:param tags: the PEP 425 tags to check the wheel against.
|
||||
"""
|
||||
return not self.file_tags.isdisjoint(tags)
|
Loading…
Add table
Add a link
Reference in a new issue