Fixed database typo and removed unnecessary class identifier.
This commit is contained in:
parent
00ad49a143
commit
45fb349a7d
5098 changed files with 952558 additions and 85 deletions
14
venv/Lib/site-packages/scipy/_lib/__init__.py
Normal file
14
venv/Lib/site-packages/scipy/_lib/__init__.py
Normal file
|
@ -0,0 +1,14 @@
|
|||
"""
|
||||
Module containing private utility functions
|
||||
===========================================
|
||||
|
||||
The ``scipy._lib`` namespace is empty (for now). Tests for all
|
||||
utilities in submodules of ``_lib`` can be run with::
|
||||
|
||||
from scipy import _lib
|
||||
_lib.test()
|
||||
|
||||
"""
|
||||
from scipy._lib._testutils import PytestTester
|
||||
test = PytestTester(__name__)
|
||||
del PytestTester
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
227
venv/Lib/site-packages/scipy/_lib/_ccallback.py
Normal file
227
venv/Lib/site-packages/scipy/_lib/_ccallback.py
Normal file
|
@ -0,0 +1,227 @@
|
|||
from . import _ccallback_c
|
||||
|
||||
import ctypes
|
||||
|
||||
PyCFuncPtr = ctypes.CFUNCTYPE(ctypes.c_void_p).__bases__[0]
|
||||
|
||||
ffi = None
|
||||
|
||||
class CData(object):
|
||||
pass
|
||||
|
||||
def _import_cffi():
|
||||
global ffi, CData
|
||||
|
||||
if ffi is not None:
|
||||
return
|
||||
|
||||
try:
|
||||
import cffi
|
||||
ffi = cffi.FFI()
|
||||
CData = ffi.CData
|
||||
except ImportError:
|
||||
ffi = False
|
||||
|
||||
|
||||
class LowLevelCallable(tuple):
|
||||
"""
|
||||
Low-level callback function.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
function : {PyCapsule, ctypes function pointer, cffi function pointer}
|
||||
Low-level callback function.
|
||||
user_data : {PyCapsule, ctypes void pointer, cffi void pointer}
|
||||
User data to pass on to the callback function.
|
||||
signature : str, optional
|
||||
Signature of the function. If omitted, determined from *function*,
|
||||
if possible.
|
||||
|
||||
Attributes
|
||||
----------
|
||||
function
|
||||
Callback function given.
|
||||
user_data
|
||||
User data given.
|
||||
signature
|
||||
Signature of the function.
|
||||
|
||||
Methods
|
||||
-------
|
||||
from_cython
|
||||
Class method for constructing callables from Cython C-exported
|
||||
functions.
|
||||
|
||||
Notes
|
||||
-----
|
||||
The argument ``function`` can be one of:
|
||||
|
||||
- PyCapsule, whose name contains the C function signature
|
||||
- ctypes function pointer
|
||||
- cffi function pointer
|
||||
|
||||
The signature of the low-level callback must match one of those expected
|
||||
by the routine it is passed to.
|
||||
|
||||
If constructing low-level functions from a PyCapsule, the name of the
|
||||
capsule must be the corresponding signature, in the format::
|
||||
|
||||
return_type (arg1_type, arg2_type, ...)
|
||||
|
||||
For example::
|
||||
|
||||
"void (double)"
|
||||
"double (double, int *, void *)"
|
||||
|
||||
The context of a PyCapsule passed in as ``function`` is used as ``user_data``,
|
||||
if an explicit value for ``user_data`` was not given.
|
||||
|
||||
"""
|
||||
|
||||
# Make the class immutable
|
||||
__slots__ = ()
|
||||
|
||||
def __new__(cls, function, user_data=None, signature=None):
|
||||
# We need to hold a reference to the function & user data,
|
||||
# to prevent them going out of scope
|
||||
item = cls._parse_callback(function, user_data, signature)
|
||||
return tuple.__new__(cls, (item, function, user_data))
|
||||
|
||||
def __repr__(self):
|
||||
return "LowLevelCallable({!r}, {!r})".format(self.function, self.user_data)
|
||||
|
||||
@property
|
||||
def function(self):
|
||||
return tuple.__getitem__(self, 1)
|
||||
|
||||
@property
|
||||
def user_data(self):
|
||||
return tuple.__getitem__(self, 2)
|
||||
|
||||
@property
|
||||
def signature(self):
|
||||
return _ccallback_c.get_capsule_signature(tuple.__getitem__(self, 0))
|
||||
|
||||
def __getitem__(self, idx):
|
||||
raise ValueError()
|
||||
|
||||
@classmethod
|
||||
def from_cython(cls, module, name, user_data=None, signature=None):
|
||||
"""
|
||||
Create a low-level callback function from an exported Cython function.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
module : module
|
||||
Cython module where the exported function resides
|
||||
name : str
|
||||
Name of the exported function
|
||||
user_data : {PyCapsule, ctypes void pointer, cffi void pointer}, optional
|
||||
User data to pass on to the callback function.
|
||||
signature : str, optional
|
||||
Signature of the function. If omitted, determined from *function*.
|
||||
|
||||
"""
|
||||
try:
|
||||
function = module.__pyx_capi__[name]
|
||||
except AttributeError:
|
||||
raise ValueError("Given module is not a Cython module with __pyx_capi__ attribute")
|
||||
except KeyError:
|
||||
raise ValueError("No function {!r} found in __pyx_capi__ of the module".format(name))
|
||||
return cls(function, user_data, signature)
|
||||
|
||||
@classmethod
|
||||
def _parse_callback(cls, obj, user_data=None, signature=None):
|
||||
_import_cffi()
|
||||
|
||||
if isinstance(obj, LowLevelCallable):
|
||||
func = tuple.__getitem__(obj, 0)
|
||||
elif isinstance(obj, PyCFuncPtr):
|
||||
func, signature = _get_ctypes_func(obj, signature)
|
||||
elif isinstance(obj, CData):
|
||||
func, signature = _get_cffi_func(obj, signature)
|
||||
elif _ccallback_c.check_capsule(obj):
|
||||
func = obj
|
||||
else:
|
||||
raise ValueError("Given input is not a callable or a low-level callable (pycapsule/ctypes/cffi)")
|
||||
|
||||
if isinstance(user_data, ctypes.c_void_p):
|
||||
context = _get_ctypes_data(user_data)
|
||||
elif isinstance(user_data, CData):
|
||||
context = _get_cffi_data(user_data)
|
||||
elif user_data is None:
|
||||
context = 0
|
||||
elif _ccallback_c.check_capsule(user_data):
|
||||
context = user_data
|
||||
else:
|
||||
raise ValueError("Given user data is not a valid low-level void* pointer (pycapsule/ctypes/cffi)")
|
||||
|
||||
return _ccallback_c.get_raw_capsule(func, signature, context)
|
||||
|
||||
|
||||
#
|
||||
# ctypes helpers
|
||||
#
|
||||
|
||||
def _get_ctypes_func(func, signature=None):
|
||||
# Get function pointer
|
||||
func_ptr = ctypes.cast(func, ctypes.c_void_p).value
|
||||
|
||||
# Construct function signature
|
||||
if signature is None:
|
||||
signature = _typename_from_ctypes(func.restype) + " ("
|
||||
for j, arg in enumerate(func.argtypes):
|
||||
if j == 0:
|
||||
signature += _typename_from_ctypes(arg)
|
||||
else:
|
||||
signature += ", " + _typename_from_ctypes(arg)
|
||||
signature += ")"
|
||||
|
||||
return func_ptr, signature
|
||||
|
||||
|
||||
def _typename_from_ctypes(item):
|
||||
if item is None:
|
||||
return "void"
|
||||
elif item is ctypes.c_void_p:
|
||||
return "void *"
|
||||
|
||||
name = item.__name__
|
||||
|
||||
pointer_level = 0
|
||||
while name.startswith("LP_"):
|
||||
pointer_level += 1
|
||||
name = name[3:]
|
||||
|
||||
if name.startswith('c_'):
|
||||
name = name[2:]
|
||||
|
||||
if pointer_level > 0:
|
||||
name += " " + "*"*pointer_level
|
||||
|
||||
return name
|
||||
|
||||
|
||||
def _get_ctypes_data(data):
|
||||
# Get voidp pointer
|
||||
return ctypes.cast(data, ctypes.c_void_p).value
|
||||
|
||||
|
||||
#
|
||||
# CFFI helpers
|
||||
#
|
||||
|
||||
def _get_cffi_func(func, signature=None):
|
||||
# Get function pointer
|
||||
func_ptr = ffi.cast('uintptr_t', func)
|
||||
|
||||
# Get signature
|
||||
if signature is None:
|
||||
signature = ffi.getctype(ffi.typeof(func)).replace('(*)', ' ')
|
||||
|
||||
return func_ptr, signature
|
||||
|
||||
|
||||
def _get_cffi_data(data):
|
||||
# Get pointer
|
||||
return ffi.cast('uintptr_t', data)
|
BIN
venv/Lib/site-packages/scipy/_lib/_ccallback_c.cp36-win32.pyd
Normal file
BIN
venv/Lib/site-packages/scipy/_lib/_ccallback_c.cp36-win32.pyd
Normal file
Binary file not shown.
BIN
venv/Lib/site-packages/scipy/_lib/_fpumode.cp36-win32.pyd
Normal file
BIN
venv/Lib/site-packages/scipy/_lib/_fpumode.cp36-win32.pyd
Normal file
Binary file not shown.
105
venv/Lib/site-packages/scipy/_lib/_gcutils.py
Normal file
105
venv/Lib/site-packages/scipy/_lib/_gcutils.py
Normal file
|
@ -0,0 +1,105 @@
|
|||
"""
|
||||
Module for testing automatic garbage collection of objects
|
||||
|
||||
.. autosummary::
|
||||
:toctree: generated/
|
||||
|
||||
set_gc_state - enable or disable garbage collection
|
||||
gc_state - context manager for given state of garbage collector
|
||||
assert_deallocated - context manager to check for circular references on object
|
||||
|
||||
"""
|
||||
import weakref
|
||||
import gc
|
||||
import sys
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
__all__ = ['set_gc_state', 'gc_state', 'assert_deallocated']
|
||||
|
||||
|
||||
IS_PYPY = '__pypy__' in sys.modules
|
||||
|
||||
|
||||
class ReferenceError(AssertionError):
|
||||
pass
|
||||
|
||||
|
||||
def set_gc_state(state):
|
||||
""" Set status of garbage collector """
|
||||
if gc.isenabled() == state:
|
||||
return
|
||||
if state:
|
||||
gc.enable()
|
||||
else:
|
||||
gc.disable()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def gc_state(state):
|
||||
""" Context manager to set state of garbage collector to `state`
|
||||
|
||||
Parameters
|
||||
----------
|
||||
state : bool
|
||||
True for gc enabled, False for disabled
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> with gc_state(False):
|
||||
... assert not gc.isenabled()
|
||||
>>> with gc_state(True):
|
||||
... assert gc.isenabled()
|
||||
"""
|
||||
orig_state = gc.isenabled()
|
||||
set_gc_state(state)
|
||||
yield
|
||||
set_gc_state(orig_state)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def assert_deallocated(func, *args, **kwargs):
|
||||
"""Context manager to check that object is deallocated
|
||||
|
||||
This is useful for checking that an object can be freed directly by
|
||||
reference counting, without requiring gc to break reference cycles.
|
||||
GC is disabled inside the context manager.
|
||||
|
||||
This check is not available on PyPy.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
func : callable
|
||||
Callable to create object to check
|
||||
\\*args : sequence
|
||||
positional arguments to `func` in order to create object to check
|
||||
\\*\\*kwargs : dict
|
||||
keyword arguments to `func` in order to create object to check
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> class C(object): pass
|
||||
>>> with assert_deallocated(C) as c:
|
||||
... # do something
|
||||
... del c
|
||||
|
||||
>>> class C(object):
|
||||
... def __init__(self):
|
||||
... self._circular = self # Make circular reference
|
||||
>>> with assert_deallocated(C) as c: #doctest: +IGNORE_EXCEPTION_DETAIL
|
||||
... # do something
|
||||
... del c
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ReferenceError: Remaining reference(s) to object
|
||||
"""
|
||||
if IS_PYPY:
|
||||
raise RuntimeError("assert_deallocated is unavailable on PyPy")
|
||||
|
||||
with gc_state(False):
|
||||
obj = func(*args, **kwargs)
|
||||
ref = weakref.ref(obj)
|
||||
yield obj
|
||||
del obj
|
||||
if ref() is not None:
|
||||
raise ReferenceError("Remaining reference(s) to object")
|
487
venv/Lib/site-packages/scipy/_lib/_pep440.py
Normal file
487
venv/Lib/site-packages/scipy/_lib/_pep440.py
Normal file
|
@ -0,0 +1,487 @@
|
|||
"""Utility to compare pep440 compatible version strings.
|
||||
|
||||
The LooseVersion and StrictVersion classes that distutils provides don't
|
||||
work; they don't recognize anything like alpha/beta/rc/dev versions.
|
||||
"""
|
||||
|
||||
# Copyright (c) Donald Stufft and individual contributors.
|
||||
# All rights reserved.
|
||||
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
|
||||
# 2. Redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution.
|
||||
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import collections
|
||||
import itertools
|
||||
import re
|
||||
|
||||
|
||||
__all__ = [
|
||||
"parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN",
|
||||
]
|
||||
|
||||
|
||||
# BEGIN packaging/_structures.py
|
||||
|
||||
|
||||
class Infinity(object):
|
||||
def __repr__(self):
|
||||
return "Infinity"
|
||||
|
||||
def __hash__(self):
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other):
|
||||
return False
|
||||
|
||||
def __le__(self, other):
|
||||
return False
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other):
|
||||
return True
|
||||
|
||||
def __ge__(self, other):
|
||||
return True
|
||||
|
||||
def __neg__(self):
|
||||
return NegativeInfinity
|
||||
|
||||
|
||||
Infinity = Infinity()
|
||||
|
||||
|
||||
class NegativeInfinity(object):
|
||||
def __repr__(self):
|
||||
return "-Infinity"
|
||||
|
||||
def __hash__(self):
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other):
|
||||
return True
|
||||
|
||||
def __le__(self, other):
|
||||
return True
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other):
|
||||
return False
|
||||
|
||||
def __ge__(self, other):
|
||||
return False
|
||||
|
||||
def __neg__(self):
|
||||
return Infinity
|
||||
|
||||
|
||||
# BEGIN packaging/version.py
|
||||
|
||||
|
||||
NegativeInfinity = NegativeInfinity()
|
||||
|
||||
_Version = collections.namedtuple(
|
||||
"_Version",
|
||||
["epoch", "release", "dev", "pre", "post", "local"],
|
||||
)
|
||||
|
||||
|
||||
def parse(version):
|
||||
"""
|
||||
Parse the given version string and return either a :class:`Version` object
|
||||
or a :class:`LegacyVersion` object depending on if the given version is
|
||||
a valid PEP 440 version or a legacy version.
|
||||
"""
|
||||
try:
|
||||
return Version(version)
|
||||
except InvalidVersion:
|
||||
return LegacyVersion(version)
|
||||
|
||||
|
||||
class InvalidVersion(ValueError):
|
||||
"""
|
||||
An invalid version was found, users should refer to PEP 440.
|
||||
"""
|
||||
|
||||
|
||||
class _BaseVersion(object):
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._key)
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._compare(other, lambda s, o: s < o)
|
||||
|
||||
def __le__(self, other):
|
||||
return self._compare(other, lambda s, o: s <= o)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._compare(other, lambda s, o: s == o)
|
||||
|
||||
def __ge__(self, other):
|
||||
return self._compare(other, lambda s, o: s >= o)
|
||||
|
||||
def __gt__(self, other):
|
||||
return self._compare(other, lambda s, o: s > o)
|
||||
|
||||
def __ne__(self, other):
|
||||
return self._compare(other, lambda s, o: s != o)
|
||||
|
||||
def _compare(self, other, method):
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return method(self._key, other._key)
|
||||
|
||||
|
||||
class LegacyVersion(_BaseVersion):
|
||||
|
||||
def __init__(self, version):
|
||||
self._version = str(version)
|
||||
self._key = _legacy_cmpkey(self._version)
|
||||
|
||||
def __str__(self):
|
||||
return self._version
|
||||
|
||||
def __repr__(self):
|
||||
return "<LegacyVersion({0})>".format(repr(str(self)))
|
||||
|
||||
@property
|
||||
def public(self):
|
||||
return self._version
|
||||
|
||||
@property
|
||||
def base_version(self):
|
||||
return self._version
|
||||
|
||||
@property
|
||||
def local(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def is_prerelease(self):
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_postrelease(self):
|
||||
return False
|
||||
|
||||
|
||||
_legacy_version_component_re = re.compile(
|
||||
r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
|
||||
)
|
||||
|
||||
_legacy_version_replacement_map = {
|
||||
"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
|
||||
}
|
||||
|
||||
|
||||
def _parse_version_parts(s):
|
||||
for part in _legacy_version_component_re.split(s):
|
||||
part = _legacy_version_replacement_map.get(part, part)
|
||||
|
||||
if not part or part == ".":
|
||||
continue
|
||||
|
||||
if part[:1] in "0123456789":
|
||||
# pad for numeric comparison
|
||||
yield part.zfill(8)
|
||||
else:
|
||||
yield "*" + part
|
||||
|
||||
# ensure that alpha/beta/candidate are before final
|
||||
yield "*final"
|
||||
|
||||
|
||||
def _legacy_cmpkey(version):
|
||||
# We hardcode an epoch of -1 here. A PEP 440 version can only have an epoch
|
||||
# greater than or equal to 0. This will effectively put the LegacyVersion,
|
||||
# which uses the defacto standard originally implemented by setuptools,
|
||||
# as before all PEP 440 versions.
|
||||
epoch = -1
|
||||
|
||||
# This scheme is taken from pkg_resources.parse_version setuptools prior to
|
||||
# its adoption of the packaging library.
|
||||
parts = []
|
||||
for part in _parse_version_parts(version.lower()):
|
||||
if part.startswith("*"):
|
||||
# remove "-" before a prerelease tag
|
||||
if part < "*final":
|
||||
while parts and parts[-1] == "*final-":
|
||||
parts.pop()
|
||||
|
||||
# remove trailing zeros from each series of numeric parts
|
||||
while parts and parts[-1] == "00000000":
|
||||
parts.pop()
|
||||
|
||||
parts.append(part)
|
||||
parts = tuple(parts)
|
||||
|
||||
return epoch, parts
|
||||
|
||||
|
||||
# Deliberately not anchored to the start and end of the string, to make it
|
||||
# easier for 3rd party code to reuse
|
||||
VERSION_PATTERN = r"""
|
||||
v?
|
||||
(?:
|
||||
(?:(?P<epoch>[0-9]+)!)? # epoch
|
||||
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
||||
(?P<pre> # pre-release
|
||||
[-_\.]?
|
||||
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
|
||||
[-_\.]?
|
||||
(?P<pre_n>[0-9]+)?
|
||||
)?
|
||||
(?P<post> # post release
|
||||
(?:-(?P<post_n1>[0-9]+))
|
||||
|
|
||||
(?:
|
||||
[-_\.]?
|
||||
(?P<post_l>post|rev|r)
|
||||
[-_\.]?
|
||||
(?P<post_n2>[0-9]+)?
|
||||
)
|
||||
)?
|
||||
(?P<dev> # dev release
|
||||
[-_\.]?
|
||||
(?P<dev_l>dev)
|
||||
[-_\.]?
|
||||
(?P<dev_n>[0-9]+)?
|
||||
)?
|
||||
)
|
||||
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
||||
"""
|
||||
|
||||
|
||||
class Version(_BaseVersion):
|
||||
|
||||
_regex = re.compile(
|
||||
r"^\s*" + VERSION_PATTERN + r"\s*$",
|
||||
re.VERBOSE | re.IGNORECASE,
|
||||
)
|
||||
|
||||
def __init__(self, version):
|
||||
# Validate the version and parse it into pieces
|
||||
match = self._regex.search(version)
|
||||
if not match:
|
||||
raise InvalidVersion("Invalid version: '{0}'".format(version))
|
||||
|
||||
# Store the parsed out pieces of the version
|
||||
self._version = _Version(
|
||||
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
|
||||
release=tuple(int(i) for i in match.group("release").split(".")),
|
||||
pre=_parse_letter_version(
|
||||
match.group("pre_l"),
|
||||
match.group("pre_n"),
|
||||
),
|
||||
post=_parse_letter_version(
|
||||
match.group("post_l"),
|
||||
match.group("post_n1") or match.group("post_n2"),
|
||||
),
|
||||
dev=_parse_letter_version(
|
||||
match.group("dev_l"),
|
||||
match.group("dev_n"),
|
||||
),
|
||||
local=_parse_local_version(match.group("local")),
|
||||
)
|
||||
|
||||
# Generate a key which will be used for sorting
|
||||
self._key = _cmpkey(
|
||||
self._version.epoch,
|
||||
self._version.release,
|
||||
self._version.pre,
|
||||
self._version.post,
|
||||
self._version.dev,
|
||||
self._version.local,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<Version({0})>".format(repr(str(self)))
|
||||
|
||||
def __str__(self):
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self._version.epoch != 0:
|
||||
parts.append("{0}!".format(self._version.epoch))
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self._version.release))
|
||||
|
||||
# Pre-release
|
||||
if self._version.pre is not None:
|
||||
parts.append("".join(str(x) for x in self._version.pre))
|
||||
|
||||
# Post-release
|
||||
if self._version.post is not None:
|
||||
parts.append(".post{0}".format(self._version.post[1]))
|
||||
|
||||
# Development release
|
||||
if self._version.dev is not None:
|
||||
parts.append(".dev{0}".format(self._version.dev[1]))
|
||||
|
||||
# Local version segment
|
||||
if self._version.local is not None:
|
||||
parts.append(
|
||||
"+{0}".format(".".join(str(x) for x in self._version.local))
|
||||
)
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def public(self):
|
||||
return str(self).split("+", 1)[0]
|
||||
|
||||
@property
|
||||
def base_version(self):
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self._version.epoch != 0:
|
||||
parts.append("{0}!".format(self._version.epoch))
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self._version.release))
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def local(self):
|
||||
version_string = str(self)
|
||||
if "+" in version_string:
|
||||
return version_string.split("+", 1)[1]
|
||||
|
||||
@property
|
||||
def is_prerelease(self):
|
||||
return bool(self._version.dev or self._version.pre)
|
||||
|
||||
@property
|
||||
def is_postrelease(self):
|
||||
return bool(self._version.post)
|
||||
|
||||
|
||||
def _parse_letter_version(letter, number):
|
||||
if letter:
|
||||
# We assume there is an implicit 0 in a pre-release if there is
|
||||
# no numeral associated with it.
|
||||
if number is None:
|
||||
number = 0
|
||||
|
||||
# We normalize any letters to their lower-case form
|
||||
letter = letter.lower()
|
||||
|
||||
# We consider some words to be alternate spellings of other words and
|
||||
# in those cases we want to normalize the spellings to our preferred
|
||||
# spelling.
|
||||
if letter == "alpha":
|
||||
letter = "a"
|
||||
elif letter == "beta":
|
||||
letter = "b"
|
||||
elif letter in ["c", "pre", "preview"]:
|
||||
letter = "rc"
|
||||
elif letter in ["rev", "r"]:
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
if not letter and number:
|
||||
# We assume that if we are given a number but not given a letter,
|
||||
# then this is using the implicit post release syntax (e.g., 1.0-1)
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
|
||||
|
||||
_local_version_seperators = re.compile(r"[\._-]")
|
||||
|
||||
|
||||
def _parse_local_version(local):
|
||||
"""
|
||||
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
||||
"""
|
||||
if local is not None:
|
||||
return tuple(
|
||||
part.lower() if not part.isdigit() else int(part)
|
||||
for part in _local_version_seperators.split(local)
|
||||
)
|
||||
|
||||
|
||||
def _cmpkey(epoch, release, pre, post, dev, local):
|
||||
# When we compare a release version, we want to compare it with all of the
|
||||
# trailing zeros removed. So we'll use a reverse the list, drop all the now
|
||||
# leading zeros until we come to something non-zero, then take the rest,
|
||||
# re-reverse it back into the correct order, and make it a tuple and use
|
||||
# that for our sorting key.
|
||||
release = tuple(
|
||||
reversed(list(
|
||||
itertools.dropwhile(
|
||||
lambda x: x == 0,
|
||||
reversed(release),
|
||||
)
|
||||
))
|
||||
)
|
||||
|
||||
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
|
||||
# We'll do this by abusing the pre-segment, but we _only_ want to do this
|
||||
# if there is no pre- or a post-segment. If we have one of those, then
|
||||
# the normal sorting rules will handle this case correctly.
|
||||
if pre is None and post is None and dev is not None:
|
||||
pre = -Infinity
|
||||
# Versions without a pre-release (except as noted above) should sort after
|
||||
# those with one.
|
||||
elif pre is None:
|
||||
pre = Infinity
|
||||
|
||||
# Versions without a post-segment should sort before those with one.
|
||||
if post is None:
|
||||
post = -Infinity
|
||||
|
||||
# Versions without a development segment should sort after those with one.
|
||||
if dev is None:
|
||||
dev = Infinity
|
||||
|
||||
if local is None:
|
||||
# Versions without a local segment should sort before those with one.
|
||||
local = -Infinity
|
||||
else:
|
||||
# Versions with a local segment need that segment parsed to implement
|
||||
# the sorting rules in PEP440.
|
||||
# - Alphanumeric segments sort before numeric segments
|
||||
# - Alphanumeric segments sort lexicographically
|
||||
# - Numeric segments sort numerically
|
||||
# - Shorter versions sort before longer versions when the prefixes
|
||||
# match exactly
|
||||
local = tuple(
|
||||
(i, "") if isinstance(i, int) else (-Infinity, i)
|
||||
for i in local
|
||||
)
|
||||
|
||||
return epoch, release, pre, post, dev, local
|
BIN
venv/Lib/site-packages/scipy/_lib/_test_ccallback.cp36-win32.pyd
Normal file
BIN
venv/Lib/site-packages/scipy/_lib/_test_ccallback.cp36-win32.pyd
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
143
venv/Lib/site-packages/scipy/_lib/_testutils.py
Normal file
143
venv/Lib/site-packages/scipy/_lib/_testutils.py
Normal file
|
@ -0,0 +1,143 @@
|
|||
"""
|
||||
Generic test utilities.
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
||||
__all__ = ['PytestTester', 'check_free_memory']
|
||||
|
||||
|
||||
class FPUModeChangeWarning(RuntimeWarning):
|
||||
"""Warning about FPU mode change"""
|
||||
pass
|
||||
|
||||
|
||||
class PytestTester(object):
|
||||
"""
|
||||
Pytest test runner entry point.
|
||||
"""
|
||||
|
||||
def __init__(self, module_name):
|
||||
self.module_name = module_name
|
||||
|
||||
def __call__(self, label="fast", verbose=1, extra_argv=None, doctests=False,
|
||||
coverage=False, tests=None, parallel=None):
|
||||
import pytest
|
||||
|
||||
module = sys.modules[self.module_name]
|
||||
module_path = os.path.abspath(module.__path__[0])
|
||||
|
||||
pytest_args = ['--showlocals', '--tb=short']
|
||||
|
||||
if doctests:
|
||||
raise ValueError("Doctests not supported")
|
||||
|
||||
if extra_argv:
|
||||
pytest_args += list(extra_argv)
|
||||
|
||||
if verbose and int(verbose) > 1:
|
||||
pytest_args += ["-" + "v"*(int(verbose)-1)]
|
||||
|
||||
if coverage:
|
||||
pytest_args += ["--cov=" + module_path]
|
||||
|
||||
if label == "fast":
|
||||
pytest_args += ["-m", "not slow"]
|
||||
elif label != "full":
|
||||
pytest_args += ["-m", label]
|
||||
|
||||
if tests is None:
|
||||
tests = [self.module_name]
|
||||
|
||||
if parallel is not None and parallel > 1:
|
||||
if _pytest_has_xdist():
|
||||
pytest_args += ['-n', str(parallel)]
|
||||
else:
|
||||
import warnings
|
||||
warnings.warn('Could not run tests in parallel because '
|
||||
'pytest-xdist plugin is not available.')
|
||||
|
||||
pytest_args += ['--pyargs'] + list(tests)
|
||||
|
||||
try:
|
||||
code = pytest.main(pytest_args)
|
||||
except SystemExit as exc:
|
||||
code = exc.code
|
||||
|
||||
return (code == 0)
|
||||
|
||||
|
||||
def _pytest_has_xdist():
|
||||
"""
|
||||
Check if the pytest-xdist plugin is installed, providing parallel tests
|
||||
"""
|
||||
# Check xdist exists without importing, otherwise pytests emits warnings
|
||||
from importlib.util import find_spec
|
||||
return find_spec('xdist') is not None
|
||||
|
||||
|
||||
def check_free_memory(free_mb):
|
||||
"""
|
||||
Check *free_mb* of memory is available, otherwise do pytest.skip
|
||||
"""
|
||||
import pytest
|
||||
|
||||
try:
|
||||
mem_free = _parse_size(os.environ['SCIPY_AVAILABLE_MEM'])
|
||||
msg = '{0} MB memory required, but environment SCIPY_AVAILABLE_MEM={1}'.format(
|
||||
free_mb, os.environ['SCIPY_AVAILABLE_MEM'])
|
||||
except KeyError:
|
||||
mem_free = _get_mem_available()
|
||||
if mem_free is None:
|
||||
pytest.skip("Could not determine available memory; set SCIPY_AVAILABLE_MEM "
|
||||
"variable to free memory in MB to run the test.")
|
||||
msg = '{0} MB memory required, but {1} MB available'.format(
|
||||
free_mb, mem_free/1e6)
|
||||
|
||||
if mem_free < free_mb * 1e6:
|
||||
pytest.skip(msg)
|
||||
|
||||
|
||||
def _parse_size(size_str):
|
||||
suffixes = {'': 1e6,
|
||||
'b': 1.0,
|
||||
'k': 1e3, 'M': 1e6, 'G': 1e9, 'T': 1e12,
|
||||
'kb': 1e3, 'Mb': 1e6, 'Gb': 1e9, 'Tb': 1e12,
|
||||
'kib': 1024.0, 'Mib': 1024.0**2, 'Gib': 1024.0**3, 'Tib': 1024.0**4}
|
||||
m = re.match(r'^\s*(\d+)\s*({0})\s*$'.format('|'.join(suffixes.keys())),
|
||||
size_str,
|
||||
re.I)
|
||||
if not m or m.group(2) not in suffixes:
|
||||
raise ValueError("Invalid size string")
|
||||
|
||||
return float(m.group(1)) * suffixes[m.group(2)]
|
||||
|
||||
|
||||
def _get_mem_available():
|
||||
"""
|
||||
Get information about memory available, not counting swap.
|
||||
"""
|
||||
try:
|
||||
import psutil
|
||||
return psutil.virtual_memory().available
|
||||
except (ImportError, AttributeError):
|
||||
pass
|
||||
|
||||
if sys.platform.startswith('linux'):
|
||||
info = {}
|
||||
with open('/proc/meminfo', 'r') as f:
|
||||
for line in f:
|
||||
p = line.split()
|
||||
info[p[0].strip(':').lower()] = float(p[1]) * 1e3
|
||||
|
||||
if 'memavailable' in info:
|
||||
# Linux >= 3.14
|
||||
return info['memavailable']
|
||||
else:
|
||||
return info['memfree'] + info['cached']
|
||||
|
||||
return None
|
58
venv/Lib/site-packages/scipy/_lib/_threadsafety.py
Normal file
58
venv/Lib/site-packages/scipy/_lib/_threadsafety.py
Normal file
|
@ -0,0 +1,58 @@
|
|||
import threading
|
||||
|
||||
import scipy._lib.decorator
|
||||
|
||||
|
||||
__all__ = ['ReentrancyError', 'ReentrancyLock', 'non_reentrant']
|
||||
|
||||
|
||||
class ReentrancyError(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
class ReentrancyLock(object):
|
||||
"""
|
||||
Threading lock that raises an exception for reentrant calls.
|
||||
|
||||
Calls from different threads are serialized, and nested calls from the
|
||||
same thread result to an error.
|
||||
|
||||
The object can be used as a context manager or to decorate functions
|
||||
via the decorate() method.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, err_msg):
|
||||
self._rlock = threading.RLock()
|
||||
self._entered = False
|
||||
self._err_msg = err_msg
|
||||
|
||||
def __enter__(self):
|
||||
self._rlock.acquire()
|
||||
if self._entered:
|
||||
self._rlock.release()
|
||||
raise ReentrancyError(self._err_msg)
|
||||
self._entered = True
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
self._entered = False
|
||||
self._rlock.release()
|
||||
|
||||
def decorate(self, func):
|
||||
def caller(func, *a, **kw):
|
||||
with self:
|
||||
return func(*a, **kw)
|
||||
return scipy._lib.decorator.decorate(func, caller)
|
||||
|
||||
|
||||
def non_reentrant(err_msg=None):
|
||||
"""
|
||||
Decorate a function with a threading lock and prevent reentrant calls.
|
||||
"""
|
||||
def decorator(func):
|
||||
msg = err_msg
|
||||
if msg is None:
|
||||
msg = "%s is not re-entrant" % func.__name__
|
||||
lock = ReentrancyLock(msg)
|
||||
return lock.decorate(func)
|
||||
return decorator
|
86
venv/Lib/site-packages/scipy/_lib/_tmpdirs.py
Normal file
86
venv/Lib/site-packages/scipy/_lib/_tmpdirs.py
Normal file
|
@ -0,0 +1,86 @@
|
|||
''' Contexts for *with* statement providing temporary directories
|
||||
'''
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
from shutil import rmtree
|
||||
from tempfile import mkdtemp
|
||||
|
||||
|
||||
@contextmanager
|
||||
def tempdir():
|
||||
"""Create and return a temporary directory. This has the same
|
||||
behavior as mkdtemp but can be used as a context manager.
|
||||
|
||||
Upon exiting the context, the directory and everything contained
|
||||
in it are removed.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> import os
|
||||
>>> with tempdir() as tmpdir:
|
||||
... fname = os.path.join(tmpdir, 'example_file.txt')
|
||||
... with open(fname, 'wt') as fobj:
|
||||
... _ = fobj.write('a string\\n')
|
||||
>>> os.path.exists(tmpdir)
|
||||
False
|
||||
"""
|
||||
d = mkdtemp()
|
||||
yield d
|
||||
rmtree(d)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def in_tempdir():
|
||||
''' Create, return, and change directory to a temporary directory
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> import os
|
||||
>>> my_cwd = os.getcwd()
|
||||
>>> with in_tempdir() as tmpdir:
|
||||
... _ = open('test.txt', 'wt').write('some text')
|
||||
... assert os.path.isfile('test.txt')
|
||||
... assert os.path.isfile(os.path.join(tmpdir, 'test.txt'))
|
||||
>>> os.path.exists(tmpdir)
|
||||
False
|
||||
>>> os.getcwd() == my_cwd
|
||||
True
|
||||
'''
|
||||
pwd = os.getcwd()
|
||||
d = mkdtemp()
|
||||
os.chdir(d)
|
||||
yield d
|
||||
os.chdir(pwd)
|
||||
rmtree(d)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def in_dir(dir=None):
|
||||
""" Change directory to given directory for duration of ``with`` block
|
||||
|
||||
Useful when you want to use `in_tempdir` for the final test, but
|
||||
you are still debugging. For example, you may want to do this in the end:
|
||||
|
||||
>>> with in_tempdir() as tmpdir:
|
||||
... # do something complicated which might break
|
||||
... pass
|
||||
|
||||
But, indeed, the complicated thing does break, and meanwhile, the
|
||||
``in_tempdir`` context manager wiped out the directory with the
|
||||
temporary files that you wanted for debugging. So, while debugging, you
|
||||
replace with something like:
|
||||
|
||||
>>> with in_dir() as tmpdir: # Use working directory by default
|
||||
... # do something complicated which might break
|
||||
... pass
|
||||
|
||||
You can then look at the temporary file outputs to debug what is happening,
|
||||
fix, and finally replace ``in_dir`` with ``in_tempdir`` again.
|
||||
"""
|
||||
cwd = os.getcwd()
|
||||
if dir is None:
|
||||
yield cwd
|
||||
return
|
||||
os.chdir(dir)
|
||||
yield dir
|
||||
os.chdir(cwd)
|
29
venv/Lib/site-packages/scipy/_lib/_uarray/LICENSE
Normal file
29
venv/Lib/site-packages/scipy/_lib/_uarray/LICENSE
Normal file
|
@ -0,0 +1,29 @@
|
|||
BSD 3-Clause License
|
||||
|
||||
Copyright (c) 2018, Quansight-Labs
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
117
venv/Lib/site-packages/scipy/_lib/_uarray/__init__.py
Normal file
117
venv/Lib/site-packages/scipy/_lib/_uarray/__init__.py
Normal file
|
@ -0,0 +1,117 @@
|
|||
"""
|
||||
.. note:
|
||||
If you are looking for overrides for NumPy-specific methods, see the
|
||||
documentation for :obj:`unumpy`. This page explains how to write
|
||||
back-ends and multimethods.
|
||||
|
||||
``uarray`` is built around a back-end protocol and overridable multimethods.
|
||||
It is necessary to define multimethods for back-ends to be able to override them.
|
||||
See the documentation of :obj:`generate_multimethod` on how to write multimethods.
|
||||
|
||||
|
||||
|
||||
Let's start with the simplest:
|
||||
|
||||
``__ua_domain__`` defines the back-end *domain*. The domain consists of period-
|
||||
separated string consisting of the modules you extend plus the submodule. For
|
||||
example, if a submodule ``module2.submodule`` extends ``module1``
|
||||
(i.e., it exposes dispatchables marked as types available in ``module1``),
|
||||
then the domain string should be ``"module1.module2.submodule"``.
|
||||
|
||||
|
||||
For the purpose of this demonstration, we'll be creating an object and setting
|
||||
its attributes directly. However, note that you can use a module or your own type
|
||||
as a backend as well.
|
||||
|
||||
>>> class Backend: pass
|
||||
>>> be = Backend()
|
||||
>>> be.__ua_domain__ = "ua_examples"
|
||||
|
||||
It might be useful at this point to sidetrack to the documentation of
|
||||
:obj:`generate_multimethod` to find out how to generate a multimethod
|
||||
overridable by :obj:`uarray`. Needless to say, writing a backend and
|
||||
creating multimethods are mostly orthogonal activities, and knowing
|
||||
one doesn't necessarily require knowledge of the other, although it
|
||||
is certainly helpful. We expect core API designers/specifiers to write the
|
||||
multimethods, and implementors to override them. But, as is often the case,
|
||||
similar people write both.
|
||||
|
||||
Without further ado, here's an example multimethod:
|
||||
|
||||
>>> import uarray as ua
|
||||
>>> from uarray import Dispatchable
|
||||
>>> def override_me(a, b):
|
||||
... return Dispatchable(a, int),
|
||||
>>> def override_replacer(args, kwargs, dispatchables):
|
||||
... return (dispatchables[0], args[1]), {}
|
||||
>>> overridden_me = ua.generate_multimethod(
|
||||
... override_me, override_replacer, "ua_examples"
|
||||
... )
|
||||
|
||||
Next comes the part about overriding the multimethod. This requires
|
||||
the ``__ua_function__`` protocol, and the ``__ua_convert__``
|
||||
protocol. The ``__ua_function__`` protocol has the signature
|
||||
``(method, args, kwargs)`` where ``method`` is the passed
|
||||
multimethod, ``args``/``kwargs`` specify the arguments and ``dispatchables``
|
||||
is the list of converted dispatchables passed in.
|
||||
|
||||
>>> def __ua_function__(method, args, kwargs):
|
||||
... return method.__name__, args, kwargs
|
||||
>>> be.__ua_function__ = __ua_function__
|
||||
|
||||
The other protocol of interest is the ``__ua_convert__`` protocol. It has the
|
||||
signature ``(dispatchables, coerce)``. When ``coerce`` is ``False``, conversion
|
||||
between the formats should ideally be an ``O(1)`` operation, but it means that
|
||||
no memory copying should be involved, only views of the existing data.
|
||||
|
||||
>>> def __ua_convert__(dispatchables, coerce):
|
||||
... for d in dispatchables:
|
||||
... if d.type is int:
|
||||
... if coerce and d.coercible:
|
||||
... yield str(d.value)
|
||||
... else:
|
||||
... yield d.value
|
||||
>>> be.__ua_convert__ = __ua_convert__
|
||||
|
||||
Now that we have defined the backend, the next thing to do is to call the multimethod.
|
||||
|
||||
>>> with ua.set_backend(be):
|
||||
... overridden_me(1, "2")
|
||||
('override_me', (1, '2'), {})
|
||||
|
||||
Note that the marked type has no effect on the actual type of the passed object.
|
||||
We can also coerce the type of the input.
|
||||
|
||||
>>> with ua.set_backend(be, coerce=True):
|
||||
... overridden_me(1, "2")
|
||||
... overridden_me(1.0, "2")
|
||||
('override_me', ('1', '2'), {})
|
||||
('override_me', ('1.0', '2'), {})
|
||||
|
||||
Another feature is that if you remove ``__ua_convert__``, the arguments are not
|
||||
converted at all and it's up to the backend to handle that.
|
||||
|
||||
>>> del be.__ua_convert__
|
||||
>>> with ua.set_backend(be):
|
||||
... overridden_me(1, "2")
|
||||
('override_me', (1, '2'), {})
|
||||
|
||||
You also have the option to return ``NotImplemented``, in which case processing moves on
|
||||
to the next back-end, which, in this case, doesn't exist. The same applies to
|
||||
``__ua_convert__``.
|
||||
|
||||
>>> be.__ua_function__ = lambda *a, **kw: NotImplemented
|
||||
>>> with ua.set_backend(be):
|
||||
... overridden_me(1, "2")
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
uarray.backend.BackendNotImplementedError: ...
|
||||
|
||||
The last possibility is if we don't have ``__ua_convert__``, in which case the job is left
|
||||
up to ``__ua_function__``, but putting things back into arrays after conversion will not be
|
||||
possible.
|
||||
"""
|
||||
|
||||
from ._backend import *
|
||||
|
||||
__version__ = '0.5.1+49.g4c3f1d7.scipy'
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
426
venv/Lib/site-packages/scipy/_lib/_uarray/_backend.py
Normal file
426
venv/Lib/site-packages/scipy/_lib/_uarray/_backend.py
Normal file
|
@ -0,0 +1,426 @@
|
|||
import typing
|
||||
import inspect
|
||||
import functools
|
||||
from . import _uarray # type: ignore
|
||||
import copyreg # type: ignore
|
||||
import atexit
|
||||
import pickle
|
||||
|
||||
ArgumentExtractorType = typing.Callable[..., typing.Tuple["Dispatchable", ...]]
|
||||
ArgumentReplacerType = typing.Callable[
|
||||
[typing.Tuple, typing.Dict, typing.Tuple], typing.Tuple[typing.Tuple, typing.Dict]
|
||||
]
|
||||
|
||||
from ._uarray import ( # type: ignore
|
||||
BackendNotImplementedError,
|
||||
_Function,
|
||||
_SkipBackendContext,
|
||||
_SetBackendContext,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"set_backend",
|
||||
"set_global_backend",
|
||||
"skip_backend",
|
||||
"register_backend",
|
||||
"clear_backends",
|
||||
"create_multimethod",
|
||||
"generate_multimethod",
|
||||
"_Function",
|
||||
"BackendNotImplementedError",
|
||||
"Dispatchable",
|
||||
"wrap_single_convertor",
|
||||
"all_of_type",
|
||||
"mark_as",
|
||||
]
|
||||
|
||||
|
||||
def unpickle_function(mod_name, qname):
|
||||
import importlib
|
||||
|
||||
try:
|
||||
module = importlib.import_module(mod_name)
|
||||
func = getattr(module, qname)
|
||||
return func
|
||||
except (ImportError, AttributeError) as e:
|
||||
from pickle import UnpicklingError
|
||||
|
||||
raise UnpicklingError from e
|
||||
|
||||
|
||||
def pickle_function(func):
|
||||
mod_name = getattr(func, "__module__", None)
|
||||
qname = getattr(func, "__qualname__", None)
|
||||
|
||||
try:
|
||||
test = unpickle_function(mod_name, qname)
|
||||
except pickle.UnpicklingError:
|
||||
test = None
|
||||
|
||||
if test is not func:
|
||||
raise pickle.PicklingError(
|
||||
"Can't pickle {}: it's not the same object as {}".format(func, test)
|
||||
)
|
||||
|
||||
return unpickle_function, (mod_name, qname)
|
||||
|
||||
|
||||
copyreg.pickle(_Function, pickle_function)
|
||||
atexit.register(_uarray.clear_all_globals)
|
||||
|
||||
|
||||
def create_multimethod(*args, **kwargs):
|
||||
"""
|
||||
Creates a decorator for generating multimethods.
|
||||
|
||||
This function creates a decorator that can be used with an argument
|
||||
extractor in order to generate a multimethod. Other than for the
|
||||
argument extractor, all arguments are passed on to
|
||||
:obj:`generate_multimethod`.
|
||||
|
||||
See Also
|
||||
--------
|
||||
generate_multimethod
|
||||
Generates a multimethod.
|
||||
"""
|
||||
|
||||
def wrapper(a):
|
||||
return generate_multimethod(a, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def generate_multimethod(
|
||||
argument_extractor: ArgumentExtractorType,
|
||||
argument_replacer: ArgumentReplacerType,
|
||||
domain: str,
|
||||
default: typing.Optional[typing.Callable] = None,
|
||||
):
|
||||
"""
|
||||
Generates a multimethod.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
argument_extractor : ArgumentExtractorType
|
||||
A callable which extracts the dispatchable arguments. Extracted arguments
|
||||
should be marked by the :obj:`Dispatchable` class. It has the same signature
|
||||
as the desired multimethod.
|
||||
argument_replacer : ArgumentReplacerType
|
||||
A callable with the signature (args, kwargs, dispatchables), which should also
|
||||
return an (args, kwargs) pair with the dispatchables replaced inside the args/kwargs.
|
||||
domain : str
|
||||
A string value indicating the domain of this multimethod.
|
||||
default: Optional[Callable], optional
|
||||
The default implementation of this multimethod, where ``None`` (the default) specifies
|
||||
there is no default implementation.
|
||||
|
||||
Examples
|
||||
--------
|
||||
In this example, ``a`` is to be dispatched over, so we return it, while marking it as an ``int``.
|
||||
The trailing comma is needed because the args have to be returned as an iterable.
|
||||
|
||||
>>> def override_me(a, b):
|
||||
... return Dispatchable(a, int),
|
||||
|
||||
Next, we define the argument replacer that replaces the dispatchables inside args/kwargs with the
|
||||
supplied ones.
|
||||
|
||||
>>> def override_replacer(args, kwargs, dispatchables):
|
||||
... return (dispatchables[0], args[1]), {}
|
||||
|
||||
Next, we define the multimethod.
|
||||
|
||||
>>> overridden_me = generate_multimethod(
|
||||
... override_me, override_replacer, "ua_examples"
|
||||
... )
|
||||
|
||||
Notice that there's no default implementation, unless you supply one.
|
||||
|
||||
>>> overridden_me(1, "a")
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
uarray.backend.BackendNotImplementedError: ...
|
||||
>>> overridden_me2 = generate_multimethod(
|
||||
... override_me, override_replacer, "ua_examples", default=lambda x, y: (x, y)
|
||||
... )
|
||||
>>> overridden_me2(1, "a")
|
||||
(1, 'a')
|
||||
|
||||
See Also
|
||||
--------
|
||||
uarray
|
||||
See the module documentation for how to override the method by creating backends.
|
||||
"""
|
||||
kw_defaults, arg_defaults, opts = get_defaults(argument_extractor)
|
||||
ua_func = _Function(
|
||||
argument_extractor,
|
||||
argument_replacer,
|
||||
domain,
|
||||
arg_defaults,
|
||||
kw_defaults,
|
||||
default,
|
||||
)
|
||||
|
||||
return functools.update_wrapper(ua_func, argument_extractor)
|
||||
|
||||
|
||||
def set_backend(backend, coerce=False, only=False):
|
||||
"""
|
||||
A context manager that sets the preferred backend.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
backend
|
||||
The backend to set.
|
||||
coerce
|
||||
Whether or not to coerce to a specific backend's types. Implies ``only``.
|
||||
only
|
||||
Whether or not this should be the last backend to try.
|
||||
|
||||
See Also
|
||||
--------
|
||||
skip_backend: A context manager that allows skipping of backends.
|
||||
set_global_backend: Set a single, global backend for a domain.
|
||||
"""
|
||||
try:
|
||||
return backend.__ua_cache__["set", coerce, only]
|
||||
except AttributeError:
|
||||
backend.__ua_cache__ = {}
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
ctx = _SetBackendContext(backend, coerce, only)
|
||||
backend.__ua_cache__["set", coerce, only] = ctx
|
||||
return ctx
|
||||
|
||||
|
||||
def skip_backend(backend):
|
||||
"""
|
||||
A context manager that allows one to skip a given backend from processing
|
||||
entirely. This allows one to use another backend's code in a library that
|
||||
is also a consumer of the same backend.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
backend
|
||||
The backend to skip.
|
||||
|
||||
See Also
|
||||
--------
|
||||
set_backend: A context manager that allows setting of backends.
|
||||
set_global_backend: Set a single, global backend for a domain.
|
||||
"""
|
||||
try:
|
||||
return backend.__ua_cache__["skip"]
|
||||
except AttributeError:
|
||||
backend.__ua_cache__ = {}
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
ctx = _SkipBackendContext(backend)
|
||||
backend.__ua_cache__["skip"] = ctx
|
||||
return ctx
|
||||
|
||||
|
||||
def get_defaults(f):
|
||||
sig = inspect.signature(f)
|
||||
kw_defaults = {}
|
||||
arg_defaults = []
|
||||
opts = set()
|
||||
for k, v in sig.parameters.items():
|
||||
if v.default is not inspect.Parameter.empty:
|
||||
kw_defaults[k] = v.default
|
||||
if v.kind in (
|
||||
inspect.Parameter.POSITIONAL_ONLY,
|
||||
inspect.Parameter.POSITIONAL_OR_KEYWORD,
|
||||
):
|
||||
arg_defaults.append(v.default)
|
||||
opts.add(k)
|
||||
|
||||
return kw_defaults, tuple(arg_defaults), opts
|
||||
|
||||
|
||||
def set_global_backend(backend, coerce=False, only=False):
|
||||
"""
|
||||
This utility method replaces the default backend for permanent use. It
|
||||
will be tried in the list of backends automatically, unless the
|
||||
``only`` flag is set on a backend. This will be the first tried
|
||||
backend outside the :obj:`set_backend` context manager.
|
||||
|
||||
Note that this method is not thread-safe.
|
||||
|
||||
.. warning::
|
||||
We caution library authors against using this function in
|
||||
their code. We do *not* support this use-case. This function
|
||||
is meant to be used only by users themselves, or by a reference
|
||||
implementation, if one exists.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
backend
|
||||
The backend to register.
|
||||
|
||||
See Also
|
||||
--------
|
||||
set_backend: A context manager that allows setting of backends.
|
||||
skip_backend: A context manager that allows skipping of backends.
|
||||
"""
|
||||
_uarray.set_global_backend(backend, coerce, only)
|
||||
|
||||
|
||||
def register_backend(backend):
|
||||
"""
|
||||
This utility method sets registers backend for permanent use. It
|
||||
will be tried in the list of backends automatically, unless the
|
||||
``only`` flag is set on a backend.
|
||||
|
||||
Note that this method is not thread-safe.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
backend
|
||||
The backend to register.
|
||||
"""
|
||||
_uarray.register_backend(backend)
|
||||
|
||||
|
||||
def clear_backends(domain, registered=True, globals=False):
|
||||
"""
|
||||
This utility method clears registered backends.
|
||||
|
||||
.. warning::
|
||||
We caution library authors against using this function in
|
||||
their code. We do *not* support this use-case. This function
|
||||
is meant to be used only by the users themselves.
|
||||
|
||||
.. warning::
|
||||
Do NOT use this method inside a multimethod call, or the
|
||||
program is likely to crash.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
domain : Optional[str]
|
||||
The domain for which to de-register backends. ``None`` means
|
||||
de-register for all domains.
|
||||
registered : bool
|
||||
Whether or not to clear registered backends. See :obj:`register_backend`.
|
||||
globals : bool
|
||||
Whether or not to clear global backends. See :obj:`set_global_backend`.
|
||||
|
||||
See Also
|
||||
--------
|
||||
register_backend : Register a backend globally.
|
||||
set_global_backend : Set a global backend.
|
||||
"""
|
||||
_uarray.clear_backends(domain, registered, globals)
|
||||
|
||||
|
||||
class Dispatchable:
|
||||
"""
|
||||
A utility class which marks an argument with a specific dispatch type.
|
||||
|
||||
|
||||
Attributes
|
||||
----------
|
||||
value
|
||||
The value of the Dispatchable.
|
||||
|
||||
type
|
||||
The type of the Dispatchable.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> x = Dispatchable(1, str)
|
||||
>>> x
|
||||
<Dispatchable: type=<class 'str'>, value=1>
|
||||
|
||||
See Also
|
||||
--------
|
||||
all_of_type
|
||||
Marks all unmarked parameters of a function.
|
||||
|
||||
mark_as
|
||||
Allows one to create a utility function to mark as a given type.
|
||||
"""
|
||||
|
||||
def __init__(self, value, dispatch_type, coercible=True):
|
||||
self.value = value
|
||||
self.type = dispatch_type
|
||||
self.coercible = coercible
|
||||
|
||||
def __getitem__(self, index):
|
||||
return (self.type, self.value)[index]
|
||||
|
||||
def __str__(self):
|
||||
return "<{0}: type={1!r}, value={2!r}>".format(
|
||||
type(self).__name__, self.type, self.value
|
||||
)
|
||||
|
||||
__repr__ = __str__
|
||||
|
||||
|
||||
def mark_as(dispatch_type):
|
||||
"""
|
||||
Creates a utility function to mark something as a specific type.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> mark_int = mark_as(int)
|
||||
>>> mark_int(1)
|
||||
<Dispatchable: type=<class 'int'>, value=1>
|
||||
"""
|
||||
return functools.partial(Dispatchable, dispatch_type=dispatch_type)
|
||||
|
||||
|
||||
def all_of_type(arg_type):
|
||||
"""
|
||||
Marks all unmarked arguments as a given type.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> @all_of_type(str)
|
||||
... def f(a, b):
|
||||
... return a, Dispatchable(b, int)
|
||||
>>> f('a', 1)
|
||||
(<Dispatchable: type=<class 'str'>, value='a'>, <Dispatchable: type=<class 'int'>, value=1>)
|
||||
"""
|
||||
|
||||
def outer(func):
|
||||
@functools.wraps(func)
|
||||
def inner(*args, **kwargs):
|
||||
extracted_args = func(*args, **kwargs)
|
||||
return tuple(
|
||||
Dispatchable(arg, arg_type)
|
||||
if not isinstance(arg, Dispatchable)
|
||||
else arg
|
||||
for arg in extracted_args
|
||||
)
|
||||
|
||||
return inner
|
||||
|
||||
return outer
|
||||
|
||||
|
||||
def wrap_single_convertor(convert_single):
|
||||
"""
|
||||
Wraps a ``__ua_convert__`` defined for a single element to all elements.
|
||||
If any of them return ``NotImplemented``, the operation is assumed to be
|
||||
undefined.
|
||||
|
||||
Accepts a signature of (value, type, coerce).
|
||||
"""
|
||||
|
||||
@functools.wraps(convert_single)
|
||||
def __ua_convert__(dispatchables, coerce):
|
||||
converted = []
|
||||
for d in dispatchables:
|
||||
c = convert_single(d.value, d.type, coerce and d.coercible)
|
||||
|
||||
if c is NotImplemented:
|
||||
return NotImplemented
|
||||
|
||||
converted.append(c)
|
||||
|
||||
return converted
|
||||
|
||||
return __ua_convert__
|
BIN
venv/Lib/site-packages/scipy/_lib/_uarray/_uarray.cp36-win32.pyd
Normal file
BIN
venv/Lib/site-packages/scipy/_lib/_uarray/_uarray.cp36-win32.pyd
Normal file
Binary file not shown.
30
venv/Lib/site-packages/scipy/_lib/_uarray/setup.py
Normal file
30
venv/Lib/site-packages/scipy/_lib/_uarray/setup.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
|
||||
def pre_build_hook(build_ext, ext):
|
||||
from scipy._build_utils.compiler_helper import (
|
||||
set_cxx_flags_hook, try_add_flag)
|
||||
cc = build_ext._cxx_compiler
|
||||
args = ext.extra_compile_args
|
||||
|
||||
set_cxx_flags_hook(build_ext, ext)
|
||||
|
||||
if cc.compiler_type == 'msvc':
|
||||
args.append('/EHsc')
|
||||
else:
|
||||
try_add_flag(args, cc, '-fvisibility=hidden')
|
||||
|
||||
|
||||
def configuration(parent_package='', top_path=None):
|
||||
from numpy.distutils.misc_util import Configuration
|
||||
|
||||
config = Configuration('_uarray', parent_package, top_path)
|
||||
config.add_data_files('LICENSE')
|
||||
ext = config.add_extension('_uarray',
|
||||
sources=['_uarray_dispatch.cxx'],
|
||||
language='c++')
|
||||
ext._pre_build_hook = pre_build_hook
|
||||
return config
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from numpy.distutils.core import setup
|
||||
setup(**configuration(top_path='').todict())
|
482
venv/Lib/site-packages/scipy/_lib/_util.py
Normal file
482
venv/Lib/site-packages/scipy/_lib/_util.py
Normal file
|
@ -0,0 +1,482 @@
|
|||
import functools
|
||||
import operator
|
||||
import sys
|
||||
import warnings
|
||||
import numbers
|
||||
from collections import namedtuple
|
||||
from multiprocessing import Pool
|
||||
import inspect
|
||||
|
||||
import numpy as np
|
||||
|
||||
try:
|
||||
from numpy.random import Generator as Generator
|
||||
except ImportError:
|
||||
class Generator(): # type: ignore[no-redef]
|
||||
pass
|
||||
|
||||
|
||||
def _valarray(shape, value=np.nan, typecode=None):
|
||||
"""Return an array of all values.
|
||||
"""
|
||||
|
||||
out = np.ones(shape, dtype=bool) * value
|
||||
if typecode is not None:
|
||||
out = out.astype(typecode)
|
||||
if not isinstance(out, np.ndarray):
|
||||
out = np.asarray(out)
|
||||
return out
|
||||
|
||||
|
||||
def _lazywhere(cond, arrays, f, fillvalue=None, f2=None):
|
||||
"""
|
||||
np.where(cond, x, fillvalue) always evaluates x even where cond is False.
|
||||
This one only evaluates f(arr1[cond], arr2[cond], ...).
|
||||
For example,
|
||||
>>> a, b = np.array([1, 2, 3, 4]), np.array([5, 6, 7, 8])
|
||||
>>> def f(a, b):
|
||||
return a*b
|
||||
>>> _lazywhere(a > 2, (a, b), f, np.nan)
|
||||
array([ nan, nan, 21., 32.])
|
||||
|
||||
Notice, it assumes that all `arrays` are of the same shape, or can be
|
||||
broadcasted together.
|
||||
|
||||
"""
|
||||
if fillvalue is None:
|
||||
if f2 is None:
|
||||
raise ValueError("One of (fillvalue, f2) must be given.")
|
||||
else:
|
||||
fillvalue = np.nan
|
||||
else:
|
||||
if f2 is not None:
|
||||
raise ValueError("Only one of (fillvalue, f2) can be given.")
|
||||
|
||||
arrays = np.broadcast_arrays(*arrays)
|
||||
temp = tuple(np.extract(cond, arr) for arr in arrays)
|
||||
tcode = np.mintypecode([a.dtype.char for a in arrays])
|
||||
out = _valarray(np.shape(arrays[0]), value=fillvalue, typecode=tcode)
|
||||
np.place(out, cond, f(*temp))
|
||||
if f2 is not None:
|
||||
temp = tuple(np.extract(~cond, arr) for arr in arrays)
|
||||
np.place(out, ~cond, f2(*temp))
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def _lazyselect(condlist, choicelist, arrays, default=0):
|
||||
"""
|
||||
Mimic `np.select(condlist, choicelist)`.
|
||||
|
||||
Notice, it assumes that all `arrays` are of the same shape or can be
|
||||
broadcasted together.
|
||||
|
||||
All functions in `choicelist` must accept array arguments in the order
|
||||
given in `arrays` and must return an array of the same shape as broadcasted
|
||||
`arrays`.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> x = np.arange(6)
|
||||
>>> np.select([x <3, x > 3], [x**2, x**3], default=0)
|
||||
array([ 0, 1, 4, 0, 64, 125])
|
||||
|
||||
>>> _lazyselect([x < 3, x > 3], [lambda x: x**2, lambda x: x**3], (x,))
|
||||
array([ 0., 1., 4., 0., 64., 125.])
|
||||
|
||||
>>> a = -np.ones_like(x)
|
||||
>>> _lazyselect([x < 3, x > 3],
|
||||
... [lambda x, a: x**2, lambda x, a: a * x**3],
|
||||
... (x, a), default=np.nan)
|
||||
array([ 0., 1., 4., nan, -64., -125.])
|
||||
|
||||
"""
|
||||
arrays = np.broadcast_arrays(*arrays)
|
||||
tcode = np.mintypecode([a.dtype.char for a in arrays])
|
||||
out = _valarray(np.shape(arrays[0]), value=default, typecode=tcode)
|
||||
for index in range(len(condlist)):
|
||||
func, cond = choicelist[index], condlist[index]
|
||||
if np.all(cond is False):
|
||||
continue
|
||||
cond, _ = np.broadcast_arrays(cond, arrays[0])
|
||||
temp = tuple(np.extract(cond, arr) for arr in arrays)
|
||||
np.place(out, cond, func(*temp))
|
||||
return out
|
||||
|
||||
|
||||
def _aligned_zeros(shape, dtype=float, order="C", align=None):
|
||||
"""Allocate a new ndarray with aligned memory.
|
||||
|
||||
Primary use case for this currently is working around a f2py issue
|
||||
in NumPy 1.9.1, where dtype.alignment is such that np.zeros() does
|
||||
not necessarily create arrays aligned up to it.
|
||||
|
||||
"""
|
||||
dtype = np.dtype(dtype)
|
||||
if align is None:
|
||||
align = dtype.alignment
|
||||
if not hasattr(shape, '__len__'):
|
||||
shape = (shape,)
|
||||
size = functools.reduce(operator.mul, shape) * dtype.itemsize
|
||||
buf = np.empty(size + align + 1, np.uint8)
|
||||
offset = buf.__array_interface__['data'][0] % align
|
||||
if offset != 0:
|
||||
offset = align - offset
|
||||
# Note: slices producing 0-size arrays do not necessarily change
|
||||
# data pointer --- so we use and allocate size+1
|
||||
buf = buf[offset:offset+size+1][:-1]
|
||||
data = np.ndarray(shape, dtype, buf, order=order)
|
||||
data.fill(0)
|
||||
return data
|
||||
|
||||
|
||||
def _prune_array(array):
|
||||
"""Return an array equivalent to the input array. If the input
|
||||
array is a view of a much larger array, copy its contents to a
|
||||
newly allocated array. Otherwise, return the input unchanged.
|
||||
"""
|
||||
if array.base is not None and array.size < array.base.size // 2:
|
||||
return array.copy()
|
||||
return array
|
||||
|
||||
|
||||
def prod(iterable):
|
||||
"""
|
||||
Product of a sequence of numbers.
|
||||
|
||||
Faster than np.prod for short lists like array shapes, and does
|
||||
not overflow if using Python integers.
|
||||
"""
|
||||
product = 1
|
||||
for x in iterable:
|
||||
product *= x
|
||||
return product
|
||||
|
||||
|
||||
class DeprecatedImport(object):
|
||||
"""
|
||||
Deprecated import with redirection and warning.
|
||||
|
||||
Examples
|
||||
--------
|
||||
Suppose you previously had in some module::
|
||||
|
||||
from foo import spam
|
||||
|
||||
If this has to be deprecated, do::
|
||||
|
||||
spam = DeprecatedImport("foo.spam", "baz")
|
||||
|
||||
to redirect users to use "baz" module instead.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, old_module_name, new_module_name):
|
||||
self._old_name = old_module_name
|
||||
self._new_name = new_module_name
|
||||
__import__(self._new_name)
|
||||
self._mod = sys.modules[self._new_name]
|
||||
|
||||
def __dir__(self):
|
||||
return dir(self._mod)
|
||||
|
||||
def __getattr__(self, name):
|
||||
warnings.warn("Module %s is deprecated, use %s instead"
|
||||
% (self._old_name, self._new_name),
|
||||
DeprecationWarning)
|
||||
return getattr(self._mod, name)
|
||||
|
||||
|
||||
# copy-pasted from scikit-learn utils/validation.py
|
||||
def check_random_state(seed):
|
||||
"""Turn seed into a np.random.RandomState instance
|
||||
|
||||
If seed is None (or np.random), return the RandomState singleton used
|
||||
by np.random.
|
||||
If seed is an int, return a new RandomState instance seeded with seed.
|
||||
If seed is already a RandomState instance, return it.
|
||||
If seed is a new-style np.random.Generator, return it.
|
||||
Otherwise, raise ValueError.
|
||||
"""
|
||||
if seed is None or seed is np.random:
|
||||
return np.random.mtrand._rand
|
||||
if isinstance(seed, (numbers.Integral, np.integer)):
|
||||
return np.random.RandomState(seed)
|
||||
if isinstance(seed, np.random.RandomState):
|
||||
return seed
|
||||
try:
|
||||
# Generator is only available in numpy >= 1.17
|
||||
if isinstance(seed, np.random.Generator):
|
||||
return seed
|
||||
except AttributeError:
|
||||
pass
|
||||
raise ValueError('%r cannot be used to seed a numpy.random.RandomState'
|
||||
' instance' % seed)
|
||||
|
||||
|
||||
def _asarray_validated(a, check_finite=True,
|
||||
sparse_ok=False, objects_ok=False, mask_ok=False,
|
||||
as_inexact=False):
|
||||
"""
|
||||
Helper function for SciPy argument validation.
|
||||
|
||||
Many SciPy linear algebra functions do support arbitrary array-like
|
||||
input arguments. Examples of commonly unsupported inputs include
|
||||
matrices containing inf/nan, sparse matrix representations, and
|
||||
matrices with complicated elements.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
a : array_like
|
||||
The array-like input.
|
||||
check_finite : bool, optional
|
||||
Whether to check that the input matrices contain only finite numbers.
|
||||
Disabling may give a performance gain, but may result in problems
|
||||
(crashes, non-termination) if the inputs do contain infinities or NaNs.
|
||||
Default: True
|
||||
sparse_ok : bool, optional
|
||||
True if scipy sparse matrices are allowed.
|
||||
objects_ok : bool, optional
|
||||
True if arrays with dype('O') are allowed.
|
||||
mask_ok : bool, optional
|
||||
True if masked arrays are allowed.
|
||||
as_inexact : bool, optional
|
||||
True to convert the input array to a np.inexact dtype.
|
||||
|
||||
Returns
|
||||
-------
|
||||
ret : ndarray
|
||||
The converted validated array.
|
||||
|
||||
"""
|
||||
if not sparse_ok:
|
||||
import scipy.sparse
|
||||
if scipy.sparse.issparse(a):
|
||||
msg = ('Sparse matrices are not supported by this function. '
|
||||
'Perhaps one of the scipy.sparse.linalg functions '
|
||||
'would work instead.')
|
||||
raise ValueError(msg)
|
||||
if not mask_ok:
|
||||
if np.ma.isMaskedArray(a):
|
||||
raise ValueError('masked arrays are not supported')
|
||||
toarray = np.asarray_chkfinite if check_finite else np.asarray
|
||||
a = toarray(a)
|
||||
if not objects_ok:
|
||||
if a.dtype is np.dtype('O'):
|
||||
raise ValueError('object arrays are not supported')
|
||||
if as_inexact:
|
||||
if not np.issubdtype(a.dtype, np.inexact):
|
||||
a = toarray(a, dtype=np.float_)
|
||||
return a
|
||||
|
||||
|
||||
# Add a replacement for inspect.getfullargspec()/
|
||||
# The version below is borrowed from Django,
|
||||
# https://github.com/django/django/pull/4846.
|
||||
|
||||
# Note an inconsistency between inspect.getfullargspec(func) and
|
||||
# inspect.signature(func). If `func` is a bound method, the latter does *not*
|
||||
# list `self` as a first argument, while the former *does*.
|
||||
# Hence, cook up a common ground replacement: `getfullargspec_no_self` which
|
||||
# mimics `inspect.getfullargspec` but does not list `self`.
|
||||
#
|
||||
# This way, the caller code does not need to know whether it uses a legacy
|
||||
# .getfullargspec or a bright and shiny .signature.
|
||||
|
||||
FullArgSpec = namedtuple('FullArgSpec',
|
||||
['args', 'varargs', 'varkw', 'defaults',
|
||||
'kwonlyargs', 'kwonlydefaults', 'annotations'])
|
||||
|
||||
def getfullargspec_no_self(func):
|
||||
"""inspect.getfullargspec replacement using inspect.signature.
|
||||
|
||||
If func is a bound method, do not list the 'self' parameter.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
func : callable
|
||||
A callable to inspect
|
||||
|
||||
Returns
|
||||
-------
|
||||
fullargspec : FullArgSpec(args, varargs, varkw, defaults, kwonlyargs,
|
||||
kwonlydefaults, annotations)
|
||||
|
||||
NOTE: if the first argument of `func` is self, it is *not*, I repeat
|
||||
*not*, included in fullargspec.args.
|
||||
This is done for consistency between inspect.getargspec() under
|
||||
Python 2.x, and inspect.signature() under Python 3.x.
|
||||
|
||||
"""
|
||||
sig = inspect.signature(func)
|
||||
args = [
|
||||
p.name for p in sig.parameters.values()
|
||||
if p.kind in [inspect.Parameter.POSITIONAL_OR_KEYWORD,
|
||||
inspect.Parameter.POSITIONAL_ONLY]
|
||||
]
|
||||
varargs = [
|
||||
p.name for p in sig.parameters.values()
|
||||
if p.kind == inspect.Parameter.VAR_POSITIONAL
|
||||
]
|
||||
varargs = varargs[0] if varargs else None
|
||||
varkw = [
|
||||
p.name for p in sig.parameters.values()
|
||||
if p.kind == inspect.Parameter.VAR_KEYWORD
|
||||
]
|
||||
varkw = varkw[0] if varkw else None
|
||||
defaults = tuple(
|
||||
p.default for p in sig.parameters.values()
|
||||
if (p.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD and
|
||||
p.default is not p.empty)
|
||||
) or None
|
||||
kwonlyargs = [
|
||||
p.name for p in sig.parameters.values()
|
||||
if p.kind == inspect.Parameter.KEYWORD_ONLY
|
||||
]
|
||||
kwdefaults = {p.name: p.default for p in sig.parameters.values()
|
||||
if p.kind == inspect.Parameter.KEYWORD_ONLY and
|
||||
p.default is not p.empty}
|
||||
annotations = {p.name: p.annotation for p in sig.parameters.values()
|
||||
if p.annotation is not p.empty}
|
||||
return FullArgSpec(args, varargs, varkw, defaults, kwonlyargs,
|
||||
kwdefaults or None, annotations)
|
||||
|
||||
|
||||
class MapWrapper(object):
|
||||
"""
|
||||
Parallelisation wrapper for working with map-like callables, such as
|
||||
`multiprocessing.Pool.map`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
pool : int or map-like callable
|
||||
If `pool` is an integer, then it specifies the number of threads to
|
||||
use for parallelization. If ``int(pool) == 1``, then no parallel
|
||||
processing is used and the map builtin is used.
|
||||
If ``pool == -1``, then the pool will utilize all available CPUs.
|
||||
If `pool` is a map-like callable that follows the same
|
||||
calling sequence as the built-in map function, then this callable is
|
||||
used for parallelization.
|
||||
"""
|
||||
def __init__(self, pool=1):
|
||||
self.pool = None
|
||||
self._mapfunc = map
|
||||
self._own_pool = False
|
||||
|
||||
if callable(pool):
|
||||
self.pool = pool
|
||||
self._mapfunc = self.pool
|
||||
else:
|
||||
# user supplies a number
|
||||
if int(pool) == -1:
|
||||
# use as many processors as possible
|
||||
self.pool = Pool()
|
||||
self._mapfunc = self.pool.map
|
||||
self._own_pool = True
|
||||
elif int(pool) == 1:
|
||||
pass
|
||||
elif int(pool) > 1:
|
||||
# use the number of processors requested
|
||||
self.pool = Pool(processes=int(pool))
|
||||
self._mapfunc = self.pool.map
|
||||
self._own_pool = True
|
||||
else:
|
||||
raise RuntimeError("Number of workers specified must be -1,"
|
||||
" an int >= 1, or an object with a 'map' method")
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __del__(self):
|
||||
self.close()
|
||||
self.terminate()
|
||||
|
||||
def terminate(self):
|
||||
if self._own_pool:
|
||||
self.pool.terminate()
|
||||
|
||||
def join(self):
|
||||
if self._own_pool:
|
||||
self.pool.join()
|
||||
|
||||
def close(self):
|
||||
if self._own_pool:
|
||||
self.pool.close()
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
if self._own_pool:
|
||||
self.pool.close()
|
||||
self.pool.terminate()
|
||||
|
||||
def __call__(self, func, iterable):
|
||||
# only accept one iterable because that's all Pool.map accepts
|
||||
try:
|
||||
return self._mapfunc(func, iterable)
|
||||
except TypeError:
|
||||
# wrong number of arguments
|
||||
raise TypeError("The map-like callable must be of the"
|
||||
" form f(func, iterable)")
|
||||
|
||||
|
||||
def rng_integers(gen, low, high=None, size=None, dtype='int64',
|
||||
endpoint=False):
|
||||
"""
|
||||
Return random integers from low (inclusive) to high (exclusive), or if
|
||||
endpoint=True, low (inclusive) to high (inclusive). Replaces
|
||||
`RandomState.randint` (with endpoint=False) and
|
||||
`RandomState.random_integers` (with endpoint=True).
|
||||
|
||||
Return random integers from the "discrete uniform" distribution of the
|
||||
specified dtype. If high is None (the default), then results are from
|
||||
0 to low.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
gen: {None, np.random.RandomState, np.random.Generator}
|
||||
Random number generator. If None, then the np.random.RandomState
|
||||
singleton is used.
|
||||
low: int or array-like of ints
|
||||
Lowest (signed) integers to be drawn from the distribution (unless
|
||||
high=None, in which case this parameter is 0 and this value is used
|
||||
for high).
|
||||
high: int or array-like of ints
|
||||
If provided, one above the largest (signed) integer to be drawn from
|
||||
the distribution (see above for behavior if high=None). If array-like,
|
||||
must contain integer values.
|
||||
size: None
|
||||
Output shape. If the given shape is, e.g., (m, n, k), then m * n * k
|
||||
samples are drawn. Default is None, in which case a single value is
|
||||
returned.
|
||||
dtype: {str, dtype}, optional
|
||||
Desired dtype of the result. All dtypes are determined by their name,
|
||||
i.e., 'int64', 'int', etc, so byteorder is not available and a specific
|
||||
precision may have different C types depending on the platform.
|
||||
The default value is np.int_.
|
||||
endpoint: bool, optional
|
||||
If True, sample from the interval [low, high] instead of the default
|
||||
[low, high) Defaults to False.
|
||||
|
||||
Returns
|
||||
-------
|
||||
out: int or ndarray of ints
|
||||
size-shaped array of random integers from the appropriate distribution,
|
||||
or a single such random int if size not provided.
|
||||
"""
|
||||
if isinstance(gen, Generator):
|
||||
return gen.integers(low, high=high, size=size, dtype=dtype,
|
||||
endpoint=endpoint)
|
||||
else:
|
||||
if gen is None:
|
||||
# default is RandomState singleton used by np.random.
|
||||
gen = np.random.mtrand._rand
|
||||
if endpoint:
|
||||
# inclusive of endpoint
|
||||
# remember that low and high can be arrays, so don't modify in
|
||||
# place
|
||||
if high is None:
|
||||
return gen.randint(low + 1, size=size, dtype=dtype)
|
||||
if high is not None:
|
||||
return gen.randint(low, high=high + 1, size=size, dtype=dtype)
|
||||
|
||||
# exclusive
|
||||
return gen.randint(low, high=high, size=size, dtype=dtype)
|
422
venv/Lib/site-packages/scipy/_lib/decorator.py
Normal file
422
venv/Lib/site-packages/scipy/_lib/decorator.py
Normal file
|
@ -0,0 +1,422 @@
|
|||
# ######################### LICENSE ############################ #
|
||||
|
||||
# Copyright (c) 2005-2015, Michele Simionato
|
||||
# All rights reserved.
|
||||
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
|
||||
# Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# Redistributions in bytecode form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in
|
||||
# the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
|
||||
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
||||
# DAMAGE.
|
||||
|
||||
"""
|
||||
Decorator module, see https://pypi.python.org/pypi/decorator
|
||||
for the documentation.
|
||||
"""
|
||||
import re
|
||||
import sys
|
||||
import inspect
|
||||
import operator
|
||||
import itertools
|
||||
import collections
|
||||
|
||||
__version__ = '4.0.5'
|
||||
|
||||
if sys.version >= '3':
|
||||
from inspect import getfullargspec
|
||||
|
||||
def get_init(cls):
|
||||
return cls.__init__
|
||||
else:
|
||||
class getfullargspec(object):
|
||||
"A quick and dirty replacement for getfullargspec for Python 2.x"
|
||||
def __init__(self, f):
|
||||
self.args, self.varargs, self.varkw, self.defaults = \
|
||||
inspect.getargspec(f)
|
||||
self.kwonlyargs = []
|
||||
self.kwonlydefaults = None
|
||||
|
||||
def __iter__(self):
|
||||
yield self.args
|
||||
yield self.varargs
|
||||
yield self.varkw
|
||||
yield self.defaults
|
||||
|
||||
getargspec = inspect.getargspec
|
||||
|
||||
def get_init(cls):
|
||||
return cls.__init__.__func__
|
||||
|
||||
# getargspec has been deprecated in Python 3.5
|
||||
ArgSpec = collections.namedtuple(
|
||||
'ArgSpec', 'args varargs varkw defaults')
|
||||
|
||||
|
||||
def getargspec(f):
|
||||
"""A replacement for inspect.getargspec"""
|
||||
spec = getfullargspec(f)
|
||||
return ArgSpec(spec.args, spec.varargs, spec.varkw, spec.defaults)
|
||||
|
||||
|
||||
DEF = re.compile(r'\s*def\s*([_\w][_\w\d]*)\s*\(')
|
||||
|
||||
|
||||
# basic functionality
|
||||
class FunctionMaker(object):
|
||||
"""
|
||||
An object with the ability to create functions with a given signature.
|
||||
It has attributes name, doc, module, signature, defaults, dict, and
|
||||
methods update and make.
|
||||
"""
|
||||
|
||||
# Atomic get-and-increment provided by the GIL
|
||||
_compile_count = itertools.count()
|
||||
|
||||
def __init__(self, func=None, name=None, signature=None,
|
||||
defaults=None, doc=None, module=None, funcdict=None):
|
||||
self.shortsignature = signature
|
||||
if func:
|
||||
# func can be a class or a callable, but not an instance method
|
||||
self.name = func.__name__
|
||||
if self.name == '<lambda>': # small hack for lambda functions
|
||||
self.name = '_lambda_'
|
||||
self.doc = func.__doc__
|
||||
self.module = func.__module__
|
||||
if inspect.isfunction(func):
|
||||
argspec = getfullargspec(func)
|
||||
self.annotations = getattr(func, '__annotations__', {})
|
||||
for a in ('args', 'varargs', 'varkw', 'defaults', 'kwonlyargs',
|
||||
'kwonlydefaults'):
|
||||
setattr(self, a, getattr(argspec, a))
|
||||
for i, arg in enumerate(self.args):
|
||||
setattr(self, 'arg%d' % i, arg)
|
||||
if sys.version < '3': # easy way
|
||||
self.shortsignature = self.signature = (
|
||||
inspect.formatargspec(
|
||||
formatvalue=lambda val: "", *argspec)[1:-1])
|
||||
else: # Python 3 way
|
||||
allargs = list(self.args)
|
||||
allshortargs = list(self.args)
|
||||
if self.varargs:
|
||||
allargs.append('*' + self.varargs)
|
||||
allshortargs.append('*' + self.varargs)
|
||||
elif self.kwonlyargs:
|
||||
allargs.append('*') # single star syntax
|
||||
for a in self.kwonlyargs:
|
||||
allargs.append('%s=None' % a)
|
||||
allshortargs.append('%s=%s' % (a, a))
|
||||
if self.varkw:
|
||||
allargs.append('**' + self.varkw)
|
||||
allshortargs.append('**' + self.varkw)
|
||||
self.signature = ', '.join(allargs)
|
||||
self.shortsignature = ', '.join(allshortargs)
|
||||
self.dict = func.__dict__.copy()
|
||||
# func=None happens when decorating a caller
|
||||
if name:
|
||||
self.name = name
|
||||
if signature is not None:
|
||||
self.signature = signature
|
||||
if defaults:
|
||||
self.defaults = defaults
|
||||
if doc:
|
||||
self.doc = doc
|
||||
if module:
|
||||
self.module = module
|
||||
if funcdict:
|
||||
self.dict = funcdict
|
||||
# check existence required attributes
|
||||
assert hasattr(self, 'name')
|
||||
if not hasattr(self, 'signature'):
|
||||
raise TypeError('You are decorating a non-function: %s' % func)
|
||||
|
||||
def update(self, func, **kw):
|
||||
"Update the signature of func with the data in self"
|
||||
func.__name__ = self.name
|
||||
func.__doc__ = getattr(self, 'doc', None)
|
||||
func.__dict__ = getattr(self, 'dict', {})
|
||||
func.__defaults__ = getattr(self, 'defaults', ())
|
||||
func.__kwdefaults__ = getattr(self, 'kwonlydefaults', None)
|
||||
func.__annotations__ = getattr(self, 'annotations', None)
|
||||
try:
|
||||
frame = sys._getframe(3)
|
||||
except AttributeError: # for IronPython and similar implementations
|
||||
callermodule = '?'
|
||||
else:
|
||||
callermodule = frame.f_globals.get('__name__', '?')
|
||||
func.__module__ = getattr(self, 'module', callermodule)
|
||||
func.__dict__.update(kw)
|
||||
|
||||
def make(self, src_templ, evaldict=None, addsource=False, **attrs):
|
||||
"Make a new function from a given template and update the signature"
|
||||
src = src_templ % vars(self) # expand name and signature
|
||||
evaldict = evaldict or {}
|
||||
mo = DEF.match(src)
|
||||
if mo is None:
|
||||
raise SyntaxError('not a valid function template\n%s' % src)
|
||||
name = mo.group(1) # extract the function name
|
||||
names = set([name] + [arg.strip(' *') for arg in
|
||||
self.shortsignature.split(',')])
|
||||
for n in names:
|
||||
if n in ('_func_', '_call_'):
|
||||
raise NameError('%s is overridden in\n%s' % (n, src))
|
||||
if not src.endswith('\n'): # add a newline just for safety
|
||||
src += '\n' # this is needed in old versions of Python
|
||||
|
||||
# Ensure each generated function has a unique filename for profilers
|
||||
# (such as cProfile) that depend on the tuple of (<filename>,
|
||||
# <definition line>, <function name>) being unique.
|
||||
filename = '<decorator-gen-%d>' % (next(self._compile_count),)
|
||||
try:
|
||||
code = compile(src, filename, 'single')
|
||||
exec(code, evaldict)
|
||||
except: # noqa: E722
|
||||
print('Error in generated code:', file=sys.stderr)
|
||||
print(src, file=sys.stderr)
|
||||
raise
|
||||
func = evaldict[name]
|
||||
if addsource:
|
||||
attrs['__source__'] = src
|
||||
self.update(func, **attrs)
|
||||
return func
|
||||
|
||||
@classmethod
|
||||
def create(cls, obj, body, evaldict, defaults=None,
|
||||
doc=None, module=None, addsource=True, **attrs):
|
||||
"""
|
||||
Create a function from the strings name, signature, and body.
|
||||
evaldict is the evaluation dictionary. If addsource is true, an
|
||||
attribute __source__ is added to the result. The attributes attrs
|
||||
are added, if any.
|
||||
"""
|
||||
if isinstance(obj, str): # "name(signature)"
|
||||
name, rest = obj.strip().split('(', 1)
|
||||
signature = rest[:-1] # strip a right parens
|
||||
func = None
|
||||
else: # a function
|
||||
name = None
|
||||
signature = None
|
||||
func = obj
|
||||
self = cls(func, name, signature, defaults, doc, module)
|
||||
ibody = '\n'.join(' ' + line for line in body.splitlines())
|
||||
return self.make('def %(name)s(%(signature)s):\n' + ibody,
|
||||
evaldict, addsource, **attrs)
|
||||
|
||||
|
||||
def decorate(func, caller):
|
||||
"""
|
||||
decorate(func, caller) decorates a function using a caller.
|
||||
"""
|
||||
evaldict = func.__globals__.copy()
|
||||
evaldict['_call_'] = caller
|
||||
evaldict['_func_'] = func
|
||||
fun = FunctionMaker.create(
|
||||
func, "return _call_(_func_, %(shortsignature)s)",
|
||||
evaldict, __wrapped__=func)
|
||||
if hasattr(func, '__qualname__'):
|
||||
fun.__qualname__ = func.__qualname__
|
||||
return fun
|
||||
|
||||
|
||||
def decorator(caller, _func=None):
|
||||
"""decorator(caller) converts a caller function into a decorator"""
|
||||
if _func is not None: # return a decorated function
|
||||
# this is obsolete behavior; you should use decorate instead
|
||||
return decorate(_func, caller)
|
||||
# else return a decorator function
|
||||
if inspect.isclass(caller):
|
||||
name = caller.__name__.lower()
|
||||
callerfunc = get_init(caller)
|
||||
doc = 'decorator(%s) converts functions/generators into ' \
|
||||
'factories of %s objects' % (caller.__name__, caller.__name__)
|
||||
elif inspect.isfunction(caller):
|
||||
if caller.__name__ == '<lambda>':
|
||||
name = '_lambda_'
|
||||
else:
|
||||
name = caller.__name__
|
||||
callerfunc = caller
|
||||
doc = caller.__doc__
|
||||
else: # assume caller is an object with a __call__ method
|
||||
name = caller.__class__.__name__.lower()
|
||||
callerfunc = caller.__call__.__func__
|
||||
doc = caller.__call__.__doc__
|
||||
evaldict = callerfunc.__globals__.copy()
|
||||
evaldict['_call_'] = caller
|
||||
evaldict['_decorate_'] = decorate
|
||||
return FunctionMaker.create(
|
||||
'%s(func)' % name, 'return _decorate_(func, _call_)',
|
||||
evaldict, doc=doc, module=caller.__module__,
|
||||
__wrapped__=caller)
|
||||
|
||||
|
||||
# ####################### contextmanager ####################### #
|
||||
|
||||
try: # Python >= 3.2
|
||||
from contextlib import _GeneratorContextManager
|
||||
except ImportError: # Python >= 2.5
|
||||
from contextlib import GeneratorContextManager as _GeneratorContextManager
|
||||
|
||||
|
||||
class ContextManager(_GeneratorContextManager):
|
||||
def __call__(self, func):
|
||||
"""Context manager decorator"""
|
||||
return FunctionMaker.create(
|
||||
func, "with _self_: return _func_(%(shortsignature)s)",
|
||||
dict(_self_=self, _func_=func), __wrapped__=func)
|
||||
|
||||
|
||||
init = getfullargspec(_GeneratorContextManager.__init__)
|
||||
n_args = len(init.args)
|
||||
if n_args == 2 and not init.varargs: # (self, genobj) Python 2.7
|
||||
def __init__(self, g, *a, **k):
|
||||
return _GeneratorContextManager.__init__(self, g(*a, **k))
|
||||
ContextManager.__init__ = __init__
|
||||
elif n_args == 2 and init.varargs: # (self, gen, *a, **k) Python 3.4
|
||||
pass
|
||||
elif n_args == 4: # (self, gen, args, kwds) Python 3.5
|
||||
def __init__(self, g, *a, **k):
|
||||
return _GeneratorContextManager.__init__(self, g, a, k)
|
||||
ContextManager.__init__ = __init__
|
||||
|
||||
contextmanager = decorator(ContextManager)
|
||||
|
||||
|
||||
# ############################ dispatch_on ############################ #
|
||||
|
||||
def append(a, vancestors):
|
||||
"""
|
||||
Append ``a`` to the list of the virtual ancestors, unless it is already
|
||||
included.
|
||||
"""
|
||||
add = True
|
||||
for j, va in enumerate(vancestors):
|
||||
if issubclass(va, a):
|
||||
add = False
|
||||
break
|
||||
if issubclass(a, va):
|
||||
vancestors[j] = a
|
||||
add = False
|
||||
if add:
|
||||
vancestors.append(a)
|
||||
|
||||
|
||||
# inspired from simplegeneric by P.J. Eby and functools.singledispatch
|
||||
def dispatch_on(*dispatch_args):
|
||||
"""
|
||||
Factory of decorators turning a function into a generic function
|
||||
dispatching on the given arguments.
|
||||
"""
|
||||
assert dispatch_args, 'No dispatch args passed'
|
||||
dispatch_str = '(%s,)' % ', '.join(dispatch_args)
|
||||
|
||||
def check(arguments, wrong=operator.ne, msg=''):
|
||||
"""Make sure one passes the expected number of arguments"""
|
||||
if wrong(len(arguments), len(dispatch_args)):
|
||||
raise TypeError('Expected %d arguments, got %d%s' %
|
||||
(len(dispatch_args), len(arguments), msg))
|
||||
|
||||
def gen_func_dec(func):
|
||||
"""Decorator turning a function into a generic function"""
|
||||
|
||||
# first check the dispatch arguments
|
||||
argset = set(getfullargspec(func).args)
|
||||
if not set(dispatch_args) <= argset:
|
||||
raise NameError('Unknown dispatch arguments %s' % dispatch_str)
|
||||
|
||||
typemap = {}
|
||||
|
||||
def vancestors(*types):
|
||||
"""
|
||||
Get a list of sets of virtual ancestors for the given types
|
||||
"""
|
||||
check(types)
|
||||
ras = [[] for _ in range(len(dispatch_args))]
|
||||
for types_ in typemap:
|
||||
for t, type_, ra in zip(types, types_, ras):
|
||||
if issubclass(t, type_) and type_ not in t.__mro__:
|
||||
append(type_, ra)
|
||||
return [set(ra) for ra in ras]
|
||||
|
||||
def ancestors(*types):
|
||||
"""
|
||||
Get a list of virtual MROs, one for each type
|
||||
"""
|
||||
check(types)
|
||||
lists = []
|
||||
for t, vas in zip(types, vancestors(*types)):
|
||||
n_vas = len(vas)
|
||||
if n_vas > 1:
|
||||
raise RuntimeError(
|
||||
'Ambiguous dispatch for %s: %s' % (t, vas))
|
||||
elif n_vas == 1:
|
||||
va, = vas
|
||||
mro = type('t', (t, va), {}).__mro__[1:]
|
||||
else:
|
||||
mro = t.__mro__
|
||||
lists.append(mro[:-1]) # discard t and object
|
||||
return lists
|
||||
|
||||
def register(*types):
|
||||
"""
|
||||
Decorator to register an implementation for the given types
|
||||
"""
|
||||
check(types)
|
||||
|
||||
def dec(f):
|
||||
check(getfullargspec(f).args, operator.lt, ' in ' + f.__name__)
|
||||
typemap[types] = f
|
||||
return f
|
||||
return dec
|
||||
|
||||
def dispatch_info(*types):
|
||||
"""
|
||||
An utility to introspect the dispatch algorithm
|
||||
"""
|
||||
check(types)
|
||||
lst = [tuple(a.__name__ for a in anc)
|
||||
for anc in itertools.product(*ancestors(*types))]
|
||||
return lst
|
||||
|
||||
def _dispatch(dispatch_args, *args, **kw):
|
||||
types = tuple(type(arg) for arg in dispatch_args)
|
||||
try: # fast path
|
||||
f = typemap[types]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
return f(*args, **kw)
|
||||
combinations = itertools.product(*ancestors(*types))
|
||||
next(combinations) # the first one has been already tried
|
||||
for types_ in combinations:
|
||||
f = typemap.get(types_)
|
||||
if f is not None:
|
||||
return f(*args, **kw)
|
||||
|
||||
# else call the default implementation
|
||||
return func(*args, **kw)
|
||||
|
||||
return FunctionMaker.create(
|
||||
func, 'return _f_(%s, %%(shortsignature)s)' % dispatch_str,
|
||||
dict(_f_=_dispatch), register=register, default=func,
|
||||
typemap=typemap, vancestors=vancestors, ancestors=ancestors,
|
||||
dispatch_info=dispatch_info, __wrapped__=func)
|
||||
|
||||
gen_func_dec.__name__ = 'dispatch_on' + dispatch_str
|
||||
return gen_func_dec
|
107
venv/Lib/site-packages/scipy/_lib/deprecation.py
Normal file
107
venv/Lib/site-packages/scipy/_lib/deprecation.py
Normal file
|
@ -0,0 +1,107 @@
|
|||
import functools
|
||||
import warnings
|
||||
|
||||
__all__ = ["_deprecated"]
|
||||
|
||||
|
||||
def _deprecated(msg, stacklevel=2):
|
||||
"""Deprecate a function by emitting a warning on use."""
|
||||
def wrap(fun):
|
||||
if isinstance(fun, type):
|
||||
warnings.warn(
|
||||
"Trying to deprecate class {!r}".format(fun),
|
||||
category=RuntimeWarning, stacklevel=2)
|
||||
return fun
|
||||
|
||||
@functools.wraps(fun)
|
||||
def call(*args, **kwargs):
|
||||
warnings.warn(msg, category=DeprecationWarning,
|
||||
stacklevel=stacklevel)
|
||||
return fun(*args, **kwargs)
|
||||
call.__doc__ = msg
|
||||
return call
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
class _DeprecationHelperStr(object):
|
||||
"""
|
||||
Helper class used by deprecate_cython_api
|
||||
"""
|
||||
def __init__(self, content, message):
|
||||
self._content = content
|
||||
self._message = message
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._content)
|
||||
|
||||
def __eq__(self, other):
|
||||
res = (self._content == other)
|
||||
if res:
|
||||
warnings.warn(self._message, category=DeprecationWarning,
|
||||
stacklevel=2)
|
||||
return res
|
||||
|
||||
|
||||
def deprecate_cython_api(module, routine_name, new_name=None, message=None):
|
||||
"""
|
||||
Deprecate an exported cdef function in a public Cython API module.
|
||||
|
||||
Only functions can be deprecated; typedefs etc. cannot.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
module : module
|
||||
Public Cython API module (e.g. scipy.linalg.cython_blas).
|
||||
routine_name : str
|
||||
Name of the routine to deprecate. May also be a fused-type
|
||||
routine (in which case its all specializations are deprecated).
|
||||
new_name : str
|
||||
New name to include in the deprecation warning message
|
||||
message : str
|
||||
Additional text in the deprecation warning message
|
||||
|
||||
Examples
|
||||
--------
|
||||
Usually, this function would be used in the top-level of the
|
||||
module ``.pyx`` file:
|
||||
|
||||
>>> from scipy._lib.deprecation import deprecate_cython_api
|
||||
>>> import scipy.linalg.cython_blas as mod
|
||||
>>> deprecate_cython_api(mod, "dgemm", "dgemm_new",
|
||||
... message="Deprecated in Scipy 1.5.0")
|
||||
>>> del deprecate_cython_api, mod
|
||||
|
||||
After this, Cython modules that use the deprecated function emit a
|
||||
deprecation warning when they are imported.
|
||||
|
||||
"""
|
||||
old_name = "{}.{}".format(module.__name__, routine_name)
|
||||
|
||||
if new_name is None:
|
||||
depdoc = "`%s` is deprecated!" % old_name
|
||||
else:
|
||||
depdoc = "`%s` is deprecated, use `%s` instead!" % \
|
||||
(old_name, new_name)
|
||||
|
||||
if message is not None:
|
||||
depdoc += "\n" + message
|
||||
|
||||
d = module.__pyx_capi__
|
||||
|
||||
# Check if the function is a fused-type function with a mangled name
|
||||
j = 0
|
||||
has_fused = False
|
||||
while True:
|
||||
fused_name = "__pyx_fuse_{}{}".format(j, routine_name)
|
||||
if fused_name in d:
|
||||
has_fused = True
|
||||
d[_DeprecationHelperStr(fused_name, depdoc)] = d.pop(fused_name)
|
||||
j += 1
|
||||
else:
|
||||
break
|
||||
|
||||
# If not, apply deprecation to the named routine
|
||||
if not has_fused:
|
||||
d[_DeprecationHelperStr(routine_name, depdoc)] = d.pop(routine_name)
|
||||
|
272
venv/Lib/site-packages/scipy/_lib/doccer.py
Normal file
272
venv/Lib/site-packages/scipy/_lib/doccer.py
Normal file
|
@ -0,0 +1,272 @@
|
|||
''' Utilities to allow inserting docstring fragments for common
|
||||
parameters into function and method docstrings'''
|
||||
|
||||
import sys
|
||||
|
||||
__all__ = ['docformat', 'inherit_docstring_from', 'indentcount_lines',
|
||||
'filldoc', 'unindent_dict', 'unindent_string', 'doc_replace']
|
||||
|
||||
|
||||
def docformat(docstring, docdict=None):
|
||||
''' Fill a function docstring from variables in dictionary
|
||||
|
||||
Adapt the indent of the inserted docs
|
||||
|
||||
Parameters
|
||||
----------
|
||||
docstring : string
|
||||
docstring from function, possibly with dict formatting strings
|
||||
docdict : dict, optional
|
||||
dictionary with keys that match the dict formatting strings
|
||||
and values that are docstring fragments to be inserted. The
|
||||
indentation of the inserted docstrings is set to match the
|
||||
minimum indentation of the ``docstring`` by adding this
|
||||
indentation to all lines of the inserted string, except the
|
||||
first.
|
||||
|
||||
Returns
|
||||
-------
|
||||
outstring : string
|
||||
string with requested ``docdict`` strings inserted
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> docformat(' Test string with %(value)s', {'value':'inserted value'})
|
||||
' Test string with inserted value'
|
||||
>>> docstring = 'First line\\n Second line\\n %(value)s'
|
||||
>>> inserted_string = "indented\\nstring"
|
||||
>>> docdict = {'value': inserted_string}
|
||||
>>> docformat(docstring, docdict)
|
||||
'First line\\n Second line\\n indented\\n string'
|
||||
'''
|
||||
if not docstring:
|
||||
return docstring
|
||||
if docdict is None:
|
||||
docdict = {}
|
||||
if not docdict:
|
||||
return docstring
|
||||
lines = docstring.expandtabs().splitlines()
|
||||
# Find the minimum indent of the main docstring, after first line
|
||||
if len(lines) < 2:
|
||||
icount = 0
|
||||
else:
|
||||
icount = indentcount_lines(lines[1:])
|
||||
indent = ' ' * icount
|
||||
# Insert this indent to dictionary docstrings
|
||||
indented = {}
|
||||
for name, dstr in docdict.items():
|
||||
lines = dstr.expandtabs().splitlines()
|
||||
try:
|
||||
newlines = [lines[0]]
|
||||
for line in lines[1:]:
|
||||
newlines.append(indent+line)
|
||||
indented[name] = '\n'.join(newlines)
|
||||
except IndexError:
|
||||
indented[name] = dstr
|
||||
return docstring % indented
|
||||
|
||||
|
||||
def inherit_docstring_from(cls):
|
||||
"""
|
||||
This decorator modifies the decorated function's docstring by
|
||||
replacing occurrences of '%(super)s' with the docstring of the
|
||||
method of the same name from the class `cls`.
|
||||
|
||||
If the decorated method has no docstring, it is simply given the
|
||||
docstring of `cls`s method.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
cls : Python class or instance
|
||||
A class with a method with the same name as the decorated method.
|
||||
The docstring of the method in this class replaces '%(super)s' in the
|
||||
docstring of the decorated method.
|
||||
|
||||
Returns
|
||||
-------
|
||||
f : function
|
||||
The decorator function that modifies the __doc__ attribute
|
||||
of its argument.
|
||||
|
||||
Examples
|
||||
--------
|
||||
In the following, the docstring for Bar.func created using the
|
||||
docstring of `Foo.func`.
|
||||
|
||||
>>> class Foo(object):
|
||||
... def func(self):
|
||||
... '''Do something useful.'''
|
||||
... return
|
||||
...
|
||||
>>> class Bar(Foo):
|
||||
... @inherit_docstring_from(Foo)
|
||||
... def func(self):
|
||||
... '''%(super)s
|
||||
... Do it fast.
|
||||
... '''
|
||||
... return
|
||||
...
|
||||
>>> b = Bar()
|
||||
>>> b.func.__doc__
|
||||
'Do something useful.\n Do it fast.\n '
|
||||
|
||||
"""
|
||||
def _doc(func):
|
||||
cls_docstring = getattr(cls, func.__name__).__doc__
|
||||
func_docstring = func.__doc__
|
||||
if func_docstring is None:
|
||||
func.__doc__ = cls_docstring
|
||||
else:
|
||||
new_docstring = func_docstring % dict(super=cls_docstring)
|
||||
func.__doc__ = new_docstring
|
||||
return func
|
||||
return _doc
|
||||
|
||||
|
||||
def extend_notes_in_docstring(cls, notes):
|
||||
"""
|
||||
This decorator replaces the decorated function's docstring
|
||||
with the docstring from corresponding method in `cls`.
|
||||
It extends the 'Notes' section of that docstring to include
|
||||
the given `notes`.
|
||||
"""
|
||||
def _doc(func):
|
||||
cls_docstring = getattr(cls, func.__name__).__doc__
|
||||
# If python is called with -OO option,
|
||||
# there is no docstring
|
||||
if cls_docstring is None:
|
||||
return func
|
||||
end_of_notes = cls_docstring.find(' References\n')
|
||||
if end_of_notes == -1:
|
||||
end_of_notes = cls_docstring.find(' Examples\n')
|
||||
if end_of_notes == -1:
|
||||
end_of_notes = len(cls_docstring)
|
||||
func.__doc__ = (cls_docstring[:end_of_notes] + notes +
|
||||
cls_docstring[end_of_notes:])
|
||||
return func
|
||||
return _doc
|
||||
|
||||
|
||||
def replace_notes_in_docstring(cls, notes):
|
||||
"""
|
||||
This decorator replaces the decorated function's docstring
|
||||
with the docstring from corresponding method in `cls`.
|
||||
It replaces the 'Notes' section of that docstring with
|
||||
the given `notes`.
|
||||
"""
|
||||
def _doc(func):
|
||||
cls_docstring = getattr(cls, func.__name__).__doc__
|
||||
notes_header = ' Notes\n -----\n'
|
||||
# If python is called with -OO option,
|
||||
# there is no docstring
|
||||
if cls_docstring is None:
|
||||
return func
|
||||
start_of_notes = cls_docstring.find(notes_header)
|
||||
end_of_notes = cls_docstring.find(' References\n')
|
||||
if end_of_notes == -1:
|
||||
end_of_notes = cls_docstring.find(' Examples\n')
|
||||
if end_of_notes == -1:
|
||||
end_of_notes = len(cls_docstring)
|
||||
func.__doc__ = (cls_docstring[:start_of_notes + len(notes_header)] +
|
||||
notes +
|
||||
cls_docstring[end_of_notes:])
|
||||
return func
|
||||
return _doc
|
||||
|
||||
|
||||
def indentcount_lines(lines):
|
||||
''' Minimum indent for all lines in line list
|
||||
|
||||
>>> lines = [' one', ' two', ' three']
|
||||
>>> indentcount_lines(lines)
|
||||
1
|
||||
>>> lines = []
|
||||
>>> indentcount_lines(lines)
|
||||
0
|
||||
>>> lines = [' one']
|
||||
>>> indentcount_lines(lines)
|
||||
1
|
||||
>>> indentcount_lines([' '])
|
||||
0
|
||||
'''
|
||||
indentno = sys.maxsize
|
||||
for line in lines:
|
||||
stripped = line.lstrip()
|
||||
if stripped:
|
||||
indentno = min(indentno, len(line) - len(stripped))
|
||||
if indentno == sys.maxsize:
|
||||
return 0
|
||||
return indentno
|
||||
|
||||
|
||||
def filldoc(docdict, unindent_params=True):
|
||||
''' Return docstring decorator using docdict variable dictionary
|
||||
|
||||
Parameters
|
||||
----------
|
||||
docdict : dictionary
|
||||
dictionary containing name, docstring fragment pairs
|
||||
unindent_params : {False, True}, boolean, optional
|
||||
If True, strip common indentation from all parameters in
|
||||
docdict
|
||||
|
||||
Returns
|
||||
-------
|
||||
decfunc : function
|
||||
decorator that applies dictionary to input function docstring
|
||||
|
||||
'''
|
||||
if unindent_params:
|
||||
docdict = unindent_dict(docdict)
|
||||
|
||||
def decorate(f):
|
||||
f.__doc__ = docformat(f.__doc__, docdict)
|
||||
return f
|
||||
return decorate
|
||||
|
||||
|
||||
def unindent_dict(docdict):
|
||||
''' Unindent all strings in a docdict '''
|
||||
can_dict = {}
|
||||
for name, dstr in docdict.items():
|
||||
can_dict[name] = unindent_string(dstr)
|
||||
return can_dict
|
||||
|
||||
|
||||
def unindent_string(docstring):
|
||||
''' Set docstring to minimum indent for all lines, including first
|
||||
|
||||
>>> unindent_string(' two')
|
||||
'two'
|
||||
>>> unindent_string(' two\\n three')
|
||||
'two\\n three'
|
||||
'''
|
||||
lines = docstring.expandtabs().splitlines()
|
||||
icount = indentcount_lines(lines)
|
||||
if icount == 0:
|
||||
return docstring
|
||||
return '\n'.join([line[icount:] for line in lines])
|
||||
|
||||
|
||||
def doc_replace(obj, oldval, newval):
|
||||
"""Decorator to take the docstring from obj, with oldval replaced by newval
|
||||
|
||||
Equivalent to ``func.__doc__ = obj.__doc__.replace(oldval, newval)``
|
||||
|
||||
Parameters
|
||||
----------
|
||||
obj: object
|
||||
The object to take the docstring from.
|
||||
oldval: string
|
||||
The string to replace from the original docstring.
|
||||
newval: string
|
||||
The string to replace ``oldval`` with.
|
||||
"""
|
||||
# __doc__ may be None for optimized Python (-OO)
|
||||
doc = (obj.__doc__ or '').replace(oldval, newval)
|
||||
|
||||
def inner(func):
|
||||
func.__doc__ = doc
|
||||
return func
|
||||
|
||||
return inner
|
BIN
venv/Lib/site-packages/scipy/_lib/messagestream.cp36-win32.pyd
Normal file
BIN
venv/Lib/site-packages/scipy/_lib/messagestream.cp36-win32.pyd
Normal file
Binary file not shown.
60
venv/Lib/site-packages/scipy/_lib/setup.py
Normal file
60
venv/Lib/site-packages/scipy/_lib/setup.py
Normal file
|
@ -0,0 +1,60 @@
|
|||
import os
|
||||
|
||||
|
||||
def configuration(parent_package='',top_path=None):
|
||||
from numpy.distutils.misc_util import Configuration
|
||||
|
||||
config = Configuration('_lib', parent_package, top_path)
|
||||
config.add_data_files('tests/*.py')
|
||||
|
||||
include_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), 'src'))
|
||||
depends = [os.path.join(include_dir, 'ccallback.h')]
|
||||
|
||||
config.add_extension("_ccallback_c",
|
||||
sources=["_ccallback_c.c"],
|
||||
depends=depends,
|
||||
include_dirs=[include_dir])
|
||||
|
||||
config.add_extension("_test_ccallback",
|
||||
sources=["src/_test_ccallback.c"],
|
||||
depends=depends,
|
||||
include_dirs=[include_dir])
|
||||
|
||||
config.add_extension("_fpumode",
|
||||
sources=["_fpumode.c"])
|
||||
|
||||
def get_messagestream_config(ext, build_dir):
|
||||
# Generate a header file containing defines
|
||||
config_cmd = config.get_config_cmd()
|
||||
defines = []
|
||||
if config_cmd.check_func('open_memstream', decl=True, call=True):
|
||||
defines.append(('HAVE_OPEN_MEMSTREAM', '1'))
|
||||
target = os.path.join(os.path.dirname(__file__), 'src',
|
||||
'messagestream_config.h')
|
||||
with open(target, 'w') as f:
|
||||
for name, value in defines:
|
||||
f.write('#define {0} {1}\n'.format(name, value))
|
||||
|
||||
depends = [os.path.join(include_dir, 'messagestream.h')]
|
||||
config.add_extension("messagestream",
|
||||
sources=["messagestream.c"] + [get_messagestream_config],
|
||||
depends=depends,
|
||||
include_dirs=[include_dir])
|
||||
|
||||
config.add_extension("_test_deprecation_call",
|
||||
sources=["_test_deprecation_call.c"],
|
||||
include_dirs=[include_dir])
|
||||
|
||||
config.add_extension("_test_deprecation_def",
|
||||
sources=["_test_deprecation_def.c"],
|
||||
include_dirs=[include_dir])
|
||||
|
||||
config.add_subpackage('_uarray')
|
||||
|
||||
return config
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from numpy.distutils.core import setup
|
||||
|
||||
setup(**configuration(top_path='').todict())
|
0
venv/Lib/site-packages/scipy/_lib/tests/__init__.py
Normal file
0
venv/Lib/site-packages/scipy/_lib/tests/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
101
venv/Lib/site-packages/scipy/_lib/tests/test__gcutils.py
Normal file
101
venv/Lib/site-packages/scipy/_lib/tests/test__gcutils.py
Normal file
|
@ -0,0 +1,101 @@
|
|||
""" Test for assert_deallocated context manager and gc utilities
|
||||
"""
|
||||
import gc
|
||||
|
||||
from scipy._lib._gcutils import (set_gc_state, gc_state, assert_deallocated,
|
||||
ReferenceError, IS_PYPY)
|
||||
|
||||
from numpy.testing import assert_equal
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
def test_set_gc_state():
|
||||
gc_status = gc.isenabled()
|
||||
try:
|
||||
for state in (True, False):
|
||||
gc.enable()
|
||||
set_gc_state(state)
|
||||
assert_equal(gc.isenabled(), state)
|
||||
gc.disable()
|
||||
set_gc_state(state)
|
||||
assert_equal(gc.isenabled(), state)
|
||||
finally:
|
||||
if gc_status:
|
||||
gc.enable()
|
||||
|
||||
|
||||
def test_gc_state():
|
||||
# Test gc_state context manager
|
||||
gc_status = gc.isenabled()
|
||||
try:
|
||||
for pre_state in (True, False):
|
||||
set_gc_state(pre_state)
|
||||
for with_state in (True, False):
|
||||
# Check the gc state is with_state in with block
|
||||
with gc_state(with_state):
|
||||
assert_equal(gc.isenabled(), with_state)
|
||||
# And returns to previous state outside block
|
||||
assert_equal(gc.isenabled(), pre_state)
|
||||
# Even if the gc state is set explicitly within the block
|
||||
with gc_state(with_state):
|
||||
assert_equal(gc.isenabled(), with_state)
|
||||
set_gc_state(not with_state)
|
||||
assert_equal(gc.isenabled(), pre_state)
|
||||
finally:
|
||||
if gc_status:
|
||||
gc.enable()
|
||||
|
||||
|
||||
@pytest.mark.skipif(IS_PYPY, reason="Test not meaningful on PyPy")
|
||||
def test_assert_deallocated():
|
||||
# Ordinary use
|
||||
class C(object):
|
||||
def __init__(self, arg0, arg1, name='myname'):
|
||||
self.name = name
|
||||
for gc_current in (True, False):
|
||||
with gc_state(gc_current):
|
||||
# We are deleting from with-block context, so that's OK
|
||||
with assert_deallocated(C, 0, 2, 'another name') as c:
|
||||
assert_equal(c.name, 'another name')
|
||||
del c
|
||||
# Or not using the thing in with-block context, also OK
|
||||
with assert_deallocated(C, 0, 2, name='third name'):
|
||||
pass
|
||||
assert_equal(gc.isenabled(), gc_current)
|
||||
|
||||
|
||||
@pytest.mark.skipif(IS_PYPY, reason="Test not meaningful on PyPy")
|
||||
def test_assert_deallocated_nodel():
|
||||
class C(object):
|
||||
pass
|
||||
with pytest.raises(ReferenceError):
|
||||
# Need to delete after using if in with-block context
|
||||
# Note: assert_deallocated(C) needs to be assigned for the test
|
||||
# to function correctly. It is assigned to c, but c itself is
|
||||
# not referenced in the body of the with, it is only there for
|
||||
# the refcount.
|
||||
with assert_deallocated(C) as c:
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.skipif(IS_PYPY, reason="Test not meaningful on PyPy")
|
||||
def test_assert_deallocated_circular():
|
||||
class C(object):
|
||||
def __init__(self):
|
||||
self._circular = self
|
||||
with pytest.raises(ReferenceError):
|
||||
# Circular reference, no automatic garbage collection
|
||||
with assert_deallocated(C) as c:
|
||||
del c
|
||||
|
||||
|
||||
@pytest.mark.skipif(IS_PYPY, reason="Test not meaningful on PyPy")
|
||||
def test_assert_deallocated_circular2():
|
||||
class C(object):
|
||||
def __init__(self):
|
||||
self._circular = self
|
||||
with pytest.raises(ReferenceError):
|
||||
# Still circular reference, no automatic garbage collection
|
||||
with assert_deallocated(C):
|
||||
pass
|
67
venv/Lib/site-packages/scipy/_lib/tests/test__pep440.py
Normal file
67
venv/Lib/site-packages/scipy/_lib/tests/test__pep440.py
Normal file
|
@ -0,0 +1,67 @@
|
|||
from pytest import raises as assert_raises
|
||||
from scipy._lib._pep440 import Version, parse
|
||||
|
||||
|
||||
def test_main_versions():
|
||||
assert Version('1.8.0') == Version('1.8.0')
|
||||
for ver in ['1.9.0', '2.0.0', '1.8.1']:
|
||||
assert Version('1.8.0') < Version(ver)
|
||||
|
||||
for ver in ['1.7.0', '1.7.1', '0.9.9']:
|
||||
assert Version('1.8.0') > Version(ver)
|
||||
|
||||
|
||||
def test_version_1_point_10():
|
||||
# regression test for gh-2998.
|
||||
assert Version('1.9.0') < Version('1.10.0')
|
||||
assert Version('1.11.0') < Version('1.11.1')
|
||||
assert Version('1.11.0') == Version('1.11.0')
|
||||
assert Version('1.99.11') < Version('1.99.12')
|
||||
|
||||
|
||||
def test_alpha_beta_rc():
|
||||
assert Version('1.8.0rc1') == Version('1.8.0rc1')
|
||||
for ver in ['1.8.0', '1.8.0rc2']:
|
||||
assert Version('1.8.0rc1') < Version(ver)
|
||||
|
||||
for ver in ['1.8.0a2', '1.8.0b3', '1.7.2rc4']:
|
||||
assert Version('1.8.0rc1') > Version(ver)
|
||||
|
||||
assert Version('1.8.0b1') > Version('1.8.0a2')
|
||||
|
||||
|
||||
def test_dev_version():
|
||||
assert Version('1.9.0.dev+Unknown') < Version('1.9.0')
|
||||
for ver in ['1.9.0', '1.9.0a1', '1.9.0b2', '1.9.0b2.dev+ffffffff', '1.9.0.dev1']:
|
||||
assert Version('1.9.0.dev+f16acvda') < Version(ver)
|
||||
|
||||
assert Version('1.9.0.dev+f16acvda') == Version('1.9.0.dev+f16acvda')
|
||||
|
||||
|
||||
def test_dev_a_b_rc_mixed():
|
||||
assert Version('1.9.0a2.dev+f16acvda') == Version('1.9.0a2.dev+f16acvda')
|
||||
assert Version('1.9.0a2.dev+6acvda54') < Version('1.9.0a2')
|
||||
|
||||
|
||||
def test_dev0_version():
|
||||
assert Version('1.9.0.dev0+Unknown') < Version('1.9.0')
|
||||
for ver in ['1.9.0', '1.9.0a1', '1.9.0b2', '1.9.0b2.dev0+ffffffff']:
|
||||
assert Version('1.9.0.dev0+f16acvda') < Version(ver)
|
||||
|
||||
assert Version('1.9.0.dev0+f16acvda') == Version('1.9.0.dev0+f16acvda')
|
||||
|
||||
|
||||
def test_dev0_a_b_rc_mixed():
|
||||
assert Version('1.9.0a2.dev0+f16acvda') == Version('1.9.0a2.dev0+f16acvda')
|
||||
assert Version('1.9.0a2.dev0+6acvda54') < Version('1.9.0a2')
|
||||
|
||||
|
||||
def test_raises():
|
||||
for ver in ['1,9.0', '1.7.x']:
|
||||
assert_raises(ValueError, Version, ver)
|
||||
|
||||
def test_legacy_version():
|
||||
# Non-PEP-440 version identifiers always compare less. For NumPy this only
|
||||
# occurs on dev builds prior to 1.10.0 which are unsupported anyway.
|
||||
assert parse('invalid') < Version('0.0.0')
|
||||
assert parse('1.9.0-f16acvda') < Version('1.0.0')
|
32
venv/Lib/site-packages/scipy/_lib/tests/test__testutils.py
Normal file
32
venv/Lib/site-packages/scipy/_lib/tests/test__testutils.py
Normal file
|
@ -0,0 +1,32 @@
|
|||
import sys
|
||||
from scipy._lib._testutils import _parse_size, _get_mem_available
|
||||
import pytest
|
||||
|
||||
|
||||
def test__parse_size():
|
||||
expected = {
|
||||
'12': 12e6,
|
||||
'12 b': 12,
|
||||
'12k': 12e3,
|
||||
' 12 M ': 12e6,
|
||||
' 12 G ': 12e9,
|
||||
' 12Tb ': 12e12,
|
||||
'12 Mib ': 12 * 1024.0**2,
|
||||
'12Tib': 12 * 1024.0**4,
|
||||
}
|
||||
|
||||
for inp, outp in sorted(expected.items()):
|
||||
if outp is None:
|
||||
with pytest.raises(ValueError):
|
||||
_parse_size(inp)
|
||||
else:
|
||||
assert _parse_size(inp) == outp
|
||||
|
||||
|
||||
def test__mem_available():
|
||||
# May return None on non-Linux platforms
|
||||
available = _get_mem_available()
|
||||
if sys.platform.startswith('linux'):
|
||||
assert available >= 0
|
||||
else:
|
||||
assert available is None or available >= 0
|
|
@ -0,0 +1,51 @@
|
|||
import threading
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from numpy.testing import assert_
|
||||
from pytest import raises as assert_raises
|
||||
|
||||
from scipy._lib._threadsafety import ReentrancyLock, non_reentrant, ReentrancyError
|
||||
|
||||
|
||||
def test_parallel_threads():
|
||||
# Check that ReentrancyLock serializes work in parallel threads.
|
||||
#
|
||||
# The test is not fully deterministic, and may succeed falsely if
|
||||
# the timings go wrong.
|
||||
|
||||
lock = ReentrancyLock("failure")
|
||||
|
||||
failflag = [False]
|
||||
exceptions_raised = []
|
||||
|
||||
def worker(k):
|
||||
try:
|
||||
with lock:
|
||||
assert_(not failflag[0])
|
||||
failflag[0] = True
|
||||
time.sleep(0.1 * k)
|
||||
assert_(failflag[0])
|
||||
failflag[0] = False
|
||||
except Exception:
|
||||
exceptions_raised.append(traceback.format_exc(2))
|
||||
|
||||
threads = [threading.Thread(target=lambda k=k: worker(k))
|
||||
for k in range(3)]
|
||||
for t in threads:
|
||||
t.start()
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
exceptions_raised = "\n".join(exceptions_raised)
|
||||
assert_(not exceptions_raised, exceptions_raised)
|
||||
|
||||
|
||||
def test_reentering():
|
||||
# Check that ReentrancyLock prevents re-entering from the same thread.
|
||||
|
||||
@non_reentrant()
|
||||
def func(x):
|
||||
return func(x)
|
||||
|
||||
assert_raises(ReentrancyError, func, 0)
|
249
venv/Lib/site-packages/scipy/_lib/tests/test__util.py
Normal file
249
venv/Lib/site-packages/scipy/_lib/tests/test__util.py
Normal file
|
@ -0,0 +1,249 @@
|
|||
from multiprocessing import Pool
|
||||
from multiprocessing.pool import Pool as PWL
|
||||
import os
|
||||
import math
|
||||
|
||||
import numpy as np
|
||||
from numpy.testing import assert_equal, assert_
|
||||
import pytest
|
||||
from pytest import raises as assert_raises, deprecated_call
|
||||
|
||||
import scipy
|
||||
from scipy._lib._util import (_aligned_zeros, check_random_state, MapWrapper,
|
||||
getfullargspec_no_self, FullArgSpec,
|
||||
rng_integers)
|
||||
|
||||
|
||||
def test__aligned_zeros():
|
||||
niter = 10
|
||||
|
||||
def check(shape, dtype, order, align):
|
||||
err_msg = repr((shape, dtype, order, align))
|
||||
x = _aligned_zeros(shape, dtype, order, align=align)
|
||||
if align is None:
|
||||
align = np.dtype(dtype).alignment
|
||||
assert_equal(x.__array_interface__['data'][0] % align, 0)
|
||||
if hasattr(shape, '__len__'):
|
||||
assert_equal(x.shape, shape, err_msg)
|
||||
else:
|
||||
assert_equal(x.shape, (shape,), err_msg)
|
||||
assert_equal(x.dtype, dtype)
|
||||
if order == "C":
|
||||
assert_(x.flags.c_contiguous, err_msg)
|
||||
elif order == "F":
|
||||
if x.size > 0:
|
||||
# Size-0 arrays get invalid flags on NumPy 1.5
|
||||
assert_(x.flags.f_contiguous, err_msg)
|
||||
elif order is None:
|
||||
assert_(x.flags.c_contiguous, err_msg)
|
||||
else:
|
||||
raise ValueError()
|
||||
|
||||
# try various alignments
|
||||
for align in [1, 2, 3, 4, 8, 16, 32, 64, None]:
|
||||
for n in [0, 1, 3, 11]:
|
||||
for order in ["C", "F", None]:
|
||||
for dtype in [np.uint8, np.float64]:
|
||||
for shape in [n, (1, 2, 3, n)]:
|
||||
for j in range(niter):
|
||||
check(shape, dtype, order, align)
|
||||
|
||||
|
||||
def test_check_random_state():
|
||||
# If seed is None, return the RandomState singleton used by np.random.
|
||||
# If seed is an int, return a new RandomState instance seeded with seed.
|
||||
# If seed is already a RandomState instance, return it.
|
||||
# Otherwise raise ValueError.
|
||||
rsi = check_random_state(1)
|
||||
assert_equal(type(rsi), np.random.RandomState)
|
||||
rsi = check_random_state(rsi)
|
||||
assert_equal(type(rsi), np.random.RandomState)
|
||||
rsi = check_random_state(None)
|
||||
assert_equal(type(rsi), np.random.RandomState)
|
||||
assert_raises(ValueError, check_random_state, 'a')
|
||||
if hasattr(np.random, 'Generator'):
|
||||
# np.random.Generator is only available in NumPy >= 1.17
|
||||
rg = np.random.Generator(np.random.PCG64())
|
||||
rsi = check_random_state(rg)
|
||||
assert_equal(type(rsi), np.random.Generator)
|
||||
|
||||
|
||||
def test_getfullargspec_no_self():
|
||||
p = MapWrapper(1)
|
||||
argspec = getfullargspec_no_self(p.__init__)
|
||||
assert_equal(argspec, FullArgSpec(['pool'], None, None, (1,), [], None, {}))
|
||||
argspec = getfullargspec_no_self(p.__call__)
|
||||
assert_equal(argspec, FullArgSpec(['func', 'iterable'], None, None, None, [], None, {}))
|
||||
|
||||
class _rv_generic(object):
|
||||
def _rvs(self, a, b=2, c=3, *args, size=None, **kwargs):
|
||||
return None
|
||||
|
||||
rv_obj = _rv_generic()
|
||||
argspec = getfullargspec_no_self(rv_obj._rvs)
|
||||
assert_equal(argspec, FullArgSpec(['a', 'b', 'c'], 'args', 'kwargs', (2, 3), ['size'], {'size': None}, {}))
|
||||
|
||||
|
||||
def test_mapwrapper_serial():
|
||||
in_arg = np.arange(10.)
|
||||
out_arg = np.sin(in_arg)
|
||||
|
||||
p = MapWrapper(1)
|
||||
assert_(p._mapfunc is map)
|
||||
assert_(p.pool is None)
|
||||
assert_(p._own_pool is False)
|
||||
out = list(p(np.sin, in_arg))
|
||||
assert_equal(out, out_arg)
|
||||
|
||||
with assert_raises(RuntimeError):
|
||||
p = MapWrapper(0)
|
||||
|
||||
|
||||
def test_pool():
|
||||
with Pool(2) as p:
|
||||
p.map(math.sin, [1,2,3, 4])
|
||||
|
||||
|
||||
def test_mapwrapper_parallel():
|
||||
in_arg = np.arange(10.)
|
||||
out_arg = np.sin(in_arg)
|
||||
|
||||
with MapWrapper(2) as p:
|
||||
out = p(np.sin, in_arg)
|
||||
assert_equal(list(out), out_arg)
|
||||
|
||||
assert_(p._own_pool is True)
|
||||
assert_(isinstance(p.pool, PWL))
|
||||
assert_(p._mapfunc is not None)
|
||||
|
||||
# the context manager should've closed the internal pool
|
||||
# check that it has by asking it to calculate again.
|
||||
with assert_raises(Exception) as excinfo:
|
||||
p(np.sin, in_arg)
|
||||
|
||||
assert_(excinfo.type is ValueError)
|
||||
|
||||
# can also set a PoolWrapper up with a map-like callable instance
|
||||
try:
|
||||
p = Pool(2)
|
||||
q = MapWrapper(p.map)
|
||||
|
||||
assert_(q._own_pool is False)
|
||||
q.close()
|
||||
|
||||
# closing the PoolWrapper shouldn't close the internal pool
|
||||
# because it didn't create it
|
||||
out = p.map(np.sin, in_arg)
|
||||
assert_equal(list(out), out_arg)
|
||||
finally:
|
||||
p.close()
|
||||
|
||||
|
||||
# get our custom ones and a few from the "import *" cases
|
||||
@pytest.mark.parametrize(
|
||||
'key', ('fft', 'ifft', 'diag', 'arccos',
|
||||
'randn', 'rand', 'array'))
|
||||
def test_numpy_deprecation(key):
|
||||
"""Test that 'from numpy import *' functions are deprecated."""
|
||||
if key in ('fft', 'ifft', 'diag', 'arccos'):
|
||||
arg = [1.0, 0.]
|
||||
elif key == 'finfo':
|
||||
arg = float
|
||||
else:
|
||||
arg = 2
|
||||
func = getattr(scipy, key)
|
||||
if key == 'fft':
|
||||
match = r'scipy\.fft.*deprecated.*1.5.0.*'
|
||||
else:
|
||||
match = r'scipy\.%s is deprecated.*2\.0\.0' % key
|
||||
with deprecated_call(match=match) as dep:
|
||||
func(arg) # deprecated
|
||||
# in case we catch more than one dep warning
|
||||
fnames = [os.path.splitext(d.filename)[0] for d in dep.list]
|
||||
basenames = [os.path.basename(fname) for fname in fnames]
|
||||
assert 'test__util' in basenames
|
||||
if key in ('rand', 'randn'):
|
||||
root = np.random
|
||||
elif key in ('fft', 'ifft'):
|
||||
root = np.fft
|
||||
else:
|
||||
root = np
|
||||
func_np = getattr(root, key)
|
||||
func_np(arg) # not deprecated
|
||||
assert func_np is not func
|
||||
# classes should remain classes
|
||||
if isinstance(func_np, type):
|
||||
assert isinstance(func, type)
|
||||
|
||||
|
||||
def test_numpy_deprecation_functionality():
|
||||
# Check that the deprecation wrappers don't break basic NumPy
|
||||
# functionality
|
||||
with deprecated_call():
|
||||
x = scipy.array([1, 2, 3], dtype=scipy.float64)
|
||||
assert x.dtype == scipy.float64
|
||||
assert x.dtype == np.float64
|
||||
|
||||
x = scipy.finfo(scipy.float32)
|
||||
assert x.eps == np.finfo(np.float32).eps
|
||||
|
||||
assert scipy.float64 == np.float64
|
||||
assert issubclass(np.float64, scipy.float64)
|
||||
|
||||
|
||||
def test_rng_integers():
|
||||
rng = np.random.RandomState()
|
||||
|
||||
# test that numbers are inclusive of high point
|
||||
arr = rng_integers(rng, low=2, high=5, size=100, endpoint=True)
|
||||
assert np.max(arr) == 5
|
||||
assert np.min(arr) == 2
|
||||
assert arr.shape == (100, )
|
||||
|
||||
# test that numbers are inclusive of high point
|
||||
arr = rng_integers(rng, low=5, size=100, endpoint=True)
|
||||
assert np.max(arr) == 5
|
||||
assert np.min(arr) == 0
|
||||
assert arr.shape == (100, )
|
||||
|
||||
# test that numbers are exclusive of high point
|
||||
arr = rng_integers(rng, low=2, high=5, size=100, endpoint=False)
|
||||
assert np.max(arr) == 4
|
||||
assert np.min(arr) == 2
|
||||
assert arr.shape == (100, )
|
||||
|
||||
# test that numbers are exclusive of high point
|
||||
arr = rng_integers(rng, low=5, size=100, endpoint=False)
|
||||
assert np.max(arr) == 4
|
||||
assert np.min(arr) == 0
|
||||
assert arr.shape == (100, )
|
||||
|
||||
# now try with np.random.Generator
|
||||
try:
|
||||
rng = np.random.default_rng()
|
||||
except AttributeError:
|
||||
return
|
||||
|
||||
# test that numbers are inclusive of high point
|
||||
arr = rng_integers(rng, low=2, high=5, size=100, endpoint=True)
|
||||
assert np.max(arr) == 5
|
||||
assert np.min(arr) == 2
|
||||
assert arr.shape == (100, )
|
||||
|
||||
# test that numbers are inclusive of high point
|
||||
arr = rng_integers(rng, low=5, size=100, endpoint=True)
|
||||
assert np.max(arr) == 5
|
||||
assert np.min(arr) == 0
|
||||
assert arr.shape == (100, )
|
||||
|
||||
# test that numbers are exclusive of high point
|
||||
arr = rng_integers(rng, low=2, high=5, size=100, endpoint=False)
|
||||
assert np.max(arr) == 4
|
||||
assert np.min(arr) == 2
|
||||
assert arr.shape == (100, )
|
||||
|
||||
# test that numbers are exclusive of high point
|
||||
arr = rng_integers(rng, low=5, size=100, endpoint=False)
|
||||
assert np.max(arr) == 4
|
||||
assert np.min(arr) == 0
|
||||
assert arr.shape == (100, )
|
197
venv/Lib/site-packages/scipy/_lib/tests/test_ccallback.py
Normal file
197
venv/Lib/site-packages/scipy/_lib/tests/test_ccallback.py
Normal file
|
@ -0,0 +1,197 @@
|
|||
from numpy.testing import assert_equal, assert_
|
||||
from pytest import raises as assert_raises
|
||||
|
||||
import time
|
||||
import pytest
|
||||
import ctypes
|
||||
import threading
|
||||
from scipy._lib import _ccallback_c as _test_ccallback_cython
|
||||
from scipy._lib import _test_ccallback
|
||||
from scipy._lib._ccallback import LowLevelCallable
|
||||
|
||||
try:
|
||||
import cffi
|
||||
HAVE_CFFI = True
|
||||
except ImportError:
|
||||
HAVE_CFFI = False
|
||||
|
||||
|
||||
ERROR_VALUE = 2.0
|
||||
|
||||
|
||||
def callback_python(a, user_data=None):
|
||||
if a == ERROR_VALUE:
|
||||
raise ValueError("bad value")
|
||||
|
||||
if user_data is None:
|
||||
return a + 1
|
||||
else:
|
||||
return a + user_data
|
||||
|
||||
def _get_cffi_func(base, signature):
|
||||
if not HAVE_CFFI:
|
||||
pytest.skip("cffi not installed")
|
||||
|
||||
# Get function address
|
||||
voidp = ctypes.cast(base, ctypes.c_void_p)
|
||||
address = voidp.value
|
||||
|
||||
# Create corresponding cffi handle
|
||||
ffi = cffi.FFI()
|
||||
func = ffi.cast(signature, address)
|
||||
return func
|
||||
|
||||
|
||||
def _get_ctypes_data():
|
||||
value = ctypes.c_double(2.0)
|
||||
return ctypes.cast(ctypes.pointer(value), ctypes.c_voidp)
|
||||
|
||||
|
||||
def _get_cffi_data():
|
||||
if not HAVE_CFFI:
|
||||
pytest.skip("cffi not installed")
|
||||
ffi = cffi.FFI()
|
||||
return ffi.new('double *', 2.0)
|
||||
|
||||
|
||||
CALLERS = {
|
||||
'simple': _test_ccallback.test_call_simple,
|
||||
'nodata': _test_ccallback.test_call_nodata,
|
||||
'nonlocal': _test_ccallback.test_call_nonlocal,
|
||||
'cython': _test_ccallback_cython.test_call_cython,
|
||||
}
|
||||
|
||||
# These functions have signatures known to the callers
|
||||
FUNCS = {
|
||||
'python': lambda: callback_python,
|
||||
'capsule': lambda: _test_ccallback.test_get_plus1_capsule(),
|
||||
'cython': lambda: LowLevelCallable.from_cython(_test_ccallback_cython, "plus1_cython"),
|
||||
'ctypes': lambda: _test_ccallback_cython.plus1_ctypes,
|
||||
'cffi': lambda: _get_cffi_func(_test_ccallback_cython.plus1_ctypes,
|
||||
'double (*)(double, int *, void *)'),
|
||||
'capsule_b': lambda: _test_ccallback.test_get_plus1b_capsule(),
|
||||
'cython_b': lambda: LowLevelCallable.from_cython(_test_ccallback_cython, "plus1b_cython"),
|
||||
'ctypes_b': lambda: _test_ccallback_cython.plus1b_ctypes,
|
||||
'cffi_b': lambda: _get_cffi_func(_test_ccallback_cython.plus1b_ctypes,
|
||||
'double (*)(double, double, int *, void *)'),
|
||||
}
|
||||
|
||||
# These functions have signatures the callers don't know
|
||||
BAD_FUNCS = {
|
||||
'capsule_bc': lambda: _test_ccallback.test_get_plus1bc_capsule(),
|
||||
'cython_bc': lambda: LowLevelCallable.from_cython(_test_ccallback_cython, "plus1bc_cython"),
|
||||
'ctypes_bc': lambda: _test_ccallback_cython.plus1bc_ctypes,
|
||||
'cffi_bc': lambda: _get_cffi_func(_test_ccallback_cython.plus1bc_ctypes,
|
||||
'double (*)(double, double, double, int *, void *)'),
|
||||
}
|
||||
|
||||
USER_DATAS = {
|
||||
'ctypes': _get_ctypes_data,
|
||||
'cffi': _get_cffi_data,
|
||||
'capsule': _test_ccallback.test_get_data_capsule,
|
||||
}
|
||||
|
||||
|
||||
def test_callbacks():
|
||||
def check(caller, func, user_data):
|
||||
caller = CALLERS[caller]
|
||||
func = FUNCS[func]()
|
||||
user_data = USER_DATAS[user_data]()
|
||||
|
||||
if func is callback_python:
|
||||
func2 = lambda x: func(x, 2.0)
|
||||
else:
|
||||
func2 = LowLevelCallable(func, user_data)
|
||||
func = LowLevelCallable(func)
|
||||
|
||||
# Test basic call
|
||||
assert_equal(caller(func, 1.0), 2.0)
|
||||
|
||||
# Test 'bad' value resulting to an error
|
||||
assert_raises(ValueError, caller, func, ERROR_VALUE)
|
||||
|
||||
# Test passing in user_data
|
||||
assert_equal(caller(func2, 1.0), 3.0)
|
||||
|
||||
for caller in sorted(CALLERS.keys()):
|
||||
for func in sorted(FUNCS.keys()):
|
||||
for user_data in sorted(USER_DATAS.keys()):
|
||||
check(caller, func, user_data)
|
||||
|
||||
|
||||
def test_bad_callbacks():
|
||||
def check(caller, func, user_data):
|
||||
caller = CALLERS[caller]
|
||||
user_data = USER_DATAS[user_data]()
|
||||
func = BAD_FUNCS[func]()
|
||||
|
||||
if func is callback_python:
|
||||
func2 = lambda x: func(x, 2.0)
|
||||
else:
|
||||
func2 = LowLevelCallable(func, user_data)
|
||||
func = LowLevelCallable(func)
|
||||
|
||||
# Test that basic call fails
|
||||
assert_raises(ValueError, caller, LowLevelCallable(func), 1.0)
|
||||
|
||||
# Test that passing in user_data also fails
|
||||
assert_raises(ValueError, caller, func2, 1.0)
|
||||
|
||||
# Test error message
|
||||
llfunc = LowLevelCallable(func)
|
||||
try:
|
||||
caller(llfunc, 1.0)
|
||||
except ValueError as err:
|
||||
msg = str(err)
|
||||
assert_(llfunc.signature in msg, msg)
|
||||
assert_('double (double, double, int *, void *)' in msg, msg)
|
||||
|
||||
for caller in sorted(CALLERS.keys()):
|
||||
for func in sorted(BAD_FUNCS.keys()):
|
||||
for user_data in sorted(USER_DATAS.keys()):
|
||||
check(caller, func, user_data)
|
||||
|
||||
|
||||
def test_signature_override():
|
||||
caller = _test_ccallback.test_call_simple
|
||||
func = _test_ccallback.test_get_plus1_capsule()
|
||||
|
||||
llcallable = LowLevelCallable(func, signature="bad signature")
|
||||
assert_equal(llcallable.signature, "bad signature")
|
||||
assert_raises(ValueError, caller, llcallable, 3)
|
||||
|
||||
llcallable = LowLevelCallable(func, signature="double (double, int *, void *)")
|
||||
assert_equal(llcallable.signature, "double (double, int *, void *)")
|
||||
assert_equal(caller(llcallable, 3), 4)
|
||||
|
||||
|
||||
def test_threadsafety():
|
||||
def callback(a, caller):
|
||||
if a <= 0:
|
||||
return 1
|
||||
else:
|
||||
res = caller(lambda x: callback(x, caller), a - 1)
|
||||
return 2*res
|
||||
|
||||
def check(caller):
|
||||
caller = CALLERS[caller]
|
||||
|
||||
results = []
|
||||
|
||||
count = 10
|
||||
|
||||
def run():
|
||||
time.sleep(0.01)
|
||||
r = caller(lambda x: callback(x, caller), count)
|
||||
results.append(r)
|
||||
|
||||
threads = [threading.Thread(target=run) for j in range(20)]
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
|
||||
assert_equal(results, [2.0**count]*len(threads))
|
||||
|
||||
for caller in CALLERS.keys():
|
||||
check(caller)
|
10
venv/Lib/site-packages/scipy/_lib/tests/test_deprecation.py
Normal file
10
venv/Lib/site-packages/scipy/_lib/tests/test_deprecation.py
Normal file
|
@ -0,0 +1,10 @@
|
|||
import pytest
|
||||
|
||||
|
||||
def test_cython_api_deprecation():
|
||||
match = ("`scipy._lib._test_deprecation_def.foo_deprecated` "
|
||||
"is deprecated, use `foo` instead!\n"
|
||||
"Deprecated in Scipy 42.0.0")
|
||||
with pytest.warns(DeprecationWarning, match=match):
|
||||
from .. import _test_deprecation_call
|
||||
assert _test_deprecation_call.call() == (1, 1)
|
|
@ -0,0 +1,50 @@
|
|||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
MODULES = [
|
||||
"scipy.cluster",
|
||||
"scipy.cluster.vq",
|
||||
"scipy.cluster.hierarchy",
|
||||
"scipy.constants",
|
||||
"scipy.fft",
|
||||
"scipy.fftpack",
|
||||
"scipy.integrate",
|
||||
"scipy.interpolate",
|
||||
"scipy.io",
|
||||
"scipy.io.arff",
|
||||
"scipy.io.harwell_boeing",
|
||||
"scipy.io.idl",
|
||||
"scipy.io.matlab",
|
||||
"scipy.io.netcdf",
|
||||
"scipy.io.wavfile",
|
||||
"scipy.linalg",
|
||||
"scipy.linalg.blas",
|
||||
"scipy.linalg.cython_blas",
|
||||
"scipy.linalg.lapack",
|
||||
"scipy.linalg.cython_lapack",
|
||||
"scipy.linalg.interpolative",
|
||||
"scipy.misc",
|
||||
"scipy.ndimage",
|
||||
"scipy.odr",
|
||||
"scipy.optimize",
|
||||
"scipy.signal",
|
||||
"scipy.signal.windows",
|
||||
"scipy.sparse",
|
||||
"scipy.sparse.linalg",
|
||||
"scipy.sparse.csgraph",
|
||||
"scipy.spatial",
|
||||
"scipy.spatial.distance",
|
||||
"scipy.special",
|
||||
"scipy.stats",
|
||||
"scipy.stats.distributions",
|
||||
"scipy.stats.mstats",
|
||||
]
|
||||
|
||||
|
||||
def test_modules_importable():
|
||||
# Check that all modules are importable in a new Python process.
|
||||
#This is not necessarily true if there are import cycles present.
|
||||
for module in MODULES:
|
||||
cmd = 'import {}'.format(module)
|
||||
subprocess.check_call([sys.executable, '-c', cmd])
|
42
venv/Lib/site-packages/scipy/_lib/tests/test_tmpdirs.py
Normal file
42
venv/Lib/site-packages/scipy/_lib/tests/test_tmpdirs.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
""" Test tmpdirs module """
|
||||
from os import getcwd
|
||||
from os.path import realpath, abspath, dirname, isfile, join as pjoin, exists
|
||||
|
||||
from scipy._lib._tmpdirs import tempdir, in_tempdir, in_dir
|
||||
|
||||
from numpy.testing import assert_, assert_equal
|
||||
|
||||
MY_PATH = abspath(__file__)
|
||||
MY_DIR = dirname(MY_PATH)
|
||||
|
||||
|
||||
def test_tempdir():
|
||||
with tempdir() as tmpdir:
|
||||
fname = pjoin(tmpdir, 'example_file.txt')
|
||||
with open(fname, 'wt') as fobj:
|
||||
fobj.write('a string\\n')
|
||||
assert_(not exists(tmpdir))
|
||||
|
||||
|
||||
def test_in_tempdir():
|
||||
my_cwd = getcwd()
|
||||
with in_tempdir() as tmpdir:
|
||||
with open('test.txt', 'wt') as f:
|
||||
f.write('some text')
|
||||
assert_(isfile('test.txt'))
|
||||
assert_(isfile(pjoin(tmpdir, 'test.txt')))
|
||||
assert_(not exists(tmpdir))
|
||||
assert_equal(getcwd(), my_cwd)
|
||||
|
||||
|
||||
def test_given_directory():
|
||||
# Test InGivenDirectory
|
||||
cwd = getcwd()
|
||||
with in_dir() as tmpdir:
|
||||
assert_equal(tmpdir, abspath(cwd))
|
||||
assert_equal(tmpdir, abspath(getcwd()))
|
||||
with in_dir(MY_DIR) as tmpdir:
|
||||
assert_equal(tmpdir, MY_DIR)
|
||||
assert_equal(realpath(MY_DIR), realpath(abspath(getcwd())))
|
||||
# We were deleting the given directory! Check not so now.
|
||||
assert_(isfile(MY_PATH))
|
121
venv/Lib/site-packages/scipy/_lib/tests/test_warnings.py
Normal file
121
venv/Lib/site-packages/scipy/_lib/tests/test_warnings.py
Normal file
|
@ -0,0 +1,121 @@
|
|||
"""
|
||||
Tests which scan for certain occurrences in the code, they may not find
|
||||
all of these occurrences but should catch almost all. This file was adapted
|
||||
from NumPy.
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
import ast
|
||||
import tokenize
|
||||
|
||||
import scipy
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
class ParseCall(ast.NodeVisitor):
|
||||
def __init__(self):
|
||||
self.ls = []
|
||||
|
||||
def visit_Attribute(self, node):
|
||||
ast.NodeVisitor.generic_visit(self, node)
|
||||
self.ls.append(node.attr)
|
||||
|
||||
def visit_Name(self, node):
|
||||
self.ls.append(node.id)
|
||||
|
||||
class FindFuncs(ast.NodeVisitor):
|
||||
def __init__(self, filename):
|
||||
super().__init__()
|
||||
self.__filename = filename
|
||||
self.bad_filters = []
|
||||
self.bad_stacklevels = []
|
||||
|
||||
def visit_Call(self, node):
|
||||
p = ParseCall()
|
||||
p.visit(node.func)
|
||||
ast.NodeVisitor.generic_visit(self, node)
|
||||
|
||||
if p.ls[-1] == 'simplefilter' or p.ls[-1] == 'filterwarnings':
|
||||
if node.args[0].s == "ignore":
|
||||
self.bad_filters.append(
|
||||
"{}:{}".format(self.__filename, node.lineno))
|
||||
|
||||
if p.ls[-1] == 'warn' and (
|
||||
len(p.ls) == 1 or p.ls[-2] == 'warnings'):
|
||||
|
||||
if self.__filename == "_lib/tests/test_warnings.py":
|
||||
# This file
|
||||
return
|
||||
|
||||
# See if stacklevel exists:
|
||||
if len(node.args) == 3:
|
||||
return
|
||||
args = {kw.arg for kw in node.keywords}
|
||||
if "stacklevel" not in args:
|
||||
self.bad_stacklevels.append(
|
||||
"{}:{}".format(self.__filename, node.lineno))
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def warning_calls():
|
||||
# combined "ignore" and stacklevel error
|
||||
base = Path(scipy.__file__).parent
|
||||
|
||||
bad_filters = []
|
||||
bad_stacklevels = []
|
||||
|
||||
for path in base.rglob("*.py"):
|
||||
# use tokenize to auto-detect encoding on systems where no
|
||||
# default encoding is defined (e.g., LANG='C')
|
||||
with tokenize.open(str(path)) as file:
|
||||
tree = ast.parse(file.read(), filename=str(path))
|
||||
finder = FindFuncs(path.relative_to(base))
|
||||
finder.visit(tree)
|
||||
bad_filters.extend(finder.bad_filters)
|
||||
bad_stacklevels.extend(finder.bad_stacklevels)
|
||||
|
||||
return bad_filters, bad_stacklevels
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_warning_calls_filters(warning_calls):
|
||||
bad_filters, bad_stacklevels = warning_calls
|
||||
|
||||
# There is still one simplefilter occurrence in optimize.py that could be removed.
|
||||
bad_filters = [item for item in bad_filters
|
||||
if 'optimize.py' not in item]
|
||||
# The filterwarnings calls in sparse are needed.
|
||||
bad_filters = [item for item in bad_filters
|
||||
if os.path.join('sparse', '__init__.py') not in item
|
||||
and os.path.join('sparse', 'sputils.py') not in item]
|
||||
|
||||
if bad_filters:
|
||||
raise AssertionError(
|
||||
"warning ignore filter should not be used, instead, use\n"
|
||||
"numpy.testing.suppress_warnings (in tests only);\n"
|
||||
"found in:\n {}".format(
|
||||
"\n ".join(bad_filters)))
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
@pytest.mark.xfail(reason="stacklevels currently missing")
|
||||
def test_warning_calls_stacklevels(warning_calls):
|
||||
bad_filters, bad_stacklevels = warning_calls
|
||||
|
||||
msg = ""
|
||||
|
||||
if bad_filters:
|
||||
msg += ("warning ignore filter should not be used, instead, use\n"
|
||||
"numpy.testing.suppress_warnings (in tests only);\n"
|
||||
"found in:\n {}".format("\n ".join(bad_filters)))
|
||||
msg += "\n\n"
|
||||
|
||||
if bad_stacklevels:
|
||||
msg += "warnings should have an appropriate stacklevel:\n {}".format(
|
||||
"\n ".join(bad_stacklevels))
|
||||
|
||||
if msg:
|
||||
raise AssertionError(msg)
|
31
venv/Lib/site-packages/scipy/_lib/uarray.py
Normal file
31
venv/Lib/site-packages/scipy/_lib/uarray.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
"""`uarray` provides functions for generating multimethods that dispatch to
|
||||
multiple different backends
|
||||
|
||||
This should be imported, rather than `_uarray` so that an installed version could
|
||||
be used instead, if available. This means that users can call
|
||||
`uarray.set_backend` directly instead of going through SciPy.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
# Prefer an installed version of uarray, if available
|
||||
try:
|
||||
import uarray as _uarray
|
||||
except ImportError:
|
||||
_has_uarray = False
|
||||
else:
|
||||
from scipy._lib._pep440 import Version as _Version
|
||||
|
||||
_has_uarray = _Version(_uarray.__version__) >= _Version("0.5")
|
||||
del _uarray
|
||||
del _Version
|
||||
|
||||
|
||||
if _has_uarray:
|
||||
from uarray import *
|
||||
from uarray import _Function
|
||||
else:
|
||||
from ._uarray import *
|
||||
from ._uarray import _Function
|
||||
|
||||
del _has_uarray
|
Loading…
Add table
Add a link
Reference in a new issue