Fixed database typo and removed unnecessary class identifier.

This commit is contained in:
Batuhan Berk Başoğlu 2020-10-14 10:10:37 -04:00
parent 00ad49a143
commit 45fb349a7d
5098 changed files with 952558 additions and 85 deletions

View file

@ -0,0 +1,55 @@
__all__ = ['polygon_clip', 'polygon_area']
import numpy as np
def polygon_clip(rp, cp, r0, c0, r1, c1):
"""Clip a polygon to the given bounding box.
Parameters
----------
rp, cp : (N,) ndarray of double
Row and column coordinates of the polygon.
(r0, c0), (r1, c1) : double
Top-left and bottom-right coordinates of the bounding box.
Returns
-------
r_clipped, c_clipped : (M,) ndarray of double
Coordinates of clipped polygon.
Notes
-----
This makes use of Sutherland-Hodgman clipping as implemented in
AGG 2.4 and exposed in Matplotlib.
"""
from matplotlib import path, transforms
poly = path.Path(np.vstack((rp, cp)).T, closed=True)
clip_rect = transforms.Bbox([[r0, c0], [r1, c1]])
poly_clipped = poly.clip_to_bbox(clip_rect).to_polygons()[0]
# This should be fixed in matplotlib >1.5
if np.all(poly_clipped[-1] == poly_clipped[-2]):
poly_clipped = poly_clipped[:-1]
return poly_clipped[:, 0], poly_clipped[:, 1]
def polygon_area(pr, pc):
"""Compute the area of a polygon.
Parameters
----------
pr, pc : (N,) array of float
Polygon row and column coordinates.
Returns
-------
a : float
Area of the polygon.
"""
pr = np.asarray(pr)
pc = np.asarray(pc)
return 0.5 * np.abs(np.sum((pc[:-1] * pr[1:]) - (pc[1:] * pr[:-1])))

View file

@ -0,0 +1,27 @@
from tempfile import NamedTemporaryFile
from contextlib import contextmanager
import os
@contextmanager
def temporary_file(suffix=''):
"""Yield a writeable temporary filename that is deleted on context exit.
Parameters
----------
suffix : string, optional
The suffix for the file.
Examples
--------
>>> import numpy as np
>>> from skimage import io
>>> with temporary_file('.tif') as tempfile:
... im = np.arange(25, dtype=np.uint8).reshape((5, 5))
... io.imsave(tempfile, im)
... assert np.all(io.imread(tempfile) == im)
"""
tempfile_stream = NamedTemporaryFile(suffix=suffix, delete=False)
tempfile = tempfile_stream.name
tempfile_stream.close()
yield tempfile
os.remove(tempfile)

View file

@ -0,0 +1,145 @@
from contextlib import contextmanager
import sys
import warnings
import re
import functools
import os
__all__ = ['all_warnings', 'expected_warnings', 'warn']
# A version of `warnings.warn` with a default stacklevel of 2.
# functool is used so as not to increase the call stack accidentally
warn = functools.partial(warnings.warn, stacklevel=2)
@contextmanager
def all_warnings():
"""
Context for use in testing to ensure that all warnings are raised.
Examples
--------
>>> import warnings
>>> def foo():
... warnings.warn(RuntimeWarning("bar"), stacklevel=2)
We raise the warning once, while the warning filter is set to "once".
Hereafter, the warning is invisible, even with custom filters:
>>> with warnings.catch_warnings():
... warnings.simplefilter('once')
... foo() # doctest: +SKIP
We can now run ``foo()`` without a warning being raised:
>>> from numpy.testing import assert_warns
>>> foo() # doctest: +SKIP
To catch the warning, we call in the help of ``all_warnings``:
>>> with all_warnings():
... assert_warns(RuntimeWarning, foo)
"""
# _warnings.py is on the critical import path.
# Since this is a testing only function, we lazy import inspect.
import inspect
# Whenever a warning is triggered, Python adds a __warningregistry__
# member to the *calling* module. The exercize here is to find
# and eradicate all those breadcrumbs that were left lying around.
#
# We proceed by first searching all parent calling frames and explicitly
# clearing their warning registries (necessary for the doctests above to
# pass). Then, we search for all submodules of skimage and clear theirs
# as well (necessary for the skimage test suite to pass).
frame = inspect.currentframe()
if frame:
for f in inspect.getouterframes(frame):
f[0].f_locals['__warningregistry__'] = {}
del frame
for mod_name, mod in list(sys.modules.items()):
try:
mod.__warningregistry__.clear()
except AttributeError:
pass
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
yield w
@contextmanager
def expected_warnings(matching):
r"""Context for use in testing to catch known warnings matching regexes
Parameters
----------
matching : None or a list of strings or compiled regexes
Regexes for the desired warning to catch
If matching is None, this behaves as a no-op.
Examples
--------
>>> import numpy as np
>>> image = np.random.randint(0, 2**16, size=(100, 100), dtype=np.uint16)
>>> # rank filters are slow when bit-depth exceeds 10 bits
>>> from skimage import filters
>>> with expected_warnings(['Bad rank filter performance']):
... median_filtered = filters.rank.median(image)
Notes
-----
Uses `all_warnings` to ensure all warnings are raised.
Upon exiting, it checks the recorded warnings for the desired matching
pattern(s).
Raises a ValueError if any match was not found or an unexpected
warning was raised.
Allows for three types of behaviors: `and`, `or`, and `optional` matches.
This is done to accommodate different build environments or loop conditions
that may produce different warnings. The behaviors can be combined.
If you pass multiple patterns, you get an orderless `and`, where all of the
warnings must be raised.
If you use the `|` operator in a pattern, you can catch one of several
warnings.
Finally, you can use `|\A\Z` in a pattern to signify it as optional.
"""
if isinstance(matching, str):
raise ValueError('``matching`` should be a list of strings and not '
'a string itself.')
# Special case for disabling the context manager
if matching is None:
yield None
return
strict_warnings = os.environ.get('SKIMAGE_TEST_STRICT_WARNINGS', '1')
if strict_warnings.lower() == 'true':
strict_warnings = True
elif strict_warnings.lower() == 'false':
strict_warnings = False
else:
strict_warnings = bool(int(strict_warnings))
with all_warnings() as w:
# enter context
yield w
# exited user context, check the recorded warnings
# Allow users to provide None
while None in matching:
matching.remove(None)
remaining = [m for m in matching if r'\A\Z' not in m.split('|')]
for warn in w:
found = False
for match in matching:
if re.search(match, str(warn.message)) is not None:
found = True
if match in remaining:
remaining.remove(match)
if strict_warnings and not found:
raise ValueError('Unexpected warning: %s' % str(warn.message))
if strict_warnings and (len(remaining) > 0):
msg = 'No warning raised matching:\n%s' % '\n'.join(remaining)
raise ValueError(msg)

View file

@ -0,0 +1,17 @@
"""Prefer FFTs via the new scipy.fft module when available (SciPy 1.4+)
Otherwise fall back to numpy.fft.
Like numpy 1.15+ scipy 1.3+ is also using pocketfft, but a newer
C++/pybind11 version called pypocketfft
"""
try:
import scipy.fft
from scipy.fft import next_fast_len
fftmodule = scipy.fft
except ImportError:
import numpy.fft
fftmodule = numpy.fft
from scipy.fftpack import next_fast_len
__all__ = ['fftmodule', 'next_fast_len']

View file

@ -0,0 +1,37 @@
#!/usr/bin/env python
import os
from skimage._build import cython
base_path = os.path.abspath(os.path.dirname(__file__))
def configuration(parent_package='', top_path=None):
from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs
config = Configuration('_shared', parent_package, top_path)
cython(['geometry.pyx',
'transform.pyx',
'interpolation.pyx',
'fast_exp.pyx'], working_path=base_path)
config.add_extension('geometry', sources=['geometry.c'])
config.add_extension('transform', sources=['transform.c'],
include_dirs=[get_numpy_include_dirs()])
config.add_extension('interpolation', sources=['interpolation.c'])
config.add_extension('fast_exp', sources=['fast_exp.c'])
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(maintainer='scikit-image Developers',
author='scikit-image Developers',
maintainer_email='scikit-image@python.org',
description='Transforms',
url='https://github.com/scikit-image/scikit-image',
license='SciPy License (BSD Style)',
**(configuration(top_path='').todict())
)

View file

@ -0,0 +1,270 @@
"""
Testing utilities.
"""
import os
import re
import struct
import threading
import functools
from tempfile import NamedTemporaryFile
import numpy as np
from numpy import testing
from numpy.testing import (assert_array_equal, assert_array_almost_equal,
assert_array_less, assert_array_almost_equal_nulp,
assert_equal, TestCase, assert_allclose,
assert_almost_equal, assert_, assert_warns,
assert_no_warnings)
import warnings
from .. import data, io
from ..util import img_as_uint, img_as_float, img_as_int, img_as_ubyte
import pytest
from ._warnings import expected_warnings
SKIP_RE = re.compile(r"(\s*>>>.*?)(\s*)#\s*skip\s+if\s+(.*)$")
skipif = pytest.mark.skipif
xfail = pytest.mark.xfail
parametrize = pytest.mark.parametrize
raises = pytest.raises
fixture = pytest.fixture
# true if python is running in 32bit mode
# Calculate the size of a void * pointer in bits
# https://docs.python.org/3/library/struct.html
arch32 = struct.calcsize("P") * 8 == 32
def assert_less(a, b, msg=None):
message = "%r is not lower than %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a < b, message
def assert_greater(a, b, msg=None):
message = "%r is not greater than %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a > b, message
def doctest_skip_parser(func):
""" Decorator replaces custom skip test markup in doctests
Say a function has a docstring::
>>> something, HAVE_AMODULE, HAVE_BMODULE = 0, False, False
>>> something # skip if not HAVE_AMODULE
0
>>> something # skip if HAVE_BMODULE
0
This decorator will evaluate the expression after ``skip if``. If this
evaluates to True, then the comment is replaced by ``# doctest: +SKIP``. If
False, then the comment is just removed. The expression is evaluated in the
``globals`` scope of `func`.
For example, if the module global ``HAVE_AMODULE`` is False, and module
global ``HAVE_BMODULE`` is False, the returned function will have docstring::
>>> something # doctest: +SKIP
>>> something + else # doctest: +SKIP
>>> something # doctest: +SKIP
"""
lines = func.__doc__.split('\n')
new_lines = []
for line in lines:
match = SKIP_RE.match(line)
if match is None:
new_lines.append(line)
continue
code, space, expr = match.groups()
try:
# Works as a function decorator
if eval(expr, func.__globals__):
code = code + space + "# doctest: +SKIP"
except AttributeError:
# Works as a class decorator
if eval(expr, func.__init__.__globals__):
code = code + space + "# doctest: +SKIP"
new_lines.append(code)
func.__doc__ = "\n".join(new_lines)
return func
def roundtrip(image, plugin, suffix):
"""Save and read an image using a specified plugin"""
if '.' not in suffix:
suffix = '.' + suffix
temp_file = NamedTemporaryFile(suffix=suffix, delete=False)
fname = temp_file.name
temp_file.close()
io.imsave(fname, image, plugin=plugin)
new = io.imread(fname, plugin=plugin)
try:
os.remove(fname)
except Exception:
pass
return new
def color_check(plugin, fmt='png'):
"""Check roundtrip behavior for color images.
All major input types should be handled as ubytes and read
back correctly.
"""
img = img_as_ubyte(data.chelsea())
r1 = roundtrip(img, plugin, fmt)
testing.assert_allclose(img, r1)
img2 = img > 128
r2 = roundtrip(img2, plugin, fmt)
testing.assert_allclose(img2, r2.astype(bool))
img3 = img_as_float(img)
r3 = roundtrip(img3, plugin, fmt)
testing.assert_allclose(r3, img)
img4 = img_as_int(img)
if fmt.lower() in (('tif', 'tiff')):
img4 -= 100
r4 = roundtrip(img4, plugin, fmt)
testing.assert_allclose(r4, img4)
else:
r4 = roundtrip(img4, plugin, fmt)
testing.assert_allclose(r4, img_as_ubyte(img4))
img5 = img_as_uint(img)
r5 = roundtrip(img5, plugin, fmt)
testing.assert_allclose(r5, img)
def mono_check(plugin, fmt='png'):
"""Check the roundtrip behavior for images that support most types.
All major input types should be handled.
"""
img = img_as_ubyte(data.moon())
r1 = roundtrip(img, plugin, fmt)
testing.assert_allclose(img, r1)
img2 = img > 128
r2 = roundtrip(img2, plugin, fmt)
testing.assert_allclose(img2, r2.astype(bool))
img3 = img_as_float(img)
r3 = roundtrip(img3, plugin, fmt)
if r3.dtype.kind == 'f':
testing.assert_allclose(img3, r3)
else:
testing.assert_allclose(r3, img_as_uint(img))
img4 = img_as_int(img)
if fmt.lower() in (('tif', 'tiff')):
img4 -= 100
r4 = roundtrip(img4, plugin, fmt)
testing.assert_allclose(r4, img4)
else:
r4 = roundtrip(img4, plugin, fmt)
testing.assert_allclose(r4, img_as_uint(img4))
img5 = img_as_uint(img)
r5 = roundtrip(img5, plugin, fmt)
testing.assert_allclose(r5, img5)
def setup_test():
"""Default package level setup routine for skimage tests.
Import packages known to raise warnings, and then
force warnings to raise errors.
Also set the random seed to zero.
"""
warnings.simplefilter('default')
from scipy import signal, ndimage, special, optimize, linalg
from scipy.io import loadmat
from skimage import viewer
np.random.seed(0)
warnings.simplefilter('error')
def teardown_test():
"""Default package level teardown routine for skimage tests.
Restore warnings to default behavior
"""
warnings.simplefilter('default')
def fetch(data_filename):
"""Attempt to fetch data, but if unavailable, skip the tests."""
try:
return data._fetch(data_filename)
except (ConnectionError, ModuleNotFoundError):
pytest.skip(f'Unable to download {data_filename}')
def test_parallel(num_threads=2, warnings_matching=None):
"""Decorator to run the same function multiple times in parallel.
This decorator is useful to ensure that separate threads execute
concurrently and correctly while releasing the GIL.
Parameters
----------
num_threads : int, optional
The number of times the function is run in parallel.
warnings_matching: list or None
This parameter is passed on to `expected_warnings` so as not to have
race conditions with the warnings filters. A single
`expected_warnings` context manager is used for all threads.
If None, then no warnings are checked.
"""
assert num_threads > 0
def wrapper(func):
@functools.wraps(func)
def inner(*args, **kwargs):
with expected_warnings(warnings_matching):
threads = []
for i in range(num_threads - 1):
thread = threading.Thread(target=func, args=args,
kwargs=kwargs)
threads.append(thread)
for thread in threads:
thread.start()
result = func(*args, **kwargs)
for thread in threads:
thread.join()
return result
return inner
return wrapper
if __name__ == '__main__':
color_check('pil')
mono_check('pil')
mono_check('pil', 'bmp')
mono_check('pil', 'tiff')

View file

@ -0,0 +1,9 @@
from ..testing import setup_test, teardown_test
def setup():
setup_test()
def teardown():
teardown_test()

View file

@ -0,0 +1,21 @@
from ..fast_exp import fast_exp
import numpy as np
def test_fast_exp():
X = np.linspace(-5, 0, 5000, endpoint=True)
# Ground truth
Y = np.exp(X)
# Approximation at double precision
_y_f64 = np.array([fast_exp['float64_t'](x) for x in X])
# Approximation at single precision
_y_f32 = np.array([fast_exp['float32_t'](x) for x in X.astype('float32')],
dtype='float32')
for _y in [_y_f64, _y_f32]:
assert np.abs(Y - _y).mean() < 3e-3

View file

@ -0,0 +1,75 @@
from skimage._shared._geometry import polygon_clip, polygon_area
import numpy as np
from numpy.testing import assert_equal, assert_almost_equal
hand = np.array(
[[ 1.64516129, 1.16145833 ],
[ 1.64516129, 1.59375 ],
[ 1.35080645, 1.921875 ],
[ 1.375 , 2.18229167 ],
[ 1.68548387, 1.9375 ],
[ 1.60887097, 2.55208333 ],
[ 1.68548387, 2.69791667 ],
[ 1.76209677, 2.56770833 ],
[ 1.83064516, 1.97395833 ],
[ 1.89516129, 2.75 ],
[ 1.9516129 , 2.84895833 ],
[ 2.01209677, 2.76041667 ],
[ 1.99193548, 1.99479167 ],
[ 2.11290323, 2.63020833 ],
[ 2.2016129 , 2.734375 ],
[ 2.25403226, 2.60416667 ],
[ 2.14919355, 1.953125 ],
[ 2.30645161, 2.36979167 ],
[ 2.39112903, 2.36979167 ],
[ 2.41532258, 2.1875 ],
[ 2.1733871 , 1.703125 ],
[ 2.07782258, 1.16666667 ]])
def test_polygon_area():
x = [0, 0, 1, 1]
y = [0, 1, 1, 0]
assert_almost_equal(polygon_area(y, x), 1)
x = [0, 0, 1]
y = [0, 1, 1]
assert_almost_equal(polygon_area(y, x), 0.5)
x = [0, 0, 0.5, 1, 1, 0.5]
y = [0, 1, 0.5, 1, 0, 0.5]
assert_almost_equal(polygon_area(y, x), 0.5)
def test_poly_clip():
x = [0, 1, 2, 1]
y = [0, -1, 0, 1]
yc, xc = polygon_clip(y, x, 0, 0, 1, 1)
assert_equal(polygon_area(yc, xc), 0.5)
x = [-1, 1.5, 1.5, -1]
y = [.5, 0.5, 1.5, 1.5]
yc, xc = polygon_clip(y, x, 0, 0, 1, 1)
assert_equal(polygon_area(yc, xc), 0.5)
def test_hand_clip():
(r0, c0, r1, c1) = (1.0, 1.5, 2.1, 2.5)
clip_r, clip_c = polygon_clip(hand[:, 1], hand[:, 0], r0, c0, r1, c1)
assert_equal(clip_r.size, 19)
assert_equal(clip_r[0], clip_r[-1])
assert_equal(clip_c[0], clip_c[-1])
(r0, c0, r1, c1) = (1.0, 1.5, 1.7, 2.5)
clip_r, clip_c = polygon_clip(hand[:, 1], hand[:, 0], r0, c0, r1, c1)
assert_equal(clip_r.size, 6)
(r0, c0, r1, c1) = (1.0, 1.5, 1.5, 2.5)
clip_r, clip_c = polygon_clip(hand[:, 1], hand[:, 0], r0, c0, r1, c1)
assert_equal(clip_r.size, 5)

View file

@ -0,0 +1,27 @@
from skimage._shared.interpolation import coord_map_py
from skimage._shared.testing import assert_array_equal
def test_coord_map():
symmetric = [coord_map_py(4, n, 'S') for n in range(-6, 6)]
expected_symmetric = [2, 3, 3, 2, 1, 0, 0, 1, 2, 3, 3, 2]
assert_array_equal(symmetric, expected_symmetric)
wrap = [coord_map_py(4, n, 'W') for n in range(-6, 6)]
expected_wrap = [2, 3, 0, 1, 2, 3, 0, 1, 2, 3, 0, 1]
assert_array_equal(wrap, expected_wrap)
edge = [coord_map_py(4, n, 'E') for n in range(-6, 6)]
expected_edge = [0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 3, 3]
assert_array_equal(edge, expected_edge)
reflect = [coord_map_py(4, n, 'R') for n in range(-6, 6)]
expected_reflect = [0, 1, 2, 3, 2, 1, 0, 1, 2, 3, 2, 1]
assert_array_equal(reflect, expected_reflect)
reflect = [coord_map_py(1, n, 'R') for n in range(-6, 6)]
expected_reflect = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
assert_array_equal(reflect, expected_reflect)
other = [coord_map_py(4, n, 'undefined') for n in range(-6, 6)]
expected_other = list(range(-6, 6))
assert_array_equal(other, expected_other)

View file

@ -0,0 +1,42 @@
import numpy as np
from skimage._shared.utils import safe_as_int
from skimage._shared import testing
def test_int_cast_not_possible():
with testing.raises(ValueError):
safe_as_int(7.1)
with testing.raises(ValueError):
safe_as_int([7.1, 0.9])
with testing.raises(ValueError):
safe_as_int(np.r_[7.1, 0.9])
with testing.raises(ValueError):
safe_as_int((7.1, 0.9))
with testing.raises(ValueError):
safe_as_int(((3, 4, 1),
(2, 7.6, 289)))
with testing.raises(ValueError):
safe_as_int(7.1, 0.09)
with testing.raises(ValueError):
safe_as_int([7.1, 0.9], 0.09)
with testing.raises(ValueError):
safe_as_int(np.r_[7.1, 0.9], 0.09)
with testing.raises(ValueError):
safe_as_int((7.1, 0.9), 0.09)
with testing.raises(ValueError):
safe_as_int(((3, 4, 1),
(2, 7.6, 289)), 0.25)
def test_int_cast_possible():
testing.assert_equal(safe_as_int(7.1, atol=0.11), 7)
testing.assert_equal(safe_as_int(-7.1, atol=0.11), -7)
testing.assert_equal(safe_as_int(41.9, atol=0.11), 42)
testing.assert_array_equal(safe_as_int([2, 42, 5789234.0, 87, 4]),
np.r_[2, 42, 5789234, 87, 4])
testing.assert_array_equal(safe_as_int(np.r_[[[3, 4, 1.000000001],
[7, 2, -8.999999999],
[6, 9, -4234918347.]]]),
np.r_[[[3, 4, 1],
[7, 2, -9],
[6, 9, -4234918347]]])

View file

@ -0,0 +1,129 @@
""" Testing decorators module
"""
import numpy as np
from numpy.testing import assert_equal
from skimage._shared.testing import doctest_skip_parser, test_parallel
from skimage._shared import testing
import pytest
from skimage._shared._warnings import expected_warnings
from warnings import warn
def test_skipper():
def f():
pass
class c():
def __init__(self):
self.me = "I think, therefore..."
docstring = \
""" Header
>>> something # skip if not HAVE_AMODULE
>>> something + else
>>> a = 1 # skip if not HAVE_BMODULE
>>> something2 # skip if HAVE_AMODULE
"""
f.__doc__ = docstring
c.__doc__ = docstring
global HAVE_AMODULE, HAVE_BMODULE
HAVE_AMODULE = False
HAVE_BMODULE = True
f2 = doctest_skip_parser(f)
c2 = doctest_skip_parser(c)
assert f is f2
assert c is c2
expected = \
""" Header
>>> something # doctest: +SKIP
>>> something + else
>>> a = 1
>>> something2
"""
assert_equal(f2.__doc__, expected)
assert_equal(c2.__doc__, expected)
HAVE_AMODULE = True
HAVE_BMODULE = False
f.__doc__ = docstring
c.__doc__ = docstring
f2 = doctest_skip_parser(f)
c2 = doctest_skip_parser(c)
assert f is f2
expected = \
""" Header
>>> something
>>> something + else
>>> a = 1 # doctest: +SKIP
>>> something2 # doctest: +SKIP
"""
assert_equal(f2.__doc__, expected)
assert_equal(c2.__doc__, expected)
del HAVE_AMODULE
f.__doc__ = docstring
c.__doc__ = docstring
with testing.raises(NameError):
doctest_skip_parser(f)
with testing.raises(NameError):
doctest_skip_parser(c)
def test_test_parallel():
state = []
@test_parallel()
def change_state1():
state.append(None)
change_state1()
assert len(state) == 2
@test_parallel(num_threads=1)
def change_state2():
state.append(None)
change_state2()
assert len(state) == 3
@test_parallel(num_threads=3)
def change_state3():
state.append(None)
change_state3()
assert len(state) == 6
def test_parallel_warning():
@test_parallel()
def change_state_warns_fails():
warn("Test warning for test parallel", stacklevel=2)
with expected_warnings(['Test warning for test parallel']):
change_state_warns_fails()
@test_parallel(warnings_matching=['Test warning for test parallel'])
def change_state_warns_passes():
warn("Test warning for test parallel", stacklevel=2)
change_state_warns_passes()
def test_expected_warnings_noop():
# This will ensure the line beolow it behaves like a no-op
with expected_warnings(['Expected warnings test']):
# This should behave as a no-op
with expected_warnings(None):
warn('Expected warnings test')
if __name__ == '__main__':
np.testing.run_module_suite()

View file

@ -0,0 +1,129 @@
import sys
import pytest
import numpy as np
import numpy.testing as npt
from skimage._shared.utils import (check_nD, deprecate_kwarg,
_validate_interpolation_order,
change_default_value)
from skimage._shared import testing
from skimage._shared._warnings import expected_warnings
def test_change_default_value():
@change_default_value('arg1', new_value=-1, changed_version='0.12')
def foo(arg0, arg1=0, arg2=1):
"""Expected docstring"""
return arg0, arg1, arg2
@change_default_value('arg1', new_value=-1, changed_version='0.12',
warning_msg="Custom warning message")
def bar(arg0, arg1=0, arg2=1):
"""Expected docstring"""
return arg0, arg1, arg2
# Assert warning messages
with pytest.warns(FutureWarning) as record:
assert foo(0) == (0, 0, 1)
assert bar(0) == (0, 0, 1)
expected_msg = ("The new recommended value for arg1 is -1. Until "
"version 0.12, the default arg1 value is 0. From "
"version 0.12, the arg1 default value will be -1. "
"To avoid this warning, please explicitly set arg1 value.")
assert str(record[0].message) == expected_msg
assert str(record[1].message) == "Custom warning message"
# Assert that nothing happens if arg1 is set
with pytest.warns(None) as record:
# No kwargs
assert foo(0, 2) == (0, 2, 1)
assert foo(0, arg1=0) == (0, 0, 1)
# Function name and doc is preserved
assert foo.__name__ == 'foo'
if sys.flags.optimize < 2:
# if PYTHONOPTIMIZE is set to 2, docstrings are stripped
assert foo.__doc__ == 'Expected docstring'
# Assert no warning was raised
assert not record.list
def test_deprecated_kwarg():
@deprecate_kwarg({'old_arg1': 'new_arg1'})
def foo(arg0, new_arg1=1, arg2=None):
"""Expected docstring"""
return arg0, new_arg1, arg2
@deprecate_kwarg({'old_arg1': 'new_arg1'},
warning_msg="Custom warning message")
def bar(arg0, new_arg1=1, arg2=None):
"""Expected docstring"""
return arg0, new_arg1, arg2
# Assert that the DeprecationWarning is raised when the deprecated
# argument name is used and that the reasult is valid
with pytest.warns(FutureWarning) as record:
assert foo(0, old_arg1=1) == (0, 1, None)
assert bar(0, old_arg1=1) == (0, 1, None)
msg = ("'old_arg1' is a deprecated argument name "
"for `foo`. Please use 'new_arg1' instead.")
assert str(record[0].message) == msg
assert str(record[1].message) == "Custom warning message"
# Assert that nothing happens when the function is called with the
# new API
with pytest.warns(None) as record:
# No kwargs
assert foo(0) == (0, 1, None)
assert foo(0, 2) == (0, 2, None)
assert foo(0, 1, 2) == (0, 1, 2)
# Kwargs without deprecated argument
assert foo(0, new_arg1=1, arg2=2) == (0, 1, 2)
assert foo(0, new_arg1=2) == (0, 2, None)
assert foo(0, arg2=2) == (0, 1, 2)
assert foo(0, 1, arg2=2) == (0, 1, 2)
# Function name and doc is preserved
assert foo.__name__ == 'foo'
if sys.flags.optimize < 2:
# if PYTHONOPTIMIZE is set to 2, docstrings are stripped
assert foo.__doc__ == 'Expected docstring'
# Assert no warning was raised
assert not record.list
def test_check_nD():
z = np.random.random(200**2).reshape((200, 200))
x = z[10:30, 30:10]
with testing.raises(ValueError):
check_nD(x, 2)
@pytest.mark.parametrize('dtype', [bool, int, np.uint8, np.uint16,
float, np.float32, np.float64])
@pytest.mark.parametrize('order', [None, -1, 0, 1, 2, 3, 4, 5, 6])
def test_validate_interpolation_order(dtype, order):
if order is None:
# Default order
assert (_validate_interpolation_order(dtype, None) == 0
if dtype == bool else 1)
elif order < 0 or order > 5:
# Order not in valid range
with testing.raises(ValueError):
_validate_interpolation_order(dtype, order)
elif dtype == bool and order != 0:
# Deprecated order for bool array
with expected_warnings(["Input image dtype is bool"]):
assert _validate_interpolation_order(bool, order) == order
else:
# Valid use case
assert _validate_interpolation_order(dtype, order) == order
if __name__ == "__main__":
npt.run_module_suite()

View file

@ -0,0 +1,41 @@
"""Tests for the version requirement functions.
"""
import numpy as np
from numpy.testing import assert_equal
from skimage._shared import version_requirements as version_req
from skimage._shared import testing
def test_get_module_version():
assert version_req.get_module_version('numpy')
assert version_req.get_module_version('scipy')
with testing.raises(ImportError):
version_req.get_module_version('fakenumpy')
def test_is_installed():
assert version_req.is_installed('python', '>=2.7')
assert not version_req.is_installed('numpy', '<1.0')
def test_require():
# A function that only runs on Python >2.7 and numpy > 1.5 (should pass)
@version_req.require('python', '>2.7')
@version_req.require('numpy', '>1.5')
def foo():
return 1
assert_equal(foo(), 1)
# function that requires scipy < 0.1 (should fail)
@version_req.require('scipy', '<0.1')
def bar():
return 0
with testing.raises(ImportError):
bar()
def test_get_module():
assert version_req.get_module("numpy") is np

View file

@ -0,0 +1,37 @@
import os
from skimage._shared._warnings import expected_warnings
import pytest
@pytest.fixture(scope='function')
def setup():
# Remove any environment variable if it exists
old_strictness = os.environ.pop('SKIMAGE_TEST_STRICT_WARNINGS', None)
yield
# Add the user's desired strictness
if old_strictness is not None:
os.environ['SKIMAGE_TEST_STRICT_WARNINGS'] = old_strictness
def test_strict_warnigns_default(setup):
# By default we should fail on missing expected warnings
with pytest.raises(ValueError):
with expected_warnings(['some warnings']):
pass
@pytest.mark.parametrize('strictness', ['1', 'true', 'True', 'TRUE'])
def test_strict_warning_true(setup, strictness):
os.environ['SKIMAGE_TEST_STRICT_WARNINGS'] = strictness
with pytest.raises(ValueError):
with expected_warnings(['some warnings']):
pass
@pytest.mark.parametrize('strictness', ['0', 'false', 'False', 'FALSE'])
def test_strict_warning_false(setup, strictness):
# If the user doesnn't wish to be strict about warnigns
# the following shouldn't raise any error
os.environ['SKIMAGE_TEST_STRICT_WARNINGS'] = strictness
with expected_warnings(['some warnings']):
pass

View file

@ -0,0 +1,375 @@
import inspect
import warnings
import functools
import sys
import numpy as np
import numbers
from ..util import img_as_float
from ._warnings import all_warnings, warn
__all__ = ['deprecated', 'get_bound_method_class', 'all_warnings',
'safe_as_int', 'check_nD', 'check_shape_equality', 'warn']
class skimage_deprecation(Warning):
"""Create our own deprecation class, since Python >= 2.7
silences deprecations by default.
"""
pass
class change_default_value:
"""Decorator for changing the default value of an argument.
Parameters
----------
arg_name: str
The name of the argument to be updated.
new_value: any
The argument new value.
changed_version : str
The package version in which the change will be introduced.
warning_msg: str
Optional warning message. If None, a generic warning message
is used.
"""
def __init__(self, arg_name, *, new_value, changed_version,
warning_msg=None):
self.arg_name = arg_name
self.new_value = new_value
self.warning_msg = warning_msg
self.changed_version = changed_version
def __call__(self, func):
parameters = inspect.signature(func).parameters
arg_idx = list(parameters.keys()).index(self.arg_name)
old_value = parameters[self.arg_name].default
if self.warning_msg is None:
self.warning_msg = (
f"The new recommended value for {self.arg_name} is "
f"{self.new_value}. Until version {self.changed_version}, "
f"the default {self.arg_name} value is {old_value}. "
f"From version {self.changed_version}, the {self.arg_name} "
f"default value will be {self.new_value}. To avoid "
f"this warning, please explicitly set {self.arg_name} value.")
@functools.wraps(func)
def fixed_func(*args, **kwargs):
if len(args) < arg_idx + 1 and self.arg_name not in kwargs.keys():
# warn that arg_name default value changed:
warnings.warn(self.warning_msg, FutureWarning, stacklevel=2)
return func(*args, **kwargs)
return fixed_func
class deprecate_kwarg:
"""Decorator ensuring backward compatibility when argument names are
modified in a function definition.
Parameters
----------
arg_mapping: dict
Mapping between the function's old argument names and the new
ones.
warning_msg: str
Optional warning message. If None, a generic warning message
is used.
removed_version : str
The package version in which the deprecated argument will be
removed.
"""
def __init__(self, kwarg_mapping, warning_msg=None, removed_version=None):
self.kwarg_mapping = kwarg_mapping
if warning_msg is None:
self.warning_msg = ("'{old_arg}' is a deprecated argument name "
"for `{func_name}`. ")
if removed_version is not None:
self.warning_msg += ("It will be removed in version {}. "
.format(removed_version))
self.warning_msg += "Please use '{new_arg}' instead."
else:
self.warning_msg = warning_msg
def __call__(self, func):
@functools.wraps(func)
def fixed_func(*args, **kwargs):
for old_arg, new_arg in self.kwarg_mapping.items():
if old_arg in kwargs:
# warn that the function interface has changed:
warnings.warn(self.warning_msg.format(
old_arg=old_arg, func_name=func.__name__,
new_arg=new_arg), FutureWarning, stacklevel=2)
# Substitute new_arg to old_arg
kwargs[new_arg] = kwargs.pop(old_arg)
# Call the function with the fixed arguments
return func(*args, **kwargs)
return fixed_func
class deprecated(object):
"""Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
removed_version : str
The package version in which the deprecated function will be removed.
"""
def __init__(self, alt_func=None, behavior='warn', removed_version=None):
self.alt_func = alt_func
self.behavior = behavior
self.removed_version = removed_version
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use ``%s`` instead.' % self.alt_func
rmv_msg = ''
if self.removed_version is not None:
rmv_msg = (' and will be removed in version %s' %
self.removed_version)
msg = ('Function ``%s`` is deprecated' % func.__name__ +
rmv_msg + '.' + alt_msg)
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
func_code = func.__code__
warnings.simplefilter('always', skimage_deprecation)
warnings.warn_explicit(msg,
category=skimage_deprecation,
filename=func_code.co_filename,
lineno=func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise skimage_deprecation(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = '**Deprecated function**.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n ' + wrapped.__doc__
return wrapped
def get_bound_method_class(m):
"""Return the class for a bound method.
"""
return m.im_class if sys.version < '3' else m.__self__.__class__
def safe_as_int(val, atol=1e-3):
"""
Attempt to safely cast values to integer format.
Parameters
----------
val : scalar or iterable of scalars
Number or container of numbers which are intended to be interpreted as
integers, e.g., for indexing purposes, but which may not carry integer
type.
atol : float
Absolute tolerance away from nearest integer to consider values in
``val`` functionally integers.
Returns
-------
val_int : NumPy scalar or ndarray of dtype `np.int64`
Returns the input value(s) coerced to dtype `np.int64` assuming all
were within ``atol`` of the nearest integer.
Notes
-----
This operation calculates ``val`` modulo 1, which returns the mantissa of
all values. Then all mantissas greater than 0.5 are subtracted from one.
Finally, the absolute tolerance from zero is calculated. If it is less
than ``atol`` for all value(s) in ``val``, they are rounded and returned
in an integer array. Or, if ``val`` was a scalar, a NumPy scalar type is
returned.
If any value(s) are outside the specified tolerance, an informative error
is raised.
Examples
--------
>>> safe_as_int(7.0)
7
>>> safe_as_int([9, 4, 2.9999999999])
array([9, 4, 3])
>>> safe_as_int(53.1)
Traceback (most recent call last):
...
ValueError: Integer argument required but received 53.1, check inputs.
>>> safe_as_int(53.01, atol=0.01)
53
"""
mod = np.asarray(val) % 1 # Extract mantissa
# Check for and subtract any mod values > 0.5 from 1
if mod.ndim == 0: # Scalar input, cannot be indexed
if mod > 0.5:
mod = 1 - mod
else: # Iterable input, now ndarray
mod[mod > 0.5] = 1 - mod[mod > 0.5] # Test on each side of nearest int
try:
np.testing.assert_allclose(mod, 0, atol=atol)
except AssertionError:
raise ValueError("Integer argument required but received "
"{0}, check inputs.".format(val))
return np.round(val).astype(np.int64)
def check_shape_equality(im1, im2):
"""Raise an error if the shape do not match."""
if not im1.shape == im2.shape:
raise ValueError('Input images must have the same dimensions.')
return
def check_nD(array, ndim, arg_name='image'):
"""
Verify an array meets the desired ndims and array isn't empty.
Parameters
----------
array : array-like
Input array to be validated
ndim : int or iterable of ints
Allowable ndim or ndims for the array.
arg_name : str, optional
The name of the array in the original function.
"""
array = np.asanyarray(array)
msg_incorrect_dim = "The parameter `%s` must be a %s-dimensional array"
msg_empty_array = "The parameter `%s` cannot be an empty array"
if isinstance(ndim, int):
ndim = [ndim]
if array.size == 0:
raise ValueError(msg_empty_array % (arg_name))
if not array.ndim in ndim:
raise ValueError(msg_incorrect_dim % (arg_name, '-or-'.join([str(n) for n in ndim])))
def check_random_state(seed):
"""Turn seed into a `np.random.RandomState` instance.
Parameters
----------
seed : None, int or np.random.RandomState
If `seed` is None, return the RandomState singleton used by `np.random`.
If `seed` is an int, return a new RandomState instance seeded with `seed`.
If `seed` is already a RandomState instance, return it.
Raises
------
ValueError
If `seed` is of the wrong type.
"""
# Function originally from scikit-learn's module sklearn.utils.validation
if seed is None or seed is np.random:
return np.random.mtrand._rand
if isinstance(seed, (numbers.Integral, np.integer)):
return np.random.RandomState(seed)
if isinstance(seed, np.random.RandomState):
return seed
raise ValueError('%r cannot be used to seed a numpy.random.RandomState'
' instance' % seed)
def convert_to_float(image, preserve_range):
"""Convert input image to float image with the appropriate range.
Parameters
----------
image : ndarray
Input image.
preserve_range : bool
Determines if the range of the image should be kept or transformed
using img_as_float. Also see
https://scikit-image.org/docs/dev/user_guide/data_types.html
Notes:
------
* Input images with `float32` data type are not upcast.
Returns
-------
image : ndarray
Transformed version of the input.
"""
if preserve_range:
# Convert image to double only if it is not single or double
# precision float
if image.dtype.char not in 'df':
image = image.astype(float)
else:
image = img_as_float(image)
return image
def _validate_interpolation_order(image_dtype, order):
"""Validate and return spline interpolation's order.
Parameters
----------
image_dtype : dtype
Image dtype.
order : int, optional
The order of the spline interpolation. The order has to be in
the range 0-5. See `skimage.transform.warp` for detail.
Returns
-------
order : int
if input order is None, returns 0 if image_dtype is bool and 1
otherwise. Otherwise, image_dtype is checked and input order
is validated accordingly (order > 0 is not supported for bool
image dtype)
"""
if order is None:
return 0 if image_dtype == bool else 1
if order < 0 or order > 5:
raise ValueError("Spline interpolation order has to be in the "
"range 0-5.")
if image_dtype == bool and order != 0:
warn("Input image dtype is bool. Interpolation is not defined "
"with bool data type. Please set order to 0 or explicitely "
"cast input image to another data type. Starting from version "
"0.19 a ValueError will be raised instead of this warning.",
FutureWarning, stacklevel=2)
return order

View file

@ -0,0 +1,180 @@
import sys
def ensure_python_version(min_version):
if not isinstance(min_version, tuple):
min_version = (min_version, )
if sys.version_info < min_version:
# since ensure_python_version is in the critical import path,
# we lazy import it.
from platform import python_version
raise ImportError("""
You are running scikit-image on an unsupported version of Python.
Unfortunately, scikit-image 0.15 and above no longer work with your installed
version of Python (%s). You therefore have two options: either upgrade to
Python %s, or install an older version of scikit-image.
For Python 2.7 or Python 3.4, use
$ pip install 'scikit-image<0.15'
Please also consider updating `pip` and `setuptools`:
$ pip install pip setuptools --upgrade
Newer versions of these tools avoid installing packages incompatible
with your version of Python.
""" % (python_version(), '.'.join([str(v) for v in min_version])))
def _check_version(actver, version, cmp_op):
"""
Check version string of an active module against a required version.
If dev/prerelease tags result in TypeError for string-number comparison,
it is assumed that the dependency is satisfied.
Users on dev branches are responsible for keeping their own packages up to
date.
Copyright (C) 2013 The IPython Development Team
Distributed under the terms of the BSD License.
"""
# since version_requirements.py is in the critical import path, we
# lazy import it
from distutils.version import LooseVersion
try:
if cmp_op == '>':
return LooseVersion(actver) > LooseVersion(version)
elif cmp_op == '>=':
return LooseVersion(actver) >= LooseVersion(version)
elif cmp_op == '=':
return LooseVersion(actver) == LooseVersion(version)
elif cmp_op == '<':
return LooseVersion(actver) < LooseVersion(version)
else:
return False
except TypeError:
return True
def get_module_version(module_name):
"""Return module version or None if version can't be retrieved."""
mod = __import__(module_name,
fromlist=[module_name.rpartition('.')[-1]])
return getattr(mod, '__version__', getattr(mod, 'VERSION', None))
def is_installed(name, version=None):
"""Test if *name* is installed.
Parameters
----------
name : str
Name of module or "python"
version : str, optional
Version string to test against.
If version is not None, checking version
(must have an attribute named '__version__' or 'VERSION')
Version may start with =, >=, > or < to specify the exact requirement
Returns
-------
out : bool
True if `name` is installed matching the optional version.
Notes
-----
Original Copyright (C) 2009-2011 Pierre Raybaut
Licensed under the terms of the MIT License.
"""
if name.lower() == 'python':
actver = sys.version[:6]
else:
try:
actver = get_module_version(name)
except ImportError:
return False
if version is None:
return True
else:
# since version_requirements is in the critical import path,
# we lazy import re
import re
match = re.search('[0-9]', version)
assert match is not None, "Invalid version number"
symb = version[:match.start()]
if not symb:
symb = '='
assert symb in ('>=', '>', '=', '<'),\
"Invalid version condition '%s'" % symb
version = version[match.start():]
return _check_version(actver, version, symb)
def require(name, version=None):
"""Return decorator that forces a requirement for a function or class.
Parameters
----------
name : str
Name of module or "python".
version : str, optional
Version string to test against.
If version is not None, checking version
(must have an attribute named '__version__' or 'VERSION')
Version may start with =, >=, > or < to specify the exact requirement
Returns
-------
func : function
A decorator that raises an ImportError if a function is run
in the absence of the input dependency.
"""
# since version_requirements is in the critical import path, we lazy import
# functools
import functools
def decorator(obj):
@functools.wraps(obj)
def func_wrapped(*args, **kwargs):
if is_installed(name, version):
return obj(*args, **kwargs)
else:
msg = '"%s" in "%s" requires "%s'
msg = msg % (obj, obj.__module__, name)
if not version is None:
msg += " %s" % version
raise ImportError(msg + '"')
return func_wrapped
return decorator
def get_module(module_name, version=None):
"""Return a module object of name *module_name* if installed.
Parameters
----------
module_name : str
Name of module.
version : str, optional
Version string to test against.
If version is not None, checking version
(must have an attribute named '__version__' or 'VERSION')
Version may start with =, >=, > or < to specify the exact requirement
Returns
-------
mod : module or None
Module if *module_name* is installed matching the optional version
or None otherwise.
"""
if not is_installed(module_name, version):
return None
return __import__(module_name,
fromlist=[module_name.rpartition('.')[-1]])