Created starter files for the project.

This commit is contained in:
Batuhan Berk Başoğlu 2020-10-02 21:26:03 -04:00
commit 73f0c0db42
1992 changed files with 769897 additions and 0 deletions

View file

@ -0,0 +1,365 @@
import sys
import pytest
import weakref
import numpy as np
from numpy.ctypeslib import ndpointer, load_library, as_array
from numpy.distutils.misc_util import get_shared_lib_extension
from numpy.testing import assert_, assert_array_equal, assert_raises, assert_equal
try:
import ctypes
except ImportError:
ctypes = None
else:
cdll = None
test_cdll = None
if hasattr(sys, 'gettotalrefcount'):
try:
cdll = load_library('_multiarray_umath_d', np.core._multiarray_umath.__file__)
except OSError:
pass
try:
test_cdll = load_library('_multiarray_tests', np.core._multiarray_tests.__file__)
except OSError:
pass
if cdll is None:
cdll = load_library('_multiarray_umath', np.core._multiarray_umath.__file__)
if test_cdll is None:
test_cdll = load_library('_multiarray_tests', np.core._multiarray_tests.__file__)
c_forward_pointer = test_cdll.forward_pointer
@pytest.mark.skipif(ctypes is None,
reason="ctypes not available in this python")
@pytest.mark.skipif(sys.platform == 'cygwin',
reason="Known to fail on cygwin")
class TestLoadLibrary:
def test_basic(self):
try:
# Should succeed
load_library('_multiarray_umath', np.core._multiarray_umath.__file__)
except ImportError as e:
msg = ("ctypes is not available on this python: skipping the test"
" (import error was: %s)" % str(e))
print(msg)
def test_basic2(self):
# Regression for #801: load_library with a full library name
# (including extension) does not work.
try:
try:
so = get_shared_lib_extension(is_python_ext=True)
# Should succeed
load_library('_multiarray_umath%s' % so, np.core._multiarray_umath.__file__)
except ImportError:
print("No distutils available, skipping test.")
except ImportError as e:
msg = ("ctypes is not available on this python: skipping the test"
" (import error was: %s)" % str(e))
print(msg)
class TestNdpointer:
def test_dtype(self):
dt = np.intc
p = ndpointer(dtype=dt)
assert_(p.from_param(np.array([1], dt)))
dt = '<i4'
p = ndpointer(dtype=dt)
assert_(p.from_param(np.array([1], dt)))
dt = np.dtype('>i4')
p = ndpointer(dtype=dt)
p.from_param(np.array([1], dt))
assert_raises(TypeError, p.from_param,
np.array([1], dt.newbyteorder('swap')))
dtnames = ['x', 'y']
dtformats = [np.intc, np.float64]
dtdescr = {'names': dtnames, 'formats': dtformats}
dt = np.dtype(dtdescr)
p = ndpointer(dtype=dt)
assert_(p.from_param(np.zeros((10,), dt)))
samedt = np.dtype(dtdescr)
p = ndpointer(dtype=samedt)
assert_(p.from_param(np.zeros((10,), dt)))
dt2 = np.dtype(dtdescr, align=True)
if dt.itemsize != dt2.itemsize:
assert_raises(TypeError, p.from_param, np.zeros((10,), dt2))
else:
assert_(p.from_param(np.zeros((10,), dt2)))
def test_ndim(self):
p = ndpointer(ndim=0)
assert_(p.from_param(np.array(1)))
assert_raises(TypeError, p.from_param, np.array([1]))
p = ndpointer(ndim=1)
assert_raises(TypeError, p.from_param, np.array(1))
assert_(p.from_param(np.array([1])))
p = ndpointer(ndim=2)
assert_(p.from_param(np.array([[1]])))
def test_shape(self):
p = ndpointer(shape=(1, 2))
assert_(p.from_param(np.array([[1, 2]])))
assert_raises(TypeError, p.from_param, np.array([[1], [2]]))
p = ndpointer(shape=())
assert_(p.from_param(np.array(1)))
def test_flags(self):
x = np.array([[1, 2], [3, 4]], order='F')
p = ndpointer(flags='FORTRAN')
assert_(p.from_param(x))
p = ndpointer(flags='CONTIGUOUS')
assert_raises(TypeError, p.from_param, x)
p = ndpointer(flags=x.flags.num)
assert_(p.from_param(x))
assert_raises(TypeError, p.from_param, np.array([[1, 2], [3, 4]]))
def test_cache(self):
assert_(ndpointer(dtype=np.float64) is ndpointer(dtype=np.float64))
# shapes are normalized
assert_(ndpointer(shape=2) is ndpointer(shape=(2,)))
# 1.12 <= v < 1.16 had a bug that made these fail
assert_(ndpointer(shape=2) is not ndpointer(ndim=2))
assert_(ndpointer(ndim=2) is not ndpointer(shape=2))
@pytest.mark.skipif(ctypes is None,
reason="ctypes not available on this python installation")
class TestNdpointerCFunc:
def test_arguments(self):
""" Test that arguments are coerced from arrays """
c_forward_pointer.restype = ctypes.c_void_p
c_forward_pointer.argtypes = (ndpointer(ndim=2),)
c_forward_pointer(np.zeros((2, 3)))
# too many dimensions
assert_raises(
ctypes.ArgumentError, c_forward_pointer, np.zeros((2, 3, 4)))
@pytest.mark.parametrize(
'dt', [
float,
np.dtype(dict(
formats=['<i4', '<i4'],
names=['a', 'b'],
offsets=[0, 2],
itemsize=6
))
], ids=[
'float',
'overlapping-fields'
]
)
def test_return(self, dt):
""" Test that return values are coerced to arrays """
arr = np.zeros((2, 3), dt)
ptr_type = ndpointer(shape=arr.shape, dtype=arr.dtype)
c_forward_pointer.restype = ptr_type
c_forward_pointer.argtypes = (ptr_type,)
# check that the arrays are equivalent views on the same data
arr2 = c_forward_pointer(arr)
assert_equal(arr2.dtype, arr.dtype)
assert_equal(arr2.shape, arr.shape)
assert_equal(
arr2.__array_interface__['data'],
arr.__array_interface__['data']
)
def test_vague_return_value(self):
""" Test that vague ndpointer return values do not promote to arrays """
arr = np.zeros((2, 3))
ptr_type = ndpointer(dtype=arr.dtype)
c_forward_pointer.restype = ptr_type
c_forward_pointer.argtypes = (ptr_type,)
ret = c_forward_pointer(arr)
assert_(isinstance(ret, ptr_type))
@pytest.mark.skipif(ctypes is None,
reason="ctypes not available on this python installation")
class TestAsArray:
def test_array(self):
from ctypes import c_int
pair_t = c_int * 2
a = as_array(pair_t(1, 2))
assert_equal(a.shape, (2,))
assert_array_equal(a, np.array([1, 2]))
a = as_array((pair_t * 3)(pair_t(1, 2), pair_t(3, 4), pair_t(5, 6)))
assert_equal(a.shape, (3, 2))
assert_array_equal(a, np.array([[1, 2], [3, 4], [5, 6]]))
def test_pointer(self):
from ctypes import c_int, cast, POINTER
p = cast((c_int * 10)(*range(10)), POINTER(c_int))
a = as_array(p, shape=(10,))
assert_equal(a.shape, (10,))
assert_array_equal(a, np.arange(10))
a = as_array(p, shape=(2, 5))
assert_equal(a.shape, (2, 5))
assert_array_equal(a, np.arange(10).reshape((2, 5)))
# shape argument is required
assert_raises(TypeError, as_array, p)
def test_struct_array_pointer(self):
from ctypes import c_int16, Structure, pointer
class Struct(Structure):
_fields_ = [('a', c_int16)]
Struct3 = 3 * Struct
c_array = (2 * Struct3)(
Struct3(Struct(a=1), Struct(a=2), Struct(a=3)),
Struct3(Struct(a=4), Struct(a=5), Struct(a=6))
)
expected = np.array([
[(1,), (2,), (3,)],
[(4,), (5,), (6,)],
], dtype=[('a', np.int16)])
def check(x):
assert_equal(x.dtype, expected.dtype)
assert_equal(x, expected)
# all of these should be equivalent
check(as_array(c_array))
check(as_array(pointer(c_array), shape=()))
check(as_array(pointer(c_array[0]), shape=(2,)))
check(as_array(pointer(c_array[0][0]), shape=(2, 3)))
def test_reference_cycles(self):
# related to gh-6511
import ctypes
# create array to work with
# don't use int/long to avoid running into bpo-10746
N = 100
a = np.arange(N, dtype=np.short)
# get pointer to array
pnt = np.ctypeslib.as_ctypes(a)
with np.testing.assert_no_gc_cycles():
# decay the array above to a pointer to its first element
newpnt = ctypes.cast(pnt, ctypes.POINTER(ctypes.c_short))
# and construct an array using this data
b = np.ctypeslib.as_array(newpnt, (N,))
# now delete both, which should cleanup both objects
del newpnt, b
def test_segmentation_fault(self):
arr = np.zeros((224, 224, 3))
c_arr = np.ctypeslib.as_ctypes(arr)
arr_ref = weakref.ref(arr)
del arr
# check the reference wasn't cleaned up
assert_(arr_ref() is not None)
# check we avoid the segfault
c_arr[0][0][0]
@pytest.mark.skipif(ctypes is None,
reason="ctypes not available on this python installation")
class TestAsCtypesType:
""" Test conversion from dtypes to ctypes types """
def test_scalar(self):
dt = np.dtype('<u2')
ct = np.ctypeslib.as_ctypes_type(dt)
assert_equal(ct, ctypes.c_uint16.__ctype_le__)
dt = np.dtype('>u2')
ct = np.ctypeslib.as_ctypes_type(dt)
assert_equal(ct, ctypes.c_uint16.__ctype_be__)
dt = np.dtype('u2')
ct = np.ctypeslib.as_ctypes_type(dt)
assert_equal(ct, ctypes.c_uint16)
def test_subarray(self):
dt = np.dtype((np.int32, (2, 3)))
ct = np.ctypeslib.as_ctypes_type(dt)
assert_equal(ct, 2 * (3 * ctypes.c_int32))
def test_structure(self):
dt = np.dtype([
('a', np.uint16),
('b', np.uint32),
])
ct = np.ctypeslib.as_ctypes_type(dt)
assert_(issubclass(ct, ctypes.Structure))
assert_equal(ctypes.sizeof(ct), dt.itemsize)
assert_equal(ct._fields_, [
('a', ctypes.c_uint16),
('b', ctypes.c_uint32),
])
def test_structure_aligned(self):
dt = np.dtype([
('a', np.uint16),
('b', np.uint32),
], align=True)
ct = np.ctypeslib.as_ctypes_type(dt)
assert_(issubclass(ct, ctypes.Structure))
assert_equal(ctypes.sizeof(ct), dt.itemsize)
assert_equal(ct._fields_, [
('a', ctypes.c_uint16),
('', ctypes.c_char * 2), # padding
('b', ctypes.c_uint32),
])
def test_union(self):
dt = np.dtype(dict(
names=['a', 'b'],
offsets=[0, 0],
formats=[np.uint16, np.uint32]
))
ct = np.ctypeslib.as_ctypes_type(dt)
assert_(issubclass(ct, ctypes.Union))
assert_equal(ctypes.sizeof(ct), dt.itemsize)
assert_equal(ct._fields_, [
('a', ctypes.c_uint16),
('b', ctypes.c_uint32),
])
def test_padded_union(self):
dt = np.dtype(dict(
names=['a', 'b'],
offsets=[0, 0],
formats=[np.uint16, np.uint32],
itemsize=5,
))
ct = np.ctypeslib.as_ctypes_type(dt)
assert_(issubclass(ct, ctypes.Union))
assert_equal(ctypes.sizeof(ct), dt.itemsize)
assert_equal(ct._fields_, [
('a', ctypes.c_uint16),
('b', ctypes.c_uint32),
('', ctypes.c_char * 5), # padding
])
def test_overlapping(self):
dt = np.dtype(dict(
names=['a', 'b'],
offsets=[0, 2],
formats=[np.uint32, np.uint32]
))
assert_raises(NotImplementedError, np.ctypeslib.as_ctypes_type, dt)

View file

@ -0,0 +1,58 @@
import numpy as np
import numpy.matlib
from numpy.testing import assert_array_equal, assert_
def test_empty():
x = numpy.matlib.empty((2,))
assert_(isinstance(x, np.matrix))
assert_(x.shape, (1, 2))
def test_ones():
assert_array_equal(numpy.matlib.ones((2, 3)),
np.matrix([[ 1., 1., 1.],
[ 1., 1., 1.]]))
assert_array_equal(numpy.matlib.ones(2), np.matrix([[ 1., 1.]]))
def test_zeros():
assert_array_equal(numpy.matlib.zeros((2, 3)),
np.matrix([[ 0., 0., 0.],
[ 0., 0., 0.]]))
assert_array_equal(numpy.matlib.zeros(2), np.matrix([[ 0., 0.]]))
def test_identity():
x = numpy.matlib.identity(2, dtype=int)
assert_array_equal(x, np.matrix([[1, 0], [0, 1]]))
def test_eye():
xc = numpy.matlib.eye(3, k=1, dtype=int)
assert_array_equal(xc, np.matrix([[ 0, 1, 0],
[ 0, 0, 1],
[ 0, 0, 0]]))
assert xc.flags.c_contiguous
assert not xc.flags.f_contiguous
xf = numpy.matlib.eye(3, 4, dtype=int, order='F')
assert_array_equal(xf, np.matrix([[ 1, 0, 0, 0],
[ 0, 1, 0, 0],
[ 0, 0, 1, 0]]))
assert not xf.flags.c_contiguous
assert xf.flags.f_contiguous
def test_rand():
x = numpy.matlib.rand(3)
# check matrix type, array would have shape (3,)
assert_(x.ndim == 2)
def test_randn():
x = np.matlib.randn(3)
# check matrix type, array would have shape (3,)
assert_(x.ndim == 2)
def test_repmat():
a1 = np.arange(4)
x = numpy.matlib.repmat(a1, 2, 2)
y = np.array([[0, 1, 2, 3, 0, 1, 2, 3],
[0, 1, 2, 3, 0, 1, 2, 3]])
assert_array_equal(x, y)

View file

@ -0,0 +1,17 @@
import re
import numpy as np
from numpy.testing import assert_
def test_valid_numpy_version():
# Verify that the numpy version is a valid one (no .post suffix or other
# nonsense). See gh-6431 for an issue caused by an invalid version.
version_pattern = r"^[0-9]+\.[0-9]+\.[0-9]+(|a[0-9]|b[0-9]|rc[0-9])"
dev_suffix = r"(\.dev0\+([0-9a-f]{7}|Unknown))"
if np.version.release:
res = re.match(version_pattern, np.__version__)
else:
res = re.match(version_pattern + dev_suffix, np.__version__)
assert_(res is not None, np.__version__)

View file

@ -0,0 +1,490 @@
import sys
import subprocess
import pkgutil
import types
import importlib
import warnings
import numpy as np
import numpy
import pytest
try:
import ctypes
except ImportError:
ctypes = None
def check_dir(module, module_name=None):
"""Returns a mapping of all objects with the wrong __module__ attribute."""
if module_name is None:
module_name = module.__name__
results = {}
for name in dir(module):
item = getattr(module, name)
if (hasattr(item, '__module__') and hasattr(item, '__name__')
and item.__module__ != module_name):
results[name] = item.__module__ + '.' + item.__name__
return results
def test_numpy_namespace():
# None of these objects are publicly documented to be part of the main
# NumPy namespace (some are useful though, others need to be cleaned up)
undocumented = {
'Tester': 'numpy.testing._private.nosetester.NoseTester',
'_add_newdoc_ufunc': 'numpy.core._multiarray_umath._add_newdoc_ufunc',
'add_docstring': 'numpy.core._multiarray_umath.add_docstring',
'add_newdoc': 'numpy.core.function_base.add_newdoc',
'add_newdoc_ufunc': 'numpy.core._multiarray_umath._add_newdoc_ufunc',
'byte_bounds': 'numpy.lib.utils.byte_bounds',
'compare_chararrays': 'numpy.core._multiarray_umath.compare_chararrays',
'deprecate': 'numpy.lib.utils.deprecate',
'deprecate_with_doc': 'numpy.lib.utils.<lambda>',
'disp': 'numpy.lib.function_base.disp',
'fastCopyAndTranspose': 'numpy.core._multiarray_umath._fastCopyAndTranspose',
'get_array_wrap': 'numpy.lib.shape_base.get_array_wrap',
'get_include': 'numpy.lib.utils.get_include',
'mafromtxt': 'numpy.lib.npyio.mafromtxt',
'ndfromtxt': 'numpy.lib.npyio.ndfromtxt',
'recfromcsv': 'numpy.lib.npyio.recfromcsv',
'recfromtxt': 'numpy.lib.npyio.recfromtxt',
'safe_eval': 'numpy.lib.utils.safe_eval',
'set_string_function': 'numpy.core.arrayprint.set_string_function',
'show_config': 'numpy.__config__.show',
'who': 'numpy.lib.utils.who',
}
# These built-in types are re-exported by numpy.
builtins = {
'bool': 'builtins.bool',
'complex': 'builtins.complex',
'float': 'builtins.float',
'int': 'builtins.int',
'long': 'builtins.int',
'object': 'builtins.object',
'str': 'builtins.str',
'unicode': 'builtins.str',
}
whitelist = dict(undocumented, **builtins)
bad_results = check_dir(np)
# pytest gives better error messages with the builtin assert than with
# assert_equal
assert bad_results == whitelist
@pytest.mark.parametrize('name', ['testing', 'Tester'])
def test_import_lazy_import(name):
"""Make sure we can actually use the modules we lazy load.
While not exported as part of the public API, it was accessible. With the
use of __getattr__ and __dir__, this isn't always true It can happen that
an infinite recursion may happen.
This is the only way I found that would force the failure to appear on the
badly implemented code.
We also test for the presence of the lazily imported modules in dir
"""
exe = (sys.executable, '-c', "import numpy; numpy." + name)
result = subprocess.check_output(exe)
assert not result
# Make sure they are still in the __dir__
assert name in dir(np)
def test_dir_testing():
"""Assert that output of dir has only one "testing/tester"
attribute without duplicate"""
assert len(dir(np)) == len(set(dir(np)))
def test_numpy_linalg():
bad_results = check_dir(np.linalg)
assert bad_results == {}
def test_numpy_fft():
bad_results = check_dir(np.fft)
assert bad_results == {}
@pytest.mark.skipif(ctypes is None,
reason="ctypes not available in this python")
def test_NPY_NO_EXPORT():
cdll = ctypes.CDLL(np.core._multiarray_tests.__file__)
# Make sure an arbitrary NPY_NO_EXPORT function is actually hidden
f = getattr(cdll, 'test_not_exported', None)
assert f is None, ("'test_not_exported' is mistakenly exported, "
"NPY_NO_EXPORT does not work")
# Historically NumPy has not used leading underscores for private submodules
# much. This has resulted in lots of things that look like public modules
# (i.e. things that can be imported as `import numpy.somesubmodule.somefile`),
# but were never intended to be public. The PUBLIC_MODULES list contains
# modules that are either public because they were meant to be, or because they
# contain public functions/objects that aren't present in any other namespace
# for whatever reason and therefore should be treated as public.
#
# The PRIVATE_BUT_PRESENT_MODULES list contains modules that look public (lack
# of underscores) but should not be used. For many of those modules the
# current status is fine. For others it may make sense to work on making them
# private, to clean up our public API and avoid confusion.
PUBLIC_MODULES = ['numpy.' + s for s in [
"ctypeslib",
"distutils",
"distutils.cpuinfo",
"distutils.exec_command",
"distutils.misc_util",
"distutils.log",
"distutils.system_info",
"doc",
"doc.basics",
"doc.broadcasting",
"doc.byteswapping",
"doc.constants",
"doc.creation",
"doc.dispatch",
"doc.glossary",
"doc.indexing",
"doc.internals",
"doc.misc",
"doc.structured_arrays",
"doc.subclassing",
"doc.ufuncs",
"dual",
"f2py",
"fft",
"lib",
"lib.format", # was this meant to be public?
"lib.mixins",
"lib.recfunctions",
"lib.scimath",
"linalg",
"ma",
"ma.extras",
"ma.mrecords",
"matlib",
"polynomial",
"polynomial.chebyshev",
"polynomial.hermite",
"polynomial.hermite_e",
"polynomial.laguerre",
"polynomial.legendre",
"polynomial.polynomial",
"polynomial.polyutils",
"random",
"testing",
"version",
]]
PUBLIC_ALIASED_MODULES = [
"numpy.char",
"numpy.emath",
"numpy.rec",
]
PRIVATE_BUT_PRESENT_MODULES = ['numpy.' + s for s in [
"compat",
"compat.py3k",
"conftest",
"core",
"core.arrayprint",
"core.defchararray",
"core.einsumfunc",
"core.fromnumeric",
"core.function_base",
"core.getlimits",
"core.machar",
"core.memmap",
"core.multiarray",
"core.numeric",
"core.numerictypes",
"core.overrides",
"core.records",
"core.shape_base",
"core.umath",
"core.umath_tests",
"distutils.ccompiler",
"distutils.command",
"distutils.command.autodist",
"distutils.command.bdist_rpm",
"distutils.command.build",
"distutils.command.build_clib",
"distutils.command.build_ext",
"distutils.command.build_py",
"distutils.command.build_scripts",
"distutils.command.build_src",
"distutils.command.config",
"distutils.command.config_compiler",
"distutils.command.develop",
"distutils.command.egg_info",
"distutils.command.install",
"distutils.command.install_clib",
"distutils.command.install_data",
"distutils.command.install_headers",
"distutils.command.sdist",
"distutils.conv_template",
"distutils.core",
"distutils.extension",
"distutils.fcompiler",
"distutils.fcompiler.absoft",
"distutils.fcompiler.compaq",
"distutils.fcompiler.environment",
"distutils.fcompiler.g95",
"distutils.fcompiler.gnu",
"distutils.fcompiler.hpux",
"distutils.fcompiler.ibm",
"distutils.fcompiler.intel",
"distutils.fcompiler.lahey",
"distutils.fcompiler.mips",
"distutils.fcompiler.nag",
"distutils.fcompiler.none",
"distutils.fcompiler.pathf95",
"distutils.fcompiler.pg",
"distutils.fcompiler.sun",
"distutils.fcompiler.vast",
"distutils.from_template",
"distutils.intelccompiler",
"distutils.lib2def",
"distutils.line_endings",
"distutils.mingw32ccompiler",
"distutils.msvccompiler",
"distutils.npy_pkg_config",
"distutils.numpy_distribution",
"distutils.pathccompiler",
"distutils.unixccompiler",
"f2py.auxfuncs",
"f2py.capi_maps",
"f2py.cb_rules",
"f2py.cfuncs",
"f2py.common_rules",
"f2py.crackfortran",
"f2py.diagnose",
"f2py.f2py2e",
"f2py.f2py_testing",
"f2py.f90mod_rules",
"f2py.func2subr",
"f2py.rules",
"f2py.use_rules",
"fft.helper",
"lib.arraypad",
"lib.arraysetops",
"lib.arrayterator",
"lib.financial",
"lib.function_base",
"lib.histograms",
"lib.index_tricks",
"lib.nanfunctions",
"lib.npyio",
"lib.polynomial",
"lib.shape_base",
"lib.stride_tricks",
"lib.twodim_base",
"lib.type_check",
"lib.ufunclike",
"lib.user_array", # note: not in np.lib, but probably should just be deleted
"lib.utils",
"linalg.lapack_lite",
"linalg.linalg",
"ma.bench",
"ma.core",
"ma.testutils",
"ma.timer_comparison",
"matrixlib",
"matrixlib.defmatrix",
"random.mtrand",
"random.bit_generator",
"testing.print_coercion_tables",
"testing.utils",
]]
def is_unexpected(name):
"""Check if this needs to be considered."""
if '._' in name or '.tests' in name or '.setup' in name:
return False
if name in PUBLIC_MODULES:
return False
if name in PUBLIC_ALIASED_MODULES:
return False
if name in PRIVATE_BUT_PRESENT_MODULES:
return False
return True
# These are present in a directory with an __init__.py but cannot be imported
# code_generators/ isn't installed, but present for an inplace build
SKIP_LIST = [
"numpy.core.code_generators",
"numpy.core.code_generators.genapi",
"numpy.core.code_generators.generate_umath",
"numpy.core.code_generators.ufunc_docstrings",
"numpy.core.code_generators.generate_numpy_api",
"numpy.core.code_generators.generate_ufunc_api",
"numpy.core.code_generators.numpy_api",
"numpy.core.cversions",
"numpy.core.generate_numpy_api",
"numpy.distutils.msvc9compiler",
]
def test_all_modules_are_expected():
"""
Test that we don't add anything that looks like a new public module by
accident. Check is based on filenames.
"""
modnames = []
for _, modname, ispkg in pkgutil.walk_packages(path=np.__path__,
prefix=np.__name__ + '.',
onerror=None):
if is_unexpected(modname) and modname not in SKIP_LIST:
# We have a name that is new. If that's on purpose, add it to
# PUBLIC_MODULES. We don't expect to have to add anything to
# PRIVATE_BUT_PRESENT_MODULES. Use an underscore in the name!
modnames.append(modname)
if modnames:
raise AssertionError("Found unexpected modules: {}".format(modnames))
# Stuff that clearly shouldn't be in the API and is detected by the next test
# below
SKIP_LIST_2 = [
'numpy.math',
'numpy.distutils.log.sys',
'numpy.distutils.system_info.copy',
'numpy.distutils.system_info.distutils',
'numpy.distutils.system_info.log',
'numpy.distutils.system_info.os',
'numpy.distutils.system_info.platform',
'numpy.distutils.system_info.re',
'numpy.distutils.system_info.shutil',
'numpy.distutils.system_info.subprocess',
'numpy.distutils.system_info.sys',
'numpy.distutils.system_info.tempfile',
'numpy.distutils.system_info.textwrap',
'numpy.distutils.system_info.warnings',
'numpy.doc.constants.re',
'numpy.doc.constants.textwrap',
'numpy.lib.emath',
'numpy.lib.math',
'numpy.matlib.char',
'numpy.matlib.rec',
'numpy.matlib.emath',
'numpy.matlib.math',
'numpy.matlib.linalg',
'numpy.matlib.fft',
'numpy.matlib.random',
'numpy.matlib.ctypeslib',
'numpy.matlib.ma',
]
def test_all_modules_are_expected_2():
"""
Method checking all objects. The pkgutil-based method in
`test_all_modules_are_expected` does not catch imports into a namespace,
only filenames. So this test is more thorough, and checks this like:
import .lib.scimath as emath
To check if something in a module is (effectively) public, one can check if
there's anything in that namespace that's a public function/object but is
not exposed in a higher-level namespace. For example for a `numpy.lib`
submodule::
mod = np.lib.mixins
for obj in mod.__all__:
if obj in np.__all__:
continue
elif obj in np.lib.__all__:
continue
else:
print(obj)
"""
def find_unexpected_members(mod_name):
members = []
module = importlib.import_module(mod_name)
if hasattr(module, '__all__'):
objnames = module.__all__
else:
objnames = dir(module)
for objname in objnames:
if not objname.startswith('_'):
fullobjname = mod_name + '.' + objname
if isinstance(getattr(module, objname), types.ModuleType):
if is_unexpected(fullobjname):
if fullobjname not in SKIP_LIST_2:
members.append(fullobjname)
return members
unexpected_members = find_unexpected_members("numpy")
for modname in PUBLIC_MODULES:
unexpected_members.extend(find_unexpected_members(modname))
if unexpected_members:
raise AssertionError("Found unexpected object(s) that look like "
"modules: {}".format(unexpected_members))
def test_api_importable():
"""
Check that all submodules listed higher up in this file can be imported
Note that if a PRIVATE_BUT_PRESENT_MODULES entry goes missing, it may
simply need to be removed from the list (deprecation may or may not be
needed - apply common sense).
"""
def check_importable(module_name):
try:
importlib.import_module(module_name)
except (ImportError, AttributeError):
return False
return True
module_names = []
for module_name in PUBLIC_MODULES:
if not check_importable(module_name):
module_names.append(module_name)
if module_names:
raise AssertionError("Modules in the public API that cannot be "
"imported: {}".format(module_names))
for module_name in PUBLIC_ALIASED_MODULES:
try:
eval(module_name)
except AttributeError:
module_names.append(module_name)
if module_names:
raise AssertionError("Modules in the public API that were not "
"found: {}".format(module_names))
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings('always', category=DeprecationWarning)
warnings.filterwarnings('always', category=ImportWarning)
for module_name in PRIVATE_BUT_PRESENT_MODULES:
if not check_importable(module_name):
module_names.append(module_name)
if module_names:
raise AssertionError("Modules that are not really public but looked "
"public and can not be imported: "
"{}".format(module_names))

View file

@ -0,0 +1,57 @@
from numpy.testing import assert_raises, assert_, assert_equal
from numpy.compat import pickle
import sys
import subprocess
import textwrap
from importlib import reload
def test_numpy_reloading():
# gh-7844. Also check that relevant globals retain their identity.
import numpy as np
import numpy._globals
_NoValue = np._NoValue
VisibleDeprecationWarning = np.VisibleDeprecationWarning
ModuleDeprecationWarning = np.ModuleDeprecationWarning
reload(np)
assert_(_NoValue is np._NoValue)
assert_(ModuleDeprecationWarning is np.ModuleDeprecationWarning)
assert_(VisibleDeprecationWarning is np.VisibleDeprecationWarning)
assert_raises(RuntimeError, reload, numpy._globals)
reload(np)
assert_(_NoValue is np._NoValue)
assert_(ModuleDeprecationWarning is np.ModuleDeprecationWarning)
assert_(VisibleDeprecationWarning is np.VisibleDeprecationWarning)
def test_novalue():
import numpy as np
for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
assert_equal(repr(np._NoValue), '<no value>')
assert_(pickle.loads(pickle.dumps(np._NoValue,
protocol=proto)) is np._NoValue)
def test_full_reimport():
"""At the time of writing this, it is *not* truly supported, but
apparently enough users rely on it, for it to be an annoying change
when it started failing previously.
"""
# Test within a new process, to ensure that we do not mess with the
# global state during the test run (could lead to cryptic test failures).
# This is generally unsafe, especially, since we also reload the C-modules.
code = textwrap.dedent(r"""
import sys
import numpy as np
for k in list(sys.modules.keys()):
if "numpy" in k:
del sys.modules[k]
import numpy as np
""")
p = subprocess.run([sys.executable, '-c', code])
assert p.returncode == 0

View file

@ -0,0 +1,46 @@
""" Test scripts
Test that we can run executable scripts that have been installed with numpy.
"""
import sys
import os
import pytest
from os.path import join as pathjoin, isfile, dirname
import subprocess
import numpy as np
from numpy.testing import assert_equal
is_inplace = isfile(pathjoin(dirname(np.__file__), '..', 'setup.py'))
def find_f2py_commands():
if sys.platform == 'win32':
exe_dir = dirname(sys.executable)
if exe_dir.endswith('Scripts'): # virtualenv
return [os.path.join(exe_dir, 'f2py')]
else:
return [os.path.join(exe_dir, "Scripts", 'f2py')]
else:
# Three scripts are installed in Unix-like systems:
# 'f2py', 'f2py{major}', and 'f2py{major.minor}'. For example,
# if installed with python3.7 the scripts would be named
# 'f2py', 'f2py3', and 'f2py3.7'.
version = sys.version_info
major = str(version.major)
minor = str(version.minor)
return ['f2py', 'f2py' + major, 'f2py' + major + '.' + minor]
@pytest.mark.skipif(is_inplace, reason="Cannot test f2py command inplace")
@pytest.mark.xfail(reason="Test is unreliable")
@pytest.mark.parametrize('f2py_cmd', find_f2py_commands())
def test_f2py(f2py_cmd):
# test that we can run f2py script
stdout = subprocess.check_output([f2py_cmd, '-v'])
assert_equal(stdout.strip(), b'2')
def test_pep338():
stdout = subprocess.check_output([sys.executable, '-mnumpy.f2py', '-v'])
assert_equal(stdout.strip(), b'2')

View file

@ -0,0 +1,74 @@
"""
Tests which scan for certain occurrences in the code, they may not find
all of these occurrences but should catch almost all.
"""
import pytest
from pathlib import Path
import ast
import tokenize
import numpy
class ParseCall(ast.NodeVisitor):
def __init__(self):
self.ls = []
def visit_Attribute(self, node):
ast.NodeVisitor.generic_visit(self, node)
self.ls.append(node.attr)
def visit_Name(self, node):
self.ls.append(node.id)
class FindFuncs(ast.NodeVisitor):
def __init__(self, filename):
super().__init__()
self.__filename = filename
def visit_Call(self, node):
p = ParseCall()
p.visit(node.func)
ast.NodeVisitor.generic_visit(self, node)
if p.ls[-1] == 'simplefilter' or p.ls[-1] == 'filterwarnings':
if node.args[0].s == "ignore":
raise AssertionError(
"warnings should have an appropriate stacklevel; found in "
"{} on line {}".format(self.__filename, node.lineno))
if p.ls[-1] == 'warn' and (
len(p.ls) == 1 or p.ls[-2] == 'warnings'):
if "testing/tests/test_warnings.py" == self.__filename:
# This file
return
# See if stacklevel exists:
if len(node.args) == 3:
return
args = {kw.arg for kw in node.keywords}
if "stacklevel" in args:
return
raise AssertionError(
"warnings should have an appropriate stacklevel; found in "
"{} on line {}".format(self.__filename, node.lineno))
@pytest.mark.slow
def test_warning_calls():
# combined "ignore" and stacklevel error
base = Path(numpy.__file__).parent
for path in base.rglob("*.py"):
if base / "testing" in path.parents:
continue
if path == base / "__init__.py":
continue
if path == base / "random" / "__init__.py":
continue
# use tokenize to auto-detect encoding on systems where no
# default encoding is defined (e.g. LANG='C')
with tokenize.open(str(path)) as file:
tree = ast.parse(file.read())
FindFuncs(path).visit(tree)