973 lines
41 KiB
Python
973 lines
41 KiB
Python
import os
|
|
import sys
|
|
import pickle
|
|
import copy
|
|
import warnings
|
|
import platform
|
|
import textwrap
|
|
from os.path import join
|
|
|
|
from numpy.distutils import log
|
|
from distutils.dep_util import newer
|
|
from distutils.sysconfig import get_config_var
|
|
from numpy._build_utils.apple_accelerate import (
|
|
uses_accelerate_framework, get_sgemv_fix
|
|
)
|
|
from numpy.compat import npy_load_module
|
|
from setup_common import * # noqa: F403
|
|
|
|
# Set to True to enable relaxed strides checking. This (mostly) means
|
|
# that `strides[dim]` is ignored if `shape[dim] == 1` when setting flags.
|
|
NPY_RELAXED_STRIDES_CHECKING = (os.environ.get('NPY_RELAXED_STRIDES_CHECKING', "1") != "0")
|
|
|
|
# Put NPY_RELAXED_STRIDES_DEBUG=1 in the environment if you want numpy to use a
|
|
# bogus value for affected strides in order to help smoke out bad stride usage
|
|
# when relaxed stride checking is enabled.
|
|
NPY_RELAXED_STRIDES_DEBUG = (os.environ.get('NPY_RELAXED_STRIDES_DEBUG', "0") != "0")
|
|
NPY_RELAXED_STRIDES_DEBUG = NPY_RELAXED_STRIDES_DEBUG and NPY_RELAXED_STRIDES_CHECKING
|
|
|
|
# XXX: ugly, we use a class to avoid calling twice some expensive functions in
|
|
# config.h/numpyconfig.h. I don't see a better way because distutils force
|
|
# config.h generation inside an Extension class, and as such sharing
|
|
# configuration information between extensions is not easy.
|
|
# Using a pickled-based memoize does not work because config_cmd is an instance
|
|
# method, which cPickle does not like.
|
|
#
|
|
# Use pickle in all cases, as cPickle is gone in python3 and the difference
|
|
# in time is only in build. -- Charles Harris, 2013-03-30
|
|
|
|
class CallOnceOnly:
|
|
def __init__(self):
|
|
self._check_types = None
|
|
self._check_ieee_macros = None
|
|
self._check_complex = None
|
|
|
|
def check_types(self, *a, **kw):
|
|
if self._check_types is None:
|
|
out = check_types(*a, **kw)
|
|
self._check_types = pickle.dumps(out)
|
|
else:
|
|
out = copy.deepcopy(pickle.loads(self._check_types))
|
|
return out
|
|
|
|
def check_ieee_macros(self, *a, **kw):
|
|
if self._check_ieee_macros is None:
|
|
out = check_ieee_macros(*a, **kw)
|
|
self._check_ieee_macros = pickle.dumps(out)
|
|
else:
|
|
out = copy.deepcopy(pickle.loads(self._check_ieee_macros))
|
|
return out
|
|
|
|
def check_complex(self, *a, **kw):
|
|
if self._check_complex is None:
|
|
out = check_complex(*a, **kw)
|
|
self._check_complex = pickle.dumps(out)
|
|
else:
|
|
out = copy.deepcopy(pickle.loads(self._check_complex))
|
|
return out
|
|
|
|
def pythonlib_dir():
|
|
"""return path where libpython* is."""
|
|
if sys.platform == 'win32':
|
|
return os.path.join(sys.prefix, "libs")
|
|
else:
|
|
return get_config_var('LIBDIR')
|
|
|
|
def is_npy_no_signal():
|
|
"""Return True if the NPY_NO_SIGNAL symbol must be defined in configuration
|
|
header."""
|
|
return sys.platform == 'win32'
|
|
|
|
def is_npy_no_smp():
|
|
"""Return True if the NPY_NO_SMP symbol must be defined in public
|
|
header (when SMP support cannot be reliably enabled)."""
|
|
# Perhaps a fancier check is in order here.
|
|
# so that threads are only enabled if there
|
|
# are actually multiple CPUS? -- but
|
|
# threaded code can be nice even on a single
|
|
# CPU so that long-calculating code doesn't
|
|
# block.
|
|
return 'NPY_NOSMP' in os.environ
|
|
|
|
def win32_checks(deflist):
|
|
from numpy.distutils.misc_util import get_build_architecture
|
|
a = get_build_architecture()
|
|
|
|
# Distutils hack on AMD64 on windows
|
|
print('BUILD_ARCHITECTURE: %r, os.name=%r, sys.platform=%r' %
|
|
(a, os.name, sys.platform))
|
|
if a == 'AMD64':
|
|
deflist.append('DISTUTILS_USE_SDK')
|
|
|
|
# On win32, force long double format string to be 'g', not
|
|
# 'Lg', since the MS runtime does not support long double whose
|
|
# size is > sizeof(double)
|
|
if a == "Intel" or a == "AMD64":
|
|
deflist.append('FORCE_NO_LONG_DOUBLE_FORMATTING')
|
|
|
|
def check_math_capabilities(config, moredefs, mathlibs):
|
|
def check_func(func_name):
|
|
return config.check_func(func_name, libraries=mathlibs,
|
|
decl=True, call=True)
|
|
|
|
def check_funcs_once(funcs_name):
|
|
decl = dict([(f, True) for f in funcs_name])
|
|
st = config.check_funcs_once(funcs_name, libraries=mathlibs,
|
|
decl=decl, call=decl)
|
|
if st:
|
|
moredefs.extend([(fname2def(f), 1) for f in funcs_name])
|
|
return st
|
|
|
|
def check_funcs(funcs_name):
|
|
# Use check_funcs_once first, and if it does not work, test func per
|
|
# func. Return success only if all the functions are available
|
|
if not check_funcs_once(funcs_name):
|
|
# Global check failed, check func per func
|
|
for f in funcs_name:
|
|
if check_func(f):
|
|
moredefs.append((fname2def(f), 1))
|
|
return 0
|
|
else:
|
|
return 1
|
|
|
|
#use_msvc = config.check_decl("_MSC_VER")
|
|
|
|
if not check_funcs_once(MANDATORY_FUNCS):
|
|
raise SystemError("One of the required function to build numpy is not"
|
|
" available (the list is %s)." % str(MANDATORY_FUNCS))
|
|
|
|
# Standard functions which may not be available and for which we have a
|
|
# replacement implementation. Note that some of these are C99 functions.
|
|
|
|
# XXX: hack to circumvent cpp pollution from python: python put its
|
|
# config.h in the public namespace, so we have a clash for the common
|
|
# functions we test. We remove every function tested by python's
|
|
# autoconf, hoping their own test are correct
|
|
for f in OPTIONAL_STDFUNCS_MAYBE:
|
|
if config.check_decl(fname2def(f),
|
|
headers=["Python.h", "math.h"]):
|
|
OPTIONAL_STDFUNCS.remove(f)
|
|
|
|
check_funcs(OPTIONAL_STDFUNCS)
|
|
|
|
for h in OPTIONAL_HEADERS:
|
|
if config.check_func("", decl=False, call=False, headers=[h]):
|
|
h = h.replace(".", "_").replace(os.path.sep, "_")
|
|
moredefs.append((fname2def(h), 1))
|
|
|
|
for tup in OPTIONAL_INTRINSICS:
|
|
headers = None
|
|
if len(tup) == 2:
|
|
f, args, m = tup[0], tup[1], fname2def(tup[0])
|
|
elif len(tup) == 3:
|
|
f, args, headers, m = tup[0], tup[1], [tup[2]], fname2def(tup[0])
|
|
else:
|
|
f, args, headers, m = tup[0], tup[1], [tup[2]], fname2def(tup[3])
|
|
if config.check_func(f, decl=False, call=True, call_args=args,
|
|
headers=headers):
|
|
moredefs.append((m, 1))
|
|
|
|
for dec, fn in OPTIONAL_FUNCTION_ATTRIBUTES:
|
|
if config.check_gcc_function_attribute(dec, fn):
|
|
moredefs.append((fname2def(fn), 1))
|
|
|
|
for dec, fn, code, header in OPTIONAL_FUNCTION_ATTRIBUTES_WITH_INTRINSICS:
|
|
if config.check_gcc_function_attribute_with_intrinsics(dec, fn, code,
|
|
header):
|
|
moredefs.append((fname2def(fn), 1))
|
|
|
|
for fn in OPTIONAL_VARIABLE_ATTRIBUTES:
|
|
if config.check_gcc_variable_attribute(fn):
|
|
m = fn.replace("(", "_").replace(")", "_")
|
|
moredefs.append((fname2def(m), 1))
|
|
|
|
# C99 functions: float and long double versions
|
|
check_funcs(C99_FUNCS_SINGLE)
|
|
check_funcs(C99_FUNCS_EXTENDED)
|
|
|
|
def check_complex(config, mathlibs):
|
|
priv = []
|
|
pub = []
|
|
|
|
try:
|
|
if os.uname()[0] == "Interix":
|
|
warnings.warn("Disabling broken complex support. See #1365", stacklevel=2)
|
|
return priv, pub
|
|
except Exception:
|
|
# os.uname not available on all platforms. blanket except ugly but safe
|
|
pass
|
|
|
|
# Check for complex support
|
|
st = config.check_header('complex.h')
|
|
if st:
|
|
priv.append(('HAVE_COMPLEX_H', 1))
|
|
pub.append(('NPY_USE_C99_COMPLEX', 1))
|
|
|
|
for t in C99_COMPLEX_TYPES:
|
|
st = config.check_type(t, headers=["complex.h"])
|
|
if st:
|
|
pub.append(('NPY_HAVE_%s' % type2def(t), 1))
|
|
|
|
def check_prec(prec):
|
|
flist = [f + prec for f in C99_COMPLEX_FUNCS]
|
|
decl = dict([(f, True) for f in flist])
|
|
if not config.check_funcs_once(flist, call=decl, decl=decl,
|
|
libraries=mathlibs):
|
|
for f in flist:
|
|
if config.check_func(f, call=True, decl=True,
|
|
libraries=mathlibs):
|
|
priv.append((fname2def(f), 1))
|
|
else:
|
|
priv.extend([(fname2def(f), 1) for f in flist])
|
|
|
|
check_prec('')
|
|
check_prec('f')
|
|
check_prec('l')
|
|
|
|
return priv, pub
|
|
|
|
def check_ieee_macros(config):
|
|
priv = []
|
|
pub = []
|
|
|
|
macros = []
|
|
|
|
def _add_decl(f):
|
|
priv.append(fname2def("decl_%s" % f))
|
|
pub.append('NPY_%s' % fname2def("decl_%s" % f))
|
|
|
|
# XXX: hack to circumvent cpp pollution from python: python put its
|
|
# config.h in the public namespace, so we have a clash for the common
|
|
# functions we test. We remove every function tested by python's
|
|
# autoconf, hoping their own test are correct
|
|
_macros = ["isnan", "isinf", "signbit", "isfinite"]
|
|
for f in _macros:
|
|
py_symbol = fname2def("decl_%s" % f)
|
|
already_declared = config.check_decl(py_symbol,
|
|
headers=["Python.h", "math.h"])
|
|
if already_declared:
|
|
if config.check_macro_true(py_symbol,
|
|
headers=["Python.h", "math.h"]):
|
|
pub.append('NPY_%s' % fname2def("decl_%s" % f))
|
|
else:
|
|
macros.append(f)
|
|
# Normally, isnan and isinf are macro (C99), but some platforms only have
|
|
# func, or both func and macro version. Check for macro only, and define
|
|
# replacement ones if not found.
|
|
# Note: including Python.h is necessary because it modifies some math.h
|
|
# definitions
|
|
for f in macros:
|
|
st = config.check_decl(f, headers=["Python.h", "math.h"])
|
|
if st:
|
|
_add_decl(f)
|
|
|
|
return priv, pub
|
|
|
|
def check_types(config_cmd, ext, build_dir):
|
|
private_defines = []
|
|
public_defines = []
|
|
|
|
# Expected size (in number of bytes) for each type. This is an
|
|
# optimization: those are only hints, and an exhaustive search for the size
|
|
# is done if the hints are wrong.
|
|
expected = {'short': [2], 'int': [4], 'long': [8, 4],
|
|
'float': [4], 'double': [8], 'long double': [16, 12, 8],
|
|
'Py_intptr_t': [8, 4], 'PY_LONG_LONG': [8], 'long long': [8],
|
|
'off_t': [8, 4]}
|
|
|
|
# Check we have the python header (-dev* packages on Linux)
|
|
result = config_cmd.check_header('Python.h')
|
|
if not result:
|
|
python = 'python'
|
|
if '__pypy__' in sys.builtin_module_names:
|
|
python = 'pypy'
|
|
raise SystemError(
|
|
"Cannot compile 'Python.h'. Perhaps you need to "
|
|
"install {0}-dev|{0}-devel.".format(python))
|
|
res = config_cmd.check_header("endian.h")
|
|
if res:
|
|
private_defines.append(('HAVE_ENDIAN_H', 1))
|
|
public_defines.append(('NPY_HAVE_ENDIAN_H', 1))
|
|
res = config_cmd.check_header("sys/endian.h")
|
|
if res:
|
|
private_defines.append(('HAVE_SYS_ENDIAN_H', 1))
|
|
public_defines.append(('NPY_HAVE_SYS_ENDIAN_H', 1))
|
|
|
|
# Check basic types sizes
|
|
for type in ('short', 'int', 'long'):
|
|
res = config_cmd.check_decl("SIZEOF_%s" % sym2def(type), headers=["Python.h"])
|
|
if res:
|
|
public_defines.append(('NPY_SIZEOF_%s' % sym2def(type), "SIZEOF_%s" % sym2def(type)))
|
|
else:
|
|
res = config_cmd.check_type_size(type, expected=expected[type])
|
|
if res >= 0:
|
|
public_defines.append(('NPY_SIZEOF_%s' % sym2def(type), '%d' % res))
|
|
else:
|
|
raise SystemError("Checking sizeof (%s) failed !" % type)
|
|
|
|
for type in ('float', 'double', 'long double'):
|
|
already_declared = config_cmd.check_decl("SIZEOF_%s" % sym2def(type),
|
|
headers=["Python.h"])
|
|
res = config_cmd.check_type_size(type, expected=expected[type])
|
|
if res >= 0:
|
|
public_defines.append(('NPY_SIZEOF_%s' % sym2def(type), '%d' % res))
|
|
if not already_declared and not type == 'long double':
|
|
private_defines.append(('SIZEOF_%s' % sym2def(type), '%d' % res))
|
|
else:
|
|
raise SystemError("Checking sizeof (%s) failed !" % type)
|
|
|
|
# Compute size of corresponding complex type: used to check that our
|
|
# definition is binary compatible with C99 complex type (check done at
|
|
# build time in npy_common.h)
|
|
complex_def = "struct {%s __x; %s __y;}" % (type, type)
|
|
res = config_cmd.check_type_size(complex_def,
|
|
expected=[2 * x for x in expected[type]])
|
|
if res >= 0:
|
|
public_defines.append(('NPY_SIZEOF_COMPLEX_%s' % sym2def(type), '%d' % res))
|
|
else:
|
|
raise SystemError("Checking sizeof (%s) failed !" % complex_def)
|
|
|
|
for type in ('Py_intptr_t', 'off_t'):
|
|
res = config_cmd.check_type_size(type, headers=["Python.h"],
|
|
library_dirs=[pythonlib_dir()],
|
|
expected=expected[type])
|
|
|
|
if res >= 0:
|
|
private_defines.append(('SIZEOF_%s' % sym2def(type), '%d' % res))
|
|
public_defines.append(('NPY_SIZEOF_%s' % sym2def(type), '%d' % res))
|
|
else:
|
|
raise SystemError("Checking sizeof (%s) failed !" % type)
|
|
|
|
# We check declaration AND type because that's how distutils does it.
|
|
if config_cmd.check_decl('PY_LONG_LONG', headers=['Python.h']):
|
|
res = config_cmd.check_type_size('PY_LONG_LONG', headers=['Python.h'],
|
|
library_dirs=[pythonlib_dir()],
|
|
expected=expected['PY_LONG_LONG'])
|
|
if res >= 0:
|
|
private_defines.append(('SIZEOF_%s' % sym2def('PY_LONG_LONG'), '%d' % res))
|
|
public_defines.append(('NPY_SIZEOF_%s' % sym2def('PY_LONG_LONG'), '%d' % res))
|
|
else:
|
|
raise SystemError("Checking sizeof (%s) failed !" % 'PY_LONG_LONG')
|
|
|
|
res = config_cmd.check_type_size('long long',
|
|
expected=expected['long long'])
|
|
if res >= 0:
|
|
#private_defines.append(('SIZEOF_%s' % sym2def('long long'), '%d' % res))
|
|
public_defines.append(('NPY_SIZEOF_%s' % sym2def('long long'), '%d' % res))
|
|
else:
|
|
raise SystemError("Checking sizeof (%s) failed !" % 'long long')
|
|
|
|
if not config_cmd.check_decl('CHAR_BIT', headers=['Python.h']):
|
|
raise RuntimeError(
|
|
"Config wo CHAR_BIT is not supported"
|
|
", please contact the maintainers")
|
|
|
|
return private_defines, public_defines
|
|
|
|
def check_mathlib(config_cmd):
|
|
# Testing the C math library
|
|
mathlibs = []
|
|
mathlibs_choices = [[], ['m'], ['cpml']]
|
|
mathlib = os.environ.get('MATHLIB')
|
|
if mathlib:
|
|
mathlibs_choices.insert(0, mathlib.split(','))
|
|
for libs in mathlibs_choices:
|
|
if config_cmd.check_func("exp", libraries=libs, decl=True, call=True):
|
|
mathlibs = libs
|
|
break
|
|
else:
|
|
raise EnvironmentError("math library missing; rerun "
|
|
"setup.py after setting the "
|
|
"MATHLIB env variable")
|
|
return mathlibs
|
|
|
|
def visibility_define(config):
|
|
"""Return the define value to use for NPY_VISIBILITY_HIDDEN (may be empty
|
|
string)."""
|
|
hide = '__attribute__((visibility("hidden")))'
|
|
if config.check_gcc_function_attribute(hide, 'hideme'):
|
|
return hide
|
|
else:
|
|
return ''
|
|
|
|
def configuration(parent_package='',top_path=None):
|
|
from numpy.distutils.misc_util import Configuration, dot_join
|
|
from numpy.distutils.system_info import get_info
|
|
|
|
config = Configuration('core', parent_package, top_path)
|
|
local_dir = config.local_path
|
|
codegen_dir = join(local_dir, 'code_generators')
|
|
|
|
if is_released(config):
|
|
warnings.simplefilter('error', MismatchCAPIWarning)
|
|
|
|
# Check whether we have a mismatch between the set C API VERSION and the
|
|
# actual C API VERSION
|
|
check_api_version(C_API_VERSION, codegen_dir)
|
|
|
|
generate_umath_py = join(codegen_dir, 'generate_umath.py')
|
|
n = dot_join(config.name, 'generate_umath')
|
|
generate_umath = npy_load_module('_'.join(n.split('.')),
|
|
generate_umath_py, ('.py', 'U', 1))
|
|
|
|
header_dir = 'include/numpy' # this is relative to config.path_in_package
|
|
|
|
cocache = CallOnceOnly()
|
|
|
|
def generate_config_h(ext, build_dir):
|
|
target = join(build_dir, header_dir, 'config.h')
|
|
d = os.path.dirname(target)
|
|
if not os.path.exists(d):
|
|
os.makedirs(d)
|
|
|
|
if newer(__file__, target):
|
|
config_cmd = config.get_config_cmd()
|
|
log.info('Generating %s', target)
|
|
|
|
# Check sizeof
|
|
moredefs, ignored = cocache.check_types(config_cmd, ext, build_dir)
|
|
|
|
# Check math library and C99 math funcs availability
|
|
mathlibs = check_mathlib(config_cmd)
|
|
moredefs.append(('MATHLIB', ','.join(mathlibs)))
|
|
|
|
check_math_capabilities(config_cmd, moredefs, mathlibs)
|
|
moredefs.extend(cocache.check_ieee_macros(config_cmd)[0])
|
|
moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[0])
|
|
|
|
# Signal check
|
|
if is_npy_no_signal():
|
|
moredefs.append('__NPY_PRIVATE_NO_SIGNAL')
|
|
|
|
# Windows checks
|
|
if sys.platform == 'win32' or os.name == 'nt':
|
|
win32_checks(moredefs)
|
|
|
|
# C99 restrict keyword
|
|
moredefs.append(('NPY_RESTRICT', config_cmd.check_restrict()))
|
|
|
|
# Inline check
|
|
inline = config_cmd.check_inline()
|
|
|
|
# Use relaxed stride checking
|
|
if NPY_RELAXED_STRIDES_CHECKING:
|
|
moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1))
|
|
|
|
# Use bogus stride debug aid when relaxed strides are enabled
|
|
if NPY_RELAXED_STRIDES_DEBUG:
|
|
moredefs.append(('NPY_RELAXED_STRIDES_DEBUG', 1))
|
|
|
|
# Get long double representation
|
|
rep = check_long_double_representation(config_cmd)
|
|
moredefs.append(('HAVE_LDOUBLE_%s' % rep, 1))
|
|
|
|
if check_for_right_shift_internal_compiler_error(config_cmd):
|
|
moredefs.append('NPY_DO_NOT_OPTIMIZE_LONG_right_shift')
|
|
moredefs.append('NPY_DO_NOT_OPTIMIZE_ULONG_right_shift')
|
|
moredefs.append('NPY_DO_NOT_OPTIMIZE_LONGLONG_right_shift')
|
|
moredefs.append('NPY_DO_NOT_OPTIMIZE_ULONGLONG_right_shift')
|
|
|
|
# Generate the config.h file from moredefs
|
|
with open(target, 'w') as target_f:
|
|
for d in moredefs:
|
|
if isinstance(d, str):
|
|
target_f.write('#define %s\n' % (d))
|
|
else:
|
|
target_f.write('#define %s %s\n' % (d[0], d[1]))
|
|
|
|
# define inline to our keyword, or nothing
|
|
target_f.write('#ifndef __cplusplus\n')
|
|
if inline == 'inline':
|
|
target_f.write('/* #undef inline */\n')
|
|
else:
|
|
target_f.write('#define inline %s\n' % inline)
|
|
target_f.write('#endif\n')
|
|
|
|
# add the guard to make sure config.h is never included directly,
|
|
# but always through npy_config.h
|
|
target_f.write(textwrap.dedent("""
|
|
#ifndef _NPY_NPY_CONFIG_H_
|
|
#error config.h should never be included directly, include npy_config.h instead
|
|
#endif
|
|
"""))
|
|
|
|
log.info('File: %s' % target)
|
|
with open(target) as target_f:
|
|
log.info(target_f.read())
|
|
log.info('EOF')
|
|
else:
|
|
mathlibs = []
|
|
with open(target) as target_f:
|
|
for line in target_f:
|
|
s = '#define MATHLIB'
|
|
if line.startswith(s):
|
|
value = line[len(s):].strip()
|
|
if value:
|
|
mathlibs.extend(value.split(','))
|
|
|
|
# Ugly: this can be called within a library and not an extension,
|
|
# in which case there is no libraries attributes (and none is
|
|
# needed).
|
|
if hasattr(ext, 'libraries'):
|
|
ext.libraries.extend(mathlibs)
|
|
|
|
incl_dir = os.path.dirname(target)
|
|
if incl_dir not in config.numpy_include_dirs:
|
|
config.numpy_include_dirs.append(incl_dir)
|
|
|
|
return target
|
|
|
|
def generate_numpyconfig_h(ext, build_dir):
|
|
"""Depends on config.h: generate_config_h has to be called before !"""
|
|
# put common include directory in build_dir on search path
|
|
# allows using code generation in headers
|
|
config.add_include_dirs(join(build_dir, "src", "common"))
|
|
config.add_include_dirs(join(build_dir, "src", "npymath"))
|
|
|
|
target = join(build_dir, header_dir, '_numpyconfig.h')
|
|
d = os.path.dirname(target)
|
|
if not os.path.exists(d):
|
|
os.makedirs(d)
|
|
if newer(__file__, target):
|
|
config_cmd = config.get_config_cmd()
|
|
log.info('Generating %s', target)
|
|
|
|
# Check sizeof
|
|
ignored, moredefs = cocache.check_types(config_cmd, ext, build_dir)
|
|
|
|
if is_npy_no_signal():
|
|
moredefs.append(('NPY_NO_SIGNAL', 1))
|
|
|
|
if is_npy_no_smp():
|
|
moredefs.append(('NPY_NO_SMP', 1))
|
|
else:
|
|
moredefs.append(('NPY_NO_SMP', 0))
|
|
|
|
mathlibs = check_mathlib(config_cmd)
|
|
moredefs.extend(cocache.check_ieee_macros(config_cmd)[1])
|
|
moredefs.extend(cocache.check_complex(config_cmd, mathlibs)[1])
|
|
|
|
if NPY_RELAXED_STRIDES_CHECKING:
|
|
moredefs.append(('NPY_RELAXED_STRIDES_CHECKING', 1))
|
|
|
|
if NPY_RELAXED_STRIDES_DEBUG:
|
|
moredefs.append(('NPY_RELAXED_STRIDES_DEBUG', 1))
|
|
|
|
# Check whether we can use inttypes (C99) formats
|
|
if config_cmd.check_decl('PRIdPTR', headers=['inttypes.h']):
|
|
moredefs.append(('NPY_USE_C99_FORMATS', 1))
|
|
|
|
# visibility check
|
|
hidden_visibility = visibility_define(config_cmd)
|
|
moredefs.append(('NPY_VISIBILITY_HIDDEN', hidden_visibility))
|
|
|
|
# Add the C API/ABI versions
|
|
moredefs.append(('NPY_ABI_VERSION', '0x%.8X' % C_ABI_VERSION))
|
|
moredefs.append(('NPY_API_VERSION', '0x%.8X' % C_API_VERSION))
|
|
|
|
# Add moredefs to header
|
|
with open(target, 'w') as target_f:
|
|
for d in moredefs:
|
|
if isinstance(d, str):
|
|
target_f.write('#define %s\n' % (d))
|
|
else:
|
|
target_f.write('#define %s %s\n' % (d[0], d[1]))
|
|
|
|
# Define __STDC_FORMAT_MACROS
|
|
target_f.write(textwrap.dedent("""
|
|
#ifndef __STDC_FORMAT_MACROS
|
|
#define __STDC_FORMAT_MACROS 1
|
|
#endif
|
|
"""))
|
|
|
|
# Dump the numpyconfig.h header to stdout
|
|
log.info('File: %s' % target)
|
|
with open(target) as target_f:
|
|
log.info(target_f.read())
|
|
log.info('EOF')
|
|
config.add_data_files((header_dir, target))
|
|
return target
|
|
|
|
def generate_api_func(module_name):
|
|
def generate_api(ext, build_dir):
|
|
script = join(codegen_dir, module_name + '.py')
|
|
sys.path.insert(0, codegen_dir)
|
|
try:
|
|
m = __import__(module_name)
|
|
log.info('executing %s', script)
|
|
h_file, c_file, doc_file = m.generate_api(os.path.join(build_dir, header_dir))
|
|
finally:
|
|
del sys.path[0]
|
|
config.add_data_files((header_dir, h_file),
|
|
(header_dir, doc_file))
|
|
return (h_file,)
|
|
return generate_api
|
|
|
|
generate_numpy_api = generate_api_func('generate_numpy_api')
|
|
generate_ufunc_api = generate_api_func('generate_ufunc_api')
|
|
|
|
config.add_include_dirs(join(local_dir, "src", "common"))
|
|
config.add_include_dirs(join(local_dir, "src"))
|
|
config.add_include_dirs(join(local_dir))
|
|
|
|
config.add_data_dir('include/numpy')
|
|
config.add_include_dirs(join('src', 'npymath'))
|
|
config.add_include_dirs(join('src', 'multiarray'))
|
|
config.add_include_dirs(join('src', 'umath'))
|
|
config.add_include_dirs(join('src', 'npysort'))
|
|
|
|
config.add_define_macros([("NPY_INTERNAL_BUILD", "1")]) # this macro indicates that Numpy build is in process
|
|
config.add_define_macros([("HAVE_NPY_CONFIG_H", "1")])
|
|
if sys.platform[:3] == "aix":
|
|
config.add_define_macros([("_LARGE_FILES", None)])
|
|
else:
|
|
config.add_define_macros([("_FILE_OFFSET_BITS", "64")])
|
|
config.add_define_macros([('_LARGEFILE_SOURCE', '1')])
|
|
config.add_define_macros([('_LARGEFILE64_SOURCE', '1')])
|
|
|
|
config.numpy_include_dirs.extend(config.paths('include'))
|
|
|
|
deps = [join('src', 'npymath', '_signbit.c'),
|
|
join('include', 'numpy', '*object.h'),
|
|
join(codegen_dir, 'genapi.py'),
|
|
]
|
|
|
|
#######################################################################
|
|
# npymath library #
|
|
#######################################################################
|
|
|
|
subst_dict = dict([("sep", os.path.sep), ("pkgname", "numpy.core")])
|
|
|
|
def get_mathlib_info(*args):
|
|
# Another ugly hack: the mathlib info is known once build_src is run,
|
|
# but we cannot use add_installed_pkg_config here either, so we only
|
|
# update the substitution dictionary during npymath build
|
|
config_cmd = config.get_config_cmd()
|
|
|
|
# Check that the toolchain works, to fail early if it doesn't
|
|
# (avoid late errors with MATHLIB which are confusing if the
|
|
# compiler does not work).
|
|
st = config_cmd.try_link('int main(void) { return 0;}')
|
|
if not st:
|
|
# rerun the failing command in verbose mode
|
|
config_cmd.compiler.verbose = True
|
|
config_cmd.try_link('int main(void) { return 0;}')
|
|
raise RuntimeError("Broken toolchain: cannot link a simple C program")
|
|
mlibs = check_mathlib(config_cmd)
|
|
|
|
posix_mlib = ' '.join(['-l%s' % l for l in mlibs])
|
|
msvc_mlib = ' '.join(['%s.lib' % l for l in mlibs])
|
|
subst_dict["posix_mathlib"] = posix_mlib
|
|
subst_dict["msvc_mathlib"] = msvc_mlib
|
|
|
|
npymath_sources = [join('src', 'npymath', 'npy_math_internal.h.src'),
|
|
join('src', 'npymath', 'npy_math.c'),
|
|
join('src', 'npymath', 'ieee754.c.src'),
|
|
join('src', 'npymath', 'npy_math_complex.c.src'),
|
|
join('src', 'npymath', 'halffloat.c')
|
|
]
|
|
|
|
# Must be true for CRT compilers but not MinGW/cygwin. See gh-9977.
|
|
# Intel and Clang also don't seem happy with /GL
|
|
is_msvc = (platform.platform().startswith('Windows') and
|
|
platform.python_compiler().startswith('MS'))
|
|
config.add_installed_library('npymath',
|
|
sources=npymath_sources + [get_mathlib_info],
|
|
install_dir='lib',
|
|
build_info={
|
|
'include_dirs' : [], # empty list required for creating npy_math_internal.h
|
|
'extra_compiler_args' : (['/GL-'] if is_msvc else []),
|
|
})
|
|
config.add_npy_pkg_config("npymath.ini.in", "lib/npy-pkg-config",
|
|
subst_dict)
|
|
config.add_npy_pkg_config("mlib.ini.in", "lib/npy-pkg-config",
|
|
subst_dict)
|
|
|
|
#######################################################################
|
|
# npysort library #
|
|
#######################################################################
|
|
|
|
# This library is created for the build but it is not installed
|
|
npysort_sources = [join('src', 'common', 'npy_sort.h.src'),
|
|
join('src', 'npysort', 'quicksort.c.src'),
|
|
join('src', 'npysort', 'mergesort.c.src'),
|
|
join('src', 'npysort', 'timsort.c.src'),
|
|
join('src', 'npysort', 'heapsort.c.src'),
|
|
join('src', 'npysort', 'radixsort.c.src'),
|
|
join('src', 'common', 'npy_partition.h.src'),
|
|
join('src', 'npysort', 'selection.c.src'),
|
|
join('src', 'common', 'npy_binsearch.h.src'),
|
|
join('src', 'npysort', 'binsearch.c.src'),
|
|
]
|
|
config.add_library('npysort',
|
|
sources=npysort_sources,
|
|
include_dirs=[])
|
|
|
|
#######################################################################
|
|
# multiarray_tests module #
|
|
#######################################################################
|
|
|
|
config.add_extension('_multiarray_tests',
|
|
sources=[join('src', 'multiarray', '_multiarray_tests.c.src'),
|
|
join('src', 'common', 'mem_overlap.c')],
|
|
depends=[join('src', 'common', 'mem_overlap.h'),
|
|
join('src', 'common', 'npy_extint128.h')],
|
|
libraries=['npymath'])
|
|
|
|
#######################################################################
|
|
# _multiarray_umath module - common part #
|
|
#######################################################################
|
|
|
|
common_deps = [
|
|
join('src', 'common', 'array_assign.h'),
|
|
join('src', 'common', 'binop_override.h'),
|
|
join('src', 'common', 'cblasfuncs.h'),
|
|
join('src', 'common', 'lowlevel_strided_loops.h'),
|
|
join('src', 'common', 'mem_overlap.h'),
|
|
join('src', 'common', 'npy_cblas.h'),
|
|
join('src', 'common', 'npy_config.h'),
|
|
join('src', 'common', 'npy_ctypes.h'),
|
|
join('src', 'common', 'npy_extint128.h'),
|
|
join('src', 'common', 'npy_import.h'),
|
|
join('src', 'common', 'npy_longdouble.h'),
|
|
join('src', 'common', 'templ_common.h.src'),
|
|
join('src', 'common', 'ucsnarrow.h'),
|
|
join('src', 'common', 'ufunc_override.h'),
|
|
join('src', 'common', 'umathmodule.h'),
|
|
join('src', 'common', 'numpyos.h'),
|
|
]
|
|
|
|
common_src = [
|
|
join('src', 'common', 'array_assign.c'),
|
|
join('src', 'common', 'mem_overlap.c'),
|
|
join('src', 'common', 'npy_longdouble.c'),
|
|
join('src', 'common', 'templ_common.h.src'),
|
|
join('src', 'common', 'ucsnarrow.c'),
|
|
join('src', 'common', 'ufunc_override.c'),
|
|
join('src', 'common', 'numpyos.c'),
|
|
join('src', 'common', 'npy_cpu_features.c.src'),
|
|
]
|
|
|
|
if os.environ.get('NPY_USE_BLAS_ILP64', "0") != "0":
|
|
blas_info = get_info('blas_ilp64_opt', 2)
|
|
else:
|
|
blas_info = get_info('blas_opt', 0)
|
|
|
|
have_blas = blas_info and ('HAVE_CBLAS', None) in blas_info.get('define_macros', [])
|
|
|
|
if have_blas:
|
|
extra_info = blas_info
|
|
# These files are also in MANIFEST.in so that they are always in
|
|
# the source distribution independently of HAVE_CBLAS.
|
|
common_src.extend([join('src', 'common', 'cblasfuncs.c'),
|
|
join('src', 'common', 'python_xerbla.c'),
|
|
])
|
|
if uses_accelerate_framework(blas_info):
|
|
common_src.extend(get_sgemv_fix())
|
|
else:
|
|
extra_info = {}
|
|
|
|
#######################################################################
|
|
# _multiarray_umath module - multiarray part #
|
|
#######################################################################
|
|
|
|
multiarray_deps = [
|
|
join('src', 'multiarray', 'arrayobject.h'),
|
|
join('src', 'multiarray', 'arraytypes.h'),
|
|
join('src', 'multiarray', 'arrayfunction_override.h'),
|
|
join('src', 'multiarray', 'npy_buffer.h'),
|
|
join('src', 'multiarray', 'calculation.h'),
|
|
join('src', 'multiarray', 'common.h'),
|
|
join('src', 'multiarray', 'convert_datatype.h'),
|
|
join('src', 'multiarray', 'convert.h'),
|
|
join('src', 'multiarray', 'conversion_utils.h'),
|
|
join('src', 'multiarray', 'ctors.h'),
|
|
join('src', 'multiarray', 'descriptor.h'),
|
|
join('src', 'multiarray', 'dragon4.h'),
|
|
join('src', 'multiarray', 'getset.h'),
|
|
join('src', 'multiarray', 'hashdescr.h'),
|
|
join('src', 'multiarray', 'iterators.h'),
|
|
join('src', 'multiarray', 'mapping.h'),
|
|
join('src', 'multiarray', 'methods.h'),
|
|
join('src', 'multiarray', 'multiarraymodule.h'),
|
|
join('src', 'multiarray', 'nditer_impl.h'),
|
|
join('src', 'multiarray', 'number.h'),
|
|
join('src', 'multiarray', 'refcount.h'),
|
|
join('src', 'multiarray', 'scalartypes.h'),
|
|
join('src', 'multiarray', 'sequence.h'),
|
|
join('src', 'multiarray', 'shape.h'),
|
|
join('src', 'multiarray', 'strfuncs.h'),
|
|
join('src', 'multiarray', 'typeinfo.h'),
|
|
join('src', 'multiarray', 'usertypes.h'),
|
|
join('src', 'multiarray', 'vdot.h'),
|
|
join('include', 'numpy', 'arrayobject.h'),
|
|
join('include', 'numpy', '_neighborhood_iterator_imp.h'),
|
|
join('include', 'numpy', 'npy_endian.h'),
|
|
join('include', 'numpy', 'arrayscalars.h'),
|
|
join('include', 'numpy', 'noprefix.h'),
|
|
join('include', 'numpy', 'npy_interrupt.h'),
|
|
join('include', 'numpy', 'npy_3kcompat.h'),
|
|
join('include', 'numpy', 'npy_math.h'),
|
|
join('include', 'numpy', 'halffloat.h'),
|
|
join('include', 'numpy', 'npy_common.h'),
|
|
join('include', 'numpy', 'npy_os.h'),
|
|
join('include', 'numpy', 'utils.h'),
|
|
join('include', 'numpy', 'ndarrayobject.h'),
|
|
join('include', 'numpy', 'npy_cpu.h'),
|
|
join('include', 'numpy', 'numpyconfig.h'),
|
|
join('include', 'numpy', 'ndarraytypes.h'),
|
|
join('include', 'numpy', 'npy_1_7_deprecated_api.h'),
|
|
# add library sources as distuils does not consider libraries
|
|
# dependencies
|
|
] + npysort_sources + npymath_sources
|
|
|
|
multiarray_src = [
|
|
join('src', 'multiarray', 'alloc.c'),
|
|
join('src', 'multiarray', 'arrayobject.c'),
|
|
join('src', 'multiarray', 'arraytypes.c.src'),
|
|
join('src', 'multiarray', 'array_assign_scalar.c'),
|
|
join('src', 'multiarray', 'array_assign_array.c'),
|
|
join('src', 'multiarray', 'arrayfunction_override.c'),
|
|
join('src', 'multiarray', 'buffer.c'),
|
|
join('src', 'multiarray', 'calculation.c'),
|
|
join('src', 'multiarray', 'compiled_base.c'),
|
|
join('src', 'multiarray', 'common.c'),
|
|
join('src', 'multiarray', 'convert.c'),
|
|
join('src', 'multiarray', 'convert_datatype.c'),
|
|
join('src', 'multiarray', 'conversion_utils.c'),
|
|
join('src', 'multiarray', 'ctors.c'),
|
|
join('src', 'multiarray', 'datetime.c'),
|
|
join('src', 'multiarray', 'datetime_strings.c'),
|
|
join('src', 'multiarray', 'datetime_busday.c'),
|
|
join('src', 'multiarray', 'datetime_busdaycal.c'),
|
|
join('src', 'multiarray', 'descriptor.c'),
|
|
join('src', 'multiarray', 'dragon4.c'),
|
|
join('src', 'multiarray', 'dtype_transfer.c'),
|
|
join('src', 'multiarray', 'einsum.c.src'),
|
|
join('src', 'multiarray', 'flagsobject.c'),
|
|
join('src', 'multiarray', 'getset.c'),
|
|
join('src', 'multiarray', 'hashdescr.c'),
|
|
join('src', 'multiarray', 'item_selection.c'),
|
|
join('src', 'multiarray', 'iterators.c'),
|
|
join('src', 'multiarray', 'lowlevel_strided_loops.c.src'),
|
|
join('src', 'multiarray', 'mapping.c'),
|
|
join('src', 'multiarray', 'methods.c'),
|
|
join('src', 'multiarray', 'multiarraymodule.c'),
|
|
join('src', 'multiarray', 'nditer_templ.c.src'),
|
|
join('src', 'multiarray', 'nditer_api.c'),
|
|
join('src', 'multiarray', 'nditer_constr.c'),
|
|
join('src', 'multiarray', 'nditer_pywrap.c'),
|
|
join('src', 'multiarray', 'number.c'),
|
|
join('src', 'multiarray', 'refcount.c'),
|
|
join('src', 'multiarray', 'sequence.c'),
|
|
join('src', 'multiarray', 'shape.c'),
|
|
join('src', 'multiarray', 'scalarapi.c'),
|
|
join('src', 'multiarray', 'scalartypes.c.src'),
|
|
join('src', 'multiarray', 'strfuncs.c'),
|
|
join('src', 'multiarray', 'temp_elide.c'),
|
|
join('src', 'multiarray', 'typeinfo.c'),
|
|
join('src', 'multiarray', 'usertypes.c'),
|
|
join('src', 'multiarray', 'vdot.c'),
|
|
]
|
|
|
|
#######################################################################
|
|
# _multiarray_umath module - umath part #
|
|
#######################################################################
|
|
|
|
def generate_umath_c(ext, build_dir):
|
|
target = join(build_dir, header_dir, '__umath_generated.c')
|
|
dir = os.path.dirname(target)
|
|
if not os.path.exists(dir):
|
|
os.makedirs(dir)
|
|
script = generate_umath_py
|
|
if newer(script, target):
|
|
with open(target, 'w') as f:
|
|
f.write(generate_umath.make_code(generate_umath.defdict,
|
|
generate_umath.__file__))
|
|
return []
|
|
|
|
umath_src = [
|
|
join('src', 'umath', 'umathmodule.c'),
|
|
join('src', 'umath', 'reduction.c'),
|
|
join('src', 'umath', 'funcs.inc.src'),
|
|
join('src', 'umath', 'simd.inc.src'),
|
|
join('src', 'umath', 'loops.h.src'),
|
|
join('src', 'umath', 'loops.c.src'),
|
|
join('src', 'umath', 'matmul.h.src'),
|
|
join('src', 'umath', 'matmul.c.src'),
|
|
join('src', 'umath', 'clip.h.src'),
|
|
join('src', 'umath', 'clip.c.src'),
|
|
join('src', 'umath', 'ufunc_object.c'),
|
|
join('src', 'umath', 'extobj.c'),
|
|
join('src', 'umath', 'scalarmath.c.src'),
|
|
join('src', 'umath', 'ufunc_type_resolution.c'),
|
|
join('src', 'umath', 'override.c'),
|
|
]
|
|
|
|
umath_deps = [
|
|
generate_umath_py,
|
|
join('include', 'numpy', 'npy_math.h'),
|
|
join('include', 'numpy', 'halffloat.h'),
|
|
join('src', 'multiarray', 'common.h'),
|
|
join('src', 'multiarray', 'number.h'),
|
|
join('src', 'common', 'templ_common.h.src'),
|
|
join('src', 'umath', 'simd.inc.src'),
|
|
join('src', 'umath', 'override.h'),
|
|
join(codegen_dir, 'generate_ufunc_api.py'),
|
|
]
|
|
|
|
config.add_extension('_multiarray_umath',
|
|
sources=multiarray_src + umath_src +
|
|
npymath_sources + common_src +
|
|
[generate_config_h,
|
|
generate_numpyconfig_h,
|
|
generate_numpy_api,
|
|
join(codegen_dir, 'generate_numpy_api.py'),
|
|
join('*.py'),
|
|
generate_umath_c,
|
|
generate_ufunc_api,
|
|
],
|
|
depends=deps + multiarray_deps + umath_deps +
|
|
common_deps,
|
|
libraries=['npymath', 'npysort'],
|
|
extra_info=extra_info)
|
|
|
|
#######################################################################
|
|
# umath_tests module #
|
|
#######################################################################
|
|
|
|
config.add_extension('_umath_tests',
|
|
sources=[join('src', 'umath', '_umath_tests.c.src')])
|
|
|
|
#######################################################################
|
|
# custom rational dtype module #
|
|
#######################################################################
|
|
|
|
config.add_extension('_rational_tests',
|
|
sources=[join('src', 'umath', '_rational_tests.c.src')])
|
|
|
|
#######################################################################
|
|
# struct_ufunc_test module #
|
|
#######################################################################
|
|
|
|
config.add_extension('_struct_ufunc_tests',
|
|
sources=[join('src', 'umath', '_struct_ufunc_tests.c.src')])
|
|
|
|
|
|
#######################################################################
|
|
# operand_flag_tests module #
|
|
#######################################################################
|
|
|
|
config.add_extension('_operand_flag_tests',
|
|
sources=[join('src', 'umath', '_operand_flag_tests.c.src')])
|
|
|
|
config.add_subpackage('tests')
|
|
config.add_data_dir('tests/data')
|
|
|
|
config.make_svn_version_py()
|
|
|
|
return config
|
|
|
|
if __name__ == '__main__':
|
|
from numpy.distutils.core import setup
|
|
setup(configuration=configuration)
|