using for loop to install conda package
This commit is contained in:
14
.CondaPkg/env/Lib/site-packages/scipy/_lib/__init__.py
vendored
Normal file
14
.CondaPkg/env/Lib/site-packages/scipy/_lib/__init__.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
"""
|
||||
Module containing private utility functions
|
||||
===========================================
|
||||
|
||||
The ``scipy._lib`` namespace is empty (for now). Tests for all
|
||||
utilities in submodules of ``_lib`` can be run with::
|
||||
|
||||
from scipy import _lib
|
||||
_lib.test()
|
||||
|
||||
"""
|
||||
from scipy._lib._testutils import PytestTester
|
||||
test = PytestTester(__name__)
|
||||
del PytestTester
|
||||
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/__init__.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/__init__.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_bunch.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_bunch.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_ccallback.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_ccallback.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_disjoint_set.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_disjoint_set.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_docscrape.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_docscrape.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_finite_differences.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_finite_differences.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_gcutils.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_gcutils.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_pep440.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_pep440.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_testutils.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_testutils.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_threadsafety.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_threadsafety.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_tmpdirs.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_tmpdirs.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_util.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/_util.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/decorator.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/decorator.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/deprecation.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/deprecation.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/doccer.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/doccer.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/uarray.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/__pycache__/uarray.cpython-311.pyc
vendored
Normal file
Binary file not shown.
226
.CondaPkg/env/Lib/site-packages/scipy/_lib/_bunch.py
vendored
Normal file
226
.CondaPkg/env/Lib/site-packages/scipy/_lib/_bunch.py
vendored
Normal file
@@ -0,0 +1,226 @@
|
||||
|
||||
import sys as _sys
|
||||
from keyword import iskeyword as _iskeyword
|
||||
|
||||
|
||||
def _validate_names(typename, field_names, extra_field_names):
|
||||
"""
|
||||
Ensure that all the given names are valid Python identifiers that
|
||||
do not start with '_'. Also check that there are no duplicates
|
||||
among field_names + extra_field_names.
|
||||
"""
|
||||
for name in [typename] + field_names + extra_field_names:
|
||||
if type(name) is not str:
|
||||
raise TypeError('typename and all field names must be strings')
|
||||
if not name.isidentifier():
|
||||
raise ValueError('typename and all field names must be valid '
|
||||
f'identifiers: {name!r}')
|
||||
if _iskeyword(name):
|
||||
raise ValueError('typename and all field names cannot be a '
|
||||
f'keyword: {name!r}')
|
||||
|
||||
seen = set()
|
||||
for name in field_names + extra_field_names:
|
||||
if name.startswith('_'):
|
||||
raise ValueError('Field names cannot start with an underscore: '
|
||||
f'{name!r}')
|
||||
if name in seen:
|
||||
raise ValueError(f'Duplicate field name: {name!r}')
|
||||
seen.add(name)
|
||||
|
||||
|
||||
# Note: This code is adapted from CPython:Lib/collections/__init__.py
|
||||
def _make_tuple_bunch(typename, field_names, extra_field_names=None,
|
||||
module=None):
|
||||
"""
|
||||
Create a namedtuple-like class with additional attributes.
|
||||
|
||||
This function creates a subclass of tuple that acts like a namedtuple
|
||||
and that has additional attributes.
|
||||
|
||||
The additional attributes are listed in `extra_field_names`. The
|
||||
values assigned to these attributes are not part of the tuple.
|
||||
|
||||
The reason this function exists is to allow functions in SciPy
|
||||
that currently return a tuple or a namedtuple to returned objects
|
||||
that have additional attributes, while maintaining backwards
|
||||
compatibility.
|
||||
|
||||
This should only be used to enhance *existing* functions in SciPy.
|
||||
New functions are free to create objects as return values without
|
||||
having to maintain backwards compatibility with an old tuple or
|
||||
namedtuple return value.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
typename : str
|
||||
The name of the type.
|
||||
field_names : list of str
|
||||
List of names of the values to be stored in the tuple. These names
|
||||
will also be attributes of instances, so the values in the tuple
|
||||
can be accessed by indexing or as attributes. At least one name
|
||||
is required. See the Notes for additional restrictions.
|
||||
extra_field_names : list of str, optional
|
||||
List of names of values that will be stored as attributes of the
|
||||
object. See the notes for additional restrictions.
|
||||
|
||||
Returns
|
||||
-------
|
||||
cls : type
|
||||
The new class.
|
||||
|
||||
Notes
|
||||
-----
|
||||
There are restrictions on the names that may be used in `field_names`
|
||||
and `extra_field_names`:
|
||||
|
||||
* The names must be unique--no duplicates allowed.
|
||||
* The names must be valid Python identifiers, and must not begin with
|
||||
an underscore.
|
||||
* The names must not be Python keywords (e.g. 'def', 'and', etc., are
|
||||
not allowed).
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from scipy._lib._bunch import _make_tuple_bunch
|
||||
|
||||
Create a class that acts like a namedtuple with length 2 (with field
|
||||
names `x` and `y`) that will also have the attributes `w` and `beta`:
|
||||
|
||||
>>> Result = _make_tuple_bunch('Result', ['x', 'y'], ['w', 'beta'])
|
||||
|
||||
`Result` is the new class. We call it with keyword arguments to create
|
||||
a new instance with given values.
|
||||
|
||||
>>> result1 = Result(x=1, y=2, w=99, beta=0.5)
|
||||
>>> result1
|
||||
Result(x=1, y=2, w=99, beta=0.5)
|
||||
|
||||
`result1` acts like a tuple of length 2:
|
||||
|
||||
>>> len(result1)
|
||||
2
|
||||
>>> result1[:]
|
||||
(1, 2)
|
||||
|
||||
The values assigned when the instance was created are available as
|
||||
attributes:
|
||||
|
||||
>>> result1.y
|
||||
2
|
||||
>>> result1.beta
|
||||
0.5
|
||||
"""
|
||||
if len(field_names) == 0:
|
||||
raise ValueError('field_names must contain at least one name')
|
||||
|
||||
if extra_field_names is None:
|
||||
extra_field_names = []
|
||||
_validate_names(typename, field_names, extra_field_names)
|
||||
|
||||
typename = _sys.intern(str(typename))
|
||||
field_names = tuple(map(_sys.intern, field_names))
|
||||
extra_field_names = tuple(map(_sys.intern, extra_field_names))
|
||||
|
||||
all_names = field_names + extra_field_names
|
||||
arg_list = ', '.join(field_names)
|
||||
full_list = ', '.join(all_names)
|
||||
repr_fmt = ''.join(('(',
|
||||
', '.join(f'{name}=%({name})r' for name in all_names),
|
||||
')'))
|
||||
tuple_new = tuple.__new__
|
||||
_dict, _tuple, _zip = dict, tuple, zip
|
||||
|
||||
# Create all the named tuple methods to be added to the class namespace
|
||||
|
||||
s = f"""\
|
||||
def __new__(_cls, {arg_list}, **extra_fields):
|
||||
return _tuple_new(_cls, ({arg_list},))
|
||||
|
||||
def __init__(self, {arg_list}, **extra_fields):
|
||||
for key in self._extra_fields:
|
||||
if key not in extra_fields:
|
||||
raise TypeError("missing keyword argument '%s'" % (key,))
|
||||
for key, val in extra_fields.items():
|
||||
if key not in self._extra_fields:
|
||||
raise TypeError("unexpected keyword argument '%s'" % (key,))
|
||||
self.__dict__[key] = val
|
||||
|
||||
def __setattr__(self, key, val):
|
||||
if key in {repr(field_names)}:
|
||||
raise AttributeError("can't set attribute %r of class %r"
|
||||
% (key, self.__class__.__name__))
|
||||
else:
|
||||
self.__dict__[key] = val
|
||||
"""
|
||||
del arg_list
|
||||
namespace = {'_tuple_new': tuple_new,
|
||||
'__builtins__': dict(TypeError=TypeError,
|
||||
AttributeError=AttributeError),
|
||||
'__name__': f'namedtuple_{typename}'}
|
||||
exec(s, namespace)
|
||||
__new__ = namespace['__new__']
|
||||
__new__.__doc__ = f'Create new instance of {typename}({full_list})'
|
||||
__init__ = namespace['__init__']
|
||||
__init__.__doc__ = f'Instantiate instance of {typename}({full_list})'
|
||||
__setattr__ = namespace['__setattr__']
|
||||
|
||||
def __repr__(self):
|
||||
'Return a nicely formatted representation string'
|
||||
return self.__class__.__name__ + repr_fmt % self._asdict()
|
||||
|
||||
def _asdict(self):
|
||||
'Return a new dict which maps field names to their values.'
|
||||
out = _dict(_zip(self._fields, self))
|
||||
out.update(self.__dict__)
|
||||
return out
|
||||
|
||||
def __getnewargs_ex__(self):
|
||||
'Return self as a plain tuple. Used by copy and pickle.'
|
||||
return _tuple(self), self.__dict__
|
||||
|
||||
# Modify function metadata to help with introspection and debugging
|
||||
for method in (__new__, __repr__, _asdict, __getnewargs_ex__):
|
||||
method.__qualname__ = f'{typename}.{method.__name__}'
|
||||
|
||||
# Build-up the class namespace dictionary
|
||||
# and use type() to build the result class
|
||||
class_namespace = {
|
||||
'__doc__': f'{typename}({full_list})',
|
||||
'_fields': field_names,
|
||||
'__new__': __new__,
|
||||
'__init__': __init__,
|
||||
'__repr__': __repr__,
|
||||
'__setattr__': __setattr__,
|
||||
'_asdict': _asdict,
|
||||
'_extra_fields': extra_field_names,
|
||||
'__getnewargs_ex__': __getnewargs_ex__,
|
||||
}
|
||||
for index, name in enumerate(field_names):
|
||||
|
||||
def _get(self, index=index):
|
||||
return self[index]
|
||||
class_namespace[name] = property(_get)
|
||||
for name in extra_field_names:
|
||||
|
||||
def _get(self, name=name):
|
||||
return self.__dict__[name]
|
||||
class_namespace[name] = property(_get)
|
||||
|
||||
result = type(typename, (tuple,), class_namespace)
|
||||
|
||||
# For pickling to work, the __module__ variable needs to be set to the
|
||||
# frame where the named tuple is created. Bypass this step in environments
|
||||
# where sys._getframe is not defined (Jython for example) or sys._getframe
|
||||
# is not defined for arguments greater than 0 (IronPython), or where the
|
||||
# user has specified a particular module.
|
||||
if module is None:
|
||||
try:
|
||||
module = _sys._getframe(1).f_globals.get('__name__', '__main__')
|
||||
except (AttributeError, ValueError):
|
||||
pass
|
||||
if module is not None:
|
||||
result.__module__ = module
|
||||
__new__.__module__ = module
|
||||
|
||||
return result
|
||||
227
.CondaPkg/env/Lib/site-packages/scipy/_lib/_ccallback.py
vendored
Normal file
227
.CondaPkg/env/Lib/site-packages/scipy/_lib/_ccallback.py
vendored
Normal file
@@ -0,0 +1,227 @@
|
||||
from . import _ccallback_c
|
||||
|
||||
import ctypes
|
||||
|
||||
PyCFuncPtr = ctypes.CFUNCTYPE(ctypes.c_void_p).__bases__[0]
|
||||
|
||||
ffi = None
|
||||
|
||||
class CData:
|
||||
pass
|
||||
|
||||
def _import_cffi():
|
||||
global ffi, CData
|
||||
|
||||
if ffi is not None:
|
||||
return
|
||||
|
||||
try:
|
||||
import cffi
|
||||
ffi = cffi.FFI()
|
||||
CData = ffi.CData
|
||||
except ImportError:
|
||||
ffi = False
|
||||
|
||||
|
||||
class LowLevelCallable(tuple):
|
||||
"""
|
||||
Low-level callback function.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
function : {PyCapsule, ctypes function pointer, cffi function pointer}
|
||||
Low-level callback function.
|
||||
user_data : {PyCapsule, ctypes void pointer, cffi void pointer}
|
||||
User data to pass on to the callback function.
|
||||
signature : str, optional
|
||||
Signature of the function. If omitted, determined from *function*,
|
||||
if possible.
|
||||
|
||||
Attributes
|
||||
----------
|
||||
function
|
||||
Callback function given.
|
||||
user_data
|
||||
User data given.
|
||||
signature
|
||||
Signature of the function.
|
||||
|
||||
Methods
|
||||
-------
|
||||
from_cython
|
||||
Class method for constructing callables from Cython C-exported
|
||||
functions.
|
||||
|
||||
Notes
|
||||
-----
|
||||
The argument ``function`` can be one of:
|
||||
|
||||
- PyCapsule, whose name contains the C function signature
|
||||
- ctypes function pointer
|
||||
- cffi function pointer
|
||||
|
||||
The signature of the low-level callback must match one of those expected
|
||||
by the routine it is passed to.
|
||||
|
||||
If constructing low-level functions from a PyCapsule, the name of the
|
||||
capsule must be the corresponding signature, in the format::
|
||||
|
||||
return_type (arg1_type, arg2_type, ...)
|
||||
|
||||
For example::
|
||||
|
||||
"void (double)"
|
||||
"double (double, int *, void *)"
|
||||
|
||||
The context of a PyCapsule passed in as ``function`` is used as ``user_data``,
|
||||
if an explicit value for ``user_data`` was not given.
|
||||
|
||||
"""
|
||||
|
||||
# Make the class immutable
|
||||
__slots__ = ()
|
||||
|
||||
def __new__(cls, function, user_data=None, signature=None):
|
||||
# We need to hold a reference to the function & user data,
|
||||
# to prevent them going out of scope
|
||||
item = cls._parse_callback(function, user_data, signature)
|
||||
return tuple.__new__(cls, (item, function, user_data))
|
||||
|
||||
def __repr__(self):
|
||||
return "LowLevelCallable({!r}, {!r})".format(self.function, self.user_data)
|
||||
|
||||
@property
|
||||
def function(self):
|
||||
return tuple.__getitem__(self, 1)
|
||||
|
||||
@property
|
||||
def user_data(self):
|
||||
return tuple.__getitem__(self, 2)
|
||||
|
||||
@property
|
||||
def signature(self):
|
||||
return _ccallback_c.get_capsule_signature(tuple.__getitem__(self, 0))
|
||||
|
||||
def __getitem__(self, idx):
|
||||
raise ValueError()
|
||||
|
||||
@classmethod
|
||||
def from_cython(cls, module, name, user_data=None, signature=None):
|
||||
"""
|
||||
Create a low-level callback function from an exported Cython function.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
module : module
|
||||
Cython module where the exported function resides
|
||||
name : str
|
||||
Name of the exported function
|
||||
user_data : {PyCapsule, ctypes void pointer, cffi void pointer}, optional
|
||||
User data to pass on to the callback function.
|
||||
signature : str, optional
|
||||
Signature of the function. If omitted, determined from *function*.
|
||||
|
||||
"""
|
||||
try:
|
||||
function = module.__pyx_capi__[name]
|
||||
except AttributeError as e:
|
||||
raise ValueError("Given module is not a Cython module with __pyx_capi__ attribute") from e
|
||||
except KeyError as e:
|
||||
raise ValueError("No function {!r} found in __pyx_capi__ of the module".format(name)) from e
|
||||
return cls(function, user_data, signature)
|
||||
|
||||
@classmethod
|
||||
def _parse_callback(cls, obj, user_data=None, signature=None):
|
||||
_import_cffi()
|
||||
|
||||
if isinstance(obj, LowLevelCallable):
|
||||
func = tuple.__getitem__(obj, 0)
|
||||
elif isinstance(obj, PyCFuncPtr):
|
||||
func, signature = _get_ctypes_func(obj, signature)
|
||||
elif isinstance(obj, CData):
|
||||
func, signature = _get_cffi_func(obj, signature)
|
||||
elif _ccallback_c.check_capsule(obj):
|
||||
func = obj
|
||||
else:
|
||||
raise ValueError("Given input is not a callable or a low-level callable (pycapsule/ctypes/cffi)")
|
||||
|
||||
if isinstance(user_data, ctypes.c_void_p):
|
||||
context = _get_ctypes_data(user_data)
|
||||
elif isinstance(user_data, CData):
|
||||
context = _get_cffi_data(user_data)
|
||||
elif user_data is None:
|
||||
context = 0
|
||||
elif _ccallback_c.check_capsule(user_data):
|
||||
context = user_data
|
||||
else:
|
||||
raise ValueError("Given user data is not a valid low-level void* pointer (pycapsule/ctypes/cffi)")
|
||||
|
||||
return _ccallback_c.get_raw_capsule(func, signature, context)
|
||||
|
||||
|
||||
#
|
||||
# ctypes helpers
|
||||
#
|
||||
|
||||
def _get_ctypes_func(func, signature=None):
|
||||
# Get function pointer
|
||||
func_ptr = ctypes.cast(func, ctypes.c_void_p).value
|
||||
|
||||
# Construct function signature
|
||||
if signature is None:
|
||||
signature = _typename_from_ctypes(func.restype) + " ("
|
||||
for j, arg in enumerate(func.argtypes):
|
||||
if j == 0:
|
||||
signature += _typename_from_ctypes(arg)
|
||||
else:
|
||||
signature += ", " + _typename_from_ctypes(arg)
|
||||
signature += ")"
|
||||
|
||||
return func_ptr, signature
|
||||
|
||||
|
||||
def _typename_from_ctypes(item):
|
||||
if item is None:
|
||||
return "void"
|
||||
elif item is ctypes.c_void_p:
|
||||
return "void *"
|
||||
|
||||
name = item.__name__
|
||||
|
||||
pointer_level = 0
|
||||
while name.startswith("LP_"):
|
||||
pointer_level += 1
|
||||
name = name[3:]
|
||||
|
||||
if name.startswith('c_'):
|
||||
name = name[2:]
|
||||
|
||||
if pointer_level > 0:
|
||||
name += " " + "*"*pointer_level
|
||||
|
||||
return name
|
||||
|
||||
|
||||
def _get_ctypes_data(data):
|
||||
# Get voidp pointer
|
||||
return ctypes.cast(data, ctypes.c_void_p).value
|
||||
|
||||
|
||||
#
|
||||
# CFFI helpers
|
||||
#
|
||||
|
||||
def _get_cffi_func(func, signature=None):
|
||||
# Get function pointer
|
||||
func_ptr = ffi.cast('uintptr_t', func)
|
||||
|
||||
# Get signature
|
||||
if signature is None:
|
||||
signature = ffi.getctype(ffi.typeof(func)).replace('(*)', ' ')
|
||||
|
||||
return func_ptr, signature
|
||||
|
||||
|
||||
def _get_cffi_data(data):
|
||||
# Get pointer
|
||||
return ffi.cast('uintptr_t', data)
|
||||
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_ccallback_c.cp311-win_amd64.dll.a
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_ccallback_c.cp311-win_amd64.dll.a
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_ccallback_c.cp311-win_amd64.pyd
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_ccallback_c.cp311-win_amd64.pyd
vendored
Normal file
Binary file not shown.
228
.CondaPkg/env/Lib/site-packages/scipy/_lib/_disjoint_set.py
vendored
Normal file
228
.CondaPkg/env/Lib/site-packages/scipy/_lib/_disjoint_set.py
vendored
Normal file
@@ -0,0 +1,228 @@
|
||||
"""
|
||||
Disjoint set data structure
|
||||
"""
|
||||
|
||||
|
||||
class DisjointSet:
|
||||
""" Disjoint set data structure for incremental connectivity queries.
|
||||
|
||||
.. versionadded:: 1.6.0
|
||||
|
||||
Attributes
|
||||
----------
|
||||
n_subsets : int
|
||||
The number of subsets.
|
||||
|
||||
Methods
|
||||
-------
|
||||
add
|
||||
merge
|
||||
connected
|
||||
subset
|
||||
subsets
|
||||
__getitem__
|
||||
|
||||
Notes
|
||||
-----
|
||||
This class implements the disjoint set [1]_, also known as the *union-find*
|
||||
or *merge-find* data structure. The *find* operation (implemented in
|
||||
`__getitem__`) implements the *path halving* variant. The *merge* method
|
||||
implements the *merge by size* variant.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] https://en.wikipedia.org/wiki/Disjoint-set_data_structure
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from scipy.cluster.hierarchy import DisjointSet
|
||||
|
||||
Initialize a disjoint set:
|
||||
|
||||
>>> disjoint_set = DisjointSet([1, 2, 3, 'a', 'b'])
|
||||
|
||||
Merge some subsets:
|
||||
|
||||
>>> disjoint_set.merge(1, 2)
|
||||
True
|
||||
>>> disjoint_set.merge(3, 'a')
|
||||
True
|
||||
>>> disjoint_set.merge('a', 'b')
|
||||
True
|
||||
>>> disjoint_set.merge('b', 'b')
|
||||
False
|
||||
|
||||
Find root elements:
|
||||
|
||||
>>> disjoint_set[2]
|
||||
1
|
||||
>>> disjoint_set['b']
|
||||
3
|
||||
|
||||
Test connectivity:
|
||||
|
||||
>>> disjoint_set.connected(1, 2)
|
||||
True
|
||||
>>> disjoint_set.connected(1, 'b')
|
||||
False
|
||||
|
||||
List elements in disjoint set:
|
||||
|
||||
>>> list(disjoint_set)
|
||||
[1, 2, 3, 'a', 'b']
|
||||
|
||||
Get the subset containing 'a':
|
||||
|
||||
>>> disjoint_set.subset('a')
|
||||
{'a', 3, 'b'}
|
||||
|
||||
Get all subsets in the disjoint set:
|
||||
|
||||
>>> disjoint_set.subsets()
|
||||
[{1, 2}, {'a', 3, 'b'}]
|
||||
"""
|
||||
def __init__(self, elements=None):
|
||||
self.n_subsets = 0
|
||||
self._sizes = {}
|
||||
self._parents = {}
|
||||
# _nbrs is a circular linked list which links connected elements.
|
||||
self._nbrs = {}
|
||||
# _indices tracks the element insertion order in `__iter__`.
|
||||
self._indices = {}
|
||||
if elements is not None:
|
||||
for x in elements:
|
||||
self.add(x)
|
||||
|
||||
def __iter__(self):
|
||||
"""Returns an iterator of the elements in the disjoint set.
|
||||
|
||||
Elements are ordered by insertion order.
|
||||
"""
|
||||
return iter(self._indices)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._indices)
|
||||
|
||||
def __contains__(self, x):
|
||||
return x in self._indices
|
||||
|
||||
def __getitem__(self, x):
|
||||
"""Find the root element of `x`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x : hashable object
|
||||
Input element.
|
||||
|
||||
Returns
|
||||
-------
|
||||
root : hashable object
|
||||
Root element of `x`.
|
||||
"""
|
||||
if x not in self._indices:
|
||||
raise KeyError(x)
|
||||
|
||||
# find by "path halving"
|
||||
parents = self._parents
|
||||
while self._indices[x] != self._indices[parents[x]]:
|
||||
parents[x] = parents[parents[x]]
|
||||
x = parents[x]
|
||||
return x
|
||||
|
||||
def add(self, x):
|
||||
"""Add element `x` to disjoint set
|
||||
"""
|
||||
if x in self._indices:
|
||||
return
|
||||
|
||||
self._sizes[x] = 1
|
||||
self._parents[x] = x
|
||||
self._nbrs[x] = x
|
||||
self._indices[x] = len(self._indices)
|
||||
self.n_subsets += 1
|
||||
|
||||
def merge(self, x, y):
|
||||
"""Merge the subsets of `x` and `y`.
|
||||
|
||||
The smaller subset (the child) is merged into the larger subset (the
|
||||
parent). If the subsets are of equal size, the root element which was
|
||||
first inserted into the disjoint set is selected as the parent.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x, y : hashable object
|
||||
Elements to merge.
|
||||
|
||||
Returns
|
||||
-------
|
||||
merged : bool
|
||||
True if `x` and `y` were in disjoint sets, False otherwise.
|
||||
"""
|
||||
xr = self[x]
|
||||
yr = self[y]
|
||||
if self._indices[xr] == self._indices[yr]:
|
||||
return False
|
||||
|
||||
sizes = self._sizes
|
||||
if (sizes[xr], self._indices[yr]) < (sizes[yr], self._indices[xr]):
|
||||
xr, yr = yr, xr
|
||||
self._parents[yr] = xr
|
||||
self._sizes[xr] += self._sizes[yr]
|
||||
self._nbrs[xr], self._nbrs[yr] = self._nbrs[yr], self._nbrs[xr]
|
||||
self.n_subsets -= 1
|
||||
return True
|
||||
|
||||
def connected(self, x, y):
|
||||
"""Test whether `x` and `y` are in the same subset.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x, y : hashable object
|
||||
Elements to test.
|
||||
|
||||
Returns
|
||||
-------
|
||||
result : bool
|
||||
True if `x` and `y` are in the same set, False otherwise.
|
||||
"""
|
||||
return self._indices[self[x]] == self._indices[self[y]]
|
||||
|
||||
def subset(self, x):
|
||||
"""Get the subset containing `x`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
x : hashable object
|
||||
Input element.
|
||||
|
||||
Returns
|
||||
-------
|
||||
result : set
|
||||
Subset containing `x`.
|
||||
"""
|
||||
if x not in self._indices:
|
||||
raise KeyError(x)
|
||||
|
||||
result = [x]
|
||||
nxt = self._nbrs[x]
|
||||
while self._indices[nxt] != self._indices[x]:
|
||||
result.append(nxt)
|
||||
nxt = self._nbrs[nxt]
|
||||
return set(result)
|
||||
|
||||
def subsets(self):
|
||||
"""Get all the subsets in the disjoint set.
|
||||
|
||||
Returns
|
||||
-------
|
||||
result : list
|
||||
Subsets in the disjoint set.
|
||||
"""
|
||||
result = []
|
||||
visited = set()
|
||||
for x in self:
|
||||
if x not in visited:
|
||||
xset = self.subset(x)
|
||||
visited.update(xset)
|
||||
result.append(xset)
|
||||
return result
|
||||
680
.CondaPkg/env/Lib/site-packages/scipy/_lib/_docscrape.py
vendored
Normal file
680
.CondaPkg/env/Lib/site-packages/scipy/_lib/_docscrape.py
vendored
Normal file
@@ -0,0 +1,680 @@
|
||||
"""Extract reference documentation from the NumPy source tree.
|
||||
|
||||
"""
|
||||
# copied from numpydoc/docscrape.py
|
||||
import inspect
|
||||
import textwrap
|
||||
import re
|
||||
import pydoc
|
||||
from warnings import warn
|
||||
from collections import namedtuple
|
||||
from collections.abc import Callable, Mapping
|
||||
import copy
|
||||
import sys
|
||||
|
||||
|
||||
def strip_blank_lines(l): # noqa
|
||||
"Remove leading and trailing blank lines from a list of lines"
|
||||
while l and not l[0].strip():
|
||||
del l[0]
|
||||
while l and not l[-1].strip():
|
||||
del l[-1]
|
||||
return l
|
||||
|
||||
|
||||
class Reader(object):
|
||||
"""A line-based string reader.
|
||||
|
||||
"""
|
||||
def __init__(self, data):
|
||||
"""
|
||||
Parameters
|
||||
----------
|
||||
data : str
|
||||
String with lines separated by '\\n'.
|
||||
|
||||
"""
|
||||
if isinstance(data, list):
|
||||
self._str = data
|
||||
else:
|
||||
self._str = data.split('\n') # store string as list of lines
|
||||
|
||||
self.reset()
|
||||
|
||||
def __getitem__(self, n):
|
||||
return self._str[n]
|
||||
|
||||
def reset(self):
|
||||
self._l = 0 # current line nr
|
||||
|
||||
def read(self):
|
||||
if not self.eof():
|
||||
out = self[self._l]
|
||||
self._l += 1
|
||||
return out
|
||||
else:
|
||||
return ''
|
||||
|
||||
def seek_next_non_empty_line(self):
|
||||
for l in self[self._l:]: # noqa
|
||||
if l.strip():
|
||||
break
|
||||
else:
|
||||
self._l += 1
|
||||
|
||||
def eof(self):
|
||||
return self._l >= len(self._str)
|
||||
|
||||
def read_to_condition(self, condition_func):
|
||||
start = self._l
|
||||
for line in self[start:]:
|
||||
if condition_func(line):
|
||||
return self[start:self._l]
|
||||
self._l += 1
|
||||
if self.eof():
|
||||
return self[start:self._l+1]
|
||||
return []
|
||||
|
||||
def read_to_next_empty_line(self):
|
||||
self.seek_next_non_empty_line()
|
||||
|
||||
def is_empty(line):
|
||||
return not line.strip()
|
||||
|
||||
return self.read_to_condition(is_empty)
|
||||
|
||||
def read_to_next_unindented_line(self):
|
||||
def is_unindented(line):
|
||||
return (line.strip() and (len(line.lstrip()) == len(line)))
|
||||
return self.read_to_condition(is_unindented)
|
||||
|
||||
def peek(self, n=0):
|
||||
if self._l + n < len(self._str):
|
||||
return self[self._l + n]
|
||||
else:
|
||||
return ''
|
||||
|
||||
def is_empty(self):
|
||||
return not ''.join(self._str).strip()
|
||||
|
||||
|
||||
class ParseError(Exception):
|
||||
def __str__(self):
|
||||
message = self.args[0]
|
||||
if hasattr(self, 'docstring'):
|
||||
message = "%s in %r" % (message, self.docstring)
|
||||
return message
|
||||
|
||||
|
||||
Parameter = namedtuple('Parameter', ['name', 'type', 'desc'])
|
||||
|
||||
|
||||
class NumpyDocString(Mapping):
|
||||
"""Parses a numpydoc string to an abstract representation
|
||||
|
||||
Instances define a mapping from section title to structured data.
|
||||
|
||||
"""
|
||||
|
||||
sections = {
|
||||
'Signature': '',
|
||||
'Summary': [''],
|
||||
'Extended Summary': [],
|
||||
'Parameters': [],
|
||||
'Returns': [],
|
||||
'Yields': [],
|
||||
'Receives': [],
|
||||
'Raises': [],
|
||||
'Warns': [],
|
||||
'Other Parameters': [],
|
||||
'Attributes': [],
|
||||
'Methods': [],
|
||||
'See Also': [],
|
||||
'Notes': [],
|
||||
'Warnings': [],
|
||||
'References': '',
|
||||
'Examples': '',
|
||||
'index': {}
|
||||
}
|
||||
|
||||
def __init__(self, docstring, config={}):
|
||||
orig_docstring = docstring
|
||||
docstring = textwrap.dedent(docstring).split('\n')
|
||||
|
||||
self._doc = Reader(docstring)
|
||||
self._parsed_data = copy.deepcopy(self.sections)
|
||||
|
||||
try:
|
||||
self._parse()
|
||||
except ParseError as e:
|
||||
e.docstring = orig_docstring
|
||||
raise
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._parsed_data[key]
|
||||
|
||||
def __setitem__(self, key, val):
|
||||
if key not in self._parsed_data:
|
||||
self._error_location("Unknown section %s" % key, error=False)
|
||||
else:
|
||||
self._parsed_data[key] = val
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._parsed_data)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._parsed_data)
|
||||
|
||||
def _is_at_section(self):
|
||||
self._doc.seek_next_non_empty_line()
|
||||
|
||||
if self._doc.eof():
|
||||
return False
|
||||
|
||||
l1 = self._doc.peek().strip() # e.g. Parameters
|
||||
|
||||
if l1.startswith('.. index::'):
|
||||
return True
|
||||
|
||||
l2 = self._doc.peek(1).strip() # ---------- or ==========
|
||||
return l2.startswith('-'*len(l1)) or l2.startswith('='*len(l1))
|
||||
|
||||
def _strip(self, doc):
|
||||
i = 0
|
||||
j = 0
|
||||
for i, line in enumerate(doc):
|
||||
if line.strip():
|
||||
break
|
||||
|
||||
for j, line in enumerate(doc[::-1]):
|
||||
if line.strip():
|
||||
break
|
||||
|
||||
return doc[i:len(doc)-j]
|
||||
|
||||
def _read_to_next_section(self):
|
||||
section = self._doc.read_to_next_empty_line()
|
||||
|
||||
while not self._is_at_section() and not self._doc.eof():
|
||||
if not self._doc.peek(-1).strip(): # previous line was empty
|
||||
section += ['']
|
||||
|
||||
section += self._doc.read_to_next_empty_line()
|
||||
|
||||
return section
|
||||
|
||||
def _read_sections(self):
|
||||
while not self._doc.eof():
|
||||
data = self._read_to_next_section()
|
||||
name = data[0].strip()
|
||||
|
||||
if name.startswith('..'): # index section
|
||||
yield name, data[1:]
|
||||
elif len(data) < 2:
|
||||
yield StopIteration
|
||||
else:
|
||||
yield name, self._strip(data[2:])
|
||||
|
||||
def _parse_param_list(self, content, single_element_is_type=False):
|
||||
r = Reader(content)
|
||||
params = []
|
||||
while not r.eof():
|
||||
header = r.read().strip()
|
||||
if ' : ' in header:
|
||||
arg_name, arg_type = header.split(' : ')[:2]
|
||||
else:
|
||||
if single_element_is_type:
|
||||
arg_name, arg_type = '', header
|
||||
else:
|
||||
arg_name, arg_type = header, ''
|
||||
|
||||
desc = r.read_to_next_unindented_line()
|
||||
desc = dedent_lines(desc)
|
||||
desc = strip_blank_lines(desc)
|
||||
|
||||
params.append(Parameter(arg_name, arg_type, desc))
|
||||
|
||||
return params
|
||||
|
||||
# See also supports the following formats.
|
||||
#
|
||||
# <FUNCNAME>
|
||||
# <FUNCNAME> SPACE* COLON SPACE+ <DESC> SPACE*
|
||||
# <FUNCNAME> ( COMMA SPACE+ <FUNCNAME>)+ (COMMA | PERIOD)? SPACE*
|
||||
# <FUNCNAME> ( COMMA SPACE+ <FUNCNAME>)* SPACE* COLON SPACE+ <DESC> SPACE*
|
||||
|
||||
# <FUNCNAME> is one of
|
||||
# <PLAIN_FUNCNAME>
|
||||
# COLON <ROLE> COLON BACKTICK <PLAIN_FUNCNAME> BACKTICK
|
||||
# where
|
||||
# <PLAIN_FUNCNAME> is a legal function name, and
|
||||
# <ROLE> is any nonempty sequence of word characters.
|
||||
# Examples: func_f1 :meth:`func_h1` :obj:`~baz.obj_r` :class:`class_j`
|
||||
# <DESC> is a string describing the function.
|
||||
|
||||
_role = r":(?P<role>\w+):"
|
||||
_funcbacktick = r"`(?P<name>(?:~\w+\.)?[a-zA-Z0-9_\.-]+)`"
|
||||
_funcplain = r"(?P<name2>[a-zA-Z0-9_\.-]+)"
|
||||
_funcname = r"(" + _role + _funcbacktick + r"|" + _funcplain + r")"
|
||||
_funcnamenext = _funcname.replace('role', 'rolenext')
|
||||
_funcnamenext = _funcnamenext.replace('name', 'namenext')
|
||||
_description = r"(?P<description>\s*:(\s+(?P<desc>\S+.*))?)?\s*$"
|
||||
_func_rgx = re.compile(r"^\s*" + _funcname + r"\s*")
|
||||
_line_rgx = re.compile(
|
||||
r"^\s*" +
|
||||
r"(?P<allfuncs>" + # group for all function names
|
||||
_funcname +
|
||||
r"(?P<morefuncs>([,]\s+" + _funcnamenext + r")*)" +
|
||||
r")" + # end of "allfuncs"
|
||||
# Some function lists have a trailing comma (or period) '\s*'
|
||||
r"(?P<trailing>[,\.])?" +
|
||||
_description)
|
||||
|
||||
# Empty <DESC> elements are replaced with '..'
|
||||
empty_description = '..'
|
||||
|
||||
def _parse_see_also(self, content):
|
||||
"""
|
||||
func_name : Descriptive text
|
||||
continued text
|
||||
another_func_name : Descriptive text
|
||||
func_name1, func_name2, :meth:`func_name`, func_name3
|
||||
|
||||
"""
|
||||
|
||||
items = []
|
||||
|
||||
def parse_item_name(text):
|
||||
"""Match ':role:`name`' or 'name'."""
|
||||
m = self._func_rgx.match(text)
|
||||
if not m:
|
||||
raise ParseError("%s is not a item name" % text)
|
||||
role = m.group('role')
|
||||
name = m.group('name') if role else m.group('name2')
|
||||
return name, role, m.end()
|
||||
|
||||
rest = []
|
||||
for line in content:
|
||||
if not line.strip():
|
||||
continue
|
||||
|
||||
line_match = self._line_rgx.match(line)
|
||||
description = None
|
||||
if line_match:
|
||||
description = line_match.group('desc')
|
||||
if line_match.group('trailing') and description:
|
||||
self._error_location(
|
||||
'Unexpected comma or period after function list at '
|
||||
'index %d of line "%s"' % (line_match.end('trailing'),
|
||||
line),
|
||||
error=False)
|
||||
if not description and line.startswith(' '):
|
||||
rest.append(line.strip())
|
||||
elif line_match:
|
||||
funcs = []
|
||||
text = line_match.group('allfuncs')
|
||||
while True:
|
||||
if not text.strip():
|
||||
break
|
||||
name, role, match_end = parse_item_name(text)
|
||||
funcs.append((name, role))
|
||||
text = text[match_end:].strip()
|
||||
if text and text[0] == ',':
|
||||
text = text[1:].strip()
|
||||
rest = list(filter(None, [description]))
|
||||
items.append((funcs, rest))
|
||||
else:
|
||||
raise ParseError("%s is not a item name" % line)
|
||||
return items
|
||||
|
||||
def _parse_index(self, section, content):
|
||||
"""
|
||||
.. index: default
|
||||
:refguide: something, else, and more
|
||||
|
||||
"""
|
||||
def strip_each_in(lst):
|
||||
return [s.strip() for s in lst]
|
||||
|
||||
out = {}
|
||||
section = section.split('::')
|
||||
if len(section) > 1:
|
||||
out['default'] = strip_each_in(section[1].split(','))[0]
|
||||
for line in content:
|
||||
line = line.split(':')
|
||||
if len(line) > 2:
|
||||
out[line[1]] = strip_each_in(line[2].split(','))
|
||||
return out
|
||||
|
||||
def _parse_summary(self):
|
||||
"""Grab signature (if given) and summary"""
|
||||
if self._is_at_section():
|
||||
return
|
||||
|
||||
# If several signatures present, take the last one
|
||||
while True:
|
||||
summary = self._doc.read_to_next_empty_line()
|
||||
summary_str = " ".join([s.strip() for s in summary]).strip()
|
||||
compiled = re.compile(r'^([\w., ]+=)?\s*[\w\.]+\(.*\)$')
|
||||
if compiled.match(summary_str):
|
||||
self['Signature'] = summary_str
|
||||
if not self._is_at_section():
|
||||
continue
|
||||
break
|
||||
|
||||
if summary is not None:
|
||||
self['Summary'] = summary
|
||||
|
||||
if not self._is_at_section():
|
||||
self['Extended Summary'] = self._read_to_next_section()
|
||||
|
||||
def _parse(self):
|
||||
self._doc.reset()
|
||||
self._parse_summary()
|
||||
|
||||
sections = list(self._read_sections())
|
||||
section_names = set([section for section, content in sections])
|
||||
|
||||
has_returns = 'Returns' in section_names
|
||||
has_yields = 'Yields' in section_names
|
||||
# We could do more tests, but we are not. Arbitrarily.
|
||||
if has_returns and has_yields:
|
||||
msg = 'Docstring contains both a Returns and Yields section.'
|
||||
raise ValueError(msg)
|
||||
if not has_yields and 'Receives' in section_names:
|
||||
msg = 'Docstring contains a Receives section but not Yields.'
|
||||
raise ValueError(msg)
|
||||
|
||||
for (section, content) in sections:
|
||||
if not section.startswith('..'):
|
||||
section = (s.capitalize() for s in section.split(' '))
|
||||
section = ' '.join(section)
|
||||
if self.get(section):
|
||||
self._error_location("The section %s appears twice"
|
||||
% section)
|
||||
|
||||
if section in ('Parameters', 'Other Parameters', 'Attributes',
|
||||
'Methods'):
|
||||
self[section] = self._parse_param_list(content)
|
||||
elif section in ('Returns', 'Yields', 'Raises', 'Warns',
|
||||
'Receives'):
|
||||
self[section] = self._parse_param_list(
|
||||
content, single_element_is_type=True)
|
||||
elif section.startswith('.. index::'):
|
||||
self['index'] = self._parse_index(section, content)
|
||||
elif section == 'See Also':
|
||||
self['See Also'] = self._parse_see_also(content)
|
||||
else:
|
||||
self[section] = content
|
||||
|
||||
def _error_location(self, msg, error=True):
|
||||
if hasattr(self, '_obj'):
|
||||
# we know where the docs came from:
|
||||
try:
|
||||
filename = inspect.getsourcefile(self._obj)
|
||||
except TypeError:
|
||||
filename = None
|
||||
msg = msg + (" in the docstring of %s in %s."
|
||||
% (self._obj, filename))
|
||||
if error:
|
||||
raise ValueError(msg)
|
||||
else:
|
||||
warn(msg)
|
||||
|
||||
# string conversion routines
|
||||
|
||||
def _str_header(self, name, symbol='-'):
|
||||
return [name, len(name)*symbol]
|
||||
|
||||
def _str_indent(self, doc, indent=4):
|
||||
out = []
|
||||
for line in doc:
|
||||
out += [' '*indent + line]
|
||||
return out
|
||||
|
||||
def _str_signature(self):
|
||||
if self['Signature']:
|
||||
return [self['Signature'].replace('*', r'\*')] + ['']
|
||||
else:
|
||||
return ['']
|
||||
|
||||
def _str_summary(self):
|
||||
if self['Summary']:
|
||||
return self['Summary'] + ['']
|
||||
else:
|
||||
return []
|
||||
|
||||
def _str_extended_summary(self):
|
||||
if self['Extended Summary']:
|
||||
return self['Extended Summary'] + ['']
|
||||
else:
|
||||
return []
|
||||
|
||||
def _str_param_list(self, name):
|
||||
out = []
|
||||
if self[name]:
|
||||
out += self._str_header(name)
|
||||
for param in self[name]:
|
||||
parts = []
|
||||
if param.name:
|
||||
parts.append(param.name)
|
||||
if param.type:
|
||||
parts.append(param.type)
|
||||
out += [' : '.join(parts)]
|
||||
if param.desc and ''.join(param.desc).strip():
|
||||
out += self._str_indent(param.desc)
|
||||
out += ['']
|
||||
return out
|
||||
|
||||
def _str_section(self, name):
|
||||
out = []
|
||||
if self[name]:
|
||||
out += self._str_header(name)
|
||||
out += self[name]
|
||||
out += ['']
|
||||
return out
|
||||
|
||||
def _str_see_also(self, func_role):
|
||||
if not self['See Also']:
|
||||
return []
|
||||
out = []
|
||||
out += self._str_header("See Also")
|
||||
out += ['']
|
||||
last_had_desc = True
|
||||
for funcs, desc in self['See Also']:
|
||||
assert isinstance(funcs, list)
|
||||
links = []
|
||||
for func, role in funcs:
|
||||
if role:
|
||||
link = ':%s:`%s`' % (role, func)
|
||||
elif func_role:
|
||||
link = ':%s:`%s`' % (func_role, func)
|
||||
else:
|
||||
link = "`%s`_" % func
|
||||
links.append(link)
|
||||
link = ', '.join(links)
|
||||
out += [link]
|
||||
if desc:
|
||||
out += self._str_indent([' '.join(desc)])
|
||||
last_had_desc = True
|
||||
else:
|
||||
last_had_desc = False
|
||||
out += self._str_indent([self.empty_description])
|
||||
|
||||
if last_had_desc:
|
||||
out += ['']
|
||||
out += ['']
|
||||
return out
|
||||
|
||||
def _str_index(self):
|
||||
idx = self['index']
|
||||
out = []
|
||||
output_index = False
|
||||
default_index = idx.get('default', '')
|
||||
if default_index:
|
||||
output_index = True
|
||||
out += ['.. index:: %s' % default_index]
|
||||
for section, references in idx.items():
|
||||
if section == 'default':
|
||||
continue
|
||||
output_index = True
|
||||
out += [' :%s: %s' % (section, ', '.join(references))]
|
||||
if output_index:
|
||||
return out
|
||||
else:
|
||||
return ''
|
||||
|
||||
def __str__(self, func_role=''):
|
||||
out = []
|
||||
out += self._str_signature()
|
||||
out += self._str_summary()
|
||||
out += self._str_extended_summary()
|
||||
for param_list in ('Parameters', 'Returns', 'Yields', 'Receives',
|
||||
'Other Parameters', 'Raises', 'Warns'):
|
||||
out += self._str_param_list(param_list)
|
||||
out += self._str_section('Warnings')
|
||||
out += self._str_see_also(func_role)
|
||||
for s in ('Notes', 'References', 'Examples'):
|
||||
out += self._str_section(s)
|
||||
for param_list in ('Attributes', 'Methods'):
|
||||
out += self._str_param_list(param_list)
|
||||
out += self._str_index()
|
||||
return '\n'.join(out)
|
||||
|
||||
|
||||
def indent(str, indent=4): # noqa
|
||||
indent_str = ' '*indent
|
||||
if str is None:
|
||||
return indent_str
|
||||
lines = str.split('\n')
|
||||
return '\n'.join(indent_str + l for l in lines) # noqa
|
||||
|
||||
|
||||
def dedent_lines(lines):
|
||||
"""Deindent a list of lines maximally"""
|
||||
return textwrap.dedent("\n".join(lines)).split("\n")
|
||||
|
||||
|
||||
def header(text, style='-'):
|
||||
return text + '\n' + style*len(text) + '\n'
|
||||
|
||||
|
||||
class FunctionDoc(NumpyDocString):
|
||||
def __init__(self, func, role='func', doc=None, config={}):
|
||||
self._f = func
|
||||
self._role = role # e.g. "func" or "meth"
|
||||
|
||||
if doc is None:
|
||||
if func is None:
|
||||
raise ValueError("No function or docstring given")
|
||||
doc = inspect.getdoc(func) or ''
|
||||
NumpyDocString.__init__(self, doc, config)
|
||||
|
||||
def get_func(self):
|
||||
func_name = getattr(self._f, '__name__', self.__class__.__name__)
|
||||
if inspect.isclass(self._f):
|
||||
func = getattr(self._f, '__call__', self._f.__init__)
|
||||
else:
|
||||
func = self._f
|
||||
return func, func_name
|
||||
|
||||
def __str__(self):
|
||||
out = ''
|
||||
|
||||
func, func_name = self.get_func()
|
||||
|
||||
roles = {'func': 'function',
|
||||
'meth': 'method'}
|
||||
|
||||
if self._role:
|
||||
if self._role not in roles:
|
||||
print("Warning: invalid role %s" % self._role)
|
||||
out += '.. %s:: %s\n \n\n' % (roles.get(self._role, ''),
|
||||
func_name)
|
||||
|
||||
out += super(FunctionDoc, self).__str__(func_role=self._role)
|
||||
return out
|
||||
|
||||
|
||||
class ClassDoc(NumpyDocString):
|
||||
|
||||
extra_public_methods = ['__call__']
|
||||
|
||||
def __init__(self, cls, doc=None, modulename='', func_doc=FunctionDoc,
|
||||
config={}):
|
||||
if not inspect.isclass(cls) and cls is not None:
|
||||
raise ValueError("Expected a class or None, but got %r" % cls)
|
||||
self._cls = cls
|
||||
|
||||
if 'sphinx' in sys.modules:
|
||||
from sphinx.ext.autodoc import ALL
|
||||
else:
|
||||
ALL = object()
|
||||
|
||||
self.show_inherited_members = config.get(
|
||||
'show_inherited_class_members', True)
|
||||
|
||||
if modulename and not modulename.endswith('.'):
|
||||
modulename += '.'
|
||||
self._mod = modulename
|
||||
|
||||
if doc is None:
|
||||
if cls is None:
|
||||
raise ValueError("No class or documentation string given")
|
||||
doc = pydoc.getdoc(cls)
|
||||
|
||||
NumpyDocString.__init__(self, doc)
|
||||
|
||||
_members = config.get('members', [])
|
||||
if _members is ALL:
|
||||
_members = None
|
||||
_exclude = config.get('exclude-members', [])
|
||||
|
||||
if config.get('show_class_members', True) and _exclude is not ALL:
|
||||
def splitlines_x(s):
|
||||
if not s:
|
||||
return []
|
||||
else:
|
||||
return s.splitlines()
|
||||
for field, items in [('Methods', self.methods),
|
||||
('Attributes', self.properties)]:
|
||||
if not self[field]:
|
||||
doc_list = []
|
||||
for name in sorted(items):
|
||||
if (name in _exclude or
|
||||
(_members and name not in _members)):
|
||||
continue
|
||||
try:
|
||||
doc_item = pydoc.getdoc(getattr(self._cls, name))
|
||||
doc_list.append(
|
||||
Parameter(name, '', splitlines_x(doc_item)))
|
||||
except AttributeError:
|
||||
pass # method doesn't exist
|
||||
self[field] = doc_list
|
||||
|
||||
@property
|
||||
def methods(self):
|
||||
if self._cls is None:
|
||||
return []
|
||||
return [name for name, func in inspect.getmembers(self._cls)
|
||||
if ((not name.startswith('_')
|
||||
or name in self.extra_public_methods)
|
||||
and isinstance(func, Callable)
|
||||
and self._is_show_member(name))]
|
||||
|
||||
@property
|
||||
def properties(self):
|
||||
if self._cls is None:
|
||||
return []
|
||||
return [name for name, func in inspect.getmembers(self._cls)
|
||||
if (not name.startswith('_') and
|
||||
(func is None or isinstance(func, property) or
|
||||
inspect.isdatadescriptor(func))
|
||||
and self._is_show_member(name))]
|
||||
|
||||
def _is_show_member(self, name):
|
||||
if self.show_inherited_members:
|
||||
return True # show all class members
|
||||
if name not in self._cls.__dict__:
|
||||
return False # class member is inherited, we do not show it
|
||||
return True
|
||||
145
.CondaPkg/env/Lib/site-packages/scipy/_lib/_finite_differences.py
vendored
Normal file
145
.CondaPkg/env/Lib/site-packages/scipy/_lib/_finite_differences.py
vendored
Normal file
@@ -0,0 +1,145 @@
|
||||
from numpy import arange, newaxis, hstack, prod, array
|
||||
|
||||
|
||||
def _central_diff_weights(Np, ndiv=1):
|
||||
"""
|
||||
Return weights for an Np-point central derivative.
|
||||
|
||||
Assumes equally-spaced function points.
|
||||
|
||||
If weights are in the vector w, then
|
||||
derivative is w[0] * f(x-ho*dx) + ... + w[-1] * f(x+h0*dx)
|
||||
|
||||
Parameters
|
||||
----------
|
||||
Np : int
|
||||
Number of points for the central derivative.
|
||||
ndiv : int, optional
|
||||
Number of divisions. Default is 1.
|
||||
|
||||
Returns
|
||||
-------
|
||||
w : ndarray
|
||||
Weights for an Np-point central derivative. Its size is `Np`.
|
||||
|
||||
Notes
|
||||
-----
|
||||
Can be inaccurate for a large number of points.
|
||||
|
||||
Examples
|
||||
--------
|
||||
We can calculate a derivative value of a function.
|
||||
|
||||
>>> def f(x):
|
||||
... return 2 * x**2 + 3
|
||||
>>> x = 3.0 # derivative point
|
||||
>>> h = 0.1 # differential step
|
||||
>>> Np = 3 # point number for central derivative
|
||||
>>> weights = _central_diff_weights(Np) # weights for first derivative
|
||||
>>> vals = [f(x + (i - Np/2) * h) for i in range(Np)]
|
||||
>>> sum(w * v for (w, v) in zip(weights, vals))/h
|
||||
11.79999999999998
|
||||
|
||||
This value is close to the analytical solution:
|
||||
f'(x) = 4x, so f'(3) = 12
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] https://en.wikipedia.org/wiki/Finite_difference
|
||||
|
||||
"""
|
||||
if Np < ndiv + 1:
|
||||
raise ValueError(
|
||||
"Number of points must be at least the derivative order + 1."
|
||||
)
|
||||
if Np % 2 == 0:
|
||||
raise ValueError("The number of points must be odd.")
|
||||
from scipy import linalg
|
||||
|
||||
ho = Np >> 1
|
||||
x = arange(-ho, ho + 1.0)
|
||||
x = x[:, newaxis]
|
||||
X = x**0.0
|
||||
for k in range(1, Np):
|
||||
X = hstack([X, x**k])
|
||||
w = prod(arange(1, ndiv + 1), axis=0) * linalg.inv(X)[ndiv]
|
||||
return w
|
||||
|
||||
|
||||
def _derivative(func, x0, dx=1.0, n=1, args=(), order=3):
|
||||
"""
|
||||
Find the nth derivative of a function at a point.
|
||||
|
||||
Given a function, use a central difference formula with spacing `dx` to
|
||||
compute the nth derivative at `x0`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
func : function
|
||||
Input function.
|
||||
x0 : float
|
||||
The point at which the nth derivative is found.
|
||||
dx : float, optional
|
||||
Spacing.
|
||||
n : int, optional
|
||||
Order of the derivative. Default is 1.
|
||||
args : tuple, optional
|
||||
Arguments
|
||||
order : int, optional
|
||||
Number of points to use, must be odd.
|
||||
|
||||
Notes
|
||||
-----
|
||||
Decreasing the step size too small can result in round-off error.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> def f(x):
|
||||
... return x**3 + x**2
|
||||
>>> _derivative(f, 1.0, dx=1e-6)
|
||||
4.9999999999217337
|
||||
|
||||
"""
|
||||
if order < n + 1:
|
||||
raise ValueError(
|
||||
"'order' (the number of points used to compute the derivative), "
|
||||
"must be at least the derivative order 'n' + 1."
|
||||
)
|
||||
if order % 2 == 0:
|
||||
raise ValueError(
|
||||
"'order' (the number of points used to compute the derivative) "
|
||||
"must be odd."
|
||||
)
|
||||
# pre-computed for n=1 and 2 and low-order for speed.
|
||||
if n == 1:
|
||||
if order == 3:
|
||||
weights = array([-1, 0, 1]) / 2.0
|
||||
elif order == 5:
|
||||
weights = array([1, -8, 0, 8, -1]) / 12.0
|
||||
elif order == 7:
|
||||
weights = array([-1, 9, -45, 0, 45, -9, 1]) / 60.0
|
||||
elif order == 9:
|
||||
weights = array([3, -32, 168, -672, 0, 672, -168, 32, -3]) / 840.0
|
||||
else:
|
||||
weights = _central_diff_weights(order, 1)
|
||||
elif n == 2:
|
||||
if order == 3:
|
||||
weights = array([1, -2.0, 1])
|
||||
elif order == 5:
|
||||
weights = array([-1, 16, -30, 16, -1]) / 12.0
|
||||
elif order == 7:
|
||||
weights = array([2, -27, 270, -490, 270, -27, 2]) / 180.0
|
||||
elif order == 9:
|
||||
weights = (
|
||||
array([-9, 128, -1008, 8064, -14350, 8064, -1008, 128, -9])
|
||||
/ 5040.0
|
||||
)
|
||||
else:
|
||||
weights = _central_diff_weights(order, 2)
|
||||
else:
|
||||
weights = _central_diff_weights(order, n)
|
||||
val = 0.0
|
||||
ho = order >> 1
|
||||
for k in range(order):
|
||||
val += weights[k] * func(x0 + (k - ho) * dx, *args)
|
||||
return val / prod((dx,) * n, axis=0)
|
||||
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_fpumode.cp311-win_amd64.dll.a
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_fpumode.cp311-win_amd64.dll.a
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_fpumode.cp311-win_amd64.pyd
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_fpumode.cp311-win_amd64.pyd
vendored
Normal file
Binary file not shown.
105
.CondaPkg/env/Lib/site-packages/scipy/_lib/_gcutils.py
vendored
Normal file
105
.CondaPkg/env/Lib/site-packages/scipy/_lib/_gcutils.py
vendored
Normal file
@@ -0,0 +1,105 @@
|
||||
"""
|
||||
Module for testing automatic garbage collection of objects
|
||||
|
||||
.. autosummary::
|
||||
:toctree: generated/
|
||||
|
||||
set_gc_state - enable or disable garbage collection
|
||||
gc_state - context manager for given state of garbage collector
|
||||
assert_deallocated - context manager to check for circular references on object
|
||||
|
||||
"""
|
||||
import weakref
|
||||
import gc
|
||||
|
||||
from contextlib import contextmanager
|
||||
from platform import python_implementation
|
||||
|
||||
__all__ = ['set_gc_state', 'gc_state', 'assert_deallocated']
|
||||
|
||||
|
||||
IS_PYPY = python_implementation() == 'PyPy'
|
||||
|
||||
|
||||
class ReferenceError(AssertionError):
|
||||
pass
|
||||
|
||||
|
||||
def set_gc_state(state):
|
||||
""" Set status of garbage collector """
|
||||
if gc.isenabled() == state:
|
||||
return
|
||||
if state:
|
||||
gc.enable()
|
||||
else:
|
||||
gc.disable()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def gc_state(state):
|
||||
""" Context manager to set state of garbage collector to `state`
|
||||
|
||||
Parameters
|
||||
----------
|
||||
state : bool
|
||||
True for gc enabled, False for disabled
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> with gc_state(False):
|
||||
... assert not gc.isenabled()
|
||||
>>> with gc_state(True):
|
||||
... assert gc.isenabled()
|
||||
"""
|
||||
orig_state = gc.isenabled()
|
||||
set_gc_state(state)
|
||||
yield
|
||||
set_gc_state(orig_state)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def assert_deallocated(func, *args, **kwargs):
|
||||
"""Context manager to check that object is deallocated
|
||||
|
||||
This is useful for checking that an object can be freed directly by
|
||||
reference counting, without requiring gc to break reference cycles.
|
||||
GC is disabled inside the context manager.
|
||||
|
||||
This check is not available on PyPy.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
func : callable
|
||||
Callable to create object to check
|
||||
\\*args : sequence
|
||||
positional arguments to `func` in order to create object to check
|
||||
\\*\\*kwargs : dict
|
||||
keyword arguments to `func` in order to create object to check
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> class C: pass
|
||||
>>> with assert_deallocated(C) as c:
|
||||
... # do something
|
||||
... del c
|
||||
|
||||
>>> class C:
|
||||
... def __init__(self):
|
||||
... self._circular = self # Make circular reference
|
||||
>>> with assert_deallocated(C) as c: #doctest: +IGNORE_EXCEPTION_DETAIL
|
||||
... # do something
|
||||
... del c
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ReferenceError: Remaining reference(s) to object
|
||||
"""
|
||||
if IS_PYPY:
|
||||
raise RuntimeError("assert_deallocated is unavailable on PyPy")
|
||||
|
||||
with gc_state(False):
|
||||
obj = func(*args, **kwargs)
|
||||
ref = weakref.ref(obj)
|
||||
yield obj
|
||||
del obj
|
||||
if ref() is not None:
|
||||
raise ReferenceError("Remaining reference(s) to object")
|
||||
487
.CondaPkg/env/Lib/site-packages/scipy/_lib/_pep440.py
vendored
Normal file
487
.CondaPkg/env/Lib/site-packages/scipy/_lib/_pep440.py
vendored
Normal file
@@ -0,0 +1,487 @@
|
||||
"""Utility to compare pep440 compatible version strings.
|
||||
|
||||
The LooseVersion and StrictVersion classes that distutils provides don't
|
||||
work; they don't recognize anything like alpha/beta/rc/dev versions.
|
||||
"""
|
||||
|
||||
# Copyright (c) Donald Stufft and individual contributors.
|
||||
# All rights reserved.
|
||||
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
|
||||
# 1. Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
|
||||
# 2. Redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution.
|
||||
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import collections
|
||||
import itertools
|
||||
import re
|
||||
|
||||
|
||||
__all__ = [
|
||||
"parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN",
|
||||
]
|
||||
|
||||
|
||||
# BEGIN packaging/_structures.py
|
||||
|
||||
|
||||
class Infinity:
|
||||
def __repr__(self):
|
||||
return "Infinity"
|
||||
|
||||
def __hash__(self):
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other):
|
||||
return False
|
||||
|
||||
def __le__(self, other):
|
||||
return False
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other):
|
||||
return True
|
||||
|
||||
def __ge__(self, other):
|
||||
return True
|
||||
|
||||
def __neg__(self):
|
||||
return NegativeInfinity
|
||||
|
||||
|
||||
Infinity = Infinity()
|
||||
|
||||
|
||||
class NegativeInfinity:
|
||||
def __repr__(self):
|
||||
return "-Infinity"
|
||||
|
||||
def __hash__(self):
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other):
|
||||
return True
|
||||
|
||||
def __le__(self, other):
|
||||
return True
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other):
|
||||
return False
|
||||
|
||||
def __ge__(self, other):
|
||||
return False
|
||||
|
||||
def __neg__(self):
|
||||
return Infinity
|
||||
|
||||
|
||||
# BEGIN packaging/version.py
|
||||
|
||||
|
||||
NegativeInfinity = NegativeInfinity()
|
||||
|
||||
_Version = collections.namedtuple(
|
||||
"_Version",
|
||||
["epoch", "release", "dev", "pre", "post", "local"],
|
||||
)
|
||||
|
||||
|
||||
def parse(version):
|
||||
"""
|
||||
Parse the given version string and return either a :class:`Version` object
|
||||
or a :class:`LegacyVersion` object depending on if the given version is
|
||||
a valid PEP 440 version or a legacy version.
|
||||
"""
|
||||
try:
|
||||
return Version(version)
|
||||
except InvalidVersion:
|
||||
return LegacyVersion(version)
|
||||
|
||||
|
||||
class InvalidVersion(ValueError):
|
||||
"""
|
||||
An invalid version was found, users should refer to PEP 440.
|
||||
"""
|
||||
|
||||
|
||||
class _BaseVersion:
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._key)
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._compare(other, lambda s, o: s < o)
|
||||
|
||||
def __le__(self, other):
|
||||
return self._compare(other, lambda s, o: s <= o)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._compare(other, lambda s, o: s == o)
|
||||
|
||||
def __ge__(self, other):
|
||||
return self._compare(other, lambda s, o: s >= o)
|
||||
|
||||
def __gt__(self, other):
|
||||
return self._compare(other, lambda s, o: s > o)
|
||||
|
||||
def __ne__(self, other):
|
||||
return self._compare(other, lambda s, o: s != o)
|
||||
|
||||
def _compare(self, other, method):
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return method(self._key, other._key)
|
||||
|
||||
|
||||
class LegacyVersion(_BaseVersion):
|
||||
|
||||
def __init__(self, version):
|
||||
self._version = str(version)
|
||||
self._key = _legacy_cmpkey(self._version)
|
||||
|
||||
def __str__(self):
|
||||
return self._version
|
||||
|
||||
def __repr__(self):
|
||||
return "<LegacyVersion({0})>".format(repr(str(self)))
|
||||
|
||||
@property
|
||||
def public(self):
|
||||
return self._version
|
||||
|
||||
@property
|
||||
def base_version(self):
|
||||
return self._version
|
||||
|
||||
@property
|
||||
def local(self):
|
||||
return None
|
||||
|
||||
@property
|
||||
def is_prerelease(self):
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_postrelease(self):
|
||||
return False
|
||||
|
||||
|
||||
_legacy_version_component_re = re.compile(
|
||||
r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
|
||||
)
|
||||
|
||||
_legacy_version_replacement_map = {
|
||||
"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
|
||||
}
|
||||
|
||||
|
||||
def _parse_version_parts(s):
|
||||
for part in _legacy_version_component_re.split(s):
|
||||
part = _legacy_version_replacement_map.get(part, part)
|
||||
|
||||
if not part or part == ".":
|
||||
continue
|
||||
|
||||
if part[:1] in "0123456789":
|
||||
# pad for numeric comparison
|
||||
yield part.zfill(8)
|
||||
else:
|
||||
yield "*" + part
|
||||
|
||||
# ensure that alpha/beta/candidate are before final
|
||||
yield "*final"
|
||||
|
||||
|
||||
def _legacy_cmpkey(version):
|
||||
# We hardcode an epoch of -1 here. A PEP 440 version can only have an epoch
|
||||
# greater than or equal to 0. This will effectively put the LegacyVersion,
|
||||
# which uses the defacto standard originally implemented by setuptools,
|
||||
# as before all PEP 440 versions.
|
||||
epoch = -1
|
||||
|
||||
# This scheme is taken from pkg_resources.parse_version setuptools prior to
|
||||
# its adoption of the packaging library.
|
||||
parts = []
|
||||
for part in _parse_version_parts(version.lower()):
|
||||
if part.startswith("*"):
|
||||
# remove "-" before a prerelease tag
|
||||
if part < "*final":
|
||||
while parts and parts[-1] == "*final-":
|
||||
parts.pop()
|
||||
|
||||
# remove trailing zeros from each series of numeric parts
|
||||
while parts and parts[-1] == "00000000":
|
||||
parts.pop()
|
||||
|
||||
parts.append(part)
|
||||
parts = tuple(parts)
|
||||
|
||||
return epoch, parts
|
||||
|
||||
|
||||
# Deliberately not anchored to the start and end of the string, to make it
|
||||
# easier for 3rd party code to reuse
|
||||
VERSION_PATTERN = r"""
|
||||
v?
|
||||
(?:
|
||||
(?:(?P<epoch>[0-9]+)!)? # epoch
|
||||
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
||||
(?P<pre> # pre-release
|
||||
[-_\.]?
|
||||
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
|
||||
[-_\.]?
|
||||
(?P<pre_n>[0-9]+)?
|
||||
)?
|
||||
(?P<post> # post release
|
||||
(?:-(?P<post_n1>[0-9]+))
|
||||
|
|
||||
(?:
|
||||
[-_\.]?
|
||||
(?P<post_l>post|rev|r)
|
||||
[-_\.]?
|
||||
(?P<post_n2>[0-9]+)?
|
||||
)
|
||||
)?
|
||||
(?P<dev> # dev release
|
||||
[-_\.]?
|
||||
(?P<dev_l>dev)
|
||||
[-_\.]?
|
||||
(?P<dev_n>[0-9]+)?
|
||||
)?
|
||||
)
|
||||
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
||||
"""
|
||||
|
||||
|
||||
class Version(_BaseVersion):
|
||||
|
||||
_regex = re.compile(
|
||||
r"^\s*" + VERSION_PATTERN + r"\s*$",
|
||||
re.VERBOSE | re.IGNORECASE,
|
||||
)
|
||||
|
||||
def __init__(self, version):
|
||||
# Validate the version and parse it into pieces
|
||||
match = self._regex.search(version)
|
||||
if not match:
|
||||
raise InvalidVersion("Invalid version: '{0}'".format(version))
|
||||
|
||||
# Store the parsed out pieces of the version
|
||||
self._version = _Version(
|
||||
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
|
||||
release=tuple(int(i) for i in match.group("release").split(".")),
|
||||
pre=_parse_letter_version(
|
||||
match.group("pre_l"),
|
||||
match.group("pre_n"),
|
||||
),
|
||||
post=_parse_letter_version(
|
||||
match.group("post_l"),
|
||||
match.group("post_n1") or match.group("post_n2"),
|
||||
),
|
||||
dev=_parse_letter_version(
|
||||
match.group("dev_l"),
|
||||
match.group("dev_n"),
|
||||
),
|
||||
local=_parse_local_version(match.group("local")),
|
||||
)
|
||||
|
||||
# Generate a key which will be used for sorting
|
||||
self._key = _cmpkey(
|
||||
self._version.epoch,
|
||||
self._version.release,
|
||||
self._version.pre,
|
||||
self._version.post,
|
||||
self._version.dev,
|
||||
self._version.local,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<Version({0})>".format(repr(str(self)))
|
||||
|
||||
def __str__(self):
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self._version.epoch != 0:
|
||||
parts.append("{0}!".format(self._version.epoch))
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self._version.release))
|
||||
|
||||
# Pre-release
|
||||
if self._version.pre is not None:
|
||||
parts.append("".join(str(x) for x in self._version.pre))
|
||||
|
||||
# Post-release
|
||||
if self._version.post is not None:
|
||||
parts.append(".post{0}".format(self._version.post[1]))
|
||||
|
||||
# Development release
|
||||
if self._version.dev is not None:
|
||||
parts.append(".dev{0}".format(self._version.dev[1]))
|
||||
|
||||
# Local version segment
|
||||
if self._version.local is not None:
|
||||
parts.append(
|
||||
"+{0}".format(".".join(str(x) for x in self._version.local))
|
||||
)
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def public(self):
|
||||
return str(self).split("+", 1)[0]
|
||||
|
||||
@property
|
||||
def base_version(self):
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self._version.epoch != 0:
|
||||
parts.append("{0}!".format(self._version.epoch))
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self._version.release))
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def local(self):
|
||||
version_string = str(self)
|
||||
if "+" in version_string:
|
||||
return version_string.split("+", 1)[1]
|
||||
|
||||
@property
|
||||
def is_prerelease(self):
|
||||
return bool(self._version.dev or self._version.pre)
|
||||
|
||||
@property
|
||||
def is_postrelease(self):
|
||||
return bool(self._version.post)
|
||||
|
||||
|
||||
def _parse_letter_version(letter, number):
|
||||
if letter:
|
||||
# We assume there is an implicit 0 in a pre-release if there is
|
||||
# no numeral associated with it.
|
||||
if number is None:
|
||||
number = 0
|
||||
|
||||
# We normalize any letters to their lower-case form
|
||||
letter = letter.lower()
|
||||
|
||||
# We consider some words to be alternate spellings of other words and
|
||||
# in those cases we want to normalize the spellings to our preferred
|
||||
# spelling.
|
||||
if letter == "alpha":
|
||||
letter = "a"
|
||||
elif letter == "beta":
|
||||
letter = "b"
|
||||
elif letter in ["c", "pre", "preview"]:
|
||||
letter = "rc"
|
||||
elif letter in ["rev", "r"]:
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
if not letter and number:
|
||||
# We assume that if we are given a number but not given a letter,
|
||||
# then this is using the implicit post release syntax (e.g., 1.0-1)
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
|
||||
|
||||
_local_version_seperators = re.compile(r"[\._-]")
|
||||
|
||||
|
||||
def _parse_local_version(local):
|
||||
"""
|
||||
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
||||
"""
|
||||
if local is not None:
|
||||
return tuple(
|
||||
part.lower() if not part.isdigit() else int(part)
|
||||
for part in _local_version_seperators.split(local)
|
||||
)
|
||||
|
||||
|
||||
def _cmpkey(epoch, release, pre, post, dev, local):
|
||||
# When we compare a release version, we want to compare it with all of the
|
||||
# trailing zeros removed. So we'll use a reverse the list, drop all the now
|
||||
# leading zeros until we come to something non-zero, then take the rest,
|
||||
# re-reverse it back into the correct order, and make it a tuple and use
|
||||
# that for our sorting key.
|
||||
release = tuple(
|
||||
reversed(list(
|
||||
itertools.dropwhile(
|
||||
lambda x: x == 0,
|
||||
reversed(release),
|
||||
)
|
||||
))
|
||||
)
|
||||
|
||||
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
|
||||
# We'll do this by abusing the pre-segment, but we _only_ want to do this
|
||||
# if there is no pre- or a post-segment. If we have one of those, then
|
||||
# the normal sorting rules will handle this case correctly.
|
||||
if pre is None and post is None and dev is not None:
|
||||
pre = -Infinity
|
||||
# Versions without a pre-release (except as noted above) should sort after
|
||||
# those with one.
|
||||
elif pre is None:
|
||||
pre = Infinity
|
||||
|
||||
# Versions without a post-segment should sort before those with one.
|
||||
if post is None:
|
||||
post = -Infinity
|
||||
|
||||
# Versions without a development segment should sort after those with one.
|
||||
if dev is None:
|
||||
dev = Infinity
|
||||
|
||||
if local is None:
|
||||
# Versions without a local segment should sort before those with one.
|
||||
local = -Infinity
|
||||
else:
|
||||
# Versions with a local segment need that segment parsed to implement
|
||||
# the sorting rules in PEP440.
|
||||
# - Alphanumeric segments sort before numeric segments
|
||||
# - Alphanumeric segments sort lexicographically
|
||||
# - Numeric segments sort numerically
|
||||
# - Shorter versions sort before longer versions when the prefixes
|
||||
# match exactly
|
||||
local = tuple(
|
||||
(i, "") if isinstance(i, int) else (-Infinity, i)
|
||||
for i in local
|
||||
)
|
||||
|
||||
return epoch, release, pre, post, dev, local
|
||||
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_test_ccallback.cp311-win_amd64.dll.a
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_test_ccallback.cp311-win_amd64.dll.a
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_test_ccallback.cp311-win_amd64.pyd
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_test_ccallback.cp311-win_amd64.pyd
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_test_deprecation_call.cp311-win_amd64.dll.a
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_test_deprecation_call.cp311-win_amd64.dll.a
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_test_deprecation_call.cp311-win_amd64.pyd
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_test_deprecation_call.cp311-win_amd64.pyd
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_test_deprecation_def.cp311-win_amd64.dll.a
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_test_deprecation_def.cp311-win_amd64.dll.a
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_test_deprecation_def.cp311-win_amd64.pyd
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_test_deprecation_def.cp311-win_amd64.pyd
vendored
Normal file
Binary file not shown.
217
.CondaPkg/env/Lib/site-packages/scipy/_lib/_testutils.py
vendored
Normal file
217
.CondaPkg/env/Lib/site-packages/scipy/_lib/_testutils.py
vendored
Normal file
@@ -0,0 +1,217 @@
|
||||
"""
|
||||
Generic test utilities.
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import numpy as np
|
||||
import inspect
|
||||
|
||||
|
||||
__all__ = ['PytestTester', 'check_free_memory', '_TestPythranFunc']
|
||||
|
||||
|
||||
class FPUModeChangeWarning(RuntimeWarning):
|
||||
"""Warning about FPU mode change"""
|
||||
pass
|
||||
|
||||
|
||||
class PytestTester:
|
||||
"""
|
||||
Pytest test runner entry point.
|
||||
"""
|
||||
|
||||
def __init__(self, module_name):
|
||||
self.module_name = module_name
|
||||
|
||||
def __call__(self, label="fast", verbose=1, extra_argv=None, doctests=False,
|
||||
coverage=False, tests=None, parallel=None):
|
||||
import pytest
|
||||
|
||||
module = sys.modules[self.module_name]
|
||||
module_path = os.path.abspath(module.__path__[0])
|
||||
|
||||
pytest_args = ['--showlocals', '--tb=short']
|
||||
|
||||
if doctests:
|
||||
raise ValueError("Doctests not supported")
|
||||
|
||||
if extra_argv:
|
||||
pytest_args += list(extra_argv)
|
||||
|
||||
if verbose and int(verbose) > 1:
|
||||
pytest_args += ["-" + "v"*(int(verbose)-1)]
|
||||
|
||||
if coverage:
|
||||
pytest_args += ["--cov=" + module_path]
|
||||
|
||||
if label == "fast":
|
||||
pytest_args += ["-m", "not slow"]
|
||||
elif label != "full":
|
||||
pytest_args += ["-m", label]
|
||||
|
||||
if tests is None:
|
||||
tests = [self.module_name]
|
||||
|
||||
if parallel is not None and parallel > 1:
|
||||
if _pytest_has_xdist():
|
||||
pytest_args += ['-n', str(parallel)]
|
||||
else:
|
||||
import warnings
|
||||
warnings.warn('Could not run tests in parallel because '
|
||||
'pytest-xdist plugin is not available.')
|
||||
|
||||
pytest_args += ['--pyargs'] + list(tests)
|
||||
|
||||
try:
|
||||
code = pytest.main(pytest_args)
|
||||
except SystemExit as exc:
|
||||
code = exc.code
|
||||
|
||||
return (code == 0)
|
||||
|
||||
|
||||
class _TestPythranFunc:
|
||||
'''
|
||||
These are situations that can be tested in our pythran tests:
|
||||
- A function with multiple array arguments and then
|
||||
other positional and keyword arguments.
|
||||
- A function with array-like keywords (e.g. `def somefunc(x0, x1=None)`.
|
||||
Note: list/tuple input is not yet tested!
|
||||
|
||||
`self.arguments`: A dictionary which key is the index of the argument,
|
||||
value is tuple(array value, all supported dtypes)
|
||||
`self.partialfunc`: A function used to freeze some non-array argument
|
||||
that of no interests in the original function
|
||||
'''
|
||||
ALL_INTEGER = [np.int8, np.int16, np.int32, np.int64, np.intc, np.intp]
|
||||
ALL_FLOAT = [np.float32, np.float64]
|
||||
ALL_COMPLEX = [np.complex64, np.complex128]
|
||||
|
||||
def setup_method(self):
|
||||
self.arguments = {}
|
||||
self.partialfunc = None
|
||||
self.expected = None
|
||||
|
||||
def get_optional_args(self, func):
|
||||
# get optional arguments with its default value,
|
||||
# used for testing keywords
|
||||
signature = inspect.signature(func)
|
||||
optional_args = {}
|
||||
for k, v in signature.parameters.items():
|
||||
if v.default is not inspect.Parameter.empty:
|
||||
optional_args[k] = v.default
|
||||
return optional_args
|
||||
|
||||
def get_max_dtype_list_length(self):
|
||||
# get the max supported dtypes list length in all arguments
|
||||
max_len = 0
|
||||
for arg_idx in self.arguments:
|
||||
cur_len = len(self.arguments[arg_idx][1])
|
||||
if cur_len > max_len:
|
||||
max_len = cur_len
|
||||
return max_len
|
||||
|
||||
def get_dtype(self, dtype_list, dtype_idx):
|
||||
# get the dtype from dtype_list via index
|
||||
# if the index is out of range, then return the last dtype
|
||||
if dtype_idx > len(dtype_list)-1:
|
||||
return dtype_list[-1]
|
||||
else:
|
||||
return dtype_list[dtype_idx]
|
||||
|
||||
def test_all_dtypes(self):
|
||||
for type_idx in range(self.get_max_dtype_list_length()):
|
||||
args_array = []
|
||||
for arg_idx in self.arguments:
|
||||
new_dtype = self.get_dtype(self.arguments[arg_idx][1],
|
||||
type_idx)
|
||||
args_array.append(self.arguments[arg_idx][0].astype(new_dtype))
|
||||
self.pythranfunc(*args_array)
|
||||
|
||||
def test_views(self):
|
||||
args_array = []
|
||||
for arg_idx in self.arguments:
|
||||
args_array.append(self.arguments[arg_idx][0][::-1][::-1])
|
||||
self.pythranfunc(*args_array)
|
||||
|
||||
def test_strided(self):
|
||||
args_array = []
|
||||
for arg_idx in self.arguments:
|
||||
args_array.append(np.repeat(self.arguments[arg_idx][0],
|
||||
2, axis=0)[::2])
|
||||
self.pythranfunc(*args_array)
|
||||
|
||||
|
||||
def _pytest_has_xdist():
|
||||
"""
|
||||
Check if the pytest-xdist plugin is installed, providing parallel tests
|
||||
"""
|
||||
# Check xdist exists without importing, otherwise pytests emits warnings
|
||||
from importlib.util import find_spec
|
||||
return find_spec('xdist') is not None
|
||||
|
||||
|
||||
def check_free_memory(free_mb):
|
||||
"""
|
||||
Check *free_mb* of memory is available, otherwise do pytest.skip
|
||||
"""
|
||||
import pytest
|
||||
|
||||
try:
|
||||
mem_free = _parse_size(os.environ['SCIPY_AVAILABLE_MEM'])
|
||||
msg = '{0} MB memory required, but environment SCIPY_AVAILABLE_MEM={1}'.format(
|
||||
free_mb, os.environ['SCIPY_AVAILABLE_MEM'])
|
||||
except KeyError:
|
||||
mem_free = _get_mem_available()
|
||||
if mem_free is None:
|
||||
pytest.skip("Could not determine available memory; set SCIPY_AVAILABLE_MEM "
|
||||
"variable to free memory in MB to run the test.")
|
||||
msg = '{0} MB memory required, but {1} MB available'.format(
|
||||
free_mb, mem_free/1e6)
|
||||
|
||||
if mem_free < free_mb * 1e6:
|
||||
pytest.skip(msg)
|
||||
|
||||
|
||||
def _parse_size(size_str):
|
||||
suffixes = {'': 1e6,
|
||||
'b': 1.0,
|
||||
'k': 1e3, 'M': 1e6, 'G': 1e9, 'T': 1e12,
|
||||
'kb': 1e3, 'Mb': 1e6, 'Gb': 1e9, 'Tb': 1e12,
|
||||
'kib': 1024.0, 'Mib': 1024.0**2, 'Gib': 1024.0**3, 'Tib': 1024.0**4}
|
||||
m = re.match(r'^\s*(\d+)\s*({0})\s*$'.format('|'.join(suffixes.keys())),
|
||||
size_str,
|
||||
re.I)
|
||||
if not m or m.group(2) not in suffixes:
|
||||
raise ValueError("Invalid size string")
|
||||
|
||||
return float(m.group(1)) * suffixes[m.group(2)]
|
||||
|
||||
|
||||
def _get_mem_available():
|
||||
"""
|
||||
Get information about memory available, not counting swap.
|
||||
"""
|
||||
try:
|
||||
import psutil
|
||||
return psutil.virtual_memory().available
|
||||
except (ImportError, AttributeError):
|
||||
pass
|
||||
|
||||
if sys.platform.startswith('linux'):
|
||||
info = {}
|
||||
with open('/proc/meminfo', 'r') as f:
|
||||
for line in f:
|
||||
p = line.split()
|
||||
info[p[0].strip(':').lower()] = float(p[1]) * 1e3
|
||||
|
||||
if 'memavailable' in info:
|
||||
# Linux >= 3.14
|
||||
return info['memavailable']
|
||||
else:
|
||||
return info['memfree'] + info['cached']
|
||||
|
||||
return None
|
||||
58
.CondaPkg/env/Lib/site-packages/scipy/_lib/_threadsafety.py
vendored
Normal file
58
.CondaPkg/env/Lib/site-packages/scipy/_lib/_threadsafety.py
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
import threading
|
||||
|
||||
import scipy._lib.decorator
|
||||
|
||||
|
||||
__all__ = ['ReentrancyError', 'ReentrancyLock', 'non_reentrant']
|
||||
|
||||
|
||||
class ReentrancyError(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
class ReentrancyLock:
|
||||
"""
|
||||
Threading lock that raises an exception for reentrant calls.
|
||||
|
||||
Calls from different threads are serialized, and nested calls from the
|
||||
same thread result to an error.
|
||||
|
||||
The object can be used as a context manager or to decorate functions
|
||||
via the decorate() method.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, err_msg):
|
||||
self._rlock = threading.RLock()
|
||||
self._entered = False
|
||||
self._err_msg = err_msg
|
||||
|
||||
def __enter__(self):
|
||||
self._rlock.acquire()
|
||||
if self._entered:
|
||||
self._rlock.release()
|
||||
raise ReentrancyError(self._err_msg)
|
||||
self._entered = True
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
self._entered = False
|
||||
self._rlock.release()
|
||||
|
||||
def decorate(self, func):
|
||||
def caller(func, *a, **kw):
|
||||
with self:
|
||||
return func(*a, **kw)
|
||||
return scipy._lib.decorator.decorate(func, caller)
|
||||
|
||||
|
||||
def non_reentrant(err_msg=None):
|
||||
"""
|
||||
Decorate a function with a threading lock and prevent reentrant calls.
|
||||
"""
|
||||
def decorator(func):
|
||||
msg = err_msg
|
||||
if msg is None:
|
||||
msg = "%s is not re-entrant" % func.__name__
|
||||
lock = ReentrancyLock(msg)
|
||||
return lock.decorate(func)
|
||||
return decorator
|
||||
86
.CondaPkg/env/Lib/site-packages/scipy/_lib/_tmpdirs.py
vendored
Normal file
86
.CondaPkg/env/Lib/site-packages/scipy/_lib/_tmpdirs.py
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
''' Contexts for *with* statement providing temporary directories
|
||||
'''
|
||||
import os
|
||||
from contextlib import contextmanager
|
||||
from shutil import rmtree
|
||||
from tempfile import mkdtemp
|
||||
|
||||
|
||||
@contextmanager
|
||||
def tempdir():
|
||||
"""Create and return a temporary directory. This has the same
|
||||
behavior as mkdtemp but can be used as a context manager.
|
||||
|
||||
Upon exiting the context, the directory and everything contained
|
||||
in it are removed.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> import os
|
||||
>>> with tempdir() as tmpdir:
|
||||
... fname = os.path.join(tmpdir, 'example_file.txt')
|
||||
... with open(fname, 'wt') as fobj:
|
||||
... _ = fobj.write('a string\\n')
|
||||
>>> os.path.exists(tmpdir)
|
||||
False
|
||||
"""
|
||||
d = mkdtemp()
|
||||
yield d
|
||||
rmtree(d)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def in_tempdir():
|
||||
''' Create, return, and change directory to a temporary directory
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> import os
|
||||
>>> my_cwd = os.getcwd()
|
||||
>>> with in_tempdir() as tmpdir:
|
||||
... _ = open('test.txt', 'wt').write('some text')
|
||||
... assert os.path.isfile('test.txt')
|
||||
... assert os.path.isfile(os.path.join(tmpdir, 'test.txt'))
|
||||
>>> os.path.exists(tmpdir)
|
||||
False
|
||||
>>> os.getcwd() == my_cwd
|
||||
True
|
||||
'''
|
||||
pwd = os.getcwd()
|
||||
d = mkdtemp()
|
||||
os.chdir(d)
|
||||
yield d
|
||||
os.chdir(pwd)
|
||||
rmtree(d)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def in_dir(dir=None):
|
||||
""" Change directory to given directory for duration of ``with`` block
|
||||
|
||||
Useful when you want to use `in_tempdir` for the final test, but
|
||||
you are still debugging. For example, you may want to do this in the end:
|
||||
|
||||
>>> with in_tempdir() as tmpdir:
|
||||
... # do something complicated which might break
|
||||
... pass
|
||||
|
||||
But, indeed, the complicated thing does break, and meanwhile, the
|
||||
``in_tempdir`` context manager wiped out the directory with the
|
||||
temporary files that you wanted for debugging. So, while debugging, you
|
||||
replace with something like:
|
||||
|
||||
>>> with in_dir() as tmpdir: # Use working directory by default
|
||||
... # do something complicated which might break
|
||||
... pass
|
||||
|
||||
You can then look at the temporary file outputs to debug what is happening,
|
||||
fix, and finally replace ``in_dir`` with ``in_tempdir`` again.
|
||||
"""
|
||||
cwd = os.getcwd()
|
||||
if dir is None:
|
||||
yield cwd
|
||||
return
|
||||
os.chdir(dir)
|
||||
yield dir
|
||||
os.chdir(cwd)
|
||||
29
.CondaPkg/env/Lib/site-packages/scipy/_lib/_uarray/LICENSE
vendored
Normal file
29
.CondaPkg/env/Lib/site-packages/scipy/_lib/_uarray/LICENSE
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
BSD 3-Clause License
|
||||
|
||||
Copyright (c) 2018, Quansight-Labs
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
116
.CondaPkg/env/Lib/site-packages/scipy/_lib/_uarray/__init__.py
vendored
Normal file
116
.CondaPkg/env/Lib/site-packages/scipy/_lib/_uarray/__init__.py
vendored
Normal file
@@ -0,0 +1,116 @@
|
||||
"""
|
||||
.. note:
|
||||
If you are looking for overrides for NumPy-specific methods, see the
|
||||
documentation for :obj:`unumpy`. This page explains how to write
|
||||
back-ends and multimethods.
|
||||
|
||||
``uarray`` is built around a back-end protocol, and overridable multimethods.
|
||||
It is necessary to define multimethods for back-ends to be able to override them.
|
||||
See the documentation of :obj:`generate_multimethod` on how to write multimethods.
|
||||
|
||||
|
||||
|
||||
Let's start with the simplest:
|
||||
|
||||
``__ua_domain__`` defines the back-end *domain*. The domain consists of period-
|
||||
separated string consisting of the modules you extend plus the submodule. For
|
||||
example, if a submodule ``module2.submodule`` extends ``module1``
|
||||
(i.e., it exposes dispatchables marked as types available in ``module1``),
|
||||
then the domain string should be ``"module1.module2.submodule"``.
|
||||
|
||||
|
||||
For the purpose of this demonstration, we'll be creating an object and setting
|
||||
its attributes directly. However, note that you can use a module or your own type
|
||||
as a backend as well.
|
||||
|
||||
>>> class Backend: pass
|
||||
>>> be = Backend()
|
||||
>>> be.__ua_domain__ = "ua_examples"
|
||||
|
||||
It might be useful at this point to sidetrack to the documentation of
|
||||
:obj:`generate_multimethod` to find out how to generate a multimethod
|
||||
overridable by :obj:`uarray`. Needless to say, writing a backend and
|
||||
creating multimethods are mostly orthogonal activities, and knowing
|
||||
one doesn't necessarily require knowledge of the other, although it
|
||||
is certainly helpful. We expect core API designers/specifiers to write the
|
||||
multimethods, and implementors to override them. But, as is often the case,
|
||||
similar people write both.
|
||||
|
||||
Without further ado, here's an example multimethod:
|
||||
|
||||
>>> import uarray as ua
|
||||
>>> from uarray import Dispatchable
|
||||
>>> def override_me(a, b):
|
||||
... return Dispatchable(a, int),
|
||||
>>> def override_replacer(args, kwargs, dispatchables):
|
||||
... return (dispatchables[0], args[1]), {}
|
||||
>>> overridden_me = ua.generate_multimethod(
|
||||
... override_me, override_replacer, "ua_examples"
|
||||
... )
|
||||
|
||||
Next comes the part about overriding the multimethod. This requires
|
||||
the ``__ua_function__`` protocol, and the ``__ua_convert__``
|
||||
protocol. The ``__ua_function__`` protocol has the signature
|
||||
``(method, args, kwargs)`` where ``method`` is the passed
|
||||
multimethod, ``args``/``kwargs`` specify the arguments and ``dispatchables``
|
||||
is the list of converted dispatchables passed in.
|
||||
|
||||
>>> def __ua_function__(method, args, kwargs):
|
||||
... return method.__name__, args, kwargs
|
||||
>>> be.__ua_function__ = __ua_function__
|
||||
|
||||
The other protocol of interest is the ``__ua_convert__`` protocol. It has the
|
||||
signature ``(dispatchables, coerce)``. When ``coerce`` is ``False``, conversion
|
||||
between the formats should ideally be an ``O(1)`` operation, but it means that
|
||||
no memory copying should be involved, only views of the existing data.
|
||||
|
||||
>>> def __ua_convert__(dispatchables, coerce):
|
||||
... for d in dispatchables:
|
||||
... if d.type is int:
|
||||
... if coerce and d.coercible:
|
||||
... yield str(d.value)
|
||||
... else:
|
||||
... yield d.value
|
||||
>>> be.__ua_convert__ = __ua_convert__
|
||||
|
||||
Now that we have defined the backend, the next thing to do is to call the multimethod.
|
||||
|
||||
>>> with ua.set_backend(be):
|
||||
... overridden_me(1, "2")
|
||||
('override_me', (1, '2'), {})
|
||||
|
||||
Note that the marked type has no effect on the actual type of the passed object.
|
||||
We can also coerce the type of the input.
|
||||
|
||||
>>> with ua.set_backend(be, coerce=True):
|
||||
... overridden_me(1, "2")
|
||||
... overridden_me(1.0, "2")
|
||||
('override_me', ('1', '2'), {})
|
||||
('override_me', ('1.0', '2'), {})
|
||||
|
||||
Another feature is that if you remove ``__ua_convert__``, the arguments are not
|
||||
converted at all and it's up to the backend to handle that.
|
||||
|
||||
>>> del be.__ua_convert__
|
||||
>>> with ua.set_backend(be):
|
||||
... overridden_me(1, "2")
|
||||
('override_me', (1, '2'), {})
|
||||
|
||||
You also have the option to return ``NotImplemented``, in which case processing moves on
|
||||
to the next back-end, which in this case, doesn't exist. The same applies to
|
||||
``__ua_convert__``.
|
||||
|
||||
>>> be.__ua_function__ = lambda *a, **kw: NotImplemented
|
||||
>>> with ua.set_backend(be):
|
||||
... overridden_me(1, "2")
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
uarray.BackendNotImplementedError: ...
|
||||
|
||||
The last possibility is if we don't have ``__ua_convert__``, in which case the job is left
|
||||
up to ``__ua_function__``, but putting things back into arrays after conversion will not be
|
||||
possible.
|
||||
"""
|
||||
|
||||
from ._backend import *
|
||||
__version__ = '0.8.8.dev0+aa94c5a4.scipy'
|
||||
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_uarray/__pycache__/__init__.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_uarray/__pycache__/__init__.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_uarray/__pycache__/_backend.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_uarray/__pycache__/_backend.cpython-311.pyc
vendored
Normal file
Binary file not shown.
703
.CondaPkg/env/Lib/site-packages/scipy/_lib/_uarray/_backend.py
vendored
Normal file
703
.CondaPkg/env/Lib/site-packages/scipy/_lib/_uarray/_backend.py
vendored
Normal file
@@ -0,0 +1,703 @@
|
||||
import typing
|
||||
import types
|
||||
import inspect
|
||||
import functools
|
||||
from . import _uarray
|
||||
import copyreg
|
||||
import pickle
|
||||
import contextlib
|
||||
|
||||
ArgumentExtractorType = typing.Callable[..., typing.Tuple["Dispatchable", ...]]
|
||||
ArgumentReplacerType = typing.Callable[
|
||||
[typing.Tuple, typing.Dict, typing.Tuple], typing.Tuple[typing.Tuple, typing.Dict]
|
||||
]
|
||||
|
||||
from ._uarray import ( # type: ignore
|
||||
BackendNotImplementedError,
|
||||
_Function,
|
||||
_SkipBackendContext,
|
||||
_SetBackendContext,
|
||||
_BackendState,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"set_backend",
|
||||
"set_global_backend",
|
||||
"skip_backend",
|
||||
"register_backend",
|
||||
"determine_backend",
|
||||
"determine_backend_multi",
|
||||
"clear_backends",
|
||||
"create_multimethod",
|
||||
"generate_multimethod",
|
||||
"_Function",
|
||||
"BackendNotImplementedError",
|
||||
"Dispatchable",
|
||||
"wrap_single_convertor",
|
||||
"wrap_single_convertor_instance",
|
||||
"all_of_type",
|
||||
"mark_as",
|
||||
"set_state",
|
||||
"get_state",
|
||||
"reset_state",
|
||||
"_BackendState",
|
||||
"_SkipBackendContext",
|
||||
"_SetBackendContext",
|
||||
]
|
||||
|
||||
|
||||
def unpickle_function(mod_name, qname, self_):
|
||||
import importlib
|
||||
|
||||
try:
|
||||
module = importlib.import_module(mod_name)
|
||||
qname = qname.split(".")
|
||||
func = module
|
||||
for q in qname:
|
||||
func = getattr(func, q)
|
||||
|
||||
if self_ is not None:
|
||||
func = types.MethodType(func, self_)
|
||||
|
||||
return func
|
||||
except (ImportError, AttributeError) as e:
|
||||
from pickle import UnpicklingError
|
||||
|
||||
raise UnpicklingError from e
|
||||
|
||||
|
||||
def pickle_function(func):
|
||||
mod_name = getattr(func, "__module__", None)
|
||||
qname = getattr(func, "__qualname__", None)
|
||||
self_ = getattr(func, "__self__", None)
|
||||
|
||||
try:
|
||||
test = unpickle_function(mod_name, qname, self_)
|
||||
except pickle.UnpicklingError:
|
||||
test = None
|
||||
|
||||
if test is not func:
|
||||
raise pickle.PicklingError(
|
||||
"Can't pickle {}: it's not the same object as {}".format(func, test)
|
||||
)
|
||||
|
||||
return unpickle_function, (mod_name, qname, self_)
|
||||
|
||||
|
||||
def pickle_state(state):
|
||||
return _uarray._BackendState._unpickle, state._pickle()
|
||||
|
||||
|
||||
def pickle_set_backend_context(ctx):
|
||||
return _SetBackendContext, ctx._pickle()
|
||||
|
||||
|
||||
def pickle_skip_backend_context(ctx):
|
||||
return _SkipBackendContext, ctx._pickle()
|
||||
|
||||
|
||||
copyreg.pickle(_Function, pickle_function)
|
||||
copyreg.pickle(_uarray._BackendState, pickle_state)
|
||||
copyreg.pickle(_SetBackendContext, pickle_set_backend_context)
|
||||
copyreg.pickle(_SkipBackendContext, pickle_skip_backend_context)
|
||||
|
||||
|
||||
def get_state():
|
||||
"""
|
||||
Returns an opaque object containing the current state of all the backends.
|
||||
|
||||
Can be used for synchronization between threads/processes.
|
||||
|
||||
See Also
|
||||
--------
|
||||
set_state
|
||||
Sets the state returned by this function.
|
||||
"""
|
||||
return _uarray.get_state()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def reset_state():
|
||||
"""
|
||||
Returns a context manager that resets all state once exited.
|
||||
|
||||
See Also
|
||||
--------
|
||||
set_state
|
||||
Context manager that sets the backend state.
|
||||
get_state
|
||||
Gets a state to be set by this context manager.
|
||||
"""
|
||||
with set_state(get_state()):
|
||||
yield
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def set_state(state):
|
||||
"""
|
||||
A context manager that sets the state of the backends to one returned by :obj:`get_state`.
|
||||
|
||||
See Also
|
||||
--------
|
||||
get_state
|
||||
Gets a state to be set by this context manager.
|
||||
"""
|
||||
old_state = get_state()
|
||||
_uarray.set_state(state)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
_uarray.set_state(old_state, True)
|
||||
|
||||
|
||||
def create_multimethod(*args, **kwargs):
|
||||
"""
|
||||
Creates a decorator for generating multimethods.
|
||||
|
||||
This function creates a decorator that can be used with an argument
|
||||
extractor in order to generate a multimethod. Other than for the
|
||||
argument extractor, all arguments are passed on to
|
||||
:obj:`generate_multimethod`.
|
||||
|
||||
See Also
|
||||
--------
|
||||
generate_multimethod
|
||||
Generates a multimethod.
|
||||
"""
|
||||
|
||||
def wrapper(a):
|
||||
return generate_multimethod(a, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def generate_multimethod(
|
||||
argument_extractor: ArgumentExtractorType,
|
||||
argument_replacer: ArgumentReplacerType,
|
||||
domain: str,
|
||||
default: typing.Optional[typing.Callable] = None,
|
||||
):
|
||||
"""
|
||||
Generates a multimethod.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
argument_extractor : ArgumentExtractorType
|
||||
A callable which extracts the dispatchable arguments. Extracted arguments
|
||||
should be marked by the :obj:`Dispatchable` class. It has the same signature
|
||||
as the desired multimethod.
|
||||
argument_replacer : ArgumentReplacerType
|
||||
A callable with the signature (args, kwargs, dispatchables), which should also
|
||||
return an (args, kwargs) pair with the dispatchables replaced inside the args/kwargs.
|
||||
domain : str
|
||||
A string value indicating the domain of this multimethod.
|
||||
default: Optional[Callable], optional
|
||||
The default implementation of this multimethod, where ``None`` (the default) specifies
|
||||
there is no default implementation.
|
||||
|
||||
Examples
|
||||
--------
|
||||
In this example, ``a`` is to be dispatched over, so we return it, while marking it as an ``int``.
|
||||
The trailing comma is needed because the args have to be returned as an iterable.
|
||||
|
||||
>>> def override_me(a, b):
|
||||
... return Dispatchable(a, int),
|
||||
|
||||
Next, we define the argument replacer that replaces the dispatchables inside args/kwargs with the
|
||||
supplied ones.
|
||||
|
||||
>>> def override_replacer(args, kwargs, dispatchables):
|
||||
... return (dispatchables[0], args[1]), {}
|
||||
|
||||
Next, we define the multimethod.
|
||||
|
||||
>>> overridden_me = generate_multimethod(
|
||||
... override_me, override_replacer, "ua_examples"
|
||||
... )
|
||||
|
||||
Notice that there's no default implementation, unless you supply one.
|
||||
|
||||
>>> overridden_me(1, "a")
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
uarray.BackendNotImplementedError: ...
|
||||
|
||||
>>> overridden_me2 = generate_multimethod(
|
||||
... override_me, override_replacer, "ua_examples", default=lambda x, y: (x, y)
|
||||
... )
|
||||
>>> overridden_me2(1, "a")
|
||||
(1, 'a')
|
||||
|
||||
See Also
|
||||
--------
|
||||
uarray
|
||||
See the module documentation for how to override the method by creating backends.
|
||||
"""
|
||||
kw_defaults, arg_defaults, opts = get_defaults(argument_extractor)
|
||||
ua_func = _Function(
|
||||
argument_extractor,
|
||||
argument_replacer,
|
||||
domain,
|
||||
arg_defaults,
|
||||
kw_defaults,
|
||||
default,
|
||||
)
|
||||
|
||||
return functools.update_wrapper(ua_func, argument_extractor)
|
||||
|
||||
|
||||
def set_backend(backend, coerce=False, only=False):
|
||||
"""
|
||||
A context manager that sets the preferred backend.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
backend
|
||||
The backend to set.
|
||||
coerce
|
||||
Whether or not to coerce to a specific backend's types. Implies ``only``.
|
||||
only
|
||||
Whether or not this should be the last backend to try.
|
||||
|
||||
See Also
|
||||
--------
|
||||
skip_backend: A context manager that allows skipping of backends.
|
||||
set_global_backend: Set a single, global backend for a domain.
|
||||
"""
|
||||
try:
|
||||
return backend.__ua_cache__["set", coerce, only]
|
||||
except AttributeError:
|
||||
backend.__ua_cache__ = {}
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
ctx = _SetBackendContext(backend, coerce, only)
|
||||
backend.__ua_cache__["set", coerce, only] = ctx
|
||||
return ctx
|
||||
|
||||
|
||||
def skip_backend(backend):
|
||||
"""
|
||||
A context manager that allows one to skip a given backend from processing
|
||||
entirely. This allows one to use another backend's code in a library that
|
||||
is also a consumer of the same backend.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
backend
|
||||
The backend to skip.
|
||||
|
||||
See Also
|
||||
--------
|
||||
set_backend: A context manager that allows setting of backends.
|
||||
set_global_backend: Set a single, global backend for a domain.
|
||||
"""
|
||||
try:
|
||||
return backend.__ua_cache__["skip"]
|
||||
except AttributeError:
|
||||
backend.__ua_cache__ = {}
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
ctx = _SkipBackendContext(backend)
|
||||
backend.__ua_cache__["skip"] = ctx
|
||||
return ctx
|
||||
|
||||
|
||||
def get_defaults(f):
|
||||
sig = inspect.signature(f)
|
||||
kw_defaults = {}
|
||||
arg_defaults = []
|
||||
opts = set()
|
||||
for k, v in sig.parameters.items():
|
||||
if v.default is not inspect.Parameter.empty:
|
||||
kw_defaults[k] = v.default
|
||||
if v.kind in (
|
||||
inspect.Parameter.POSITIONAL_ONLY,
|
||||
inspect.Parameter.POSITIONAL_OR_KEYWORD,
|
||||
):
|
||||
arg_defaults.append(v.default)
|
||||
opts.add(k)
|
||||
|
||||
return kw_defaults, tuple(arg_defaults), opts
|
||||
|
||||
|
||||
def set_global_backend(backend, coerce=False, only=False, *, try_last=False):
|
||||
"""
|
||||
This utility method replaces the default backend for permanent use. It
|
||||
will be tried in the list of backends automatically, unless the
|
||||
``only`` flag is set on a backend. This will be the first tried
|
||||
backend outside the :obj:`set_backend` context manager.
|
||||
|
||||
Note that this method is not thread-safe.
|
||||
|
||||
.. warning::
|
||||
We caution library authors against using this function in
|
||||
their code. We do *not* support this use-case. This function
|
||||
is meant to be used only by users themselves, or by a reference
|
||||
implementation, if one exists.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
backend
|
||||
The backend to register.
|
||||
coerce : bool
|
||||
Whether to coerce input types when trying this backend.
|
||||
only : bool
|
||||
If ``True``, no more backends will be tried if this fails.
|
||||
Implied by ``coerce=True``.
|
||||
try_last : bool
|
||||
If ``True``, the global backend is tried after registered backends.
|
||||
|
||||
See Also
|
||||
--------
|
||||
set_backend: A context manager that allows setting of backends.
|
||||
skip_backend: A context manager that allows skipping of backends.
|
||||
"""
|
||||
_uarray.set_global_backend(backend, coerce, only, try_last)
|
||||
|
||||
|
||||
def register_backend(backend):
|
||||
"""
|
||||
This utility method sets registers backend for permanent use. It
|
||||
will be tried in the list of backends automatically, unless the
|
||||
``only`` flag is set on a backend.
|
||||
|
||||
Note that this method is not thread-safe.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
backend
|
||||
The backend to register.
|
||||
"""
|
||||
_uarray.register_backend(backend)
|
||||
|
||||
|
||||
def clear_backends(domain, registered=True, globals=False):
|
||||
"""
|
||||
This utility method clears registered backends.
|
||||
|
||||
.. warning::
|
||||
We caution library authors against using this function in
|
||||
their code. We do *not* support this use-case. This function
|
||||
is meant to be used only by users themselves.
|
||||
|
||||
.. warning::
|
||||
Do NOT use this method inside a multimethod call, or the
|
||||
program is likely to crash.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
domain : Optional[str]
|
||||
The domain for which to de-register backends. ``None`` means
|
||||
de-register for all domains.
|
||||
registered : bool
|
||||
Whether or not to clear registered backends. See :obj:`register_backend`.
|
||||
globals : bool
|
||||
Whether or not to clear global backends. See :obj:`set_global_backend`.
|
||||
|
||||
See Also
|
||||
--------
|
||||
register_backend : Register a backend globally.
|
||||
set_global_backend : Set a global backend.
|
||||
"""
|
||||
_uarray.clear_backends(domain, registered, globals)
|
||||
|
||||
|
||||
class Dispatchable:
|
||||
"""
|
||||
A utility class which marks an argument with a specific dispatch type.
|
||||
|
||||
|
||||
Attributes
|
||||
----------
|
||||
value
|
||||
The value of the Dispatchable.
|
||||
|
||||
type
|
||||
The type of the Dispatchable.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> x = Dispatchable(1, str)
|
||||
>>> x
|
||||
<Dispatchable: type=<class 'str'>, value=1>
|
||||
|
||||
See Also
|
||||
--------
|
||||
all_of_type
|
||||
Marks all unmarked parameters of a function.
|
||||
|
||||
mark_as
|
||||
Allows one to create a utility function to mark as a given type.
|
||||
"""
|
||||
|
||||
def __init__(self, value, dispatch_type, coercible=True):
|
||||
self.value = value
|
||||
self.type = dispatch_type
|
||||
self.coercible = coercible
|
||||
|
||||
def __getitem__(self, index):
|
||||
return (self.type, self.value)[index]
|
||||
|
||||
def __str__(self):
|
||||
return "<{0}: type={1!r}, value={2!r}>".format(
|
||||
type(self).__name__, self.type, self.value
|
||||
)
|
||||
|
||||
__repr__ = __str__
|
||||
|
||||
|
||||
def mark_as(dispatch_type):
|
||||
"""
|
||||
Creates a utility function to mark something as a specific type.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> mark_int = mark_as(int)
|
||||
>>> mark_int(1)
|
||||
<Dispatchable: type=<class 'int'>, value=1>
|
||||
"""
|
||||
return functools.partial(Dispatchable, dispatch_type=dispatch_type)
|
||||
|
||||
|
||||
def all_of_type(arg_type):
|
||||
"""
|
||||
Marks all unmarked arguments as a given type.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> @all_of_type(str)
|
||||
... def f(a, b):
|
||||
... return a, Dispatchable(b, int)
|
||||
>>> f('a', 1)
|
||||
(<Dispatchable: type=<class 'str'>, value='a'>, <Dispatchable: type=<class 'int'>, value=1>)
|
||||
"""
|
||||
|
||||
def outer(func):
|
||||
@functools.wraps(func)
|
||||
def inner(*args, **kwargs):
|
||||
extracted_args = func(*args, **kwargs)
|
||||
return tuple(
|
||||
Dispatchable(arg, arg_type)
|
||||
if not isinstance(arg, Dispatchable)
|
||||
else arg
|
||||
for arg in extracted_args
|
||||
)
|
||||
|
||||
return inner
|
||||
|
||||
return outer
|
||||
|
||||
|
||||
def wrap_single_convertor(convert_single):
|
||||
"""
|
||||
Wraps a ``__ua_convert__`` defined for a single element to all elements.
|
||||
If any of them return ``NotImplemented``, the operation is assumed to be
|
||||
undefined.
|
||||
|
||||
Accepts a signature of (value, type, coerce).
|
||||
"""
|
||||
|
||||
@functools.wraps(convert_single)
|
||||
def __ua_convert__(dispatchables, coerce):
|
||||
converted = []
|
||||
for d in dispatchables:
|
||||
c = convert_single(d.value, d.type, coerce and d.coercible)
|
||||
|
||||
if c is NotImplemented:
|
||||
return NotImplemented
|
||||
|
||||
converted.append(c)
|
||||
|
||||
return converted
|
||||
|
||||
return __ua_convert__
|
||||
|
||||
|
||||
def wrap_single_convertor_instance(convert_single):
|
||||
"""
|
||||
Wraps a ``__ua_convert__`` defined for a single element to all elements.
|
||||
If any of them return ``NotImplemented``, the operation is assumed to be
|
||||
undefined.
|
||||
|
||||
Accepts a signature of (value, type, coerce).
|
||||
"""
|
||||
|
||||
@functools.wraps(convert_single)
|
||||
def __ua_convert__(self, dispatchables, coerce):
|
||||
converted = []
|
||||
for d in dispatchables:
|
||||
c = convert_single(self, d.value, d.type, coerce and d.coercible)
|
||||
|
||||
if c is NotImplemented:
|
||||
return NotImplemented
|
||||
|
||||
converted.append(c)
|
||||
|
||||
return converted
|
||||
|
||||
return __ua_convert__
|
||||
|
||||
|
||||
def determine_backend(value, dispatch_type, *, domain, only=True, coerce=False):
|
||||
"""Set the backend to the first active backend that supports ``value``
|
||||
|
||||
This is useful for functions that call multimethods without any dispatchable
|
||||
arguments. You can use :func:`determine_backend` to ensure the same backend
|
||||
is used everywhere in a block of multimethod calls.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
value
|
||||
The value being tested
|
||||
dispatch_type
|
||||
The dispatch type associated with ``value``, aka
|
||||
":ref:`marking <MarkingGlossary>`".
|
||||
domain: string
|
||||
The domain to query for backends and set.
|
||||
coerce: bool
|
||||
Whether or not to allow coercion to the backend's types. Implies ``only``.
|
||||
only: bool
|
||||
Whether or not this should be the last backend to try.
|
||||
|
||||
See Also
|
||||
--------
|
||||
set_backend: For when you know which backend to set
|
||||
|
||||
Notes
|
||||
-----
|
||||
|
||||
Support is determined by the ``__ua_convert__`` protocol. Backends not
|
||||
supporting the type must return ``NotImplemented`` from their
|
||||
``__ua_convert__`` if they don't support input of that type.
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
Suppose we have two backends ``BackendA`` and ``BackendB`` each supporting
|
||||
different types, ``TypeA`` and ``TypeB``. Neither supporting the other type:
|
||||
|
||||
>>> with ua.set_backend(ex.BackendA):
|
||||
... ex.call_multimethod(ex.TypeB(), ex.TypeB())
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
uarray.BackendNotImplementedError: ...
|
||||
|
||||
Now consider a multimethod that creates a new object of ``TypeA``, or
|
||||
``TypeB`` depending on the active backend.
|
||||
|
||||
>>> with ua.set_backend(ex.BackendA), ua.set_backend(ex.BackendB):
|
||||
... res = ex.creation_multimethod()
|
||||
... ex.call_multimethod(res, ex.TypeA())
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
uarray.BackendNotImplementedError: ...
|
||||
|
||||
``res`` is an object of ``TypeB`` because ``BackendB`` is set in the
|
||||
innermost with statement. So, ``call_multimethod`` fails since the types
|
||||
don't match.
|
||||
|
||||
Instead, we need to first find a backend suitable for all of our objects.
|
||||
|
||||
>>> with ua.set_backend(ex.BackendA), ua.set_backend(ex.BackendB):
|
||||
... x = ex.TypeA()
|
||||
... with ua.determine_backend(x, "mark", domain="ua_examples"):
|
||||
... res = ex.creation_multimethod()
|
||||
... ex.call_multimethod(res, x)
|
||||
TypeA
|
||||
|
||||
"""
|
||||
dispatchables = (Dispatchable(value, dispatch_type, coerce),)
|
||||
backend = _uarray.determine_backend(domain, dispatchables, coerce)
|
||||
|
||||
return set_backend(backend, coerce=coerce, only=only)
|
||||
|
||||
|
||||
def determine_backend_multi(
|
||||
dispatchables, *, domain, only=True, coerce=False, **kwargs
|
||||
):
|
||||
"""Set a backend supporting all ``dispatchables``
|
||||
|
||||
This is useful for functions that call multimethods without any dispatchable
|
||||
arguments. You can use :func:`determine_backend_multi` to ensure the same
|
||||
backend is used everywhere in a block of multimethod calls involving
|
||||
multiple arrays.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
dispatchables: Sequence[Union[uarray.Dispatchable, Any]]
|
||||
The dispatchables that must be supported
|
||||
domain: string
|
||||
The domain to query for backends and set.
|
||||
coerce: bool
|
||||
Whether or not to allow coercion to the backend's types. Implies ``only``.
|
||||
only: bool
|
||||
Whether or not this should be the last backend to try.
|
||||
dispatch_type: Optional[Any]
|
||||
The default dispatch type associated with ``dispatchables``, aka
|
||||
":ref:`marking <MarkingGlossary>`".
|
||||
|
||||
See Also
|
||||
--------
|
||||
determine_backend: For a single dispatch value
|
||||
set_backend: For when you know which backend to set
|
||||
|
||||
Notes
|
||||
-----
|
||||
|
||||
Support is determined by the ``__ua_convert__`` protocol. Backends not
|
||||
supporting the type must return ``NotImplemented`` from their
|
||||
``__ua_convert__`` if they don't support input of that type.
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
:func:`determine_backend` allows the backend to be set from a single
|
||||
object. :func:`determine_backend_multi` allows multiple objects to be
|
||||
checked simultaneously for support in the backend. Suppose we have a
|
||||
``BackendAB`` which supports ``TypeA`` and ``TypeB`` in the same call,
|
||||
and a ``BackendBC`` that doesn't support ``TypeA``.
|
||||
|
||||
>>> with ua.set_backend(ex.BackendAB), ua.set_backend(ex.BackendBC):
|
||||
... a, b = ex.TypeA(), ex.TypeB()
|
||||
... with ua.determine_backend_multi(
|
||||
... [ua.Dispatchable(a, "mark"), ua.Dispatchable(b, "mark")],
|
||||
... domain="ua_examples"
|
||||
... ):
|
||||
... res = ex.creation_multimethod()
|
||||
... ex.call_multimethod(res, a, b)
|
||||
TypeA
|
||||
|
||||
This won't call ``BackendBC`` because it doesn't support ``TypeA``.
|
||||
|
||||
We can also use leave out the ``ua.Dispatchable`` if we specify the
|
||||
default ``dispatch_type`` for the ``dispatchables`` argument.
|
||||
|
||||
>>> with ua.set_backend(ex.BackendAB), ua.set_backend(ex.BackendBC):
|
||||
... a, b = ex.TypeA(), ex.TypeB()
|
||||
... with ua.determine_backend_multi(
|
||||
... [a, b], dispatch_type="mark", domain="ua_examples"
|
||||
... ):
|
||||
... res = ex.creation_multimethod()
|
||||
... ex.call_multimethod(res, a, b)
|
||||
TypeA
|
||||
|
||||
"""
|
||||
if "dispatch_type" in kwargs:
|
||||
disp_type = kwargs.pop("dispatch_type")
|
||||
dispatchables = tuple(
|
||||
d if isinstance(d, Dispatchable) else Dispatchable(d, disp_type)
|
||||
for d in dispatchables
|
||||
)
|
||||
else:
|
||||
dispatchables = tuple(dispatchables)
|
||||
if not all(isinstance(d, Dispatchable) for d in dispatchables):
|
||||
raise TypeError("dispatchables must be instances of uarray.Dispatchable")
|
||||
|
||||
if len(kwargs) != 0:
|
||||
raise TypeError("Received unexpected keyword arguments: {}".format(kwargs))
|
||||
|
||||
backend = _uarray.determine_backend(domain, dispatchables, coerce)
|
||||
|
||||
return set_backend(backend, coerce=coerce, only=only)
|
||||
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_uarray/_uarray.cp311-win_amd64.dll.a
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_uarray/_uarray.cp311-win_amd64.dll.a
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_uarray/_uarray.cp311-win_amd64.pyd
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/_uarray/_uarray.cp311-win_amd64.pyd
vendored
Normal file
Binary file not shown.
711
.CondaPkg/env/Lib/site-packages/scipy/_lib/_util.py
vendored
Normal file
711
.CondaPkg/env/Lib/site-packages/scipy/_lib/_util.py
vendored
Normal file
@@ -0,0 +1,711 @@
|
||||
from contextlib import contextmanager
|
||||
import functools
|
||||
import operator
|
||||
import warnings
|
||||
import numbers
|
||||
from collections import namedtuple
|
||||
import inspect
|
||||
import math
|
||||
from typing import (
|
||||
Optional,
|
||||
Union,
|
||||
TYPE_CHECKING,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
|
||||
IntNumber = Union[int, np.integer]
|
||||
DecimalNumber = Union[float, np.floating, np.integer]
|
||||
|
||||
# Since Generator was introduced in numpy 1.17, the following condition is needed for
|
||||
# backward compatibility
|
||||
if TYPE_CHECKING:
|
||||
SeedType = Optional[Union[IntNumber, np.random.Generator,
|
||||
np.random.RandomState]]
|
||||
GeneratorType = TypeVar("GeneratorType", bound=Union[np.random.Generator,
|
||||
np.random.RandomState])
|
||||
|
||||
try:
|
||||
from numpy.random import Generator as Generator
|
||||
except ImportError:
|
||||
class Generator(): # type: ignore[no-redef]
|
||||
pass
|
||||
|
||||
|
||||
def _lazywhere(cond, arrays, f, fillvalue=None, f2=None):
|
||||
"""
|
||||
np.where(cond, x, fillvalue) always evaluates x even where cond is False.
|
||||
This one only evaluates f(arr1[cond], arr2[cond], ...).
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> import numpy as np
|
||||
>>> a, b = np.array([1, 2, 3, 4]), np.array([5, 6, 7, 8])
|
||||
>>> def f(a, b):
|
||||
... return a*b
|
||||
>>> _lazywhere(a > 2, (a, b), f, np.nan)
|
||||
array([ nan, nan, 21., 32.])
|
||||
|
||||
Notice, it assumes that all `arrays` are of the same shape, or can be
|
||||
broadcasted together.
|
||||
|
||||
"""
|
||||
cond = np.asarray(cond)
|
||||
if fillvalue is None:
|
||||
if f2 is None:
|
||||
raise ValueError("One of (fillvalue, f2) must be given.")
|
||||
else:
|
||||
fillvalue = np.nan
|
||||
else:
|
||||
if f2 is not None:
|
||||
raise ValueError("Only one of (fillvalue, f2) can be given.")
|
||||
|
||||
args = np.broadcast_arrays(cond, *arrays)
|
||||
cond, arrays = args[0], args[1:]
|
||||
temp = tuple(np.extract(cond, arr) for arr in arrays)
|
||||
tcode = np.mintypecode([a.dtype.char for a in arrays])
|
||||
out = np.full(np.shape(arrays[0]), fill_value=fillvalue, dtype=tcode)
|
||||
np.place(out, cond, f(*temp))
|
||||
if f2 is not None:
|
||||
temp = tuple(np.extract(~cond, arr) for arr in arrays)
|
||||
np.place(out, ~cond, f2(*temp))
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def _lazyselect(condlist, choicelist, arrays, default=0):
|
||||
"""
|
||||
Mimic `np.select(condlist, choicelist)`.
|
||||
|
||||
Notice, it assumes that all `arrays` are of the same shape or can be
|
||||
broadcasted together.
|
||||
|
||||
All functions in `choicelist` must accept array arguments in the order
|
||||
given in `arrays` and must return an array of the same shape as broadcasted
|
||||
`arrays`.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> import numpy as np
|
||||
>>> x = np.arange(6)
|
||||
>>> np.select([x <3, x > 3], [x**2, x**3], default=0)
|
||||
array([ 0, 1, 4, 0, 64, 125])
|
||||
|
||||
>>> _lazyselect([x < 3, x > 3], [lambda x: x**2, lambda x: x**3], (x,))
|
||||
array([ 0., 1., 4., 0., 64., 125.])
|
||||
|
||||
>>> a = -np.ones_like(x)
|
||||
>>> _lazyselect([x < 3, x > 3],
|
||||
... [lambda x, a: x**2, lambda x, a: a * x**3],
|
||||
... (x, a), default=np.nan)
|
||||
array([ 0., 1., 4., nan, -64., -125.])
|
||||
|
||||
"""
|
||||
arrays = np.broadcast_arrays(*arrays)
|
||||
tcode = np.mintypecode([a.dtype.char for a in arrays])
|
||||
out = np.full(np.shape(arrays[0]), fill_value=default, dtype=tcode)
|
||||
for func, cond in zip(choicelist, condlist):
|
||||
if np.all(cond is False):
|
||||
continue
|
||||
cond, _ = np.broadcast_arrays(cond, arrays[0])
|
||||
temp = tuple(np.extract(cond, arr) for arr in arrays)
|
||||
np.place(out, cond, func(*temp))
|
||||
return out
|
||||
|
||||
|
||||
def _aligned_zeros(shape, dtype=float, order="C", align=None):
|
||||
"""Allocate a new ndarray with aligned memory.
|
||||
|
||||
Primary use case for this currently is working around a f2py issue
|
||||
in NumPy 1.9.1, where dtype.alignment is such that np.zeros() does
|
||||
not necessarily create arrays aligned up to it.
|
||||
|
||||
"""
|
||||
dtype = np.dtype(dtype)
|
||||
if align is None:
|
||||
align = dtype.alignment
|
||||
if not hasattr(shape, '__len__'):
|
||||
shape = (shape,)
|
||||
size = functools.reduce(operator.mul, shape) * dtype.itemsize
|
||||
buf = np.empty(size + align + 1, np.uint8)
|
||||
offset = buf.__array_interface__['data'][0] % align
|
||||
if offset != 0:
|
||||
offset = align - offset
|
||||
# Note: slices producing 0-size arrays do not necessarily change
|
||||
# data pointer --- so we use and allocate size+1
|
||||
buf = buf[offset:offset+size+1][:-1]
|
||||
data = np.ndarray(shape, dtype, buf, order=order)
|
||||
data.fill(0)
|
||||
return data
|
||||
|
||||
|
||||
def _prune_array(array):
|
||||
"""Return an array equivalent to the input array. If the input
|
||||
array is a view of a much larger array, copy its contents to a
|
||||
newly allocated array. Otherwise, return the input unchanged.
|
||||
"""
|
||||
if array.base is not None and array.size < array.base.size // 2:
|
||||
return array.copy()
|
||||
return array
|
||||
|
||||
|
||||
def prod(iterable):
|
||||
"""
|
||||
Product of a sequence of numbers.
|
||||
|
||||
Faster than np.prod for short lists like array shapes, and does
|
||||
not overflow if using Python integers.
|
||||
"""
|
||||
product = 1
|
||||
for x in iterable:
|
||||
product *= x
|
||||
return product
|
||||
|
||||
|
||||
def float_factorial(n: int) -> float:
|
||||
"""Compute the factorial and return as a float
|
||||
|
||||
Returns infinity when result is too large for a double
|
||||
"""
|
||||
return float(math.factorial(n)) if n < 171 else np.inf
|
||||
|
||||
|
||||
# copy-pasted from scikit-learn utils/validation.py
|
||||
# change this to scipy.stats._qmc.check_random_state once numpy 1.16 is dropped
|
||||
def check_random_state(seed):
|
||||
"""Turn `seed` into a `np.random.RandomState` instance.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
seed : {None, int, `numpy.random.Generator`, `numpy.random.RandomState`}, optional
|
||||
If `seed` is None (or `np.random`), the `numpy.random.RandomState`
|
||||
singleton is used.
|
||||
If `seed` is an int, a new ``RandomState`` instance is used,
|
||||
seeded with `seed`.
|
||||
If `seed` is already a ``Generator`` or ``RandomState`` instance then
|
||||
that instance is used.
|
||||
|
||||
Returns
|
||||
-------
|
||||
seed : {`numpy.random.Generator`, `numpy.random.RandomState`}
|
||||
Random number generator.
|
||||
|
||||
"""
|
||||
if seed is None or seed is np.random:
|
||||
return np.random.mtrand._rand
|
||||
if isinstance(seed, (numbers.Integral, np.integer)):
|
||||
return np.random.RandomState(seed)
|
||||
if isinstance(seed, (np.random.RandomState, np.random.Generator)):
|
||||
return seed
|
||||
|
||||
raise ValueError('%r cannot be used to seed a numpy.random.RandomState'
|
||||
' instance' % seed)
|
||||
|
||||
|
||||
def _asarray_validated(a, check_finite=True,
|
||||
sparse_ok=False, objects_ok=False, mask_ok=False,
|
||||
as_inexact=False):
|
||||
"""
|
||||
Helper function for SciPy argument validation.
|
||||
|
||||
Many SciPy linear algebra functions do support arbitrary array-like
|
||||
input arguments. Examples of commonly unsupported inputs include
|
||||
matrices containing inf/nan, sparse matrix representations, and
|
||||
matrices with complicated elements.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
a : array_like
|
||||
The array-like input.
|
||||
check_finite : bool, optional
|
||||
Whether to check that the input matrices contain only finite numbers.
|
||||
Disabling may give a performance gain, but may result in problems
|
||||
(crashes, non-termination) if the inputs do contain infinities or NaNs.
|
||||
Default: True
|
||||
sparse_ok : bool, optional
|
||||
True if scipy sparse matrices are allowed.
|
||||
objects_ok : bool, optional
|
||||
True if arrays with dype('O') are allowed.
|
||||
mask_ok : bool, optional
|
||||
True if masked arrays are allowed.
|
||||
as_inexact : bool, optional
|
||||
True to convert the input array to a np.inexact dtype.
|
||||
|
||||
Returns
|
||||
-------
|
||||
ret : ndarray
|
||||
The converted validated array.
|
||||
|
||||
"""
|
||||
if not sparse_ok:
|
||||
import scipy.sparse
|
||||
if scipy.sparse.issparse(a):
|
||||
msg = ('Sparse matrices are not supported by this function. '
|
||||
'Perhaps one of the scipy.sparse.linalg functions '
|
||||
'would work instead.')
|
||||
raise ValueError(msg)
|
||||
if not mask_ok:
|
||||
if np.ma.isMaskedArray(a):
|
||||
raise ValueError('masked arrays are not supported')
|
||||
toarray = np.asarray_chkfinite if check_finite else np.asarray
|
||||
a = toarray(a)
|
||||
if not objects_ok:
|
||||
if a.dtype is np.dtype('O'):
|
||||
raise ValueError('object arrays are not supported')
|
||||
if as_inexact:
|
||||
if not np.issubdtype(a.dtype, np.inexact):
|
||||
a = toarray(a, dtype=np.float_)
|
||||
return a
|
||||
|
||||
|
||||
def _validate_int(k, name, minimum=None):
|
||||
"""
|
||||
Validate a scalar integer.
|
||||
|
||||
This functon can be used to validate an argument to a function
|
||||
that expects the value to be an integer. It uses `operator.index`
|
||||
to validate the value (so, for example, k=2.0 results in a
|
||||
TypeError).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
k : int
|
||||
The value to be validated.
|
||||
name : str
|
||||
The name of the parameter.
|
||||
minimum : int, optional
|
||||
An optional lower bound.
|
||||
"""
|
||||
try:
|
||||
k = operator.index(k)
|
||||
except TypeError:
|
||||
raise TypeError(f'{name} must be an integer.') from None
|
||||
if minimum is not None and k < minimum:
|
||||
raise ValueError(f'{name} must be an integer not less '
|
||||
f'than {minimum}') from None
|
||||
return k
|
||||
|
||||
|
||||
# Add a replacement for inspect.getfullargspec()/
|
||||
# The version below is borrowed from Django,
|
||||
# https://github.com/django/django/pull/4846.
|
||||
|
||||
# Note an inconsistency between inspect.getfullargspec(func) and
|
||||
# inspect.signature(func). If `func` is a bound method, the latter does *not*
|
||||
# list `self` as a first argument, while the former *does*.
|
||||
# Hence, cook up a common ground replacement: `getfullargspec_no_self` which
|
||||
# mimics `inspect.getfullargspec` but does not list `self`.
|
||||
#
|
||||
# This way, the caller code does not need to know whether it uses a legacy
|
||||
# .getfullargspec or a bright and shiny .signature.
|
||||
|
||||
FullArgSpec = namedtuple('FullArgSpec',
|
||||
['args', 'varargs', 'varkw', 'defaults',
|
||||
'kwonlyargs', 'kwonlydefaults', 'annotations'])
|
||||
|
||||
|
||||
def getfullargspec_no_self(func):
|
||||
"""inspect.getfullargspec replacement using inspect.signature.
|
||||
|
||||
If func is a bound method, do not list the 'self' parameter.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
func : callable
|
||||
A callable to inspect
|
||||
|
||||
Returns
|
||||
-------
|
||||
fullargspec : FullArgSpec(args, varargs, varkw, defaults, kwonlyargs,
|
||||
kwonlydefaults, annotations)
|
||||
|
||||
NOTE: if the first argument of `func` is self, it is *not*, I repeat
|
||||
*not*, included in fullargspec.args.
|
||||
This is done for consistency between inspect.getargspec() under
|
||||
Python 2.x, and inspect.signature() under Python 3.x.
|
||||
|
||||
"""
|
||||
sig = inspect.signature(func)
|
||||
args = [
|
||||
p.name for p in sig.parameters.values()
|
||||
if p.kind in [inspect.Parameter.POSITIONAL_OR_KEYWORD,
|
||||
inspect.Parameter.POSITIONAL_ONLY]
|
||||
]
|
||||
varargs = [
|
||||
p.name for p in sig.parameters.values()
|
||||
if p.kind == inspect.Parameter.VAR_POSITIONAL
|
||||
]
|
||||
varargs = varargs[0] if varargs else None
|
||||
varkw = [
|
||||
p.name for p in sig.parameters.values()
|
||||
if p.kind == inspect.Parameter.VAR_KEYWORD
|
||||
]
|
||||
varkw = varkw[0] if varkw else None
|
||||
defaults = tuple(
|
||||
p.default for p in sig.parameters.values()
|
||||
if (p.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD and
|
||||
p.default is not p.empty)
|
||||
) or None
|
||||
kwonlyargs = [
|
||||
p.name for p in sig.parameters.values()
|
||||
if p.kind == inspect.Parameter.KEYWORD_ONLY
|
||||
]
|
||||
kwdefaults = {p.name: p.default for p in sig.parameters.values()
|
||||
if p.kind == inspect.Parameter.KEYWORD_ONLY and
|
||||
p.default is not p.empty}
|
||||
annotations = {p.name: p.annotation for p in sig.parameters.values()
|
||||
if p.annotation is not p.empty}
|
||||
return FullArgSpec(args, varargs, varkw, defaults, kwonlyargs,
|
||||
kwdefaults or None, annotations)
|
||||
|
||||
|
||||
class _FunctionWrapper:
|
||||
"""
|
||||
Object to wrap user's function, allowing picklability
|
||||
"""
|
||||
def __init__(self, f, args):
|
||||
self.f = f
|
||||
self.args = [] if args is None else args
|
||||
|
||||
def __call__(self, x):
|
||||
return self.f(x, *self.args)
|
||||
|
||||
|
||||
class MapWrapper:
|
||||
"""
|
||||
Parallelisation wrapper for working with map-like callables, such as
|
||||
`multiprocessing.Pool.map`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
pool : int or map-like callable
|
||||
If `pool` is an integer, then it specifies the number of threads to
|
||||
use for parallelization. If ``int(pool) == 1``, then no parallel
|
||||
processing is used and the map builtin is used.
|
||||
If ``pool == -1``, then the pool will utilize all available CPUs.
|
||||
If `pool` is a map-like callable that follows the same
|
||||
calling sequence as the built-in map function, then this callable is
|
||||
used for parallelization.
|
||||
"""
|
||||
def __init__(self, pool=1):
|
||||
self.pool = None
|
||||
self._mapfunc = map
|
||||
self._own_pool = False
|
||||
|
||||
if callable(pool):
|
||||
self.pool = pool
|
||||
self._mapfunc = self.pool
|
||||
else:
|
||||
from multiprocessing import Pool
|
||||
# user supplies a number
|
||||
if int(pool) == -1:
|
||||
# use as many processors as possible
|
||||
self.pool = Pool()
|
||||
self._mapfunc = self.pool.map
|
||||
self._own_pool = True
|
||||
elif int(pool) == 1:
|
||||
pass
|
||||
elif int(pool) > 1:
|
||||
# use the number of processors requested
|
||||
self.pool = Pool(processes=int(pool))
|
||||
self._mapfunc = self.pool.map
|
||||
self._own_pool = True
|
||||
else:
|
||||
raise RuntimeError("Number of workers specified must be -1,"
|
||||
" an int >= 1, or an object with a 'map' "
|
||||
"method")
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def terminate(self):
|
||||
if self._own_pool:
|
||||
self.pool.terminate()
|
||||
|
||||
def join(self):
|
||||
if self._own_pool:
|
||||
self.pool.join()
|
||||
|
||||
def close(self):
|
||||
if self._own_pool:
|
||||
self.pool.close()
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
if self._own_pool:
|
||||
self.pool.close()
|
||||
self.pool.terminate()
|
||||
|
||||
def __call__(self, func, iterable):
|
||||
# only accept one iterable because that's all Pool.map accepts
|
||||
try:
|
||||
return self._mapfunc(func, iterable)
|
||||
except TypeError as e:
|
||||
# wrong number of arguments
|
||||
raise TypeError("The map-like callable must be of the"
|
||||
" form f(func, iterable)") from e
|
||||
|
||||
|
||||
def rng_integers(gen, low, high=None, size=None, dtype='int64',
|
||||
endpoint=False):
|
||||
"""
|
||||
Return random integers from low (inclusive) to high (exclusive), or if
|
||||
endpoint=True, low (inclusive) to high (inclusive). Replaces
|
||||
`RandomState.randint` (with endpoint=False) and
|
||||
`RandomState.random_integers` (with endpoint=True).
|
||||
|
||||
Return random integers from the "discrete uniform" distribution of the
|
||||
specified dtype. If high is None (the default), then results are from
|
||||
0 to low.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
gen : {None, np.random.RandomState, np.random.Generator}
|
||||
Random number generator. If None, then the np.random.RandomState
|
||||
singleton is used.
|
||||
low : int or array-like of ints
|
||||
Lowest (signed) integers to be drawn from the distribution (unless
|
||||
high=None, in which case this parameter is 0 and this value is used
|
||||
for high).
|
||||
high : int or array-like of ints
|
||||
If provided, one above the largest (signed) integer to be drawn from
|
||||
the distribution (see above for behavior if high=None). If array-like,
|
||||
must contain integer values.
|
||||
size : array-like of ints, optional
|
||||
Output shape. If the given shape is, e.g., (m, n, k), then m * n * k
|
||||
samples are drawn. Default is None, in which case a single value is
|
||||
returned.
|
||||
dtype : {str, dtype}, optional
|
||||
Desired dtype of the result. All dtypes are determined by their name,
|
||||
i.e., 'int64', 'int', etc, so byteorder is not available and a specific
|
||||
precision may have different C types depending on the platform.
|
||||
The default value is np.int_.
|
||||
endpoint : bool, optional
|
||||
If True, sample from the interval [low, high] instead of the default
|
||||
[low, high) Defaults to False.
|
||||
|
||||
Returns
|
||||
-------
|
||||
out: int or ndarray of ints
|
||||
size-shaped array of random integers from the appropriate distribution,
|
||||
or a single such random int if size not provided.
|
||||
"""
|
||||
if isinstance(gen, Generator):
|
||||
return gen.integers(low, high=high, size=size, dtype=dtype,
|
||||
endpoint=endpoint)
|
||||
else:
|
||||
if gen is None:
|
||||
# default is RandomState singleton used by np.random.
|
||||
gen = np.random.mtrand._rand
|
||||
if endpoint:
|
||||
# inclusive of endpoint
|
||||
# remember that low and high can be arrays, so don't modify in
|
||||
# place
|
||||
if high is None:
|
||||
return gen.randint(low + 1, size=size, dtype=dtype)
|
||||
if high is not None:
|
||||
return gen.randint(low, high=high + 1, size=size, dtype=dtype)
|
||||
|
||||
# exclusive
|
||||
return gen.randint(low, high=high, size=size, dtype=dtype)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _fixed_default_rng(seed=1638083107694713882823079058616272161):
|
||||
"""Context with a fixed np.random.default_rng seed."""
|
||||
orig_fun = np.random.default_rng
|
||||
np.random.default_rng = lambda seed=seed: orig_fun(seed)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
np.random.default_rng = orig_fun
|
||||
|
||||
|
||||
def _argmin(a, keepdims=False, axis=None):
|
||||
"""
|
||||
argmin with a `keepdims` parameter.
|
||||
|
||||
See https://github.com/numpy/numpy/issues/8710
|
||||
|
||||
If axis is not None, a.shape[axis] must be greater than 0.
|
||||
"""
|
||||
res = np.argmin(a, axis=axis)
|
||||
if keepdims and axis is not None:
|
||||
res = np.expand_dims(res, axis=axis)
|
||||
return res
|
||||
|
||||
|
||||
def _first_nonnan(a, axis):
|
||||
"""
|
||||
Return the first non-nan value along the given axis.
|
||||
|
||||
If a slice is all nan, nan is returned for that slice.
|
||||
|
||||
The shape of the return value corresponds to ``keepdims=True``.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> import numpy as np
|
||||
>>> nan = np.nan
|
||||
>>> a = np.array([[ 3., 3., nan, 3.],
|
||||
[ 1., nan, 2., 4.],
|
||||
[nan, nan, 9., -1.],
|
||||
[nan, 5., 4., 3.],
|
||||
[ 2., 2., 2., 2.],
|
||||
[nan, nan, nan, nan]])
|
||||
>>> _first_nonnan(a, axis=0)
|
||||
array([[3., 3., 2., 3.]])
|
||||
>>> _first_nonnan(a, axis=1)
|
||||
array([[ 3.],
|
||||
[ 1.],
|
||||
[ 9.],
|
||||
[ 5.],
|
||||
[ 2.],
|
||||
[nan]])
|
||||
"""
|
||||
k = _argmin(np.isnan(a), axis=axis, keepdims=True)
|
||||
return np.take_along_axis(a, k, axis=axis)
|
||||
|
||||
|
||||
def _nan_allsame(a, axis, keepdims=False):
|
||||
"""
|
||||
Determine if the values along an axis are all the same.
|
||||
|
||||
nan values are ignored.
|
||||
|
||||
`a` must be a numpy array.
|
||||
|
||||
`axis` is assumed to be normalized; that is, 0 <= axis < a.ndim.
|
||||
|
||||
For an axis of length 0, the result is True. That is, we adopt the
|
||||
convention that ``allsame([])`` is True. (There are no values in the
|
||||
input that are different.)
|
||||
|
||||
`True` is returned for slices that are all nan--not because all the
|
||||
values are the same, but because this is equivalent to ``allsame([])``.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> import numpy as np
|
||||
>>> a = np.array([[ 3., 3., nan, 3.],
|
||||
[ 1., nan, 2., 4.],
|
||||
[nan, nan, 9., -1.],
|
||||
[nan, 5., 4., 3.],
|
||||
[ 2., 2., 2., 2.],
|
||||
[nan, nan, nan, nan]])
|
||||
>>> _nan_allsame(a, axis=1, keepdims=True)
|
||||
array([[ True],
|
||||
[False],
|
||||
[False],
|
||||
[False],
|
||||
[ True],
|
||||
[ True]])
|
||||
"""
|
||||
if axis is None:
|
||||
if a.size == 0:
|
||||
return True
|
||||
a = a.ravel()
|
||||
axis = 0
|
||||
else:
|
||||
shp = a.shape
|
||||
if shp[axis] == 0:
|
||||
shp = shp[:axis] + (1,)*keepdims + shp[axis + 1:]
|
||||
return np.full(shp, fill_value=True, dtype=bool)
|
||||
a0 = _first_nonnan(a, axis=axis)
|
||||
return ((a0 == a) | np.isnan(a)).all(axis=axis, keepdims=keepdims)
|
||||
|
||||
|
||||
def _contains_nan(a, nan_policy='propagate', use_summation=True):
|
||||
if not isinstance(a, np.ndarray):
|
||||
use_summation = False # some array_likes ignore nans (e.g. pandas)
|
||||
policies = ['propagate', 'raise', 'omit']
|
||||
if nan_policy not in policies:
|
||||
raise ValueError("nan_policy must be one of {%s}" %
|
||||
', '.join("'%s'" % s for s in policies))
|
||||
|
||||
if np.issubdtype(a.dtype, np.inexact):
|
||||
# The summation method avoids creating a (potentially huge) array.
|
||||
if use_summation:
|
||||
with np.errstate(invalid='ignore', over='ignore'):
|
||||
contains_nan = np.isnan(np.sum(a))
|
||||
else:
|
||||
contains_nan = np.isnan(a).any()
|
||||
elif np.issubdtype(a.dtype, object):
|
||||
contains_nan = False
|
||||
for el in a.ravel():
|
||||
# isnan doesn't work on non-numeric elements
|
||||
if np.issubdtype(type(el), np.number) and np.isnan(el):
|
||||
contains_nan = True
|
||||
break
|
||||
else:
|
||||
# Only `object` and `inexact` arrays can have NaNs
|
||||
contains_nan = False
|
||||
|
||||
if contains_nan and nan_policy == 'raise':
|
||||
raise ValueError("The input contains nan values")
|
||||
|
||||
return contains_nan, nan_policy
|
||||
|
||||
|
||||
def _rename_parameter(old_name, new_name, dep_version=None):
|
||||
"""
|
||||
Generate decorator for backward-compatible keyword renaming.
|
||||
|
||||
Apply the decorator generated by `_rename_parameter` to functions with a
|
||||
recently renamed parameter to maintain backward-compatibility.
|
||||
|
||||
After decoration, the function behaves as follows:
|
||||
If only the new parameter is passed into the function, behave as usual.
|
||||
If only the old parameter is passed into the function (as a keyword), raise
|
||||
a DeprecationWarning if `dep_version` is provided, and behave as usual
|
||||
otherwise.
|
||||
If both old and new parameters are passed into the function, raise a
|
||||
DeprecationWarning if `dep_version` is provided, and raise the appropriate
|
||||
TypeError (function got multiple values for argument).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
old_name : str
|
||||
Old name of parameter
|
||||
new_name : str
|
||||
New name of parameter
|
||||
dep_version : str, optional
|
||||
Version of SciPy in which old parameter was deprecated in the format
|
||||
'X.Y.Z'. If supplied, the deprecation message will indicate that
|
||||
support for the old parameter will be removed in version 'X.Y+2.Z'
|
||||
|
||||
Notes
|
||||
-----
|
||||
Untested with functions that accept *args. Probably won't work as written.
|
||||
|
||||
"""
|
||||
def decorator(fun):
|
||||
@functools.wraps(fun)
|
||||
def wrapper(*args, **kwargs):
|
||||
if old_name in kwargs:
|
||||
if dep_version:
|
||||
end_version = dep_version.split('.')
|
||||
end_version[1] = str(int(end_version[1]) + 2)
|
||||
end_version = '.'.join(end_version)
|
||||
message = (f"Use of keyword argument `{old_name}` is "
|
||||
f"deprecated and replaced by `{new_name}`. "
|
||||
f"Support for `{old_name}` will be removed "
|
||||
f"in SciPy {end_version}.")
|
||||
warnings.warn(message, DeprecationWarning, stacklevel=2)
|
||||
if new_name in kwargs:
|
||||
message = (f"{fun.__name__}() got multiple values for "
|
||||
f"argument now known as `{new_name}`")
|
||||
raise TypeError(message)
|
||||
kwargs[new_name] = kwargs.pop(old_name)
|
||||
return fun(*args, **kwargs)
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def _rng_spawn(rng, n_children):
|
||||
# spawns independent RNGs from a parent RNG
|
||||
bg = rng._bit_generator
|
||||
ss = bg._seed_seq
|
||||
child_rngs = [np.random.Generator(type(bg)(child_ss))
|
||||
for child_ss in ss.spawn(n_children)]
|
||||
return child_rngs
|
||||
399
.CondaPkg/env/Lib/site-packages/scipy/_lib/decorator.py
vendored
Normal file
399
.CondaPkg/env/Lib/site-packages/scipy/_lib/decorator.py
vendored
Normal file
@@ -0,0 +1,399 @@
|
||||
# ######################### LICENSE ############################ #
|
||||
|
||||
# Copyright (c) 2005-2015, Michele Simionato
|
||||
# All rights reserved.
|
||||
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
|
||||
# Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# Redistributions in bytecode form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in
|
||||
# the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
|
||||
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
||||
# DAMAGE.
|
||||
|
||||
"""
|
||||
Decorator module, see https://pypi.python.org/pypi/decorator
|
||||
for the documentation.
|
||||
"""
|
||||
import re
|
||||
import sys
|
||||
import inspect
|
||||
import operator
|
||||
import itertools
|
||||
import collections
|
||||
|
||||
from inspect import getfullargspec
|
||||
|
||||
__version__ = '4.0.5'
|
||||
|
||||
|
||||
def get_init(cls):
|
||||
return cls.__init__
|
||||
|
||||
|
||||
# getargspec has been deprecated in Python 3.5
|
||||
ArgSpec = collections.namedtuple(
|
||||
'ArgSpec', 'args varargs varkw defaults')
|
||||
|
||||
|
||||
def getargspec(f):
|
||||
"""A replacement for inspect.getargspec"""
|
||||
spec = getfullargspec(f)
|
||||
return ArgSpec(spec.args, spec.varargs, spec.varkw, spec.defaults)
|
||||
|
||||
|
||||
DEF = re.compile(r'\s*def\s*([_\w][_\w\d]*)\s*\(')
|
||||
|
||||
|
||||
# basic functionality
|
||||
class FunctionMaker:
|
||||
"""
|
||||
An object with the ability to create functions with a given signature.
|
||||
It has attributes name, doc, module, signature, defaults, dict, and
|
||||
methods update and make.
|
||||
"""
|
||||
|
||||
# Atomic get-and-increment provided by the GIL
|
||||
_compile_count = itertools.count()
|
||||
|
||||
def __init__(self, func=None, name=None, signature=None,
|
||||
defaults=None, doc=None, module=None, funcdict=None):
|
||||
self.shortsignature = signature
|
||||
if func:
|
||||
# func can be a class or a callable, but not an instance method
|
||||
self.name = func.__name__
|
||||
if self.name == '<lambda>': # small hack for lambda functions
|
||||
self.name = '_lambda_'
|
||||
self.doc = func.__doc__
|
||||
self.module = func.__module__
|
||||
if inspect.isfunction(func):
|
||||
argspec = getfullargspec(func)
|
||||
self.annotations = getattr(func, '__annotations__', {})
|
||||
for a in ('args', 'varargs', 'varkw', 'defaults', 'kwonlyargs',
|
||||
'kwonlydefaults'):
|
||||
setattr(self, a, getattr(argspec, a))
|
||||
for i, arg in enumerate(self.args):
|
||||
setattr(self, 'arg%d' % i, arg)
|
||||
allargs = list(self.args)
|
||||
allshortargs = list(self.args)
|
||||
if self.varargs:
|
||||
allargs.append('*' + self.varargs)
|
||||
allshortargs.append('*' + self.varargs)
|
||||
elif self.kwonlyargs:
|
||||
allargs.append('*') # single star syntax
|
||||
for a in self.kwonlyargs:
|
||||
allargs.append('%s=None' % a)
|
||||
allshortargs.append('%s=%s' % (a, a))
|
||||
if self.varkw:
|
||||
allargs.append('**' + self.varkw)
|
||||
allshortargs.append('**' + self.varkw)
|
||||
self.signature = ', '.join(allargs)
|
||||
self.shortsignature = ', '.join(allshortargs)
|
||||
self.dict = func.__dict__.copy()
|
||||
# func=None happens when decorating a caller
|
||||
if name:
|
||||
self.name = name
|
||||
if signature is not None:
|
||||
self.signature = signature
|
||||
if defaults:
|
||||
self.defaults = defaults
|
||||
if doc:
|
||||
self.doc = doc
|
||||
if module:
|
||||
self.module = module
|
||||
if funcdict:
|
||||
self.dict = funcdict
|
||||
# check existence required attributes
|
||||
assert hasattr(self, 'name')
|
||||
if not hasattr(self, 'signature'):
|
||||
raise TypeError('You are decorating a non-function: %s' % func)
|
||||
|
||||
def update(self, func, **kw):
|
||||
"Update the signature of func with the data in self"
|
||||
func.__name__ = self.name
|
||||
func.__doc__ = getattr(self, 'doc', None)
|
||||
func.__dict__ = getattr(self, 'dict', {})
|
||||
func.__defaults__ = getattr(self, 'defaults', ())
|
||||
func.__kwdefaults__ = getattr(self, 'kwonlydefaults', None)
|
||||
func.__annotations__ = getattr(self, 'annotations', None)
|
||||
try:
|
||||
frame = sys._getframe(3)
|
||||
except AttributeError: # for IronPython and similar implementations
|
||||
callermodule = '?'
|
||||
else:
|
||||
callermodule = frame.f_globals.get('__name__', '?')
|
||||
func.__module__ = getattr(self, 'module', callermodule)
|
||||
func.__dict__.update(kw)
|
||||
|
||||
def make(self, src_templ, evaldict=None, addsource=False, **attrs):
|
||||
"Make a new function from a given template and update the signature"
|
||||
src = src_templ % vars(self) # expand name and signature
|
||||
evaldict = evaldict or {}
|
||||
mo = DEF.match(src)
|
||||
if mo is None:
|
||||
raise SyntaxError('not a valid function template\n%s' % src)
|
||||
name = mo.group(1) # extract the function name
|
||||
names = set([name] + [arg.strip(' *') for arg in
|
||||
self.shortsignature.split(',')])
|
||||
for n in names:
|
||||
if n in ('_func_', '_call_'):
|
||||
raise NameError('%s is overridden in\n%s' % (n, src))
|
||||
if not src.endswith('\n'): # add a newline just for safety
|
||||
src += '\n' # this is needed in old versions of Python
|
||||
|
||||
# Ensure each generated function has a unique filename for profilers
|
||||
# (such as cProfile) that depend on the tuple of (<filename>,
|
||||
# <definition line>, <function name>) being unique.
|
||||
filename = '<decorator-gen-%d>' % (next(self._compile_count),)
|
||||
try:
|
||||
code = compile(src, filename, 'single')
|
||||
exec(code, evaldict)
|
||||
except: # noqa: E722
|
||||
print('Error in generated code:', file=sys.stderr)
|
||||
print(src, file=sys.stderr)
|
||||
raise
|
||||
func = evaldict[name]
|
||||
if addsource:
|
||||
attrs['__source__'] = src
|
||||
self.update(func, **attrs)
|
||||
return func
|
||||
|
||||
@classmethod
|
||||
def create(cls, obj, body, evaldict, defaults=None,
|
||||
doc=None, module=None, addsource=True, **attrs):
|
||||
"""
|
||||
Create a function from the strings name, signature, and body.
|
||||
evaldict is the evaluation dictionary. If addsource is true, an
|
||||
attribute __source__ is added to the result. The attributes attrs
|
||||
are added, if any.
|
||||
"""
|
||||
if isinstance(obj, str): # "name(signature)"
|
||||
name, rest = obj.strip().split('(', 1)
|
||||
signature = rest[:-1] # strip a right parens
|
||||
func = None
|
||||
else: # a function
|
||||
name = None
|
||||
signature = None
|
||||
func = obj
|
||||
self = cls(func, name, signature, defaults, doc, module)
|
||||
ibody = '\n'.join(' ' + line for line in body.splitlines())
|
||||
return self.make('def %(name)s(%(signature)s):\n' + ibody,
|
||||
evaldict, addsource, **attrs)
|
||||
|
||||
|
||||
def decorate(func, caller):
|
||||
"""
|
||||
decorate(func, caller) decorates a function using a caller.
|
||||
"""
|
||||
evaldict = func.__globals__.copy()
|
||||
evaldict['_call_'] = caller
|
||||
evaldict['_func_'] = func
|
||||
fun = FunctionMaker.create(
|
||||
func, "return _call_(_func_, %(shortsignature)s)",
|
||||
evaldict, __wrapped__=func)
|
||||
if hasattr(func, '__qualname__'):
|
||||
fun.__qualname__ = func.__qualname__
|
||||
return fun
|
||||
|
||||
|
||||
def decorator(caller, _func=None):
|
||||
"""decorator(caller) converts a caller function into a decorator"""
|
||||
if _func is not None: # return a decorated function
|
||||
# this is obsolete behavior; you should use decorate instead
|
||||
return decorate(_func, caller)
|
||||
# else return a decorator function
|
||||
if inspect.isclass(caller):
|
||||
name = caller.__name__.lower()
|
||||
callerfunc = get_init(caller)
|
||||
doc = 'decorator(%s) converts functions/generators into ' \
|
||||
'factories of %s objects' % (caller.__name__, caller.__name__)
|
||||
elif inspect.isfunction(caller):
|
||||
if caller.__name__ == '<lambda>':
|
||||
name = '_lambda_'
|
||||
else:
|
||||
name = caller.__name__
|
||||
callerfunc = caller
|
||||
doc = caller.__doc__
|
||||
else: # assume caller is an object with a __call__ method
|
||||
name = caller.__class__.__name__.lower()
|
||||
callerfunc = caller.__call__.__func__
|
||||
doc = caller.__call__.__doc__
|
||||
evaldict = callerfunc.__globals__.copy()
|
||||
evaldict['_call_'] = caller
|
||||
evaldict['_decorate_'] = decorate
|
||||
return FunctionMaker.create(
|
||||
'%s(func)' % name, 'return _decorate_(func, _call_)',
|
||||
evaldict, doc=doc, module=caller.__module__,
|
||||
__wrapped__=caller)
|
||||
|
||||
|
||||
# ####################### contextmanager ####################### #
|
||||
|
||||
try: # Python >= 3.2
|
||||
from contextlib import _GeneratorContextManager
|
||||
except ImportError: # Python >= 2.5
|
||||
from contextlib import GeneratorContextManager as _GeneratorContextManager
|
||||
|
||||
|
||||
class ContextManager(_GeneratorContextManager):
|
||||
def __call__(self, func):
|
||||
"""Context manager decorator"""
|
||||
return FunctionMaker.create(
|
||||
func, "with _self_: return _func_(%(shortsignature)s)",
|
||||
dict(_self_=self, _func_=func), __wrapped__=func)
|
||||
|
||||
|
||||
init = getfullargspec(_GeneratorContextManager.__init__)
|
||||
n_args = len(init.args)
|
||||
if n_args == 2 and not init.varargs: # (self, genobj) Python 2.7
|
||||
def __init__(self, g, *a, **k):
|
||||
return _GeneratorContextManager.__init__(self, g(*a, **k))
|
||||
ContextManager.__init__ = __init__
|
||||
elif n_args == 2 and init.varargs: # (self, gen, *a, **k) Python 3.4
|
||||
pass
|
||||
elif n_args == 4: # (self, gen, args, kwds) Python 3.5
|
||||
def __init__(self, g, *a, **k):
|
||||
return _GeneratorContextManager.__init__(self, g, a, k)
|
||||
ContextManager.__init__ = __init__
|
||||
|
||||
contextmanager = decorator(ContextManager)
|
||||
|
||||
|
||||
# ############################ dispatch_on ############################ #
|
||||
|
||||
def append(a, vancestors):
|
||||
"""
|
||||
Append ``a`` to the list of the virtual ancestors, unless it is already
|
||||
included.
|
||||
"""
|
||||
add = True
|
||||
for j, va in enumerate(vancestors):
|
||||
if issubclass(va, a):
|
||||
add = False
|
||||
break
|
||||
if issubclass(a, va):
|
||||
vancestors[j] = a
|
||||
add = False
|
||||
if add:
|
||||
vancestors.append(a)
|
||||
|
||||
|
||||
# inspired from simplegeneric by P.J. Eby and functools.singledispatch
|
||||
def dispatch_on(*dispatch_args):
|
||||
"""
|
||||
Factory of decorators turning a function into a generic function
|
||||
dispatching on the given arguments.
|
||||
"""
|
||||
assert dispatch_args, 'No dispatch args passed'
|
||||
dispatch_str = '(%s,)' % ', '.join(dispatch_args)
|
||||
|
||||
def check(arguments, wrong=operator.ne, msg=''):
|
||||
"""Make sure one passes the expected number of arguments"""
|
||||
if wrong(len(arguments), len(dispatch_args)):
|
||||
raise TypeError('Expected %d arguments, got %d%s' %
|
||||
(len(dispatch_args), len(arguments), msg))
|
||||
|
||||
def gen_func_dec(func):
|
||||
"""Decorator turning a function into a generic function"""
|
||||
|
||||
# first check the dispatch arguments
|
||||
argset = set(getfullargspec(func).args)
|
||||
if not set(dispatch_args) <= argset:
|
||||
raise NameError('Unknown dispatch arguments %s' % dispatch_str)
|
||||
|
||||
typemap = {}
|
||||
|
||||
def vancestors(*types):
|
||||
"""
|
||||
Get a list of sets of virtual ancestors for the given types
|
||||
"""
|
||||
check(types)
|
||||
ras = [[] for _ in range(len(dispatch_args))]
|
||||
for types_ in typemap:
|
||||
for t, type_, ra in zip(types, types_, ras):
|
||||
if issubclass(t, type_) and type_ not in t.__mro__:
|
||||
append(type_, ra)
|
||||
return [set(ra) for ra in ras]
|
||||
|
||||
def ancestors(*types):
|
||||
"""
|
||||
Get a list of virtual MROs, one for each type
|
||||
"""
|
||||
check(types)
|
||||
lists = []
|
||||
for t, vas in zip(types, vancestors(*types)):
|
||||
n_vas = len(vas)
|
||||
if n_vas > 1:
|
||||
raise RuntimeError(
|
||||
'Ambiguous dispatch for %s: %s' % (t, vas))
|
||||
elif n_vas == 1:
|
||||
va, = vas
|
||||
mro = type('t', (t, va), {}).__mro__[1:]
|
||||
else:
|
||||
mro = t.__mro__
|
||||
lists.append(mro[:-1]) # discard t and object
|
||||
return lists
|
||||
|
||||
def register(*types):
|
||||
"""
|
||||
Decorator to register an implementation for the given types
|
||||
"""
|
||||
check(types)
|
||||
|
||||
def dec(f):
|
||||
check(getfullargspec(f).args, operator.lt, ' in ' + f.__name__)
|
||||
typemap[types] = f
|
||||
return f
|
||||
return dec
|
||||
|
||||
def dispatch_info(*types):
|
||||
"""
|
||||
An utility to introspect the dispatch algorithm
|
||||
"""
|
||||
check(types)
|
||||
lst = [tuple(a.__name__ for a in anc)
|
||||
for anc in itertools.product(*ancestors(*types))]
|
||||
return lst
|
||||
|
||||
def _dispatch(dispatch_args, *args, **kw):
|
||||
types = tuple(type(arg) for arg in dispatch_args)
|
||||
try: # fast path
|
||||
f = typemap[types]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
return f(*args, **kw)
|
||||
combinations = itertools.product(*ancestors(*types))
|
||||
next(combinations) # the first one has been already tried
|
||||
for types_ in combinations:
|
||||
f = typemap.get(types_)
|
||||
if f is not None:
|
||||
return f(*args, **kw)
|
||||
|
||||
# else call the default implementation
|
||||
return func(*args, **kw)
|
||||
|
||||
return FunctionMaker.create(
|
||||
func, 'return _f_(%s, %%(shortsignature)s)' % dispatch_str,
|
||||
dict(_f_=_dispatch), register=register, default=func,
|
||||
typemap=typemap, vancestors=vancestors, ancestors=ancestors,
|
||||
dispatch_info=dispatch_info, __wrapped__=func)
|
||||
|
||||
gen_func_dec.__name__ = 'dispatch_on' + dispatch_str
|
||||
return gen_func_dec
|
||||
107
.CondaPkg/env/Lib/site-packages/scipy/_lib/deprecation.py
vendored
Normal file
107
.CondaPkg/env/Lib/site-packages/scipy/_lib/deprecation.py
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
import functools
|
||||
import warnings
|
||||
|
||||
__all__ = ["_deprecated"]
|
||||
|
||||
|
||||
def _deprecated(msg, stacklevel=2):
|
||||
"""Deprecate a function by emitting a warning on use."""
|
||||
def wrap(fun):
|
||||
if isinstance(fun, type):
|
||||
warnings.warn(
|
||||
"Trying to deprecate class {!r}".format(fun),
|
||||
category=RuntimeWarning, stacklevel=2)
|
||||
return fun
|
||||
|
||||
@functools.wraps(fun)
|
||||
def call(*args, **kwargs):
|
||||
warnings.warn(msg, category=DeprecationWarning,
|
||||
stacklevel=stacklevel)
|
||||
return fun(*args, **kwargs)
|
||||
call.__doc__ = fun.__doc__
|
||||
return call
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
class _DeprecationHelperStr:
|
||||
"""
|
||||
Helper class used by deprecate_cython_api
|
||||
"""
|
||||
def __init__(self, content, message):
|
||||
self._content = content
|
||||
self._message = message
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._content)
|
||||
|
||||
def __eq__(self, other):
|
||||
res = (self._content == other)
|
||||
if res:
|
||||
warnings.warn(self._message, category=DeprecationWarning,
|
||||
stacklevel=2)
|
||||
return res
|
||||
|
||||
|
||||
def deprecate_cython_api(module, routine_name, new_name=None, message=None):
|
||||
"""
|
||||
Deprecate an exported cdef function in a public Cython API module.
|
||||
|
||||
Only functions can be deprecated; typedefs etc. cannot.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
module : module
|
||||
Public Cython API module (e.g. scipy.linalg.cython_blas).
|
||||
routine_name : str
|
||||
Name of the routine to deprecate. May also be a fused-type
|
||||
routine (in which case its all specializations are deprecated).
|
||||
new_name : str
|
||||
New name to include in the deprecation warning message
|
||||
message : str
|
||||
Additional text in the deprecation warning message
|
||||
|
||||
Examples
|
||||
--------
|
||||
Usually, this function would be used in the top-level of the
|
||||
module ``.pyx`` file:
|
||||
|
||||
>>> from scipy._lib.deprecation import deprecate_cython_api
|
||||
>>> import scipy.linalg.cython_blas as mod
|
||||
>>> deprecate_cython_api(mod, "dgemm", "dgemm_new",
|
||||
... message="Deprecated in Scipy 1.5.0")
|
||||
>>> del deprecate_cython_api, mod
|
||||
|
||||
After this, Cython modules that use the deprecated function emit a
|
||||
deprecation warning when they are imported.
|
||||
|
||||
"""
|
||||
old_name = "{}.{}".format(module.__name__, routine_name)
|
||||
|
||||
if new_name is None:
|
||||
depdoc = "`%s` is deprecated!" % old_name
|
||||
else:
|
||||
depdoc = "`%s` is deprecated, use `%s` instead!" % \
|
||||
(old_name, new_name)
|
||||
|
||||
if message is not None:
|
||||
depdoc += "\n" + message
|
||||
|
||||
d = module.__pyx_capi__
|
||||
|
||||
# Check if the function is a fused-type function with a mangled name
|
||||
j = 0
|
||||
has_fused = False
|
||||
while True:
|
||||
fused_name = "__pyx_fuse_{}{}".format(j, routine_name)
|
||||
if fused_name in d:
|
||||
has_fused = True
|
||||
d[_DeprecationHelperStr(fused_name, depdoc)] = d.pop(fused_name)
|
||||
j += 1
|
||||
else:
|
||||
break
|
||||
|
||||
# If not, apply deprecation to the named routine
|
||||
if not has_fused:
|
||||
d[_DeprecationHelperStr(routine_name, depdoc)] = d.pop(routine_name)
|
||||
|
||||
275
.CondaPkg/env/Lib/site-packages/scipy/_lib/doccer.py
vendored
Normal file
275
.CondaPkg/env/Lib/site-packages/scipy/_lib/doccer.py
vendored
Normal file
@@ -0,0 +1,275 @@
|
||||
''' Utilities to allow inserting docstring fragments for common
|
||||
parameters into function and method docstrings'''
|
||||
|
||||
import sys
|
||||
|
||||
__all__ = [
|
||||
'docformat', 'inherit_docstring_from', 'indentcount_lines',
|
||||
'filldoc', 'unindent_dict', 'unindent_string', 'extend_notes_in_docstring',
|
||||
'replace_notes_in_docstring', 'doc_replace'
|
||||
]
|
||||
|
||||
|
||||
def docformat(docstring, docdict=None):
|
||||
''' Fill a function docstring from variables in dictionary
|
||||
|
||||
Adapt the indent of the inserted docs
|
||||
|
||||
Parameters
|
||||
----------
|
||||
docstring : string
|
||||
docstring from function, possibly with dict formatting strings
|
||||
docdict : dict, optional
|
||||
dictionary with keys that match the dict formatting strings
|
||||
and values that are docstring fragments to be inserted. The
|
||||
indentation of the inserted docstrings is set to match the
|
||||
minimum indentation of the ``docstring`` by adding this
|
||||
indentation to all lines of the inserted string, except the
|
||||
first.
|
||||
|
||||
Returns
|
||||
-------
|
||||
outstring : string
|
||||
string with requested ``docdict`` strings inserted
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> docformat(' Test string with %(value)s', {'value':'inserted value'})
|
||||
' Test string with inserted value'
|
||||
>>> docstring = 'First line\\n Second line\\n %(value)s'
|
||||
>>> inserted_string = "indented\\nstring"
|
||||
>>> docdict = {'value': inserted_string}
|
||||
>>> docformat(docstring, docdict)
|
||||
'First line\\n Second line\\n indented\\n string'
|
||||
'''
|
||||
if not docstring:
|
||||
return docstring
|
||||
if docdict is None:
|
||||
docdict = {}
|
||||
if not docdict:
|
||||
return docstring
|
||||
lines = docstring.expandtabs().splitlines()
|
||||
# Find the minimum indent of the main docstring, after first line
|
||||
if len(lines) < 2:
|
||||
icount = 0
|
||||
else:
|
||||
icount = indentcount_lines(lines[1:])
|
||||
indent = ' ' * icount
|
||||
# Insert this indent to dictionary docstrings
|
||||
indented = {}
|
||||
for name, dstr in docdict.items():
|
||||
lines = dstr.expandtabs().splitlines()
|
||||
try:
|
||||
newlines = [lines[0]]
|
||||
for line in lines[1:]:
|
||||
newlines.append(indent+line)
|
||||
indented[name] = '\n'.join(newlines)
|
||||
except IndexError:
|
||||
indented[name] = dstr
|
||||
return docstring % indented
|
||||
|
||||
|
||||
def inherit_docstring_from(cls):
|
||||
"""
|
||||
This decorator modifies the decorated function's docstring by
|
||||
replacing occurrences of '%(super)s' with the docstring of the
|
||||
method of the same name from the class `cls`.
|
||||
|
||||
If the decorated method has no docstring, it is simply given the
|
||||
docstring of `cls`s method.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
cls : Python class or instance
|
||||
A class with a method with the same name as the decorated method.
|
||||
The docstring of the method in this class replaces '%(super)s' in the
|
||||
docstring of the decorated method.
|
||||
|
||||
Returns
|
||||
-------
|
||||
f : function
|
||||
The decorator function that modifies the __doc__ attribute
|
||||
of its argument.
|
||||
|
||||
Examples
|
||||
--------
|
||||
In the following, the docstring for Bar.func created using the
|
||||
docstring of `Foo.func`.
|
||||
|
||||
>>> class Foo:
|
||||
... def func(self):
|
||||
... '''Do something useful.'''
|
||||
... return
|
||||
...
|
||||
>>> class Bar(Foo):
|
||||
... @inherit_docstring_from(Foo)
|
||||
... def func(self):
|
||||
... '''%(super)s
|
||||
... Do it fast.
|
||||
... '''
|
||||
... return
|
||||
...
|
||||
>>> b = Bar()
|
||||
>>> b.func.__doc__
|
||||
'Do something useful.\n Do it fast.\n '
|
||||
|
||||
"""
|
||||
def _doc(func):
|
||||
cls_docstring = getattr(cls, func.__name__).__doc__
|
||||
func_docstring = func.__doc__
|
||||
if func_docstring is None:
|
||||
func.__doc__ = cls_docstring
|
||||
else:
|
||||
new_docstring = func_docstring % dict(super=cls_docstring)
|
||||
func.__doc__ = new_docstring
|
||||
return func
|
||||
return _doc
|
||||
|
||||
|
||||
def extend_notes_in_docstring(cls, notes):
|
||||
"""
|
||||
This decorator replaces the decorated function's docstring
|
||||
with the docstring from corresponding method in `cls`.
|
||||
It extends the 'Notes' section of that docstring to include
|
||||
the given `notes`.
|
||||
"""
|
||||
def _doc(func):
|
||||
cls_docstring = getattr(cls, func.__name__).__doc__
|
||||
# If python is called with -OO option,
|
||||
# there is no docstring
|
||||
if cls_docstring is None:
|
||||
return func
|
||||
end_of_notes = cls_docstring.find(' References\n')
|
||||
if end_of_notes == -1:
|
||||
end_of_notes = cls_docstring.find(' Examples\n')
|
||||
if end_of_notes == -1:
|
||||
end_of_notes = len(cls_docstring)
|
||||
func.__doc__ = (cls_docstring[:end_of_notes] + notes +
|
||||
cls_docstring[end_of_notes:])
|
||||
return func
|
||||
return _doc
|
||||
|
||||
|
||||
def replace_notes_in_docstring(cls, notes):
|
||||
"""
|
||||
This decorator replaces the decorated function's docstring
|
||||
with the docstring from corresponding method in `cls`.
|
||||
It replaces the 'Notes' section of that docstring with
|
||||
the given `notes`.
|
||||
"""
|
||||
def _doc(func):
|
||||
cls_docstring = getattr(cls, func.__name__).__doc__
|
||||
notes_header = ' Notes\n -----\n'
|
||||
# If python is called with -OO option,
|
||||
# there is no docstring
|
||||
if cls_docstring is None:
|
||||
return func
|
||||
start_of_notes = cls_docstring.find(notes_header)
|
||||
end_of_notes = cls_docstring.find(' References\n')
|
||||
if end_of_notes == -1:
|
||||
end_of_notes = cls_docstring.find(' Examples\n')
|
||||
if end_of_notes == -1:
|
||||
end_of_notes = len(cls_docstring)
|
||||
func.__doc__ = (cls_docstring[:start_of_notes + len(notes_header)] +
|
||||
notes +
|
||||
cls_docstring[end_of_notes:])
|
||||
return func
|
||||
return _doc
|
||||
|
||||
|
||||
def indentcount_lines(lines):
|
||||
''' Minimum indent for all lines in line list
|
||||
|
||||
>>> lines = [' one', ' two', ' three']
|
||||
>>> indentcount_lines(lines)
|
||||
1
|
||||
>>> lines = []
|
||||
>>> indentcount_lines(lines)
|
||||
0
|
||||
>>> lines = [' one']
|
||||
>>> indentcount_lines(lines)
|
||||
1
|
||||
>>> indentcount_lines([' '])
|
||||
0
|
||||
'''
|
||||
indentno = sys.maxsize
|
||||
for line in lines:
|
||||
stripped = line.lstrip()
|
||||
if stripped:
|
||||
indentno = min(indentno, len(line) - len(stripped))
|
||||
if indentno == sys.maxsize:
|
||||
return 0
|
||||
return indentno
|
||||
|
||||
|
||||
def filldoc(docdict, unindent_params=True):
|
||||
''' Return docstring decorator using docdict variable dictionary
|
||||
|
||||
Parameters
|
||||
----------
|
||||
docdict : dictionary
|
||||
dictionary containing name, docstring fragment pairs
|
||||
unindent_params : {False, True}, boolean, optional
|
||||
If True, strip common indentation from all parameters in
|
||||
docdict
|
||||
|
||||
Returns
|
||||
-------
|
||||
decfunc : function
|
||||
decorator that applies dictionary to input function docstring
|
||||
|
||||
'''
|
||||
if unindent_params:
|
||||
docdict = unindent_dict(docdict)
|
||||
|
||||
def decorate(f):
|
||||
f.__doc__ = docformat(f.__doc__, docdict)
|
||||
return f
|
||||
return decorate
|
||||
|
||||
|
||||
def unindent_dict(docdict):
|
||||
''' Unindent all strings in a docdict '''
|
||||
can_dict = {}
|
||||
for name, dstr in docdict.items():
|
||||
can_dict[name] = unindent_string(dstr)
|
||||
return can_dict
|
||||
|
||||
|
||||
def unindent_string(docstring):
|
||||
''' Set docstring to minimum indent for all lines, including first
|
||||
|
||||
>>> unindent_string(' two')
|
||||
'two'
|
||||
>>> unindent_string(' two\\n three')
|
||||
'two\\n three'
|
||||
'''
|
||||
lines = docstring.expandtabs().splitlines()
|
||||
icount = indentcount_lines(lines)
|
||||
if icount == 0:
|
||||
return docstring
|
||||
return '\n'.join([line[icount:] for line in lines])
|
||||
|
||||
|
||||
def doc_replace(obj, oldval, newval):
|
||||
"""Decorator to take the docstring from obj, with oldval replaced by newval
|
||||
|
||||
Equivalent to ``func.__doc__ = obj.__doc__.replace(oldval, newval)``
|
||||
|
||||
Parameters
|
||||
----------
|
||||
obj : object
|
||||
The object to take the docstring from.
|
||||
oldval : string
|
||||
The string to replace from the original docstring.
|
||||
newval : string
|
||||
The string to replace ``oldval`` with.
|
||||
"""
|
||||
# __doc__ may be None for optimized Python (-OO)
|
||||
doc = (obj.__doc__ or '').replace(oldval, newval)
|
||||
|
||||
def inner(func):
|
||||
func.__doc__ = doc
|
||||
return func
|
||||
|
||||
return inner
|
||||
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/messagestream.cp311-win_amd64.dll.a
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/messagestream.cp311-win_amd64.dll.a
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/messagestream.cp311-win_amd64.pyd
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/messagestream.cp311-win_amd64.pyd
vendored
Normal file
Binary file not shown.
0
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__init__.py
vendored
Normal file
0
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__init__.py
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/__init__.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/__init__.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test__gcutils.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test__gcutils.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test__pep440.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test__pep440.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test__testutils.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test__testutils.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test__threadsafety.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test__threadsafety.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test__util.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test__util.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test_bunch.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test_bunch.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test_ccallback.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test_ccallback.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test_deprecation.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test_deprecation.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test_import_cycles.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test_import_cycles.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test_public_api.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test_public_api.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test_scipy_version.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test_scipy_version.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test_tmpdirs.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test_tmpdirs.cpython-311.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test_warnings.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/__pycache__/test_warnings.cpython-311.pyc
vendored
Normal file
Binary file not shown.
101
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test__gcutils.py
vendored
Normal file
101
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test__gcutils.py
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
""" Test for assert_deallocated context manager and gc utilities
|
||||
"""
|
||||
import gc
|
||||
|
||||
from scipy._lib._gcutils import (set_gc_state, gc_state, assert_deallocated,
|
||||
ReferenceError, IS_PYPY)
|
||||
|
||||
from numpy.testing import assert_equal
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
def test_set_gc_state():
|
||||
gc_status = gc.isenabled()
|
||||
try:
|
||||
for state in (True, False):
|
||||
gc.enable()
|
||||
set_gc_state(state)
|
||||
assert_equal(gc.isenabled(), state)
|
||||
gc.disable()
|
||||
set_gc_state(state)
|
||||
assert_equal(gc.isenabled(), state)
|
||||
finally:
|
||||
if gc_status:
|
||||
gc.enable()
|
||||
|
||||
|
||||
def test_gc_state():
|
||||
# Test gc_state context manager
|
||||
gc_status = gc.isenabled()
|
||||
try:
|
||||
for pre_state in (True, False):
|
||||
set_gc_state(pre_state)
|
||||
for with_state in (True, False):
|
||||
# Check the gc state is with_state in with block
|
||||
with gc_state(with_state):
|
||||
assert_equal(gc.isenabled(), with_state)
|
||||
# And returns to previous state outside block
|
||||
assert_equal(gc.isenabled(), pre_state)
|
||||
# Even if the gc state is set explicitly within the block
|
||||
with gc_state(with_state):
|
||||
assert_equal(gc.isenabled(), with_state)
|
||||
set_gc_state(not with_state)
|
||||
assert_equal(gc.isenabled(), pre_state)
|
||||
finally:
|
||||
if gc_status:
|
||||
gc.enable()
|
||||
|
||||
|
||||
@pytest.mark.skipif(IS_PYPY, reason="Test not meaningful on PyPy")
|
||||
def test_assert_deallocated():
|
||||
# Ordinary use
|
||||
class C:
|
||||
def __init__(self, arg0, arg1, name='myname'):
|
||||
self.name = name
|
||||
for gc_current in (True, False):
|
||||
with gc_state(gc_current):
|
||||
# We are deleting from with-block context, so that's OK
|
||||
with assert_deallocated(C, 0, 2, 'another name') as c:
|
||||
assert_equal(c.name, 'another name')
|
||||
del c
|
||||
# Or not using the thing in with-block context, also OK
|
||||
with assert_deallocated(C, 0, 2, name='third name'):
|
||||
pass
|
||||
assert_equal(gc.isenabled(), gc_current)
|
||||
|
||||
|
||||
@pytest.mark.skipif(IS_PYPY, reason="Test not meaningful on PyPy")
|
||||
def test_assert_deallocated_nodel():
|
||||
class C:
|
||||
pass
|
||||
with pytest.raises(ReferenceError):
|
||||
# Need to delete after using if in with-block context
|
||||
# Note: assert_deallocated(C) needs to be assigned for the test
|
||||
# to function correctly. It is assigned to c, but c itself is
|
||||
# not referenced in the body of the with, it is only there for
|
||||
# the refcount.
|
||||
with assert_deallocated(C) as c:
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.skipif(IS_PYPY, reason="Test not meaningful on PyPy")
|
||||
def test_assert_deallocated_circular():
|
||||
class C:
|
||||
def __init__(self):
|
||||
self._circular = self
|
||||
with pytest.raises(ReferenceError):
|
||||
# Circular reference, no automatic garbage collection
|
||||
with assert_deallocated(C) as c:
|
||||
del c
|
||||
|
||||
|
||||
@pytest.mark.skipif(IS_PYPY, reason="Test not meaningful on PyPy")
|
||||
def test_assert_deallocated_circular2():
|
||||
class C:
|
||||
def __init__(self):
|
||||
self._circular = self
|
||||
with pytest.raises(ReferenceError):
|
||||
# Still circular reference, no automatic garbage collection
|
||||
with assert_deallocated(C):
|
||||
pass
|
||||
67
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test__pep440.py
vendored
Normal file
67
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test__pep440.py
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
from pytest import raises as assert_raises
|
||||
from scipy._lib._pep440 import Version, parse
|
||||
|
||||
|
||||
def test_main_versions():
|
||||
assert Version('1.8.0') == Version('1.8.0')
|
||||
for ver in ['1.9.0', '2.0.0', '1.8.1']:
|
||||
assert Version('1.8.0') < Version(ver)
|
||||
|
||||
for ver in ['1.7.0', '1.7.1', '0.9.9']:
|
||||
assert Version('1.8.0') > Version(ver)
|
||||
|
||||
|
||||
def test_version_1_point_10():
|
||||
# regression test for gh-2998.
|
||||
assert Version('1.9.0') < Version('1.10.0')
|
||||
assert Version('1.11.0') < Version('1.11.1')
|
||||
assert Version('1.11.0') == Version('1.11.0')
|
||||
assert Version('1.99.11') < Version('1.99.12')
|
||||
|
||||
|
||||
def test_alpha_beta_rc():
|
||||
assert Version('1.8.0rc1') == Version('1.8.0rc1')
|
||||
for ver in ['1.8.0', '1.8.0rc2']:
|
||||
assert Version('1.8.0rc1') < Version(ver)
|
||||
|
||||
for ver in ['1.8.0a2', '1.8.0b3', '1.7.2rc4']:
|
||||
assert Version('1.8.0rc1') > Version(ver)
|
||||
|
||||
assert Version('1.8.0b1') > Version('1.8.0a2')
|
||||
|
||||
|
||||
def test_dev_version():
|
||||
assert Version('1.9.0.dev+Unknown') < Version('1.9.0')
|
||||
for ver in ['1.9.0', '1.9.0a1', '1.9.0b2', '1.9.0b2.dev+ffffffff', '1.9.0.dev1']:
|
||||
assert Version('1.9.0.dev+f16acvda') < Version(ver)
|
||||
|
||||
assert Version('1.9.0.dev+f16acvda') == Version('1.9.0.dev+f16acvda')
|
||||
|
||||
|
||||
def test_dev_a_b_rc_mixed():
|
||||
assert Version('1.9.0a2.dev+f16acvda') == Version('1.9.0a2.dev+f16acvda')
|
||||
assert Version('1.9.0a2.dev+6acvda54') < Version('1.9.0a2')
|
||||
|
||||
|
||||
def test_dev0_version():
|
||||
assert Version('1.9.0.dev0+Unknown') < Version('1.9.0')
|
||||
for ver in ['1.9.0', '1.9.0a1', '1.9.0b2', '1.9.0b2.dev0+ffffffff']:
|
||||
assert Version('1.9.0.dev0+f16acvda') < Version(ver)
|
||||
|
||||
assert Version('1.9.0.dev0+f16acvda') == Version('1.9.0.dev0+f16acvda')
|
||||
|
||||
|
||||
def test_dev0_a_b_rc_mixed():
|
||||
assert Version('1.9.0a2.dev0+f16acvda') == Version('1.9.0a2.dev0+f16acvda')
|
||||
assert Version('1.9.0a2.dev0+6acvda54') < Version('1.9.0a2')
|
||||
|
||||
|
||||
def test_raises():
|
||||
for ver in ['1,9.0', '1.7.x']:
|
||||
assert_raises(ValueError, Version, ver)
|
||||
|
||||
def test_legacy_version():
|
||||
# Non-PEP-440 version identifiers always compare less. For NumPy this only
|
||||
# occurs on dev builds prior to 1.10.0 which are unsupported anyway.
|
||||
assert parse('invalid') < Version('0.0.0')
|
||||
assert parse('1.9.0-f16acvda') < Version('1.0.0')
|
||||
32
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test__testutils.py
vendored
Normal file
32
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test__testutils.py
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
import sys
|
||||
from scipy._lib._testutils import _parse_size, _get_mem_available
|
||||
import pytest
|
||||
|
||||
|
||||
def test__parse_size():
|
||||
expected = {
|
||||
'12': 12e6,
|
||||
'12 b': 12,
|
||||
'12k': 12e3,
|
||||
' 12 M ': 12e6,
|
||||
' 12 G ': 12e9,
|
||||
' 12Tb ': 12e12,
|
||||
'12 Mib ': 12 * 1024.0**2,
|
||||
'12Tib': 12 * 1024.0**4,
|
||||
}
|
||||
|
||||
for inp, outp in sorted(expected.items()):
|
||||
if outp is None:
|
||||
with pytest.raises(ValueError):
|
||||
_parse_size(inp)
|
||||
else:
|
||||
assert _parse_size(inp) == outp
|
||||
|
||||
|
||||
def test__mem_available():
|
||||
# May return None on non-Linux platforms
|
||||
available = _get_mem_available()
|
||||
if sys.platform.startswith('linux'):
|
||||
assert available >= 0
|
||||
else:
|
||||
assert available is None or available >= 0
|
||||
51
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test__threadsafety.py
vendored
Normal file
51
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test__threadsafety.py
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from numpy.testing import assert_
|
||||
from pytest import raises as assert_raises
|
||||
|
||||
from scipy._lib._threadsafety import ReentrancyLock, non_reentrant, ReentrancyError
|
||||
|
||||
|
||||
def test_parallel_threads():
|
||||
# Check that ReentrancyLock serializes work in parallel threads.
|
||||
#
|
||||
# The test is not fully deterministic, and may succeed falsely if
|
||||
# the timings go wrong.
|
||||
|
||||
lock = ReentrancyLock("failure")
|
||||
|
||||
failflag = [False]
|
||||
exceptions_raised = []
|
||||
|
||||
def worker(k):
|
||||
try:
|
||||
with lock:
|
||||
assert_(not failflag[0])
|
||||
failflag[0] = True
|
||||
time.sleep(0.1 * k)
|
||||
assert_(failflag[0])
|
||||
failflag[0] = False
|
||||
except Exception:
|
||||
exceptions_raised.append(traceback.format_exc(2))
|
||||
|
||||
threads = [threading.Thread(target=lambda k=k: worker(k))
|
||||
for k in range(3)]
|
||||
for t in threads:
|
||||
t.start()
|
||||
for t in threads:
|
||||
t.join()
|
||||
|
||||
exceptions_raised = "\n".join(exceptions_raised)
|
||||
assert_(not exceptions_raised, exceptions_raised)
|
||||
|
||||
|
||||
def test_reentering():
|
||||
# Check that ReentrancyLock prevents re-entering from the same thread.
|
||||
|
||||
@non_reentrant()
|
||||
def func(x):
|
||||
return func(x)
|
||||
|
||||
assert_raises(ReentrancyError, func, 0)
|
||||
380
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test__util.py
vendored
Normal file
380
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test__util.py
vendored
Normal file
@@ -0,0 +1,380 @@
|
||||
from multiprocessing import Pool
|
||||
from multiprocessing.pool import Pool as PWL
|
||||
import os
|
||||
import re
|
||||
import math
|
||||
from fractions import Fraction
|
||||
|
||||
import numpy as np
|
||||
from numpy.testing import assert_equal, assert_
|
||||
import pytest
|
||||
from pytest import raises as assert_raises, deprecated_call
|
||||
|
||||
import scipy
|
||||
from scipy._lib._util import (_aligned_zeros, check_random_state, MapWrapper,
|
||||
getfullargspec_no_self, FullArgSpec,
|
||||
rng_integers, _validate_int, _rename_parameter,
|
||||
_contains_nan)
|
||||
|
||||
|
||||
def test__aligned_zeros():
|
||||
niter = 10
|
||||
|
||||
def check(shape, dtype, order, align):
|
||||
err_msg = repr((shape, dtype, order, align))
|
||||
x = _aligned_zeros(shape, dtype, order, align=align)
|
||||
if align is None:
|
||||
align = np.dtype(dtype).alignment
|
||||
assert_equal(x.__array_interface__['data'][0] % align, 0)
|
||||
if hasattr(shape, '__len__'):
|
||||
assert_equal(x.shape, shape, err_msg)
|
||||
else:
|
||||
assert_equal(x.shape, (shape,), err_msg)
|
||||
assert_equal(x.dtype, dtype)
|
||||
if order == "C":
|
||||
assert_(x.flags.c_contiguous, err_msg)
|
||||
elif order == "F":
|
||||
if x.size > 0:
|
||||
# Size-0 arrays get invalid flags on NumPy 1.5
|
||||
assert_(x.flags.f_contiguous, err_msg)
|
||||
elif order is None:
|
||||
assert_(x.flags.c_contiguous, err_msg)
|
||||
else:
|
||||
raise ValueError()
|
||||
|
||||
# try various alignments
|
||||
for align in [1, 2, 3, 4, 8, 16, 32, 64, None]:
|
||||
for n in [0, 1, 3, 11]:
|
||||
for order in ["C", "F", None]:
|
||||
for dtype in [np.uint8, np.float64]:
|
||||
for shape in [n, (1, 2, 3, n)]:
|
||||
for j in range(niter):
|
||||
check(shape, dtype, order, align)
|
||||
|
||||
|
||||
def test_check_random_state():
|
||||
# If seed is None, return the RandomState singleton used by np.random.
|
||||
# If seed is an int, return a new RandomState instance seeded with seed.
|
||||
# If seed is already a RandomState instance, return it.
|
||||
# Otherwise raise ValueError.
|
||||
rsi = check_random_state(1)
|
||||
assert_equal(type(rsi), np.random.RandomState)
|
||||
rsi = check_random_state(rsi)
|
||||
assert_equal(type(rsi), np.random.RandomState)
|
||||
rsi = check_random_state(None)
|
||||
assert_equal(type(rsi), np.random.RandomState)
|
||||
assert_raises(ValueError, check_random_state, 'a')
|
||||
if hasattr(np.random, 'Generator'):
|
||||
# np.random.Generator is only available in NumPy >= 1.17
|
||||
rg = np.random.Generator(np.random.PCG64())
|
||||
rsi = check_random_state(rg)
|
||||
assert_equal(type(rsi), np.random.Generator)
|
||||
|
||||
|
||||
def test_getfullargspec_no_self():
|
||||
p = MapWrapper(1)
|
||||
argspec = getfullargspec_no_self(p.__init__)
|
||||
assert_equal(argspec, FullArgSpec(['pool'], None, None, (1,), [],
|
||||
None, {}))
|
||||
argspec = getfullargspec_no_self(p.__call__)
|
||||
assert_equal(argspec, FullArgSpec(['func', 'iterable'], None, None, None,
|
||||
[], None, {}))
|
||||
|
||||
class _rv_generic:
|
||||
def _rvs(self, a, b=2, c=3, *args, size=None, **kwargs):
|
||||
return None
|
||||
|
||||
rv_obj = _rv_generic()
|
||||
argspec = getfullargspec_no_self(rv_obj._rvs)
|
||||
assert_equal(argspec, FullArgSpec(['a', 'b', 'c'], 'args', 'kwargs',
|
||||
(2, 3), ['size'], {'size': None}, {}))
|
||||
|
||||
|
||||
def test_mapwrapper_serial():
|
||||
in_arg = np.arange(10.)
|
||||
out_arg = np.sin(in_arg)
|
||||
|
||||
p = MapWrapper(1)
|
||||
assert_(p._mapfunc is map)
|
||||
assert_(p.pool is None)
|
||||
assert_(p._own_pool is False)
|
||||
out = list(p(np.sin, in_arg))
|
||||
assert_equal(out, out_arg)
|
||||
|
||||
with assert_raises(RuntimeError):
|
||||
p = MapWrapper(0)
|
||||
|
||||
|
||||
def test_pool():
|
||||
with Pool(2) as p:
|
||||
p.map(math.sin, [1, 2, 3, 4])
|
||||
|
||||
|
||||
def test_mapwrapper_parallel():
|
||||
in_arg = np.arange(10.)
|
||||
out_arg = np.sin(in_arg)
|
||||
|
||||
with MapWrapper(2) as p:
|
||||
out = p(np.sin, in_arg)
|
||||
assert_equal(list(out), out_arg)
|
||||
|
||||
assert_(p._own_pool is True)
|
||||
assert_(isinstance(p.pool, PWL))
|
||||
assert_(p._mapfunc is not None)
|
||||
|
||||
# the context manager should've closed the internal pool
|
||||
# check that it has by asking it to calculate again.
|
||||
with assert_raises(Exception) as excinfo:
|
||||
p(np.sin, in_arg)
|
||||
|
||||
assert_(excinfo.type is ValueError)
|
||||
|
||||
# can also set a PoolWrapper up with a map-like callable instance
|
||||
with Pool(2) as p:
|
||||
q = MapWrapper(p.map)
|
||||
|
||||
assert_(q._own_pool is False)
|
||||
q.close()
|
||||
|
||||
# closing the PoolWrapper shouldn't close the internal pool
|
||||
# because it didn't create it
|
||||
out = p.map(np.sin, in_arg)
|
||||
assert_equal(list(out), out_arg)
|
||||
|
||||
|
||||
# get our custom ones and a few from the "import *" cases
|
||||
@pytest.mark.parametrize(
|
||||
'key', ('ifft', 'diag', 'arccos', 'randn', 'rand', 'array'))
|
||||
def test_numpy_deprecation(key):
|
||||
"""Test that 'from numpy import *' functions are deprecated."""
|
||||
if key in ('ifft', 'diag', 'arccos'):
|
||||
arg = [1.0, 0.]
|
||||
elif key == 'finfo':
|
||||
arg = float
|
||||
else:
|
||||
arg = 2
|
||||
func = getattr(scipy, key)
|
||||
match = r'scipy\.%s is deprecated.*2\.0\.0' % key
|
||||
with deprecated_call(match=match) as dep:
|
||||
func(arg) # deprecated
|
||||
# in case we catch more than one dep warning
|
||||
fnames = [os.path.splitext(d.filename)[0] for d in dep.list]
|
||||
basenames = [os.path.basename(fname) for fname in fnames]
|
||||
assert 'test__util' in basenames
|
||||
if key in ('rand', 'randn'):
|
||||
root = np.random
|
||||
elif key == 'ifft':
|
||||
root = np.fft
|
||||
else:
|
||||
root = np
|
||||
func_np = getattr(root, key)
|
||||
func_np(arg) # not deprecated
|
||||
assert func_np is not func
|
||||
# classes should remain classes
|
||||
if isinstance(func_np, type):
|
||||
assert isinstance(func, type)
|
||||
|
||||
|
||||
def test_numpy_deprecation_functionality():
|
||||
# Check that the deprecation wrappers don't break basic NumPy
|
||||
# functionality
|
||||
with deprecated_call():
|
||||
x = scipy.array([1, 2, 3], dtype=scipy.float64)
|
||||
assert x.dtype == scipy.float64
|
||||
assert x.dtype == np.float64
|
||||
|
||||
x = scipy.finfo(scipy.float32)
|
||||
assert x.eps == np.finfo(np.float32).eps
|
||||
|
||||
assert scipy.float64 == np.float64
|
||||
assert issubclass(np.float64, scipy.float64)
|
||||
|
||||
|
||||
def test_rng_integers():
|
||||
rng = np.random.RandomState()
|
||||
|
||||
# test that numbers are inclusive of high point
|
||||
arr = rng_integers(rng, low=2, high=5, size=100, endpoint=True)
|
||||
assert np.max(arr) == 5
|
||||
assert np.min(arr) == 2
|
||||
assert arr.shape == (100, )
|
||||
|
||||
# test that numbers are inclusive of high point
|
||||
arr = rng_integers(rng, low=5, size=100, endpoint=True)
|
||||
assert np.max(arr) == 5
|
||||
assert np.min(arr) == 0
|
||||
assert arr.shape == (100, )
|
||||
|
||||
# test that numbers are exclusive of high point
|
||||
arr = rng_integers(rng, low=2, high=5, size=100, endpoint=False)
|
||||
assert np.max(arr) == 4
|
||||
assert np.min(arr) == 2
|
||||
assert arr.shape == (100, )
|
||||
|
||||
# test that numbers are exclusive of high point
|
||||
arr = rng_integers(rng, low=5, size=100, endpoint=False)
|
||||
assert np.max(arr) == 4
|
||||
assert np.min(arr) == 0
|
||||
assert arr.shape == (100, )
|
||||
|
||||
# now try with np.random.Generator
|
||||
try:
|
||||
rng = np.random.default_rng()
|
||||
except AttributeError:
|
||||
return
|
||||
|
||||
# test that numbers are inclusive of high point
|
||||
arr = rng_integers(rng, low=2, high=5, size=100, endpoint=True)
|
||||
assert np.max(arr) == 5
|
||||
assert np.min(arr) == 2
|
||||
assert arr.shape == (100, )
|
||||
|
||||
# test that numbers are inclusive of high point
|
||||
arr = rng_integers(rng, low=5, size=100, endpoint=True)
|
||||
assert np.max(arr) == 5
|
||||
assert np.min(arr) == 0
|
||||
assert arr.shape == (100, )
|
||||
|
||||
# test that numbers are exclusive of high point
|
||||
arr = rng_integers(rng, low=2, high=5, size=100, endpoint=False)
|
||||
assert np.max(arr) == 4
|
||||
assert np.min(arr) == 2
|
||||
assert arr.shape == (100, )
|
||||
|
||||
# test that numbers are exclusive of high point
|
||||
arr = rng_integers(rng, low=5, size=100, endpoint=False)
|
||||
assert np.max(arr) == 4
|
||||
assert np.min(arr) == 0
|
||||
assert arr.shape == (100, )
|
||||
|
||||
|
||||
class TestValidateInt:
|
||||
|
||||
@pytest.mark.parametrize('n', [4, np.uint8(4), np.int16(4), np.array(4)])
|
||||
def test_validate_int(self, n):
|
||||
n = _validate_int(n, 'n')
|
||||
assert n == 4
|
||||
|
||||
@pytest.mark.parametrize('n', [4.0, np.array([4]), Fraction(4, 1)])
|
||||
def test_validate_int_bad(self, n):
|
||||
with pytest.raises(TypeError, match='n must be an integer'):
|
||||
_validate_int(n, 'n')
|
||||
|
||||
def test_validate_int_below_min(self):
|
||||
with pytest.raises(ValueError, match='n must be an integer not '
|
||||
'less than 0'):
|
||||
_validate_int(-1, 'n', 0)
|
||||
|
||||
|
||||
class TestRenameParameter:
|
||||
# check that wrapper `_rename_parameter` for backward-compatible
|
||||
# keyword renaming works correctly
|
||||
|
||||
# Example method/function that still accepts keyword `old`
|
||||
@_rename_parameter("old", "new")
|
||||
def old_keyword_still_accepted(self, new):
|
||||
return new
|
||||
|
||||
# Example method/function for which keyword `old` is deprecated
|
||||
@_rename_parameter("old", "new", dep_version="1.9.0")
|
||||
def old_keyword_deprecated(self, new):
|
||||
return new
|
||||
|
||||
def test_old_keyword_still_accepted(self):
|
||||
# positional argument and both keyword work identically
|
||||
res1 = self.old_keyword_still_accepted(10)
|
||||
res2 = self.old_keyword_still_accepted(new=10)
|
||||
res3 = self.old_keyword_still_accepted(old=10)
|
||||
assert res1 == res2 == res3 == 10
|
||||
|
||||
# unexpected keyword raises an error
|
||||
message = re.escape("old_keyword_still_accepted() got an unexpected")
|
||||
with pytest.raises(TypeError, match=message):
|
||||
self.old_keyword_still_accepted(unexpected=10)
|
||||
|
||||
# multiple values for the same parameter raises an error
|
||||
message = re.escape("old_keyword_still_accepted() got multiple")
|
||||
with pytest.raises(TypeError, match=message):
|
||||
self.old_keyword_still_accepted(10, new=10)
|
||||
with pytest.raises(TypeError, match=message):
|
||||
self.old_keyword_still_accepted(10, old=10)
|
||||
with pytest.raises(TypeError, match=message):
|
||||
self.old_keyword_still_accepted(new=10, old=10)
|
||||
|
||||
def test_old_keyword_deprecated(self):
|
||||
# positional argument and both keyword work identically,
|
||||
# but use of old keyword results in DeprecationWarning
|
||||
dep_msg = "Use of keyword argument `old` is deprecated"
|
||||
res1 = self.old_keyword_deprecated(10)
|
||||
res2 = self.old_keyword_deprecated(new=10)
|
||||
with pytest.warns(DeprecationWarning, match=dep_msg):
|
||||
res3 = self.old_keyword_deprecated(old=10)
|
||||
assert res1 == res2 == res3 == 10
|
||||
|
||||
# unexpected keyword raises an error
|
||||
message = re.escape("old_keyword_deprecated() got an unexpected")
|
||||
with pytest.raises(TypeError, match=message):
|
||||
self.old_keyword_deprecated(unexpected=10)
|
||||
|
||||
# multiple values for the same parameter raises an error and,
|
||||
# if old keyword is used, results in DeprecationWarning
|
||||
message = re.escape("old_keyword_deprecated() got multiple")
|
||||
with pytest.raises(TypeError, match=message):
|
||||
self.old_keyword_deprecated(10, new=10)
|
||||
with pytest.raises(TypeError, match=message), \
|
||||
pytest.warns(DeprecationWarning, match=dep_msg):
|
||||
self.old_keyword_deprecated(10, old=10)
|
||||
with pytest.raises(TypeError, match=message), \
|
||||
pytest.warns(DeprecationWarning, match=dep_msg):
|
||||
self.old_keyword_deprecated(new=10, old=10)
|
||||
|
||||
|
||||
class TestContainsNaNTest:
|
||||
|
||||
def test_policy(self):
|
||||
data = np.array([1, 2, 3, np.nan])
|
||||
|
||||
contains_nan, nan_policy = _contains_nan(data, nan_policy="propagate")
|
||||
assert contains_nan
|
||||
assert nan_policy == "propagate"
|
||||
|
||||
contains_nan, nan_policy = _contains_nan(data, nan_policy="omit")
|
||||
assert contains_nan
|
||||
assert nan_policy == "omit"
|
||||
|
||||
msg = "The input contains nan values"
|
||||
with pytest.raises(ValueError, match=msg):
|
||||
_contains_nan(data, nan_policy="raise")
|
||||
|
||||
msg = "nan_policy must be one of"
|
||||
with pytest.raises(ValueError, match=msg):
|
||||
_contains_nan(data, nan_policy="nan")
|
||||
|
||||
def test_contains_nan_1d(self):
|
||||
data1 = np.array([1, 2, 3])
|
||||
assert not _contains_nan(data1)[0]
|
||||
|
||||
data2 = np.array([1, 2, 3, np.nan])
|
||||
assert _contains_nan(data2)[0]
|
||||
|
||||
data3 = np.array([np.nan, 2, 3, np.nan])
|
||||
assert _contains_nan(data3)[0]
|
||||
|
||||
data4 = np.array([1, 2, "3", np.nan]) # converted to string "nan"
|
||||
assert not _contains_nan(data4)[0]
|
||||
|
||||
data5 = np.array([1, 2, "3", np.nan], dtype='object')
|
||||
assert _contains_nan(data5)[0]
|
||||
|
||||
def test_contains_nan_2d(self):
|
||||
data1 = np.array([[1, 2], [3, 4]])
|
||||
assert not _contains_nan(data1)[0]
|
||||
|
||||
data2 = np.array([[1, 2], [3, np.nan]])
|
||||
assert _contains_nan(data2)[0]
|
||||
|
||||
data3 = np.array([["1", 2], [3, np.nan]]) # converted to string "nan"
|
||||
assert not _contains_nan(data3)[0]
|
||||
|
||||
data4 = np.array([["1", 2], [3, np.nan]], dtype='object')
|
||||
assert _contains_nan(data4)[0]
|
||||
163
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test_bunch.py
vendored
Normal file
163
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test_bunch.py
vendored
Normal file
@@ -0,0 +1,163 @@
|
||||
|
||||
import pytest
|
||||
import pickle
|
||||
from numpy.testing import assert_equal
|
||||
from scipy._lib._bunch import _make_tuple_bunch
|
||||
|
||||
|
||||
# `Result` is defined at the top level of the module so it can be
|
||||
# used to test pickling.
|
||||
Result = _make_tuple_bunch('Result', ['x', 'y', 'z'], ['w', 'beta'])
|
||||
|
||||
|
||||
class TestMakeTupleBunch:
|
||||
|
||||
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||
# Tests with Result
|
||||
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||
|
||||
def setup_method(self):
|
||||
# Set up an instance of Result.
|
||||
self.result = Result(x=1, y=2, z=3, w=99, beta=0.5)
|
||||
|
||||
def test_attribute_access(self):
|
||||
assert_equal(self.result.x, 1)
|
||||
assert_equal(self.result.y, 2)
|
||||
assert_equal(self.result.z, 3)
|
||||
assert_equal(self.result.w, 99)
|
||||
assert_equal(self.result.beta, 0.5)
|
||||
|
||||
def test_indexing(self):
|
||||
assert_equal(self.result[0], 1)
|
||||
assert_equal(self.result[1], 2)
|
||||
assert_equal(self.result[2], 3)
|
||||
assert_equal(self.result[-1], 3)
|
||||
with pytest.raises(IndexError, match='index out of range'):
|
||||
self.result[3]
|
||||
|
||||
def test_unpacking(self):
|
||||
x0, y0, z0 = self.result
|
||||
assert_equal((x0, y0, z0), (1, 2, 3))
|
||||
assert_equal(self.result, (1, 2, 3))
|
||||
|
||||
def test_slice(self):
|
||||
assert_equal(self.result[1:], (2, 3))
|
||||
assert_equal(self.result[::2], (1, 3))
|
||||
assert_equal(self.result[::-1], (3, 2, 1))
|
||||
|
||||
def test_len(self):
|
||||
assert_equal(len(self.result), 3)
|
||||
|
||||
def test_repr(self):
|
||||
s = repr(self.result)
|
||||
assert_equal(s, 'Result(x=1, y=2, z=3, w=99, beta=0.5)')
|
||||
|
||||
def test_hash(self):
|
||||
assert_equal(hash(self.result), hash((1, 2, 3)))
|
||||
|
||||
def test_pickle(self):
|
||||
s = pickle.dumps(self.result)
|
||||
obj = pickle.loads(s)
|
||||
assert isinstance(obj, Result)
|
||||
assert_equal(obj.x, self.result.x)
|
||||
assert_equal(obj.y, self.result.y)
|
||||
assert_equal(obj.z, self.result.z)
|
||||
assert_equal(obj.w, self.result.w)
|
||||
assert_equal(obj.beta, self.result.beta)
|
||||
|
||||
def test_read_only_existing(self):
|
||||
with pytest.raises(AttributeError, match="can't set attribute"):
|
||||
self.result.x = -1
|
||||
|
||||
def test_read_only_new(self):
|
||||
self.result.plate_of_shrimp = "lattice of coincidence"
|
||||
assert self.result.plate_of_shrimp == "lattice of coincidence"
|
||||
|
||||
def test_constructor_missing_parameter(self):
|
||||
with pytest.raises(TypeError, match='missing'):
|
||||
# `w` is missing.
|
||||
Result(x=1, y=2, z=3, beta=0.75)
|
||||
|
||||
def test_constructor_incorrect_parameter(self):
|
||||
with pytest.raises(TypeError, match='unexpected'):
|
||||
# `foo` is not an existing field.
|
||||
Result(x=1, y=2, z=3, w=123, beta=0.75, foo=999)
|
||||
|
||||
def test_module(self):
|
||||
m = 'scipy._lib.tests.test_bunch'
|
||||
assert_equal(Result.__module__, m)
|
||||
assert_equal(self.result.__module__, m)
|
||||
|
||||
def test_extra_fields_per_instance(self):
|
||||
# This test exists to ensure that instances of the same class
|
||||
# store their own values for the extra fields. That is, the values
|
||||
# are stored per instance and not in the class.
|
||||
result1 = Result(x=1, y=2, z=3, w=-1, beta=0.0)
|
||||
result2 = Result(x=4, y=5, z=6, w=99, beta=1.0)
|
||||
assert_equal(result1.w, -1)
|
||||
assert_equal(result1.beta, 0.0)
|
||||
# The rest of these checks aren't essential, but let's check
|
||||
# them anyway.
|
||||
assert_equal(result1[:], (1, 2, 3))
|
||||
assert_equal(result2.w, 99)
|
||||
assert_equal(result2.beta, 1.0)
|
||||
assert_equal(result2[:], (4, 5, 6))
|
||||
|
||||
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||
# Other tests
|
||||
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||
|
||||
def test_extra_field_names_is_optional(self):
|
||||
Square = _make_tuple_bunch('Square', ['width', 'height'])
|
||||
sq = Square(width=1, height=2)
|
||||
assert_equal(sq.width, 1)
|
||||
assert_equal(sq.height, 2)
|
||||
s = repr(sq)
|
||||
assert_equal(s, 'Square(width=1, height=2)')
|
||||
|
||||
def test_tuple_like(self):
|
||||
Tup = _make_tuple_bunch('Tup', ['a', 'b'])
|
||||
tu = Tup(a=1, b=2)
|
||||
assert isinstance(tu, tuple)
|
||||
assert isinstance(tu + (1,), tuple)
|
||||
|
||||
def test_explicit_module(self):
|
||||
m = 'some.module.name'
|
||||
Foo = _make_tuple_bunch('Foo', ['x'], ['a', 'b'], module=m)
|
||||
foo = Foo(x=1, a=355, b=113)
|
||||
assert_equal(Foo.__module__, m)
|
||||
assert_equal(foo.__module__, m)
|
||||
|
||||
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||
# Argument validation
|
||||
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
|
||||
|
||||
@pytest.mark.parametrize('args', [('123', ['a'], ['b']),
|
||||
('Foo', ['-3'], ['x']),
|
||||
('Foo', ['a'], ['+-*/'])])
|
||||
def test_identifiers_not_allowed(self, args):
|
||||
with pytest.raises(ValueError, match='identifiers'):
|
||||
_make_tuple_bunch(*args)
|
||||
|
||||
@pytest.mark.parametrize('args', [('Foo', ['a', 'b', 'a'], ['x']),
|
||||
('Foo', ['a', 'b'], ['b', 'x'])])
|
||||
def test_repeated_field_names(self, args):
|
||||
with pytest.raises(ValueError, match='Duplicate'):
|
||||
_make_tuple_bunch(*args)
|
||||
|
||||
@pytest.mark.parametrize('args', [('Foo', ['_a'], ['x']),
|
||||
('Foo', ['a'], ['_x'])])
|
||||
def test_leading_underscore_not_allowed(self, args):
|
||||
with pytest.raises(ValueError, match='underscore'):
|
||||
_make_tuple_bunch(*args)
|
||||
|
||||
@pytest.mark.parametrize('args', [('Foo', ['def'], ['x']),
|
||||
('Foo', ['a'], ['or']),
|
||||
('and', ['a'], ['x'])])
|
||||
def test_keyword_not_allowed_in_fields(self, args):
|
||||
with pytest.raises(ValueError, match='keyword'):
|
||||
_make_tuple_bunch(*args)
|
||||
|
||||
def test_at_least_one_field_name_required(self):
|
||||
with pytest.raises(ValueError, match='at least one name'):
|
||||
_make_tuple_bunch('Qwerty', [], ['a', 'b'])
|
||||
197
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test_ccallback.py
vendored
Normal file
197
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test_ccallback.py
vendored
Normal file
@@ -0,0 +1,197 @@
|
||||
from numpy.testing import assert_equal, assert_
|
||||
from pytest import raises as assert_raises
|
||||
|
||||
import time
|
||||
import pytest
|
||||
import ctypes
|
||||
import threading
|
||||
from scipy._lib import _ccallback_c as _test_ccallback_cython
|
||||
from scipy._lib import _test_ccallback
|
||||
from scipy._lib._ccallback import LowLevelCallable
|
||||
|
||||
try:
|
||||
import cffi
|
||||
HAVE_CFFI = True
|
||||
except ImportError:
|
||||
HAVE_CFFI = False
|
||||
|
||||
|
||||
ERROR_VALUE = 2.0
|
||||
|
||||
|
||||
def callback_python(a, user_data=None):
|
||||
if a == ERROR_VALUE:
|
||||
raise ValueError("bad value")
|
||||
|
||||
if user_data is None:
|
||||
return a + 1
|
||||
else:
|
||||
return a + user_data
|
||||
|
||||
def _get_cffi_func(base, signature):
|
||||
if not HAVE_CFFI:
|
||||
pytest.skip("cffi not installed")
|
||||
|
||||
# Get function address
|
||||
voidp = ctypes.cast(base, ctypes.c_void_p)
|
||||
address = voidp.value
|
||||
|
||||
# Create corresponding cffi handle
|
||||
ffi = cffi.FFI()
|
||||
func = ffi.cast(signature, address)
|
||||
return func
|
||||
|
||||
|
||||
def _get_ctypes_data():
|
||||
value = ctypes.c_double(2.0)
|
||||
return ctypes.cast(ctypes.pointer(value), ctypes.c_voidp)
|
||||
|
||||
|
||||
def _get_cffi_data():
|
||||
if not HAVE_CFFI:
|
||||
pytest.skip("cffi not installed")
|
||||
ffi = cffi.FFI()
|
||||
return ffi.new('double *', 2.0)
|
||||
|
||||
|
||||
CALLERS = {
|
||||
'simple': _test_ccallback.test_call_simple,
|
||||
'nodata': _test_ccallback.test_call_nodata,
|
||||
'nonlocal': _test_ccallback.test_call_nonlocal,
|
||||
'cython': _test_ccallback_cython.test_call_cython,
|
||||
}
|
||||
|
||||
# These functions have signatures known to the callers
|
||||
FUNCS = {
|
||||
'python': lambda: callback_python,
|
||||
'capsule': lambda: _test_ccallback.test_get_plus1_capsule(),
|
||||
'cython': lambda: LowLevelCallable.from_cython(_test_ccallback_cython, "plus1_cython"),
|
||||
'ctypes': lambda: _test_ccallback_cython.plus1_ctypes,
|
||||
'cffi': lambda: _get_cffi_func(_test_ccallback_cython.plus1_ctypes,
|
||||
'double (*)(double, int *, void *)'),
|
||||
'capsule_b': lambda: _test_ccallback.test_get_plus1b_capsule(),
|
||||
'cython_b': lambda: LowLevelCallable.from_cython(_test_ccallback_cython, "plus1b_cython"),
|
||||
'ctypes_b': lambda: _test_ccallback_cython.plus1b_ctypes,
|
||||
'cffi_b': lambda: _get_cffi_func(_test_ccallback_cython.plus1b_ctypes,
|
||||
'double (*)(double, double, int *, void *)'),
|
||||
}
|
||||
|
||||
# These functions have signatures the callers don't know
|
||||
BAD_FUNCS = {
|
||||
'capsule_bc': lambda: _test_ccallback.test_get_plus1bc_capsule(),
|
||||
'cython_bc': lambda: LowLevelCallable.from_cython(_test_ccallback_cython, "plus1bc_cython"),
|
||||
'ctypes_bc': lambda: _test_ccallback_cython.plus1bc_ctypes,
|
||||
'cffi_bc': lambda: _get_cffi_func(_test_ccallback_cython.plus1bc_ctypes,
|
||||
'double (*)(double, double, double, int *, void *)'),
|
||||
}
|
||||
|
||||
USER_DATAS = {
|
||||
'ctypes': _get_ctypes_data,
|
||||
'cffi': _get_cffi_data,
|
||||
'capsule': _test_ccallback.test_get_data_capsule,
|
||||
}
|
||||
|
||||
|
||||
def test_callbacks():
|
||||
def check(caller, func, user_data):
|
||||
caller = CALLERS[caller]
|
||||
func = FUNCS[func]()
|
||||
user_data = USER_DATAS[user_data]()
|
||||
|
||||
if func is callback_python:
|
||||
func2 = lambda x: func(x, 2.0)
|
||||
else:
|
||||
func2 = LowLevelCallable(func, user_data)
|
||||
func = LowLevelCallable(func)
|
||||
|
||||
# Test basic call
|
||||
assert_equal(caller(func, 1.0), 2.0)
|
||||
|
||||
# Test 'bad' value resulting to an error
|
||||
assert_raises(ValueError, caller, func, ERROR_VALUE)
|
||||
|
||||
# Test passing in user_data
|
||||
assert_equal(caller(func2, 1.0), 3.0)
|
||||
|
||||
for caller in sorted(CALLERS.keys()):
|
||||
for func in sorted(FUNCS.keys()):
|
||||
for user_data in sorted(USER_DATAS.keys()):
|
||||
check(caller, func, user_data)
|
||||
|
||||
|
||||
def test_bad_callbacks():
|
||||
def check(caller, func, user_data):
|
||||
caller = CALLERS[caller]
|
||||
user_data = USER_DATAS[user_data]()
|
||||
func = BAD_FUNCS[func]()
|
||||
|
||||
if func is callback_python:
|
||||
func2 = lambda x: func(x, 2.0)
|
||||
else:
|
||||
func2 = LowLevelCallable(func, user_data)
|
||||
func = LowLevelCallable(func)
|
||||
|
||||
# Test that basic call fails
|
||||
assert_raises(ValueError, caller, LowLevelCallable(func), 1.0)
|
||||
|
||||
# Test that passing in user_data also fails
|
||||
assert_raises(ValueError, caller, func2, 1.0)
|
||||
|
||||
# Test error message
|
||||
llfunc = LowLevelCallable(func)
|
||||
try:
|
||||
caller(llfunc, 1.0)
|
||||
except ValueError as err:
|
||||
msg = str(err)
|
||||
assert_(llfunc.signature in msg, msg)
|
||||
assert_('double (double, double, int *, void *)' in msg, msg)
|
||||
|
||||
for caller in sorted(CALLERS.keys()):
|
||||
for func in sorted(BAD_FUNCS.keys()):
|
||||
for user_data in sorted(USER_DATAS.keys()):
|
||||
check(caller, func, user_data)
|
||||
|
||||
|
||||
def test_signature_override():
|
||||
caller = _test_ccallback.test_call_simple
|
||||
func = _test_ccallback.test_get_plus1_capsule()
|
||||
|
||||
llcallable = LowLevelCallable(func, signature="bad signature")
|
||||
assert_equal(llcallable.signature, "bad signature")
|
||||
assert_raises(ValueError, caller, llcallable, 3)
|
||||
|
||||
llcallable = LowLevelCallable(func, signature="double (double, int *, void *)")
|
||||
assert_equal(llcallable.signature, "double (double, int *, void *)")
|
||||
assert_equal(caller(llcallable, 3), 4)
|
||||
|
||||
|
||||
def test_threadsafety():
|
||||
def callback(a, caller):
|
||||
if a <= 0:
|
||||
return 1
|
||||
else:
|
||||
res = caller(lambda x: callback(x, caller), a - 1)
|
||||
return 2*res
|
||||
|
||||
def check(caller):
|
||||
caller = CALLERS[caller]
|
||||
|
||||
results = []
|
||||
|
||||
count = 10
|
||||
|
||||
def run():
|
||||
time.sleep(0.01)
|
||||
r = caller(lambda x: callback(x, caller), count)
|
||||
results.append(r)
|
||||
|
||||
threads = [threading.Thread(target=run) for j in range(20)]
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
|
||||
assert_equal(results, [2.0**count]*len(threads))
|
||||
|
||||
for caller in CALLERS.keys():
|
||||
check(caller)
|
||||
10
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test_deprecation.py
vendored
Normal file
10
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test_deprecation.py
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import pytest
|
||||
|
||||
|
||||
def test_cython_api_deprecation():
|
||||
match = ("`scipy._lib._test_deprecation_def.foo_deprecated` "
|
||||
"is deprecated, use `foo` instead!\n"
|
||||
"Deprecated in Scipy 42.0.0")
|
||||
with pytest.warns(DeprecationWarning, match=match):
|
||||
from .. import _test_deprecation_call
|
||||
assert _test_deprecation_call.call() == (1, 1)
|
||||
53
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test_import_cycles.py
vendored
Normal file
53
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test_import_cycles.py
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
MODULES = [
|
||||
"scipy.cluster",
|
||||
"scipy.cluster.vq",
|
||||
"scipy.cluster.hierarchy",
|
||||
"scipy.constants",
|
||||
"scipy.fft",
|
||||
"scipy.fftpack",
|
||||
"scipy.fftpack.convolve",
|
||||
"scipy.integrate",
|
||||
"scipy.interpolate",
|
||||
"scipy.io",
|
||||
"scipy.io.arff",
|
||||
"scipy.io.harwell_boeing",
|
||||
"scipy.io.idl",
|
||||
"scipy.io.matlab",
|
||||
"scipy.io.netcdf",
|
||||
"scipy.io.wavfile",
|
||||
"scipy.linalg",
|
||||
"scipy.linalg.blas",
|
||||
"scipy.linalg.cython_blas",
|
||||
"scipy.linalg.lapack",
|
||||
"scipy.linalg.cython_lapack",
|
||||
"scipy.linalg.interpolative",
|
||||
"scipy.misc",
|
||||
"scipy.ndimage",
|
||||
"scipy.odr",
|
||||
"scipy.optimize",
|
||||
"scipy.signal",
|
||||
"scipy.signal.windows",
|
||||
"scipy.sparse",
|
||||
"scipy.sparse.linalg",
|
||||
"scipy.sparse.csgraph",
|
||||
"scipy.spatial",
|
||||
"scipy.spatial.distance",
|
||||
"scipy.special",
|
||||
"scipy.stats",
|
||||
"scipy.stats.distributions",
|
||||
"scipy.stats.mstats",
|
||||
"scipy.stats.contingency"
|
||||
]
|
||||
|
||||
|
||||
def test_modules_importable():
|
||||
# Regression test for gh-6793.
|
||||
# Check that all modules are importable in a new Python process.
|
||||
# This is not necessarily true if there are import cycles present.
|
||||
for module in MODULES:
|
||||
cmd = 'import {}'.format(module)
|
||||
subprocess.check_call([sys.executable, '-c', cmd])
|
||||
326
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test_public_api.py
vendored
Normal file
326
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test_public_api.py
vendored
Normal file
@@ -0,0 +1,326 @@
|
||||
"""
|
||||
This test script is adopted from:
|
||||
https://github.com/numpy/numpy/blob/main/numpy/tests/test_public_api.py
|
||||
"""
|
||||
|
||||
import pkgutil
|
||||
import types
|
||||
import importlib
|
||||
import warnings
|
||||
|
||||
import scipy
|
||||
|
||||
|
||||
def check_dir(module, module_name=None):
|
||||
"""Returns a mapping of all objects with the wrong __module__ attribute."""
|
||||
if module_name is None:
|
||||
module_name = module.__name__
|
||||
results = {}
|
||||
for name in dir(module):
|
||||
item = getattr(module, name)
|
||||
if (hasattr(item, '__module__') and hasattr(item, '__name__')
|
||||
and item.__module__ != module_name):
|
||||
results[name] = item.__module__ + '.' + item.__name__
|
||||
return results
|
||||
|
||||
|
||||
def test_dir_testing():
|
||||
"""Assert that output of dir has only one "testing/tester"
|
||||
attribute without duplicate"""
|
||||
assert len(dir(scipy)) == len(set(dir(scipy)))
|
||||
|
||||
|
||||
# Historically SciPy has not used leading underscores for private submodules
|
||||
# much. This has resulted in lots of things that look like public modules
|
||||
# (i.e. things that can be imported as `import scipy.somesubmodule.somefile`),
|
||||
# but were never intended to be public. The PUBLIC_MODULES list contains
|
||||
# modules that are either public because they were meant to be, or because they
|
||||
# contain public functions/objects that aren't present in any other namespace
|
||||
# for whatever reason and therefore should be treated as public.
|
||||
PUBLIC_MODULES = ["scipy." + s for s in [
|
||||
"cluster",
|
||||
"cluster.vq",
|
||||
"cluster.hierarchy",
|
||||
"constants",
|
||||
"datasets",
|
||||
"fft",
|
||||
"fftpack",
|
||||
"integrate",
|
||||
"interpolate",
|
||||
"io",
|
||||
"io.arff",
|
||||
"io.matlab",
|
||||
"io.wavfile",
|
||||
"linalg",
|
||||
"linalg.blas",
|
||||
"linalg.cython_blas",
|
||||
"linalg.lapack",
|
||||
"linalg.cython_lapack",
|
||||
"linalg.interpolative",
|
||||
"misc",
|
||||
"ndimage",
|
||||
"odr",
|
||||
"optimize",
|
||||
"signal",
|
||||
"signal.windows",
|
||||
"sparse",
|
||||
"sparse.linalg",
|
||||
"sparse.csgraph",
|
||||
"spatial",
|
||||
"spatial.distance",
|
||||
"spatial.transform",
|
||||
"special",
|
||||
"stats",
|
||||
"stats.contingency",
|
||||
"stats.distributions",
|
||||
"stats.mstats",
|
||||
"stats.qmc",
|
||||
"stats.sampling"
|
||||
]]
|
||||
|
||||
# The PRIVATE_BUT_PRESENT_MODULES list contains modules that look public (lack
|
||||
# of underscores) but should not be used. For many of those modules the
|
||||
# current status is fine. For others it may make sense to work on making them
|
||||
# private, to clean up our public API and avoid confusion.
|
||||
# These private modules support will be removed in SciPy v2.0.0
|
||||
PRIVATE_BUT_PRESENT_MODULES = [
|
||||
'scipy.constants.codata',
|
||||
'scipy.constants.constants',
|
||||
'scipy.fftpack.basic',
|
||||
'scipy.fftpack.convolve',
|
||||
'scipy.fftpack.helper',
|
||||
'scipy.fftpack.pseudo_diffs',
|
||||
'scipy.fftpack.realtransforms',
|
||||
'scipy.integrate.odepack',
|
||||
'scipy.integrate.quadpack',
|
||||
'scipy.integrate.dop',
|
||||
'scipy.integrate.lsoda',
|
||||
'scipy.integrate.vode',
|
||||
'scipy.interpolate.dfitpack',
|
||||
'scipy.interpolate.fitpack',
|
||||
'scipy.interpolate.fitpack2',
|
||||
'scipy.interpolate.interpnd',
|
||||
'scipy.interpolate.interpolate',
|
||||
'scipy.interpolate.ndgriddata',
|
||||
'scipy.interpolate.polyint',
|
||||
'scipy.interpolate.rbf',
|
||||
'scipy.io.arff.arffread',
|
||||
'scipy.io.harwell_boeing',
|
||||
'scipy.io.idl',
|
||||
'scipy.io.mmio',
|
||||
'scipy.io.netcdf',
|
||||
'scipy.io.matlab.byteordercodes',
|
||||
'scipy.io.matlab.mio',
|
||||
'scipy.io.matlab.mio4',
|
||||
'scipy.io.matlab.mio5',
|
||||
'scipy.io.matlab.mio5_params',
|
||||
'scipy.io.matlab.mio5_utils',
|
||||
'scipy.io.matlab.mio_utils',
|
||||
'scipy.io.matlab.miobase',
|
||||
'scipy.io.matlab.streams',
|
||||
'scipy.linalg.basic',
|
||||
'scipy.linalg.decomp',
|
||||
'scipy.linalg.decomp_cholesky',
|
||||
'scipy.linalg.decomp_lu',
|
||||
'scipy.linalg.decomp_qr',
|
||||
'scipy.linalg.decomp_schur',
|
||||
'scipy.linalg.decomp_svd',
|
||||
'scipy.linalg.flinalg',
|
||||
'scipy.linalg.matfuncs',
|
||||
'scipy.linalg.misc',
|
||||
'scipy.linalg.special_matrices',
|
||||
'scipy.misc.common',
|
||||
'scipy.misc.doccer',
|
||||
'scipy.ndimage.filters',
|
||||
'scipy.ndimage.fourier',
|
||||
'scipy.ndimage.interpolation',
|
||||
'scipy.ndimage.measurements',
|
||||
'scipy.ndimage.morphology',
|
||||
'scipy.odr.models',
|
||||
'scipy.odr.odrpack',
|
||||
'scipy.optimize.cobyla',
|
||||
'scipy.optimize.cython_optimize',
|
||||
'scipy.optimize.lbfgsb',
|
||||
'scipy.optimize.linesearch',
|
||||
'scipy.optimize.minpack',
|
||||
'scipy.optimize.minpack2',
|
||||
'scipy.optimize.moduleTNC',
|
||||
'scipy.optimize.nonlin',
|
||||
'scipy.optimize.optimize',
|
||||
'scipy.optimize.slsqp',
|
||||
'scipy.optimize.tnc',
|
||||
'scipy.optimize.zeros',
|
||||
'scipy.signal.bsplines',
|
||||
'scipy.signal.filter_design',
|
||||
'scipy.signal.fir_filter_design',
|
||||
'scipy.signal.lti_conversion',
|
||||
'scipy.signal.ltisys',
|
||||
'scipy.signal.signaltools',
|
||||
'scipy.signal.spectral',
|
||||
'scipy.signal.spline',
|
||||
'scipy.signal.waveforms',
|
||||
'scipy.signal.wavelets',
|
||||
'scipy.signal.windows.windows',
|
||||
'scipy.sparse.base',
|
||||
'scipy.sparse.bsr',
|
||||
'scipy.sparse.compressed',
|
||||
'scipy.sparse.construct',
|
||||
'scipy.sparse.coo',
|
||||
'scipy.sparse.csc',
|
||||
'scipy.sparse.csr',
|
||||
'scipy.sparse.data',
|
||||
'scipy.sparse.dia',
|
||||
'scipy.sparse.dok',
|
||||
'scipy.sparse.extract',
|
||||
'scipy.sparse.lil',
|
||||
'scipy.sparse.linalg.dsolve',
|
||||
'scipy.sparse.linalg.eigen',
|
||||
'scipy.sparse.linalg.interface',
|
||||
'scipy.sparse.linalg.isolve',
|
||||
'scipy.sparse.linalg.matfuncs',
|
||||
'scipy.sparse.sparsetools',
|
||||
'scipy.sparse.spfuncs',
|
||||
'scipy.sparse.sputils',
|
||||
'scipy.spatial.ckdtree',
|
||||
'scipy.spatial.kdtree',
|
||||
'scipy.spatial.qhull',
|
||||
'scipy.spatial.transform.rotation',
|
||||
'scipy.special.add_newdocs',
|
||||
'scipy.special.basic',
|
||||
'scipy.special.cython_special',
|
||||
'scipy.special.orthogonal',
|
||||
'scipy.special.sf_error',
|
||||
'scipy.special.specfun',
|
||||
'scipy.special.spfun_stats',
|
||||
'scipy.stats.biasedurn',
|
||||
'scipy.stats.kde',
|
||||
'scipy.stats.morestats',
|
||||
'scipy.stats.mstats_basic',
|
||||
'scipy.stats.mstats_extras',
|
||||
'scipy.stats.mvn',
|
||||
'scipy.stats.statlib',
|
||||
'scipy.stats.stats',
|
||||
]
|
||||
|
||||
|
||||
def is_unexpected(name):
|
||||
"""Check if this needs to be considered."""
|
||||
if '._' in name or '.tests' in name or '.setup' in name:
|
||||
return False
|
||||
|
||||
if name in PUBLIC_MODULES:
|
||||
return False
|
||||
|
||||
if name in PRIVATE_BUT_PRESENT_MODULES:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
SKIP_LIST = [
|
||||
'scipy.conftest',
|
||||
'scipy.version',
|
||||
]
|
||||
|
||||
|
||||
def test_all_modules_are_expected():
|
||||
"""
|
||||
Test that we don't add anything that looks like a new public module by
|
||||
accident. Check is based on filenames.
|
||||
"""
|
||||
|
||||
modnames = []
|
||||
for _, modname, ispkg in pkgutil.walk_packages(path=scipy.__path__,
|
||||
prefix=scipy.__name__ + '.',
|
||||
onerror=None):
|
||||
if is_unexpected(modname) and modname not in SKIP_LIST:
|
||||
# We have a name that is new. If that's on purpose, add it to
|
||||
# PUBLIC_MODULES. We don't expect to have to add anything to
|
||||
# PRIVATE_BUT_PRESENT_MODULES. Use an underscore in the name!
|
||||
modnames.append(modname)
|
||||
|
||||
if modnames:
|
||||
raise AssertionError(f'Found unexpected modules: {modnames}')
|
||||
|
||||
|
||||
# Stuff that clearly shouldn't be in the API and is detected by the next test
|
||||
# below
|
||||
SKIP_LIST_2 = [
|
||||
'scipy.char',
|
||||
'scipy.rec',
|
||||
'scipy.emath',
|
||||
'scipy.math',
|
||||
'scipy.random',
|
||||
'scipy.ctypeslib',
|
||||
'scipy.ma'
|
||||
]
|
||||
|
||||
|
||||
def test_all_modules_are_expected_2():
|
||||
"""
|
||||
Method checking all objects. The pkgutil-based method in
|
||||
`test_all_modules_are_expected` does not catch imports into a namespace,
|
||||
only filenames.
|
||||
"""
|
||||
|
||||
def find_unexpected_members(mod_name):
|
||||
members = []
|
||||
module = importlib.import_module(mod_name)
|
||||
if hasattr(module, '__all__'):
|
||||
objnames = module.__all__
|
||||
else:
|
||||
objnames = dir(module)
|
||||
|
||||
for objname in objnames:
|
||||
if not objname.startswith('_'):
|
||||
fullobjname = mod_name + '.' + objname
|
||||
if isinstance(getattr(module, objname), types.ModuleType):
|
||||
if is_unexpected(fullobjname) and fullobjname not in SKIP_LIST_2:
|
||||
members.append(fullobjname)
|
||||
|
||||
return members
|
||||
|
||||
unexpected_members = find_unexpected_members("scipy")
|
||||
for modname in PUBLIC_MODULES:
|
||||
unexpected_members.extend(find_unexpected_members(modname))
|
||||
|
||||
if unexpected_members:
|
||||
raise AssertionError("Found unexpected object(s) that look like "
|
||||
"modules: {}".format(unexpected_members))
|
||||
|
||||
|
||||
def test_api_importable():
|
||||
"""
|
||||
Check that all submodules listed higher up in this file can be imported
|
||||
Note that if a PRIVATE_BUT_PRESENT_MODULES entry goes missing, it may
|
||||
simply need to be removed from the list (deprecation may or may not be
|
||||
needed - apply common sense).
|
||||
"""
|
||||
def check_importable(module_name):
|
||||
try:
|
||||
importlib.import_module(module_name)
|
||||
except (ImportError, AttributeError):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
module_names = []
|
||||
for module_name in PUBLIC_MODULES:
|
||||
if not check_importable(module_name):
|
||||
module_names.append(module_name)
|
||||
|
||||
if module_names:
|
||||
raise AssertionError("Modules in the public API that cannot be "
|
||||
"imported: {}".format(module_names))
|
||||
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
warnings.filterwarnings('always', category=DeprecationWarning)
|
||||
warnings.filterwarnings('always', category=ImportWarning)
|
||||
for module_name in PRIVATE_BUT_PRESENT_MODULES:
|
||||
if not check_importable(module_name):
|
||||
module_names.append(module_name)
|
||||
|
||||
if module_names:
|
||||
raise AssertionError("Modules that are not really public but looked "
|
||||
"public and can not be imported: "
|
||||
"{}".format(module_names))
|
||||
18
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test_scipy_version.py
vendored
Normal file
18
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test_scipy_version.py
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
import re
|
||||
|
||||
import scipy
|
||||
from numpy.testing import assert_
|
||||
|
||||
|
||||
def test_valid_scipy_version():
|
||||
# Verify that the SciPy version is a valid one (no .post suffix or other
|
||||
# nonsense). See NumPy issue gh-6431 for an issue caused by an invalid
|
||||
# version.
|
||||
version_pattern = r"^[0-9]+\.[0-9]+\.[0-9]+(|a[0-9]|b[0-9]|rc[0-9])"
|
||||
dev_suffix = r"(\.dev0\+.+([0-9a-f]{7}|Unknown))"
|
||||
if scipy.version.release:
|
||||
res = re.match(version_pattern, scipy.__version__)
|
||||
else:
|
||||
res = re.match(version_pattern + dev_suffix, scipy.__version__)
|
||||
|
||||
assert_(res is not None, scipy.__version__)
|
||||
42
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test_tmpdirs.py
vendored
Normal file
42
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test_tmpdirs.py
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
""" Test tmpdirs module """
|
||||
from os import getcwd
|
||||
from os.path import realpath, abspath, dirname, isfile, join as pjoin, exists
|
||||
|
||||
from scipy._lib._tmpdirs import tempdir, in_tempdir, in_dir
|
||||
|
||||
from numpy.testing import assert_, assert_equal
|
||||
|
||||
MY_PATH = abspath(__file__)
|
||||
MY_DIR = dirname(MY_PATH)
|
||||
|
||||
|
||||
def test_tempdir():
|
||||
with tempdir() as tmpdir:
|
||||
fname = pjoin(tmpdir, 'example_file.txt')
|
||||
with open(fname, 'wt') as fobj:
|
||||
fobj.write('a string\\n')
|
||||
assert_(not exists(tmpdir))
|
||||
|
||||
|
||||
def test_in_tempdir():
|
||||
my_cwd = getcwd()
|
||||
with in_tempdir() as tmpdir:
|
||||
with open('test.txt', 'wt') as f:
|
||||
f.write('some text')
|
||||
assert_(isfile('test.txt'))
|
||||
assert_(isfile(pjoin(tmpdir, 'test.txt')))
|
||||
assert_(not exists(tmpdir))
|
||||
assert_equal(getcwd(), my_cwd)
|
||||
|
||||
|
||||
def test_given_directory():
|
||||
# Test InGivenDirectory
|
||||
cwd = getcwd()
|
||||
with in_dir() as tmpdir:
|
||||
assert_equal(tmpdir, abspath(cwd))
|
||||
assert_equal(tmpdir, abspath(getcwd()))
|
||||
with in_dir(MY_DIR) as tmpdir:
|
||||
assert_equal(tmpdir, MY_DIR)
|
||||
assert_equal(realpath(MY_DIR), realpath(abspath(getcwd())))
|
||||
# We were deleting the given directory! Check not so now.
|
||||
assert_(isfile(MY_PATH))
|
||||
131
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test_warnings.py
vendored
Normal file
131
.CondaPkg/env/Lib/site-packages/scipy/_lib/tests/test_warnings.py
vendored
Normal file
@@ -0,0 +1,131 @@
|
||||
"""
|
||||
Tests which scan for certain occurrences in the code, they may not find
|
||||
all of these occurrences but should catch almost all. This file was adapted
|
||||
from NumPy.
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
import ast
|
||||
import tokenize
|
||||
|
||||
import scipy
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
class ParseCall(ast.NodeVisitor):
|
||||
def __init__(self):
|
||||
self.ls = []
|
||||
|
||||
def visit_Attribute(self, node):
|
||||
ast.NodeVisitor.generic_visit(self, node)
|
||||
self.ls.append(node.attr)
|
||||
|
||||
def visit_Name(self, node):
|
||||
self.ls.append(node.id)
|
||||
|
||||
class FindFuncs(ast.NodeVisitor):
|
||||
def __init__(self, filename):
|
||||
super().__init__()
|
||||
self.__filename = filename
|
||||
self.bad_filters = []
|
||||
self.bad_stacklevels = []
|
||||
|
||||
def visit_Call(self, node):
|
||||
p = ParseCall()
|
||||
p.visit(node.func)
|
||||
ast.NodeVisitor.generic_visit(self, node)
|
||||
|
||||
if p.ls[-1] == 'simplefilter' or p.ls[-1] == 'filterwarnings':
|
||||
if node.args[0].s == "ignore":
|
||||
self.bad_filters.append(
|
||||
"{}:{}".format(self.__filename, node.lineno))
|
||||
|
||||
if p.ls[-1] == 'warn' and (
|
||||
len(p.ls) == 1 or p.ls[-2] == 'warnings'):
|
||||
|
||||
if self.__filename == "_lib/tests/test_warnings.py":
|
||||
# This file
|
||||
return
|
||||
|
||||
# See if stacklevel exists:
|
||||
if len(node.args) == 3:
|
||||
return
|
||||
args = {kw.arg for kw in node.keywords}
|
||||
if "stacklevel" not in args:
|
||||
self.bad_stacklevels.append(
|
||||
"{}:{}".format(self.__filename, node.lineno))
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def warning_calls():
|
||||
# combined "ignore" and stacklevel error
|
||||
base = Path(scipy.__file__).parent
|
||||
|
||||
bad_filters = []
|
||||
bad_stacklevels = []
|
||||
|
||||
for path in base.rglob("*.py"):
|
||||
# use tokenize to auto-detect encoding on systems where no
|
||||
# default encoding is defined (e.g., LANG='C')
|
||||
with tokenize.open(str(path)) as file:
|
||||
tree = ast.parse(file.read(), filename=str(path))
|
||||
finder = FindFuncs(path.relative_to(base))
|
||||
finder.visit(tree)
|
||||
bad_filters.extend(finder.bad_filters)
|
||||
bad_stacklevels.extend(finder.bad_stacklevels)
|
||||
|
||||
return bad_filters, bad_stacklevels
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_warning_calls_filters(warning_calls):
|
||||
bad_filters, bad_stacklevels = warning_calls
|
||||
|
||||
# We try not to add filters in the code base, because those filters aren't
|
||||
# thread-safe. We aim to only filter in tests with
|
||||
# np.testing.suppress_warnings. However, in some cases it may prove
|
||||
# necessary to filter out warnings, because we can't (easily) fix the root
|
||||
# cause for them and we don't want users to see some warnings when they use
|
||||
# SciPy correctly. So we list exceptions here. Add new entries only if
|
||||
# there's a good reason.
|
||||
allowed_filters = (
|
||||
os.path.join('datasets', '_fetchers.py'),
|
||||
os.path.join('datasets', '__init__.py'),
|
||||
os.path.join('optimize', '_optimize.py'),
|
||||
os.path.join('sparse', '__init__.py'), # np.matrix pending-deprecation
|
||||
os.path.join('stats', '_discrete_distns.py'), # gh-14901
|
||||
os.path.join('stats', '_continuous_distns.py'),
|
||||
)
|
||||
bad_filters = [item for item in bad_filters if item.split(':')[0] not in
|
||||
allowed_filters]
|
||||
|
||||
if bad_filters:
|
||||
raise AssertionError(
|
||||
"warning ignore filter should not be used, instead, use\n"
|
||||
"numpy.testing.suppress_warnings (in tests only);\n"
|
||||
"found in:\n {}".format(
|
||||
"\n ".join(bad_filters)))
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
@pytest.mark.xfail(reason="stacklevels currently missing")
|
||||
def test_warning_calls_stacklevels(warning_calls):
|
||||
bad_filters, bad_stacklevels = warning_calls
|
||||
|
||||
msg = ""
|
||||
|
||||
if bad_filters:
|
||||
msg += ("warning ignore filter should not be used, instead, use\n"
|
||||
"numpy.testing.suppress_warnings (in tests only);\n"
|
||||
"found in:\n {}".format("\n ".join(bad_filters)))
|
||||
msg += "\n\n"
|
||||
|
||||
if bad_stacklevels:
|
||||
msg += "warnings should have an appropriate stacklevel:\n {}".format(
|
||||
"\n ".join(bad_stacklevels))
|
||||
|
||||
if msg:
|
||||
raise AssertionError(msg)
|
||||
31
.CondaPkg/env/Lib/site-packages/scipy/_lib/uarray.py
vendored
Normal file
31
.CondaPkg/env/Lib/site-packages/scipy/_lib/uarray.py
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
"""`uarray` provides functions for generating multimethods that dispatch to
|
||||
multiple different backends
|
||||
|
||||
This should be imported, rather than `_uarray` so that an installed version could
|
||||
be used instead, if available. This means that users can call
|
||||
`uarray.set_backend` directly instead of going through SciPy.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
# Prefer an installed version of uarray, if available
|
||||
try:
|
||||
import uarray as _uarray
|
||||
except ImportError:
|
||||
_has_uarray = False
|
||||
else:
|
||||
from scipy._lib._pep440 import Version as _Version
|
||||
|
||||
_has_uarray = _Version(_uarray.__version__) >= _Version("0.8")
|
||||
del _uarray
|
||||
del _Version
|
||||
|
||||
|
||||
if _has_uarray:
|
||||
from uarray import *
|
||||
from uarray import _Function
|
||||
else:
|
||||
from ._uarray import *
|
||||
from ._uarray import _Function
|
||||
|
||||
del _has_uarray
|
||||
Reference in New Issue
Block a user