update
This commit is contained in:
0
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__init__.py
vendored
Normal file
0
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__init__.py
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/__init__.cpython-312.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/__init__.cpython-312.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/test_coord.cpython-312.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/test_coord.cpython-312.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/test_dtype.cpython-312.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/test_dtype.cpython-312.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/test_fast_exp.cpython-312.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/test_fast_exp.cpython-312.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/test_geometry.cpython-312.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/test_geometry.cpython-312.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/test_interpolation.cpython-312.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/test_interpolation.cpython-312.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/test_safe_as_int.cpython-312.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/test_safe_as_int.cpython-312.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/test_testing.cpython-312.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/test_testing.cpython-312.pyc
vendored
Normal file
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/test_utils.cpython-312.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/test_utils.cpython-312.pyc
vendored
Normal file
Binary file not shown.
Binary file not shown.
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/test_warnings.cpython-312.pyc
vendored
Normal file
BIN
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/__pycache__/test_warnings.cpython-312.pyc
vendored
Normal file
Binary file not shown.
91
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_coord.py
vendored
Normal file
91
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_coord.py
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
import time
|
||||
|
||||
import numpy as np
|
||||
import pytest
|
||||
from scipy.spatial.distance import pdist, minkowski
|
||||
|
||||
from skimage._shared.coord import ensure_spacing
|
||||
|
||||
|
||||
@pytest.mark.parametrize("p", [1, 2, np.inf])
|
||||
@pytest.mark.parametrize("size", [30, 50, None])
|
||||
def test_ensure_spacing_trivial(p, size):
|
||||
# --- Empty input
|
||||
assert ensure_spacing([], p_norm=p) == []
|
||||
|
||||
# --- A unique point
|
||||
coord = np.random.randn(1, 2)
|
||||
assert np.array_equal(coord, ensure_spacing(coord, p_norm=p, min_split_size=size))
|
||||
|
||||
# --- Verified spacing
|
||||
coord = np.random.randn(100, 2)
|
||||
|
||||
# --- 0 spacing
|
||||
assert np.array_equal(
|
||||
coord, ensure_spacing(coord, spacing=0, p_norm=p, min_split_size=size)
|
||||
)
|
||||
|
||||
# Spacing is chosen to be half the minimum distance
|
||||
spacing = pdist(coord, metric=minkowski, p=p).min() * 0.5
|
||||
|
||||
out = ensure_spacing(coord, spacing=spacing, p_norm=p, min_split_size=size)
|
||||
|
||||
assert np.array_equal(coord, out)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("ndim", [1, 2, 3, 4, 5])
|
||||
@pytest.mark.parametrize("size", [2, 10, None])
|
||||
def test_ensure_spacing_nD(ndim, size):
|
||||
coord = np.ones((5, ndim))
|
||||
|
||||
expected = np.ones((1, ndim))
|
||||
|
||||
assert np.array_equal(ensure_spacing(coord, min_split_size=size), expected)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("p", [1, 2, np.inf])
|
||||
@pytest.mark.parametrize("size", [50, 100, None])
|
||||
def test_ensure_spacing_batch_processing(p, size):
|
||||
coord = np.random.randn(100, 2)
|
||||
|
||||
# --- Consider the average distance btween the point as spacing
|
||||
spacing = np.median(pdist(coord, metric=minkowski, p=p))
|
||||
|
||||
expected = ensure_spacing(coord, spacing=spacing, p_norm=p)
|
||||
|
||||
assert np.array_equal(
|
||||
ensure_spacing(coord, spacing=spacing, p_norm=p, min_split_size=size), expected
|
||||
)
|
||||
|
||||
|
||||
def test_max_batch_size():
|
||||
"""Small batches are slow, large batches -> large allocations -> also slow.
|
||||
|
||||
https://github.com/scikit-image/scikit-image/pull/6035#discussion_r751518691
|
||||
"""
|
||||
coords = np.random.randint(low=0, high=1848, size=(40000, 2))
|
||||
tstart = time.time()
|
||||
ensure_spacing(coords, spacing=100, min_split_size=50, max_split_size=2000)
|
||||
dur1 = time.time() - tstart
|
||||
|
||||
tstart = time.time()
|
||||
ensure_spacing(coords, spacing=100, min_split_size=50, max_split_size=20000)
|
||||
dur2 = time.time() - tstart
|
||||
|
||||
# Originally checked dur1 < dur2 to assert that the default batch size was
|
||||
# faster than a much larger batch size. However, on rare occasion a CI test
|
||||
# case would fail with dur1 ~5% larger than dur2. To be more robust to
|
||||
# variable load or differences across architectures, we relax this here.
|
||||
assert dur1 < 1.33 * dur2
|
||||
|
||||
|
||||
@pytest.mark.parametrize("p", [1, 2, np.inf])
|
||||
@pytest.mark.parametrize("size", [30, 50, None])
|
||||
def test_ensure_spacing_p_norm(p, size):
|
||||
coord = np.random.randn(100, 2)
|
||||
|
||||
# --- Consider the average distance btween the point as spacing
|
||||
spacing = np.median(pdist(coord, metric=minkowski, p=p))
|
||||
out = ensure_spacing(coord, spacing=spacing, p_norm=p, min_split_size=size)
|
||||
|
||||
assert pdist(out, metric=minkowski, p=p).min() > spacing
|
||||
14
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_dtype.py
vendored
Normal file
14
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_dtype.py
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
import numpy as np
|
||||
import pytest
|
||||
|
||||
|
||||
from ..dtype import numeric_dtype_min_max, numeric_types
|
||||
|
||||
|
||||
class Test_numeric_dtype_min_max:
|
||||
@pytest.mark.parametrize("dtype", numeric_types)
|
||||
def test_all_numeric_types(self, dtype):
|
||||
min_, max_ = numeric_dtype_min_max(dtype)
|
||||
assert np.isscalar(min_)
|
||||
assert np.isscalar(max_)
|
||||
assert min_ < max_
|
||||
20
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_fast_exp.py
vendored
Normal file
20
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_fast_exp.py
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
from ..fast_exp import fast_exp
|
||||
import numpy as np
|
||||
|
||||
|
||||
def test_fast_exp():
|
||||
X = np.linspace(-5, 0, 5000, endpoint=True)
|
||||
|
||||
# Ground truth
|
||||
Y = np.exp(X)
|
||||
|
||||
# Approximation at double precision
|
||||
_y_f64 = np.array([fast_exp['float64_t'](x) for x in X])
|
||||
|
||||
# Approximation at single precision
|
||||
_y_f32 = np.array(
|
||||
[fast_exp['float32_t'](x) for x in X.astype('float32')], dtype='float32'
|
||||
)
|
||||
|
||||
for _y in [_y_f64, _y_f32]:
|
||||
assert np.abs(Y - _y).mean() < 3e-3
|
||||
81
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_geometry.py
vendored
Normal file
81
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_geometry.py
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
import pytest
|
||||
from skimage._shared._geometry import polygon_clip, polygon_area
|
||||
|
||||
import numpy as np
|
||||
from numpy.testing import assert_equal, assert_almost_equal
|
||||
|
||||
pytest.importorskip("matplotlib")
|
||||
|
||||
|
||||
hand = np.array(
|
||||
[
|
||||
[1.64516129, 1.16145833],
|
||||
[1.64516129, 1.59375],
|
||||
[1.35080645, 1.921875],
|
||||
[1.375, 2.18229167],
|
||||
[1.68548387, 1.9375],
|
||||
[1.60887097, 2.55208333],
|
||||
[1.68548387, 2.69791667],
|
||||
[1.76209677, 2.56770833],
|
||||
[1.83064516, 1.97395833],
|
||||
[1.89516129, 2.75],
|
||||
[1.9516129, 2.84895833],
|
||||
[2.01209677, 2.76041667],
|
||||
[1.99193548, 1.99479167],
|
||||
[2.11290323, 2.63020833],
|
||||
[2.2016129, 2.734375],
|
||||
[2.25403226, 2.60416667],
|
||||
[2.14919355, 1.953125],
|
||||
[2.30645161, 2.36979167],
|
||||
[2.39112903, 2.36979167],
|
||||
[2.41532258, 2.1875],
|
||||
[2.1733871, 1.703125],
|
||||
[2.07782258, 1.16666667],
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_polygon_area():
|
||||
x = [0, 0, 1, 1]
|
||||
y = [0, 1, 1, 0]
|
||||
|
||||
assert_almost_equal(polygon_area(y, x), 1)
|
||||
|
||||
x = [0, 0, 1]
|
||||
y = [0, 1, 1]
|
||||
|
||||
assert_almost_equal(polygon_area(y, x), 0.5)
|
||||
|
||||
x = [0, 0, 0.5, 1, 1, 0.5]
|
||||
y = [0, 1, 0.5, 1, 0, 0.5]
|
||||
|
||||
assert_almost_equal(polygon_area(y, x), 0.5)
|
||||
|
||||
|
||||
def test_poly_clip():
|
||||
x = [0, 1, 2, 1]
|
||||
y = [0, -1, 0, 1]
|
||||
|
||||
yc, xc = polygon_clip(y, x, 0, 0, 1, 1)
|
||||
assert_equal(polygon_area(yc, xc), 0.5)
|
||||
|
||||
x = [-1, 1.5, 1.5, -1]
|
||||
y = [0.5, 0.5, 1.5, 1.5]
|
||||
yc, xc = polygon_clip(y, x, 0, 0, 1, 1)
|
||||
assert_equal(polygon_area(yc, xc), 0.5)
|
||||
|
||||
|
||||
def test_hand_clip():
|
||||
(r0, c0, r1, c1) = (1.0, 1.5, 2.1, 2.5)
|
||||
clip_r, clip_c = polygon_clip(hand[:, 1], hand[:, 0], r0, c0, r1, c1)
|
||||
assert_equal(clip_r.size, 19)
|
||||
assert_equal(clip_r[0], clip_r[-1])
|
||||
assert_equal(clip_c[0], clip_c[-1])
|
||||
|
||||
(r0, c0, r1, c1) = (1.0, 1.5, 1.7, 2.5)
|
||||
clip_r, clip_c = polygon_clip(hand[:, 1], hand[:, 0], r0, c0, r1, c1)
|
||||
assert_equal(clip_r.size, 6)
|
||||
|
||||
(r0, c0, r1, c1) = (1.0, 1.5, 1.5, 2.5)
|
||||
clip_r, clip_c = polygon_clip(hand[:, 1], hand[:, 0], r0, c0, r1, c1)
|
||||
assert_equal(clip_r.size, 5)
|
||||
28
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_interpolation.py
vendored
Normal file
28
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_interpolation.py
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
from skimage._shared.interpolation import coord_map_py
|
||||
from skimage._shared.testing import assert_array_equal
|
||||
|
||||
|
||||
def test_coord_map():
|
||||
symmetric = [coord_map_py(4, n, 'S') for n in range(-6, 6)]
|
||||
expected_symmetric = [2, 3, 3, 2, 1, 0, 0, 1, 2, 3, 3, 2]
|
||||
assert_array_equal(symmetric, expected_symmetric)
|
||||
|
||||
wrap = [coord_map_py(4, n, 'W') for n in range(-6, 6)]
|
||||
expected_wrap = [2, 3, 0, 1, 2, 3, 0, 1, 2, 3, 0, 1]
|
||||
assert_array_equal(wrap, expected_wrap)
|
||||
|
||||
edge = [coord_map_py(4, n, 'E') for n in range(-6, 6)]
|
||||
expected_edge = [0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 3, 3]
|
||||
assert_array_equal(edge, expected_edge)
|
||||
|
||||
reflect = [coord_map_py(4, n, 'R') for n in range(-6, 6)]
|
||||
expected_reflect = [0, 1, 2, 3, 2, 1, 0, 1, 2, 3, 2, 1]
|
||||
assert_array_equal(reflect, expected_reflect)
|
||||
|
||||
reflect = [coord_map_py(1, n, 'R') for n in range(-6, 6)]
|
||||
expected_reflect = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
|
||||
assert_array_equal(reflect, expected_reflect)
|
||||
|
||||
other = [coord_map_py(4, n, 'undefined') for n in range(-6, 6)]
|
||||
expected_other = list(range(-6, 6))
|
||||
assert_array_equal(other, expected_other)
|
||||
41
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_safe_as_int.py
vendored
Normal file
41
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_safe_as_int.py
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
import numpy as np
|
||||
from skimage._shared.utils import safe_as_int
|
||||
from skimage._shared import testing
|
||||
|
||||
|
||||
def test_int_cast_not_possible():
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int(7.1)
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int([7.1, 0.9])
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int(np.r_[7.1, 0.9])
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int((7.1, 0.9))
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int(((3, 4, 1), (2, 7.6, 289)))
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int(7.1, 0.09)
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int([7.1, 0.9], 0.09)
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int(np.r_[7.1, 0.9], 0.09)
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int((7.1, 0.9), 0.09)
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int(((3, 4, 1), (2, 7.6, 289)), 0.25)
|
||||
|
||||
|
||||
def test_int_cast_possible():
|
||||
testing.assert_equal(safe_as_int(7.1, atol=0.11), 7)
|
||||
testing.assert_equal(safe_as_int(-7.1, atol=0.11), -7)
|
||||
testing.assert_equal(safe_as_int(41.9, atol=0.11), 42)
|
||||
testing.assert_array_equal(
|
||||
safe_as_int([2, 42, 5789234.0, 87, 4]), np.r_[2, 42, 5789234, 87, 4]
|
||||
)
|
||||
testing.assert_array_equal(
|
||||
safe_as_int(
|
||||
np.r_[[[3, 4, 1.000000001], [7, 2, -8.999999999], [6, 9, -4234918347.0]]]
|
||||
),
|
||||
np.r_[[[3, 4, 1], [7, 2, -9], [6, 9, -4234918347]]],
|
||||
)
|
||||
154
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_testing.py
vendored
Normal file
154
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_testing.py
vendored
Normal file
@@ -0,0 +1,154 @@
|
||||
""" Testing decorators module
|
||||
"""
|
||||
|
||||
import inspect
|
||||
import re
|
||||
import warnings
|
||||
|
||||
import pytest
|
||||
from numpy.testing import assert_equal
|
||||
from skimage._shared.testing import (
|
||||
doctest_skip_parser,
|
||||
run_in_parallel,
|
||||
assert_stacklevel,
|
||||
)
|
||||
from skimage._shared import testing
|
||||
from skimage._shared._dependency_checks import is_wasm
|
||||
|
||||
from skimage._shared._warnings import expected_warnings
|
||||
from warnings import warn
|
||||
|
||||
|
||||
def test_skipper():
|
||||
def f():
|
||||
pass
|
||||
|
||||
class c:
|
||||
def __init__(self):
|
||||
self.me = "I think, therefore..."
|
||||
|
||||
docstring = """ Header
|
||||
|
||||
>>> something # skip if not HAVE_AMODULE
|
||||
>>> something + else
|
||||
>>> a = 1 # skip if not HAVE_BMODULE
|
||||
>>> something2 # skip if HAVE_AMODULE
|
||||
"""
|
||||
f.__doc__ = docstring
|
||||
c.__doc__ = docstring
|
||||
|
||||
global HAVE_AMODULE, HAVE_BMODULE
|
||||
HAVE_AMODULE = False
|
||||
HAVE_BMODULE = True
|
||||
|
||||
f2 = doctest_skip_parser(f)
|
||||
c2 = doctest_skip_parser(c)
|
||||
assert f is f2
|
||||
assert c is c2
|
||||
|
||||
expected = """ Header
|
||||
|
||||
>>> something # doctest: +SKIP
|
||||
>>> something + else
|
||||
>>> a = 1
|
||||
>>> something2
|
||||
"""
|
||||
assert_equal(f2.__doc__, expected)
|
||||
assert_equal(c2.__doc__, expected)
|
||||
|
||||
HAVE_AMODULE = True
|
||||
HAVE_BMODULE = False
|
||||
f.__doc__ = docstring
|
||||
c.__doc__ = docstring
|
||||
f2 = doctest_skip_parser(f)
|
||||
c2 = doctest_skip_parser(c)
|
||||
|
||||
assert f is f2
|
||||
expected = """ Header
|
||||
|
||||
>>> something
|
||||
>>> something + else
|
||||
>>> a = 1 # doctest: +SKIP
|
||||
>>> something2 # doctest: +SKIP
|
||||
"""
|
||||
assert_equal(f2.__doc__, expected)
|
||||
assert_equal(c2.__doc__, expected)
|
||||
|
||||
del HAVE_AMODULE
|
||||
f.__doc__ = docstring
|
||||
c.__doc__ = docstring
|
||||
with testing.raises(NameError):
|
||||
doctest_skip_parser(f)
|
||||
with testing.raises(NameError):
|
||||
doctest_skip_parser(c)
|
||||
|
||||
|
||||
@pytest.mark.skipif(is_wasm, reason="Cannot start threads in WASM")
|
||||
def test_run_in_parallel():
|
||||
state = []
|
||||
|
||||
@run_in_parallel()
|
||||
def change_state1():
|
||||
state.append(None)
|
||||
|
||||
change_state1()
|
||||
assert len(state) == 2
|
||||
|
||||
@run_in_parallel(num_threads=1)
|
||||
def change_state2():
|
||||
state.append(None)
|
||||
|
||||
change_state2()
|
||||
assert len(state) == 3
|
||||
|
||||
@run_in_parallel(num_threads=3)
|
||||
def change_state3():
|
||||
state.append(None)
|
||||
|
||||
change_state3()
|
||||
assert len(state) == 6
|
||||
|
||||
|
||||
def test_parallel_warning():
|
||||
@run_in_parallel()
|
||||
def change_state_warns_fails():
|
||||
warn("Test warning for test parallel", stacklevel=2)
|
||||
|
||||
with expected_warnings(['Test warning for test parallel']):
|
||||
change_state_warns_fails()
|
||||
|
||||
@run_in_parallel(warnings_matching=['Test warning for test parallel'])
|
||||
def change_state_warns_passes():
|
||||
warn("Test warning for test parallel", stacklevel=2)
|
||||
|
||||
change_state_warns_passes()
|
||||
|
||||
|
||||
def test_expected_warnings_noop():
|
||||
# This will ensure the line beolow it behaves like a no-op
|
||||
with expected_warnings(['Expected warnings test']):
|
||||
# This should behave as a no-op
|
||||
with expected_warnings(None):
|
||||
warn('Expected warnings test')
|
||||
|
||||
|
||||
class Test_assert_stacklevel:
|
||||
def raise_warning(self, *args, **kwargs):
|
||||
warnings.warn(*args, **kwargs)
|
||||
|
||||
def test_correct_stacklevel(self):
|
||||
# Should pass if stacklevel is set correctly
|
||||
with pytest.warns(UserWarning, match="passes") as record:
|
||||
self.raise_warning("passes", UserWarning, stacklevel=2)
|
||||
assert_stacklevel(record)
|
||||
|
||||
@pytest.mark.parametrize("level", [1, 3])
|
||||
def test_wrong_stacklevel(self, level):
|
||||
# AssertionError should be raised for wrong stacklevel
|
||||
with pytest.warns(UserWarning, match="wrong") as record:
|
||||
self.raise_warning("wrong", UserWarning, stacklevel=level)
|
||||
# Check that message contains expected line on right side
|
||||
line_number = inspect.currentframe().f_lineno - 2
|
||||
regex = ".*" + re.escape(f"!= {__file__}:{line_number}")
|
||||
with pytest.raises(AssertionError, match=regex):
|
||||
assert_stacklevel(record, offset=-5)
|
||||
516
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_utils.py
vendored
Normal file
516
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_utils.py
vendored
Normal file
@@ -0,0 +1,516 @@
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
import numpy as np
|
||||
import pytest
|
||||
|
||||
from skimage._shared import testing
|
||||
from skimage._shared.utils import (
|
||||
_supported_float_type,
|
||||
_validate_interpolation_order,
|
||||
change_default_value,
|
||||
channel_as_last_axis,
|
||||
check_nD,
|
||||
deprecate_func,
|
||||
deprecate_parameter,
|
||||
DEPRECATED,
|
||||
)
|
||||
|
||||
complex_dtypes = [np.complex64, np.complex128]
|
||||
if hasattr(np, 'complex256'):
|
||||
complex_dtypes += [np.complex256]
|
||||
|
||||
have_numpydoc = False
|
||||
try:
|
||||
import numpydoc # noqa: F401
|
||||
|
||||
have_numpydoc = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def test_change_default_value():
|
||||
@change_default_value('arg1', new_value=-1, changed_version='0.12')
|
||||
def foo(arg0, arg1=0, arg2=1):
|
||||
"""Expected docstring"""
|
||||
return arg0, arg1, arg2
|
||||
|
||||
@change_default_value(
|
||||
'arg1',
|
||||
new_value=-1,
|
||||
changed_version='0.12',
|
||||
warning_msg="Custom warning message",
|
||||
)
|
||||
def bar(arg0, arg1=0, arg2=1):
|
||||
"""Expected docstring"""
|
||||
return arg0, arg1, arg2
|
||||
|
||||
# Assert warning messages
|
||||
with pytest.warns(FutureWarning) as record:
|
||||
assert foo(0) == (0, 0, 1)
|
||||
assert bar(0) == (0, 0, 1)
|
||||
|
||||
expected_msg = (
|
||||
"The new recommended value for arg1 is -1. Until "
|
||||
"version 0.12, the default arg1 value is 0. From "
|
||||
"version 0.12, the arg1 default value will be -1. "
|
||||
"To avoid this warning, please explicitly set arg1 value."
|
||||
)
|
||||
|
||||
assert str(record[0].message) == expected_msg
|
||||
assert str(record[1].message) == "Custom warning message"
|
||||
|
||||
# Assert that nothing happens if arg1 is set
|
||||
with warnings.catch_warnings(record=True) as recorded:
|
||||
# No kwargs
|
||||
assert foo(0, 2) == (0, 2, 1)
|
||||
assert foo(0, arg1=0) == (0, 0, 1)
|
||||
|
||||
# Function name and doc is preserved
|
||||
assert foo.__name__ == 'foo'
|
||||
if sys.flags.optimize < 2:
|
||||
# if PYTHONOPTIMIZE is set to 2, docstrings are stripped
|
||||
assert foo.__doc__ == 'Expected docstring'
|
||||
# Assert no warnings were raised
|
||||
assert len(recorded) == 0
|
||||
|
||||
|
||||
def test_check_nD():
|
||||
z = np.random.random(200**2).reshape((200, 200))
|
||||
x = z[10:30, 30:10]
|
||||
with testing.raises(ValueError):
|
||||
check_nD(x, 2)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'dtype', [bool, int, np.uint8, np.uint16, float, np.float32, np.float64]
|
||||
)
|
||||
@pytest.mark.parametrize('order', [None, -1, 0, 1, 2, 3, 4, 5, 6])
|
||||
def test_validate_interpolation_order(dtype, order):
|
||||
if order is None:
|
||||
# Default order
|
||||
assert _validate_interpolation_order(dtype, None) == 0 if dtype == bool else 1
|
||||
elif order < 0 or order > 5:
|
||||
# Order not in valid range
|
||||
with testing.raises(ValueError):
|
||||
_validate_interpolation_order(dtype, order)
|
||||
elif dtype == bool and order != 0:
|
||||
# Deprecated order for bool array
|
||||
with pytest.raises(ValueError):
|
||||
_validate_interpolation_order(bool, order)
|
||||
else:
|
||||
# Valid use case
|
||||
assert _validate_interpolation_order(dtype, order) == order
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'dtype',
|
||||
[
|
||||
bool,
|
||||
np.float16,
|
||||
np.float32,
|
||||
np.float64,
|
||||
np.uint8,
|
||||
np.uint16,
|
||||
np.uint32,
|
||||
np.uint64,
|
||||
np.int8,
|
||||
np.int16,
|
||||
np.int32,
|
||||
np.int64,
|
||||
],
|
||||
)
|
||||
def test_supported_float_dtype_real(dtype):
|
||||
float_dtype = _supported_float_type(dtype)
|
||||
if dtype in [np.float16, np.float32]:
|
||||
assert float_dtype == np.float32
|
||||
else:
|
||||
assert float_dtype == np.float64
|
||||
|
||||
|
||||
@pytest.mark.parametrize('dtype', complex_dtypes)
|
||||
@pytest.mark.parametrize('allow_complex', [False, True])
|
||||
def test_supported_float_dtype_complex(dtype, allow_complex):
|
||||
if allow_complex:
|
||||
float_dtype = _supported_float_type(dtype, allow_complex=allow_complex)
|
||||
if dtype == np.complex64:
|
||||
assert float_dtype == np.complex64
|
||||
else:
|
||||
assert float_dtype == np.complex128
|
||||
else:
|
||||
with testing.raises(ValueError):
|
||||
_supported_float_type(dtype, allow_complex=allow_complex)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('dtype', ['f', 'float32', np.float32, np.dtype(np.float32)])
|
||||
def test_supported_float_dtype_input_kinds(dtype):
|
||||
assert _supported_float_type(dtype) == np.float32
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'dtypes, expected',
|
||||
[
|
||||
((np.float16, np.float64), np.float64),
|
||||
((np.float32, np.uint16, np.int8), np.float64),
|
||||
((np.float32, np.float16), np.float32),
|
||||
],
|
||||
)
|
||||
def test_supported_float_dtype_sequence(dtypes, expected):
|
||||
float_dtype = _supported_float_type(dtypes)
|
||||
assert float_dtype == expected
|
||||
|
||||
|
||||
@channel_as_last_axis(multichannel_output=False)
|
||||
def _decorated_channel_axis_size(x, *, channel_axis=None):
|
||||
if channel_axis is None:
|
||||
return None
|
||||
assert channel_axis == -1
|
||||
return x.shape[-1]
|
||||
|
||||
|
||||
@testing.parametrize('channel_axis', [None, 0, 1, 2, -1, -2, -3])
|
||||
def test_decorated_channel_axis_shape(channel_axis):
|
||||
# Verify that channel_as_last_axis modifies the channel_axis as expected
|
||||
|
||||
# need unique size per axis here
|
||||
x = np.zeros((2, 3, 4))
|
||||
|
||||
size = _decorated_channel_axis_size(x, channel_axis=channel_axis)
|
||||
if channel_axis is None:
|
||||
assert size is None
|
||||
else:
|
||||
assert size == x.shape[channel_axis]
|
||||
|
||||
|
||||
@deprecate_func(
|
||||
deprecated_version="x", removed_version="y", hint="You are on your own."
|
||||
)
|
||||
def _deprecated_func():
|
||||
"""Dummy function used in `test_deprecate_func`.
|
||||
|
||||
The decorated function must be outside the test function, otherwise it
|
||||
seems that the warning does not point at the calling location.
|
||||
"""
|
||||
|
||||
|
||||
def test_deprecate_func():
|
||||
with pytest.warns(FutureWarning) as record:
|
||||
_deprecated_func()
|
||||
testing.assert_stacklevel(record)
|
||||
|
||||
assert len(record) == 1
|
||||
assert record[0].message.args[0] == (
|
||||
"`_deprecated_func` is deprecated since version x and will be removed in "
|
||||
"version y. You are on your own."
|
||||
)
|
||||
|
||||
|
||||
@deprecate_parameter("old1", start_version="0.10", stop_version="0.12")
|
||||
@deprecate_parameter("old0", start_version="0.10", stop_version="0.12")
|
||||
def _func_deprecated_params(arg0, old0=DEPRECATED, old1=DEPRECATED, arg1=None):
|
||||
"""Expected docstring.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
arg0 : int
|
||||
First unchanged parameter.
|
||||
arg1 : int, optional
|
||||
Second unchanged parameter.
|
||||
"""
|
||||
return arg0, old0, old1, arg1
|
||||
|
||||
|
||||
@deprecate_parameter("old1", new_name="new0", start_version="0.10", stop_version="0.12")
|
||||
@deprecate_parameter("old0", new_name="new1", start_version="0.10", stop_version="0.12")
|
||||
def _func_replace_params(
|
||||
arg0, old0=DEPRECATED, old1=DEPRECATED, new0=None, new1=None, arg1=None
|
||||
):
|
||||
"""Expected docstring.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
arg0 : int
|
||||
First unchanged parameter.
|
||||
new0 : int, optional
|
||||
First new parameter.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
new1 : int, optional
|
||||
Second new parameter.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
arg1 : int, optional
|
||||
Second unchanged parameter.
|
||||
"""
|
||||
return arg0, old0, old1, new0, new1, arg1
|
||||
|
||||
|
||||
class Test_deprecate_parameter:
|
||||
@pytest.mark.skipif(not have_numpydoc, reason="requires numpydoc")
|
||||
def test_docstring_removed_param(self):
|
||||
# function name and doc are preserved
|
||||
assert _func_deprecated_params.__name__ == "_func_deprecated_params"
|
||||
if sys.flags.optimize < 2:
|
||||
# if PYTHONOPTIMIZE is set to 2, docstrings are stripped
|
||||
assert (
|
||||
_func_deprecated_params.__doc__
|
||||
== """Expected docstring.
|
||||
|
||||
|
||||
Parameters
|
||||
----------
|
||||
arg0 : int
|
||||
First unchanged parameter.
|
||||
arg1 : int, optional
|
||||
Second unchanged parameter.
|
||||
|
||||
Other Parameters
|
||||
----------------
|
||||
old0 : DEPRECATED
|
||||
`old0` is deprecated.
|
||||
|
||||
.. deprecated:: 0.10
|
||||
old1 : DEPRECATED
|
||||
`old1` is deprecated.
|
||||
|
||||
.. deprecated:: 0.10
|
||||
"""
|
||||
)
|
||||
|
||||
@pytest.mark.skipif(not have_numpydoc, reason="requires numpydoc")
|
||||
def test_docstring_replaced_param(self):
|
||||
assert _func_replace_params.__name__ == "_func_replace_params"
|
||||
if sys.flags.optimize < 2:
|
||||
# if PYTHONOPTIMIZE is set to 2, docstrings are stripped
|
||||
assert (
|
||||
_func_replace_params.__doc__
|
||||
== """Expected docstring.
|
||||
|
||||
|
||||
Parameters
|
||||
----------
|
||||
arg0 : int
|
||||
First unchanged parameter.
|
||||
new0 : int, optional
|
||||
First new parameter.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
new1 : int, optional
|
||||
Second new parameter.
|
||||
|
||||
.. versionadded:: 0.10
|
||||
arg1 : int, optional
|
||||
Second unchanged parameter.
|
||||
|
||||
Other Parameters
|
||||
----------------
|
||||
old0 : DEPRECATED
|
||||
Deprecated in favor of `new1`.
|
||||
|
||||
.. deprecated:: 0.10
|
||||
old1 : DEPRECATED
|
||||
Deprecated in favor of `new0`.
|
||||
|
||||
.. deprecated:: 0.10
|
||||
"""
|
||||
)
|
||||
|
||||
def test_warning_removed_param(self):
|
||||
match = (
|
||||
r".*`old[01]` is deprecated since version 0\.10 and will be removed "
|
||||
r"in 0\.12.* see the documentation of .*_func_deprecated_params`."
|
||||
)
|
||||
with pytest.warns(FutureWarning, match=match):
|
||||
assert _func_deprecated_params(1, 2) == (1, DEPRECATED, DEPRECATED, None)
|
||||
with pytest.warns(FutureWarning, match=match):
|
||||
assert _func_deprecated_params(1, 2, 3) == (1, DEPRECATED, DEPRECATED, None)
|
||||
with pytest.warns(FutureWarning, match=match):
|
||||
assert _func_deprecated_params(1, old0=2) == (
|
||||
1,
|
||||
DEPRECATED,
|
||||
DEPRECATED,
|
||||
None,
|
||||
)
|
||||
with pytest.warns(FutureWarning, match=match):
|
||||
assert _func_deprecated_params(1, old1=2) == (
|
||||
1,
|
||||
DEPRECATED,
|
||||
DEPRECATED,
|
||||
None,
|
||||
)
|
||||
|
||||
with warnings.catch_warnings(record=True) as record:
|
||||
assert _func_deprecated_params(1, arg1=3) == (1, DEPRECATED, DEPRECATED, 3)
|
||||
assert len(record) == 0
|
||||
|
||||
def test_warning_replaced_param(self):
|
||||
match = (
|
||||
r".*`old[0,1]` is deprecated since version 0\.10 and will be removed "
|
||||
r"in 0\.12.* see the documentation of .*_func_replace_params`."
|
||||
)
|
||||
|
||||
with pytest.warns(FutureWarning, match=match):
|
||||
assert _func_replace_params(1, 2) == (
|
||||
1,
|
||||
DEPRECATED,
|
||||
DEPRECATED,
|
||||
None,
|
||||
2,
|
||||
None,
|
||||
)
|
||||
|
||||
with pytest.warns(FutureWarning, match=match) as records:
|
||||
assert _func_replace_params(1, 2, 3) == (
|
||||
1,
|
||||
DEPRECATED,
|
||||
DEPRECATED,
|
||||
3,
|
||||
2,
|
||||
None,
|
||||
)
|
||||
assert len(records) == 2
|
||||
assert "`old1` is deprecated" in records[0].message.args[0]
|
||||
assert "`old0` is deprecated" in records[1].message.args[0]
|
||||
|
||||
with pytest.warns(FutureWarning, match=match):
|
||||
assert _func_replace_params(1, old0=2) == (
|
||||
1,
|
||||
DEPRECATED,
|
||||
DEPRECATED,
|
||||
None,
|
||||
2,
|
||||
None,
|
||||
)
|
||||
|
||||
with pytest.warns(FutureWarning, match=match):
|
||||
assert _func_replace_params(1, old1=3) == (
|
||||
1,
|
||||
DEPRECATED,
|
||||
DEPRECATED,
|
||||
3,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
||||
# Otherwise, no warnings are emitted!
|
||||
with warnings.catch_warnings(record=True) as record:
|
||||
assert _func_replace_params(1, new0=2, new1=3) == (
|
||||
1,
|
||||
DEPRECATED,
|
||||
DEPRECATED,
|
||||
2,
|
||||
3,
|
||||
None,
|
||||
)
|
||||
assert len(record) == 0
|
||||
|
||||
def test_missing_DEPRECATED(self):
|
||||
decorate = deprecate_parameter(
|
||||
"old", start_version="0.10", stop_version="0.12", stacklevel=2
|
||||
)
|
||||
|
||||
def foo(arg0, old=None):
|
||||
return arg0, old
|
||||
|
||||
with pytest.raises(RuntimeError, match="Expected .* <DEPRECATED>"):
|
||||
decorate(foo)
|
||||
|
||||
def bar(arg0, old=DEPRECATED):
|
||||
return arg0
|
||||
|
||||
assert decorate(bar)(1) == 1
|
||||
|
||||
def test_new_keyword_only(self):
|
||||
@deprecate_parameter(
|
||||
"old",
|
||||
new_name="new",
|
||||
start_version="0.19",
|
||||
stop_version="0.21",
|
||||
)
|
||||
def foo(arg0, old=DEPRECATED, *, new=1, arg3=None):
|
||||
"""Expected docstring"""
|
||||
return arg0, new, arg3
|
||||
|
||||
# Assert that nothing happens when the function is called with the
|
||||
# new API
|
||||
with warnings.catch_warnings(record=True) as recorded:
|
||||
# No kwargs
|
||||
assert foo(0) == (0, 1, None)
|
||||
# Kwargs without deprecated argument
|
||||
assert foo(0, new=1, arg3=2) == (0, 1, 2)
|
||||
assert foo(0, new=2) == (0, 2, None)
|
||||
assert foo(0, arg3=2) == (0, 1, 2)
|
||||
assert len(recorded) == 0
|
||||
|
||||
def test_conflicting_old_and_new(self):
|
||||
match = r".*`old[0,1]` is deprecated"
|
||||
with pytest.warns(FutureWarning, match=match):
|
||||
with pytest.raises(ValueError, match=".* avoid conflicting values"):
|
||||
_func_replace_params(1, old0=2, new1=2)
|
||||
|
||||
with pytest.warns(FutureWarning, match=match):
|
||||
with pytest.raises(ValueError, match=".* avoid conflicting values"):
|
||||
_func_replace_params(1, old1=2, new0=2)
|
||||
|
||||
with pytest.warns(FutureWarning, match=match):
|
||||
with pytest.raises(ValueError, match=".* avoid conflicting values"):
|
||||
_func_replace_params(1, old0=1, old1=1, new0=1, new1=1)
|
||||
|
||||
def test_wrong_call_signature(self):
|
||||
"""Check that normal errors for faulty calls are unchanged."""
|
||||
with pytest.raises(
|
||||
TypeError, match=r".* required positional argument\: 'arg0'"
|
||||
):
|
||||
_func_replace_params()
|
||||
|
||||
with pytest.warns(FutureWarning, match=r".*`old[0,1]` is deprecated"):
|
||||
with pytest.raises(
|
||||
TypeError, match=".* multiple values for argument 'old0'"
|
||||
):
|
||||
_func_deprecated_params(1, 2, old0=2)
|
||||
|
||||
def test_wrong_param_name(self):
|
||||
with pytest.raises(ValueError, match="'old' is not in list"):
|
||||
|
||||
@deprecate_parameter("old", start_version="0.10", stop_version="0.12")
|
||||
def foo(arg0):
|
||||
pass
|
||||
|
||||
with pytest.raises(ValueError, match="'new' is not in list"):
|
||||
|
||||
@deprecate_parameter(
|
||||
"old", new_name="new", start_version="0.10", stop_version="0.12"
|
||||
)
|
||||
def bar(arg0, old, arg1):
|
||||
pass
|
||||
|
||||
def test_warning_location(self):
|
||||
with pytest.warns(FutureWarning) as records:
|
||||
_func_deprecated_params(1, old0=2, old1=2)
|
||||
testing.assert_stacklevel(records)
|
||||
assert len(records) == 2
|
||||
|
||||
def test_stacklevel(self):
|
||||
@deprecate_parameter(
|
||||
"old",
|
||||
start_version="0.19",
|
||||
stop_version="0.21",
|
||||
)
|
||||
def foo(arg0, old=DEPRECATED):
|
||||
pass
|
||||
|
||||
with pytest.raises(RuntimeError, match="Set stacklevel manually"):
|
||||
foo(0, 1)
|
||||
|
||||
@deprecate_parameter(
|
||||
"old",
|
||||
start_version="0.19",
|
||||
stop_version="0.21",
|
||||
stacklevel=2,
|
||||
)
|
||||
def bar(arg0, old=DEPRECATED):
|
||||
pass
|
||||
|
||||
with pytest.warns(FutureWarning, match="`old` is deprecated") as records:
|
||||
bar(0, 1)
|
||||
testing.assert_stacklevel(records)
|
||||
42
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_version_requirements.py
vendored
Normal file
42
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_version_requirements.py
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
"""Tests for the version requirement functions.
|
||||
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
from numpy.testing import assert_equal
|
||||
from skimage._shared import version_requirements as version_req
|
||||
from skimage._shared import testing
|
||||
|
||||
|
||||
def test_get_module_version():
|
||||
assert version_req.get_module_version('numpy')
|
||||
assert version_req.get_module_version('scipy')
|
||||
with testing.raises(ImportError):
|
||||
version_req.get_module_version('fakenumpy')
|
||||
|
||||
|
||||
def test_is_installed():
|
||||
assert version_req.is_installed('python', '>=2.7')
|
||||
assert not version_req.is_installed('numpy', '<1.0')
|
||||
|
||||
|
||||
def test_require():
|
||||
# A function that only runs on Python >2.7 and numpy > 1.5 (should pass)
|
||||
@version_req.require('python', '>2.7')
|
||||
@version_req.require('numpy', '>1.5')
|
||||
def foo():
|
||||
return 1
|
||||
|
||||
assert_equal(foo(), 1)
|
||||
|
||||
# function that requires scipy < 0.1 (should fail)
|
||||
@version_req.require('scipy', '<0.1')
|
||||
def bar():
|
||||
return 0
|
||||
|
||||
with testing.raises(ImportError):
|
||||
bar()
|
||||
|
||||
|
||||
def test_get_module():
|
||||
assert version_req.get_module("numpy") is np
|
||||
37
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_warnings.py
vendored
Normal file
37
.CondaPkg/env/Lib/site-packages/skimage/_shared/tests/test_warnings.py
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
import os
|
||||
from skimage._shared._warnings import expected_warnings
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def setup():
|
||||
# Remove any environment variable if it exists
|
||||
old_strictness = os.environ.pop('SKIMAGE_TEST_STRICT_WARNINGS', None)
|
||||
yield
|
||||
# Add the user's desired strictness
|
||||
if old_strictness is not None:
|
||||
os.environ['SKIMAGE_TEST_STRICT_WARNINGS'] = old_strictness
|
||||
|
||||
|
||||
def test_strict_warnigns_default(setup):
|
||||
# By default we should fail on missing expected warnings
|
||||
with pytest.raises(ValueError):
|
||||
with expected_warnings(['some warnings']):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.parametrize('strictness', ['1', 'true', 'True', 'TRUE'])
|
||||
def test_strict_warning_true(setup, strictness):
|
||||
os.environ['SKIMAGE_TEST_STRICT_WARNINGS'] = strictness
|
||||
with pytest.raises(ValueError):
|
||||
with expected_warnings(['some warnings']):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.parametrize('strictness', ['0', 'false', 'False', 'FALSE'])
|
||||
def test_strict_warning_false(setup, strictness):
|
||||
# If the user doesn't wish to be strict about warnings
|
||||
# the following shouldn't raise any error
|
||||
os.environ['SKIMAGE_TEST_STRICT_WARNINGS'] = strictness
|
||||
with expected_warnings(['some warnings']):
|
||||
pass
|
||||
Reference in New Issue
Block a user