comment here
179
.CondaPkg/env/lib/python3.11/site-packages/skimage/__init__.py
vendored
Normal file
@@ -0,0 +1,179 @@
|
||||
"""Image Processing for Python
|
||||
|
||||
``scikit-image`` (a.k.a. ``skimage``) is a collection of algorithms for image
|
||||
processing and computer vision.
|
||||
|
||||
The main package of ``skimage`` only provides a few utilities for converting
|
||||
between image data types; for most features, you need to import one of the
|
||||
following subpackages:
|
||||
|
||||
Subpackages
|
||||
-----------
|
||||
color
|
||||
Color space conversion.
|
||||
data
|
||||
Test images and example data.
|
||||
draw
|
||||
Drawing primitives (lines, text, etc.) that operate on NumPy arrays.
|
||||
exposure
|
||||
Image intensity adjustment, e.g., histogram equalization, etc.
|
||||
feature
|
||||
Feature detection and extraction, e.g., texture analysis corners, etc.
|
||||
filters
|
||||
Sharpening, edge finding, rank filters, thresholding, etc.
|
||||
graph
|
||||
Graph-theoretic operations, e.g., shortest paths.
|
||||
io
|
||||
Reading, saving, and displaying images and video.
|
||||
measure
|
||||
Measurement of image properties, e.g., region properties and contours.
|
||||
metrics
|
||||
Metrics corresponding to images, e.g. distance metrics, similarity, etc.
|
||||
morphology
|
||||
Morphological operations, e.g., opening or skeletonization.
|
||||
restoration
|
||||
Restoration algorithms, e.g., deconvolution algorithms, denoising, etc.
|
||||
segmentation
|
||||
Partitioning an image into multiple regions.
|
||||
transform
|
||||
Geometric and other transforms, e.g., rotation or the Radon transform.
|
||||
util
|
||||
Generic utilities.
|
||||
|
||||
Utility Functions
|
||||
-----------------
|
||||
img_as_float
|
||||
Convert an image to floating point format, with values in [0, 1].
|
||||
Is similar to `img_as_float64`, but will not convert lower-precision
|
||||
floating point arrays to `float64`.
|
||||
img_as_float32
|
||||
Convert an image to single-precision (32-bit) floating point format,
|
||||
with values in [0, 1].
|
||||
img_as_float64
|
||||
Convert an image to double-precision (64-bit) floating point format,
|
||||
with values in [0, 1].
|
||||
img_as_uint
|
||||
Convert an image to unsigned integer format, with values in [0, 65535].
|
||||
img_as_int
|
||||
Convert an image to signed integer format, with values in [-32768, 32767].
|
||||
img_as_ubyte
|
||||
Convert an image to unsigned byte format, with values in [0, 255].
|
||||
img_as_bool
|
||||
Convert an image to boolean format, with values either True or False.
|
||||
dtype_limits
|
||||
Return intensity limits, i.e. (min, max) tuple, of the image's dtype.
|
||||
|
||||
"""
|
||||
|
||||
__version__ = '0.20.0'
|
||||
|
||||
from ._shared.version_requirements import ensure_python_version
|
||||
ensure_python_version((3, 8))
|
||||
|
||||
import lazy_loader as lazy
|
||||
__getattr__, __lazy_dir__, _ = lazy.attach_stub(__name__, __file__)
|
||||
|
||||
def __dir__():
|
||||
return __lazy_dir__() + ['__version__']
|
||||
|
||||
# Logic for checking for improper install and importing while in the source
|
||||
# tree when package has not been installed inplace.
|
||||
# Code adapted from scikit-learn's __check_build module.
|
||||
_INPLACE_MSG = """
|
||||
It appears that you are importing a local scikit-image source tree. For
|
||||
this, you need to have an inplace install. Maybe you are in the source
|
||||
directory and you need to try from another location."""
|
||||
|
||||
_STANDARD_MSG = """
|
||||
Your install of scikit-image appears to be broken.
|
||||
Try re-installing the package following the instructions at:
|
||||
https://scikit-image.org/docs/stable/install.html """
|
||||
|
||||
|
||||
def _raise_build_error(e):
|
||||
# Raise a comprehensible error
|
||||
import os.path as osp
|
||||
local_dir = osp.split(__file__)[0]
|
||||
msg = _STANDARD_MSG
|
||||
if local_dir == "skimage":
|
||||
# Picking up the local install: this will work only if the
|
||||
# install is an 'inplace build'
|
||||
msg = _INPLACE_MSG
|
||||
raise ImportError(
|
||||
f"{e}\nIt seems that scikit-image has not been built correctly.\n{msg}"
|
||||
)
|
||||
|
||||
|
||||
try:
|
||||
# This variable is injected in the __builtins__ by the build
|
||||
# process. It used to enable importing subpackages of skimage when
|
||||
# the binaries are not built
|
||||
__SKIMAGE_SETUP__
|
||||
except NameError:
|
||||
__SKIMAGE_SETUP__ = False
|
||||
|
||||
if __SKIMAGE_SETUP__:
|
||||
import sys
|
||||
sys.stderr.write('Partial import of skimage during the build process.\n')
|
||||
# We are not importing the rest of the scikit during the build
|
||||
# process, as it may not be compiled yet
|
||||
else:
|
||||
try:
|
||||
from ._shared import geometry
|
||||
del geometry
|
||||
except ImportError as e:
|
||||
_raise_build_error(e)
|
||||
|
||||
# Legacy imports into the root namespace; not advertised in __all__
|
||||
from .util.dtype import (
|
||||
dtype_limits,
|
||||
img_as_float32,
|
||||
img_as_float64,
|
||||
img_as_float,
|
||||
img_as_int,
|
||||
img_as_uint,
|
||||
img_as_ubyte,
|
||||
img_as_bool
|
||||
)
|
||||
|
||||
from .util.lookfor import lookfor
|
||||
|
||||
from .data import data_dir
|
||||
|
||||
|
||||
if 'dev' in __version__:
|
||||
# Append last commit date and hash to dev version information, if available
|
||||
|
||||
import subprocess
|
||||
import os.path
|
||||
|
||||
try:
|
||||
p = subprocess.Popen(
|
||||
['git', 'log', '-1', '--format="%h %aI"'],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
cwd=os.path.dirname(__file__),
|
||||
)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
else:
|
||||
out, err = p.communicate()
|
||||
if p.returncode == 0:
|
||||
git_hash, git_date = (
|
||||
out.decode('utf-8')
|
||||
.strip()
|
||||
.replace('"', '')
|
||||
.split('T')[0]
|
||||
.replace('-', '')
|
||||
.split()
|
||||
)
|
||||
|
||||
__version__ = '+'.join(
|
||||
[tag for tag in __version__.split('+')
|
||||
if not tag.startswith('git')]
|
||||
)
|
||||
__version__ += f'+git{git_date}.{git_hash}'
|
||||
|
||||
from skimage._shared.tester import PytestTester # noqa
|
||||
test = PytestTester(__name__)
|
||||
del PytestTester
|
||||
41
.CondaPkg/env/lib/python3.11/site-packages/skimage/__init__.pyi
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
submodules = [
|
||||
'color',
|
||||
'data',
|
||||
'draw',
|
||||
'exposure',
|
||||
'feature',
|
||||
'filters',
|
||||
'future',
|
||||
'graph',
|
||||
'io',
|
||||
'measure',
|
||||
'metrics',
|
||||
'morphology',
|
||||
'registration',
|
||||
'restoration',
|
||||
'segmentation',
|
||||
'transform',
|
||||
'util',
|
||||
]
|
||||
|
||||
__all__ = submodules + ['__version__'] # noqa: F822
|
||||
|
||||
from . import (
|
||||
color,
|
||||
data,
|
||||
draw,
|
||||
exposure,
|
||||
feature,
|
||||
filters,
|
||||
future,
|
||||
graph,
|
||||
io,
|
||||
measure,
|
||||
metrics,
|
||||
morphology,
|
||||
registration,
|
||||
restoration,
|
||||
segmentation,
|
||||
transform,
|
||||
util,
|
||||
)
|
||||
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/__pycache__/__init__.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/__pycache__/conftest.cpython-311.pyc
vendored
Normal file
0
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/__init__.py
vendored
Normal file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/__pycache__/__init__.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/__pycache__/_geometry.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/__pycache__/_tempfile.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/__pycache__/_warnings.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/__pycache__/coord.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/__pycache__/filters.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/__pycache__/tester.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/__pycache__/testing.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/__pycache__/utils.cpython-311.pyc
vendored
Normal file
3
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/_dependency_checks.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
from .version_requirements import is_installed
|
||||
|
||||
has_mpl = is_installed("matplotlib", ">=3.3")
|
||||
54
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/_geometry.py
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
__all__ = ['polygon_clip', 'polygon_area']
|
||||
|
||||
import numpy as np
|
||||
|
||||
from .version_requirements import require
|
||||
|
||||
|
||||
@require("matplotlib", ">=3.3")
|
||||
def polygon_clip(rp, cp, r0, c0, r1, c1):
|
||||
"""Clip a polygon to the given bounding box.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
rp, cp : (N,) ndarray of double
|
||||
Row and column coordinates of the polygon.
|
||||
(r0, c0), (r1, c1) : double
|
||||
Top-left and bottom-right coordinates of the bounding box.
|
||||
|
||||
Returns
|
||||
-------
|
||||
r_clipped, c_clipped : (M,) ndarray of double
|
||||
Coordinates of clipped polygon.
|
||||
|
||||
Notes
|
||||
-----
|
||||
This makes use of Sutherland-Hodgman clipping as implemented in
|
||||
AGG 2.4 and exposed in Matplotlib.
|
||||
|
||||
"""
|
||||
from matplotlib import path, transforms
|
||||
|
||||
poly = path.Path(np.vstack((rp, cp)).T, closed=True)
|
||||
clip_rect = transforms.Bbox([[r0, c0], [r1, c1]])
|
||||
poly_clipped = poly.clip_to_bbox(clip_rect).to_polygons()[0]
|
||||
|
||||
return poly_clipped[:, 0], poly_clipped[:, 1]
|
||||
|
||||
|
||||
def polygon_area(pr, pc):
|
||||
"""Compute the area of a polygon.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
pr, pc : (N,) array of float
|
||||
Polygon row and column coordinates.
|
||||
|
||||
Returns
|
||||
-------
|
||||
a : float
|
||||
Area of the polygon.
|
||||
"""
|
||||
pr = np.asarray(pr)
|
||||
pc = np.asarray(pc)
|
||||
return 0.5 * np.abs(np.sum((pc[:-1] * pr[1:]) - (pc[1:] * pr[:-1])))
|
||||
27
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/_tempfile.py
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
from tempfile import NamedTemporaryFile
|
||||
from contextlib import contextmanager
|
||||
import os
|
||||
|
||||
@contextmanager
|
||||
def temporary_file(suffix=''):
|
||||
"""Yield a writeable temporary filename that is deleted on context exit.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
suffix : string, optional
|
||||
The suffix for the file.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> import numpy as np
|
||||
>>> from skimage import io
|
||||
>>> with temporary_file('.tif') as tempfile:
|
||||
... im = np.arange(25, dtype=np.uint8).reshape((5, 5))
|
||||
... io.imsave(tempfile, im)
|
||||
... assert np.all(io.imread(tempfile) == im)
|
||||
"""
|
||||
with NamedTemporaryFile(suffix=suffix, delete=False) as tempfile_stream:
|
||||
tempfile = tempfile_stream.name
|
||||
|
||||
yield tempfile
|
||||
os.remove(tempfile)
|
||||
147
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/_warnings.py
vendored
Normal file
@@ -0,0 +1,147 @@
|
||||
from contextlib import contextmanager
|
||||
import sys
|
||||
import warnings
|
||||
import re
|
||||
import functools
|
||||
import os
|
||||
|
||||
__all__ = ['all_warnings', 'expected_warnings', 'warn']
|
||||
|
||||
|
||||
# A version of `warnings.warn` with a default stacklevel of 2.
|
||||
# functool is used so as not to increase the call stack accidentally
|
||||
warn = functools.partial(warnings.warn, stacklevel=2)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def all_warnings():
|
||||
"""
|
||||
Context for use in testing to ensure that all warnings are raised.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> import warnings
|
||||
>>> def foo():
|
||||
... warnings.warn(RuntimeWarning("bar"), stacklevel=2)
|
||||
|
||||
We raise the warning once, while the warning filter is set to "once".
|
||||
Hereafter, the warning is invisible, even with custom filters:
|
||||
|
||||
>>> with warnings.catch_warnings():
|
||||
... warnings.simplefilter('once')
|
||||
... foo() # doctest: +SKIP
|
||||
|
||||
We can now run ``foo()`` without a warning being raised:
|
||||
|
||||
>>> from numpy.testing import assert_warns
|
||||
>>> foo() # doctest: +SKIP
|
||||
|
||||
To catch the warning, we call in the help of ``all_warnings``:
|
||||
|
||||
>>> with all_warnings():
|
||||
... assert_warns(RuntimeWarning, foo)
|
||||
"""
|
||||
# _warnings.py is on the critical import path.
|
||||
# Since this is a testing only function, we lazy import inspect.
|
||||
import inspect
|
||||
# Whenever a warning is triggered, Python adds a __warningregistry__
|
||||
# member to the *calling* module. The exercise here is to find
|
||||
# and eradicate all those breadcrumbs that were left lying around.
|
||||
#
|
||||
# We proceed by first searching all parent calling frames and explicitly
|
||||
# clearing their warning registries (necessary for the doctests above to
|
||||
# pass). Then, we search for all submodules of skimage and clear theirs
|
||||
# as well (necessary for the skimage test suite to pass).
|
||||
|
||||
frame = inspect.currentframe()
|
||||
if frame:
|
||||
for f in inspect.getouterframes(frame):
|
||||
f[0].f_locals['__warningregistry__'] = {}
|
||||
del frame
|
||||
|
||||
for mod_name, mod in list(sys.modules.items()):
|
||||
try:
|
||||
mod.__warningregistry__.clear()
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
with warnings.catch_warnings(record=True) as w:
|
||||
warnings.simplefilter("always")
|
||||
yield w
|
||||
|
||||
|
||||
@contextmanager
|
||||
def expected_warnings(matching):
|
||||
r"""Context for use in testing to catch known warnings matching regexes
|
||||
|
||||
Parameters
|
||||
----------
|
||||
matching : None or a list of strings or compiled regexes
|
||||
Regexes for the desired warning to catch
|
||||
If matching is None, this behaves as a no-op.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> import numpy as np
|
||||
>>> rng = np.random.default_rng()
|
||||
>>> image = rng.integers(0, 2**16, size=(100, 100), dtype=np.uint16)
|
||||
>>> # rank filters are slow when bit-depth exceeds 10 bits
|
||||
>>> from skimage import filters
|
||||
>>> with expected_warnings(['Bad rank filter performance']):
|
||||
... median_filtered = filters.rank.median(image)
|
||||
|
||||
Notes
|
||||
-----
|
||||
Uses `all_warnings` to ensure all warnings are raised.
|
||||
Upon exiting, it checks the recorded warnings for the desired matching
|
||||
pattern(s).
|
||||
Raises a ValueError if any match was not found or an unexpected
|
||||
warning was raised.
|
||||
Allows for three types of behaviors: `and`, `or`, and `optional` matches.
|
||||
This is done to accommodate different build environments or loop conditions
|
||||
that may produce different warnings. The behaviors can be combined.
|
||||
If you pass multiple patterns, you get an orderless `and`, where all of the
|
||||
warnings must be raised.
|
||||
If you use the `|` operator in a pattern, you can catch one of several
|
||||
warnings.
|
||||
Finally, you can use `|\A\Z` in a pattern to signify it as optional.
|
||||
|
||||
"""
|
||||
if isinstance(matching, str):
|
||||
raise ValueError('``matching`` should be a list of strings and not '
|
||||
'a string itself.')
|
||||
|
||||
# Special case for disabling the context manager
|
||||
if matching is None:
|
||||
yield None
|
||||
return
|
||||
|
||||
strict_warnings = os.environ.get('SKIMAGE_TEST_STRICT_WARNINGS', '1')
|
||||
if strict_warnings.lower() == 'true':
|
||||
strict_warnings = True
|
||||
elif strict_warnings.lower() == 'false':
|
||||
strict_warnings = False
|
||||
else:
|
||||
strict_warnings = bool(int(strict_warnings))
|
||||
|
||||
with all_warnings() as w:
|
||||
# enter context
|
||||
yield w
|
||||
# exited user context, check the recorded warnings
|
||||
# Allow users to provide None
|
||||
while None in matching:
|
||||
matching.remove(None)
|
||||
remaining = [m for m in matching if r'\A\Z' not in m.split('|')]
|
||||
for warn in w:
|
||||
found = False
|
||||
for match in matching:
|
||||
if re.search(match, str(warn.message)) is not None:
|
||||
found = True
|
||||
if match in remaining:
|
||||
remaining.remove(match)
|
||||
if strict_warnings and not found:
|
||||
raise ValueError(f'Unexpected warning: {str(warn.message)}')
|
||||
if strict_warnings and (len(remaining) > 0):
|
||||
newline = "\n"
|
||||
msg = f"No warning raised matching:{newline}{newline.join(remaining)}"
|
||||
raise ValueError(msg)
|
||||
121
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/coord.py
vendored
Normal file
@@ -0,0 +1,121 @@
|
||||
import numpy as np
|
||||
from scipy.spatial import cKDTree, distance
|
||||
|
||||
|
||||
def _ensure_spacing(coord, spacing, p_norm, max_out):
|
||||
"""Returns a subset of coord where a minimum spacing is guaranteed.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
coord : ndarray
|
||||
The coordinates of the considered points.
|
||||
spacing : float
|
||||
the maximum allowed spacing between the points.
|
||||
p_norm : float
|
||||
Which Minkowski p-norm to use. Should be in the range [1, inf].
|
||||
A finite large p may cause a ValueError if overflow can occur.
|
||||
``inf`` corresponds to the Chebyshev distance and 2 to the
|
||||
Euclidean distance.
|
||||
max_out: int
|
||||
If not None, at most the first ``max_out`` candidates are
|
||||
returned.
|
||||
|
||||
Returns
|
||||
-------
|
||||
output : ndarray
|
||||
A subset of coord where a minimum spacing is guaranteed.
|
||||
|
||||
"""
|
||||
|
||||
# Use KDtree to find the peaks that are too close to each other
|
||||
tree = cKDTree(coord)
|
||||
|
||||
indices = tree.query_ball_point(coord, r=spacing, p=p_norm)
|
||||
rejected_peaks_indices = set()
|
||||
naccepted = 0
|
||||
for idx, candidates in enumerate(indices):
|
||||
if idx not in rejected_peaks_indices:
|
||||
# keep current point and the points at exactly spacing from it
|
||||
candidates.remove(idx)
|
||||
dist = distance.cdist([coord[idx]],
|
||||
coord[candidates],
|
||||
distance.minkowski,
|
||||
p=p_norm).reshape(-1)
|
||||
candidates = [c for c, d in zip(candidates, dist)
|
||||
if d < spacing]
|
||||
|
||||
# candidates.remove(keep)
|
||||
rejected_peaks_indices.update(candidates)
|
||||
naccepted += 1
|
||||
if max_out is not None and naccepted >= max_out:
|
||||
break
|
||||
|
||||
# Remove the peaks that are too close to each other
|
||||
output = np.delete(coord, tuple(rejected_peaks_indices), axis=0)
|
||||
if max_out is not None:
|
||||
output = output[:max_out]
|
||||
|
||||
return output
|
||||
|
||||
|
||||
def ensure_spacing(coords, spacing=1, p_norm=np.inf, min_split_size=50,
|
||||
max_out=None, *, max_split_size=2000):
|
||||
"""Returns a subset of coord where a minimum spacing is guaranteed.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
coords : array_like
|
||||
The coordinates of the considered points.
|
||||
spacing : float
|
||||
the maximum allowed spacing between the points.
|
||||
p_norm : float
|
||||
Which Minkowski p-norm to use. Should be in the range [1, inf].
|
||||
A finite large p may cause a ValueError if overflow can occur.
|
||||
``inf`` corresponds to the Chebyshev distance and 2 to the
|
||||
Euclidean distance.
|
||||
min_split_size : int
|
||||
Minimum split size used to process ``coords`` by batch to save
|
||||
memory. If None, the memory saving strategy is not applied.
|
||||
max_out : int
|
||||
If not None, only the first ``max_out`` candidates are returned.
|
||||
max_split_size : int
|
||||
Maximum split size used to process ``coords`` by batch to save
|
||||
memory. This number was decided by profiling with a large number
|
||||
of points. Too small a number results in too much looping in
|
||||
Python instead of C, slowing down the process, while too large
|
||||
a number results in large memory allocations, slowdowns, and,
|
||||
potentially, in the process being killed -- see gh-6010. See
|
||||
benchmark results `here
|
||||
<https://github.com/scikit-image/scikit-image/pull/6035#discussion_r751518691>`_.
|
||||
|
||||
Returns
|
||||
-------
|
||||
output : array_like
|
||||
A subset of coord where a minimum spacing is guaranteed.
|
||||
|
||||
"""
|
||||
|
||||
output = coords
|
||||
if len(coords):
|
||||
|
||||
coords = np.atleast_2d(coords)
|
||||
if min_split_size is None:
|
||||
batch_list = [coords]
|
||||
else:
|
||||
coord_count = len(coords)
|
||||
split_idx = [min_split_size]
|
||||
split_size = min_split_size
|
||||
while coord_count - split_idx[-1] > max_split_size:
|
||||
split_size *= 2
|
||||
split_idx.append(split_idx[-1] + min(split_size,
|
||||
max_split_size))
|
||||
batch_list = np.array_split(coords, split_idx)
|
||||
|
||||
output = np.zeros((0, coords.shape[1]), dtype=coords.dtype)
|
||||
for batch in batch_list:
|
||||
output = _ensure_spacing(np.vstack([output, batch]),
|
||||
spacing, p_norm, max_out)
|
||||
if max_out is not None and len(output) >= max_out:
|
||||
break
|
||||
|
||||
return output
|
||||
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/fast_exp.cpython-311-x86_64-linux-gnu.so
vendored
Executable file
47
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/fast_exp.h
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
/* A fast approximation of the exponential function.
|
||||
* Reference [1]: https://schraudolph.org/pubs/Schraudolph99.pdf
|
||||
* Reference [2]: https://doi.org/10.1162/089976600300015033
|
||||
* Additional improvements by Leonid Bloch. */
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
/* use just EXP_A = 1512775 for integer version, to avoid FP calculations */
|
||||
#define EXP_A (1512775.3951951856938) /* 2^20/ln2 */
|
||||
/* For min. RMS error */
|
||||
#define EXP_BC 1072632447 /* 1023*2^20 - 60801 */
|
||||
/* For min. max. relative error */
|
||||
/* #define EXP_BC 1072647449 */ /* 1023*2^20 - 45799 */
|
||||
/* For min. mean relative error */
|
||||
/* #define EXP_BC 1072625005 */ /* 1023*2^20 - 68243 */
|
||||
|
||||
__inline double _fast_exp (double y)
|
||||
{
|
||||
union
|
||||
{
|
||||
double d;
|
||||
struct { int32_t i, j; } n;
|
||||
char t[8];
|
||||
} _eco;
|
||||
|
||||
_eco.n.i = 1;
|
||||
|
||||
switch(_eco.t[0]) {
|
||||
case 1:
|
||||
/* Little endian */
|
||||
_eco.n.j = (int32_t)(EXP_A*(y)) + EXP_BC;
|
||||
_eco.n.i = 0;
|
||||
break;
|
||||
case 0:
|
||||
/* Big endian */
|
||||
_eco.n.i = (int32_t)(EXP_A*(y)) + EXP_BC;
|
||||
_eco.n.j = 0;
|
||||
break;
|
||||
}
|
||||
|
||||
return _eco.d;
|
||||
}
|
||||
|
||||
__inline float _fast_expf (float y)
|
||||
{
|
||||
return (float)_fast_exp((double)y);
|
||||
}
|
||||
166
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/filters.py
vendored
Normal file
@@ -0,0 +1,166 @@
|
||||
"""Filters used across multiple skimage submodules.
|
||||
|
||||
These are defined here to avoid circular imports.
|
||||
|
||||
The unit tests remain under skimage/filters/tests/
|
||||
"""
|
||||
from collections.abc import Iterable
|
||||
|
||||
import numpy as np
|
||||
from scipy import ndimage as ndi
|
||||
|
||||
from .._shared.utils import _supported_float_type, convert_to_float, warn
|
||||
|
||||
|
||||
class _PatchClassRepr(type):
|
||||
"""Control class representations in rendered signatures."""
|
||||
def __repr__(cls):
|
||||
return f"<{cls.__name__}>"
|
||||
|
||||
|
||||
class ChannelAxisNotSet(metaclass=_PatchClassRepr):
|
||||
"""Signal that the `channel_axis` parameter is not set.
|
||||
|
||||
This is a proxy object, used to signal to `skimage.filters.gaussian` that
|
||||
the `channel_axis` parameter has not been set, in which case the function
|
||||
will determine whether a color channel is present. We cannot use ``None``
|
||||
for this purpose as it has its own meaning which indicates that the given
|
||||
image is grayscale.
|
||||
|
||||
This automatic behavior was broken in v0.19, recovered but deprecated in
|
||||
v0.20 and will be removed in v0.21.
|
||||
"""
|
||||
|
||||
|
||||
def gaussian(image, sigma=1, output=None, mode='nearest', cval=0,
|
||||
preserve_range=False, truncate=4.0, *,
|
||||
channel_axis=ChannelAxisNotSet):
|
||||
"""Multi-dimensional Gaussian filter.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
image : array-like
|
||||
Input image (grayscale or color) to filter.
|
||||
sigma : scalar or sequence of scalars, optional
|
||||
Standard deviation for Gaussian kernel. The standard
|
||||
deviations of the Gaussian filter are given for each axis as a
|
||||
sequence, or as a single number, in which case it is equal for
|
||||
all axes.
|
||||
output : array, optional
|
||||
The ``output`` parameter passes an array in which to store the
|
||||
filter output.
|
||||
mode : {'reflect', 'constant', 'nearest', 'mirror', 'wrap'}, optional
|
||||
The ``mode`` parameter determines how the array borders are
|
||||
handled, where ``cval`` is the value when mode is equal to
|
||||
'constant'. Default is 'nearest'.
|
||||
cval : scalar, optional
|
||||
Value to fill past edges of input if ``mode`` is 'constant'. Default
|
||||
is 0.0
|
||||
preserve_range : bool, optional
|
||||
If True, keep the original range of values. Otherwise, the input
|
||||
``image`` is converted according to the conventions of ``img_as_float``
|
||||
(Normalized first to values [-1.0 ; 1.0] or [0 ; 1.0] depending on
|
||||
dtype of input)
|
||||
|
||||
For more information, see:
|
||||
https://scikit-image.org/docs/dev/user_guide/data_types.html
|
||||
truncate : float, optional
|
||||
Truncate the filter at this many standard deviations.
|
||||
channel_axis : int or None, optional
|
||||
If None, the image is assumed to be a grayscale (single channel) image.
|
||||
Otherwise, this parameter indicates which axis of the array corresponds
|
||||
to channels.
|
||||
|
||||
.. versionadded:: 0.19
|
||||
``channel_axis`` was added in 0.19.
|
||||
|
||||
.. warning::
|
||||
|
||||
Automatic detection of the color channel based on the old deprecated
|
||||
`multichannel=None` was broken in version 0.19. In 0.20 this
|
||||
behavior is fixed. The last axis of an `image` with dimensions
|
||||
(M, N, 3) is interpreted as a color channel if `channel_axis` is not
|
||||
set by the user (signaled by the default proxy value
|
||||
`ChannelAxisNotSet`). Starting with 0.21, `channel_axis=None` will
|
||||
be used as the new default value.
|
||||
|
||||
Returns
|
||||
-------
|
||||
filtered_image : ndarray
|
||||
the filtered array
|
||||
|
||||
Notes
|
||||
-----
|
||||
This function is a wrapper around :func:`scipy.ndi.gaussian_filter`.
|
||||
|
||||
Integer arrays are converted to float.
|
||||
|
||||
The ``output`` should be floating point data type since gaussian converts
|
||||
to float provided ``image``. If ``output`` is not provided, another array
|
||||
will be allocated and returned as the result.
|
||||
|
||||
The multi-dimensional filter is implemented as a sequence of
|
||||
one-dimensional convolution filters. The intermediate arrays are
|
||||
stored in the same data type as the output. Therefore, for output
|
||||
types with a limited precision, the results may be imprecise
|
||||
because intermediate results may be stored with insufficient
|
||||
precision.
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
||||
>>> a = np.zeros((3, 3))
|
||||
>>> a[1, 1] = 1
|
||||
>>> a
|
||||
array([[0., 0., 0.],
|
||||
[0., 1., 0.],
|
||||
[0., 0., 0.]])
|
||||
>>> gaussian(a, sigma=0.4) # mild smoothing
|
||||
array([[0.00163116, 0.03712502, 0.00163116],
|
||||
[0.03712502, 0.84496158, 0.03712502],
|
||||
[0.00163116, 0.03712502, 0.00163116]])
|
||||
>>> gaussian(a, sigma=1) # more smoothing
|
||||
array([[0.05855018, 0.09653293, 0.05855018],
|
||||
[0.09653293, 0.15915589, 0.09653293],
|
||||
[0.05855018, 0.09653293, 0.05855018]])
|
||||
>>> # Several modes are possible for handling boundaries
|
||||
>>> gaussian(a, sigma=1, mode='reflect')
|
||||
array([[0.08767308, 0.12075024, 0.08767308],
|
||||
[0.12075024, 0.16630671, 0.12075024],
|
||||
[0.08767308, 0.12075024, 0.08767308]])
|
||||
>>> # For RGB images, each is filtered separately
|
||||
>>> from skimage.data import astronaut
|
||||
>>> image = astronaut()
|
||||
>>> filtered_img = gaussian(image, sigma=1, channel_axis=-1)
|
||||
|
||||
"""
|
||||
if channel_axis is ChannelAxisNotSet:
|
||||
if image.ndim == 3 and image.shape[-1] == 3:
|
||||
warn(
|
||||
"Automatic detection of the color channel was deprecated in "
|
||||
"v0.19, and `channel_axis=None` will be the new default in "
|
||||
"v0.21. Set `channel_axis=-1` explicitly to silence this "
|
||||
"warning.",
|
||||
FutureWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
channel_axis = -1
|
||||
else:
|
||||
channel_axis = None
|
||||
|
||||
if np.any(np.asarray(sigma) < 0.0):
|
||||
raise ValueError("Sigma values less than zero are not valid")
|
||||
if channel_axis is not None:
|
||||
# do not filter across channels
|
||||
if not isinstance(sigma, Iterable):
|
||||
sigma = [sigma] * (image.ndim - 1)
|
||||
if len(sigma) == image.ndim - 1:
|
||||
sigma = list(sigma)
|
||||
sigma.insert(channel_axis % image.ndim, 0)
|
||||
image = convert_to_float(image, preserve_range)
|
||||
float_dtype = _supported_float_type(image.dtype)
|
||||
image = image.astype(float_dtype, copy=False)
|
||||
if (output is not None) and (not np.issubdtype(output.dtype, np.floating)):
|
||||
raise ValueError("Provided output data type is not float")
|
||||
return ndi.gaussian_filter(image, sigma, output=output,
|
||||
mode=mode, cval=cval, truncate=truncate)
|
||||
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/geometry.cpython-311-x86_64-linux-gnu.so
vendored
Executable file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/interpolation.cpython-311-x86_64-linux-gnu.so
vendored
Executable file
120
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/tester.py
vendored
Normal file
@@ -0,0 +1,120 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def _show_skimage_info():
|
||||
import skimage
|
||||
print(f"skimage version {skimage.__version__}")
|
||||
|
||||
|
||||
class PytestTester:
|
||||
"""
|
||||
Pytest test runner.
|
||||
|
||||
This class is made available in ``skimage._shared.testing``, and a test
|
||||
function is typically added to a package's __init__.py like so::
|
||||
|
||||
from skimage._shared.testing import PytestTester
|
||||
test = PytestTester(__name__)
|
||||
del PytestTester
|
||||
|
||||
Calling this test function finds and runs all tests associated with the
|
||||
module and all its sub-modules.
|
||||
|
||||
Attributes
|
||||
----------
|
||||
module_name : str
|
||||
Full path to the package to test.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
module_name : module name
|
||||
The name of the module to test.
|
||||
|
||||
"""
|
||||
def __init__(self, module_name):
|
||||
self.module_name = module_name
|
||||
|
||||
def __call__(self, label='fast', verbose=1, extra_argv=None,
|
||||
doctests=False, coverage=False, durations=-1, tests=None):
|
||||
"""
|
||||
Run tests for module using pytest.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
label : {'fast', 'full'}, optional
|
||||
Identifies the tests to run. When set to 'fast', tests decorated
|
||||
with `pytest.mark.slow` are skipped, when 'full', the slow marker
|
||||
is ignored.
|
||||
verbose : int, optional
|
||||
Verbosity value for test outputs, in the range 1-3. Default is 1.
|
||||
extra_argv : list, optional
|
||||
List with any extra arguments to pass to pytests.
|
||||
doctests : bool, optional
|
||||
.. note:: Not supported
|
||||
coverage : bool, optional
|
||||
If True, report coverage of scikit-image code. Default is False.
|
||||
Requires installation of (pip) pytest-cov.
|
||||
durations : int, optional
|
||||
If < 0, do nothing, If 0, report time of all tests, if > 0,
|
||||
report the time of the slowest `timer` tests. Default is -1.
|
||||
tests : test or list of tests
|
||||
Tests to be executed with pytest '--pyargs'
|
||||
|
||||
Returns
|
||||
-------
|
||||
result : bool
|
||||
Return True on success, false otherwise.
|
||||
"""
|
||||
import pytest
|
||||
|
||||
module = sys.modules[self.module_name]
|
||||
module_path = os.path.abspath(module.__path__[0])
|
||||
|
||||
# setup the pytest arguments
|
||||
pytest_args = ["-l"]
|
||||
|
||||
# offset verbosity. The "-q" cancels a "-v".
|
||||
pytest_args += ["-q"]
|
||||
|
||||
# Filter out annoying import messages. Want these in both develop and
|
||||
# release mode.
|
||||
pytest_args += [
|
||||
"-W ignore:Not importing directory",
|
||||
"-W ignore:numpy.dtype size changed",
|
||||
"-W ignore:numpy.ufunc size changed", ]
|
||||
|
||||
if doctests:
|
||||
raise ValueError("Doctests not supported")
|
||||
|
||||
if extra_argv:
|
||||
pytest_args += list(extra_argv)
|
||||
|
||||
if verbose > 1:
|
||||
pytest_args += ["-" + "v" * (verbose - 1)]
|
||||
|
||||
if coverage:
|
||||
pytest_args += ["--cov=" + module_path]
|
||||
|
||||
if label == "fast":
|
||||
pytest_args += ["-m", "not slow"]
|
||||
elif label != "full":
|
||||
pytest_args += ["-m", label]
|
||||
|
||||
if durations >= 0:
|
||||
pytest_args += [f"--durations={durations}"]
|
||||
|
||||
if tests is None:
|
||||
tests = [self.module_name]
|
||||
|
||||
pytest_args += ["--pyargs"] + list(tests)
|
||||
|
||||
# run tests.
|
||||
_show_skimage_info()
|
||||
|
||||
try:
|
||||
code = pytest.main(pytest_args)
|
||||
except SystemExit as exc:
|
||||
code = exc.code
|
||||
|
||||
return code == 0
|
||||
331
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/testing.py
vendored
Normal file
@@ -0,0 +1,331 @@
|
||||
"""
|
||||
Testing utilities.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import struct
|
||||
import threading
|
||||
import functools
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
import numpy as np
|
||||
from numpy import testing
|
||||
from numpy.testing import (
|
||||
TestCase, assert_, assert_warns, assert_no_warnings,
|
||||
assert_equal, assert_almost_equal,
|
||||
assert_array_equal, assert_allclose,
|
||||
assert_array_almost_equal, assert_array_almost_equal_nulp,
|
||||
assert_array_less
|
||||
)
|
||||
|
||||
import warnings
|
||||
|
||||
from .. import data, io
|
||||
from ..data._fetchers import _fetch
|
||||
from ..util import img_as_uint, img_as_float, img_as_int, img_as_ubyte
|
||||
from ._warnings import expected_warnings
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
skipif = pytest.mark.skipif
|
||||
xfail = pytest.mark.xfail
|
||||
parametrize = pytest.mark.parametrize
|
||||
raises = pytest.raises
|
||||
fixture = pytest.fixture
|
||||
|
||||
SKIP_RE = re.compile(r"(\s*>>>.*?)(\s*)#\s*skip\s+if\s+(.*)$")
|
||||
|
||||
|
||||
# true if python is running in 32bit mode
|
||||
# Calculate the size of a void * pointer in bits
|
||||
# https://docs.python.org/3/library/struct.html
|
||||
arch32 = struct.calcsize("P") * 8 == 32
|
||||
|
||||
|
||||
_error_on_warnings = os.environ.get('SKIMAGE_TEST_STRICT_WARNINGS_GLOBAL', '0')
|
||||
if _error_on_warnings.lower() == 'true':
|
||||
_error_on_warnings = True
|
||||
elif _error_on_warnings.lower() == 'false':
|
||||
_error_on_warnings = False
|
||||
else:
|
||||
try:
|
||||
_error_on_warnings = bool(int(_error_on_warnings))
|
||||
except ValueError:
|
||||
_error_on_warnings = False
|
||||
|
||||
def assert_less(a, b, msg=None):
|
||||
message = f"{a!r} is not lower than {b!r}"
|
||||
if msg is not None:
|
||||
message += ": " + msg
|
||||
assert a < b, message
|
||||
|
||||
|
||||
def assert_greater(a, b, msg=None):
|
||||
message = f"{a!r} is not greater than {b!r}"
|
||||
if msg is not None:
|
||||
message += ": " + msg
|
||||
assert a > b, message
|
||||
|
||||
|
||||
def doctest_skip_parser(func):
|
||||
""" Decorator replaces custom skip test markup in doctests
|
||||
|
||||
Say a function has a docstring::
|
||||
|
||||
>>> something, HAVE_AMODULE, HAVE_BMODULE = 0, False, False
|
||||
>>> something # skip if not HAVE_AMODULE
|
||||
0
|
||||
>>> something # skip if HAVE_BMODULE
|
||||
0
|
||||
|
||||
This decorator will evaluate the expression after ``skip if``. If this
|
||||
evaluates to True, then the comment is replaced by ``# doctest: +SKIP``. If
|
||||
False, then the comment is just removed. The expression is evaluated in the
|
||||
``globals`` scope of `func`.
|
||||
|
||||
For example, if the module global ``HAVE_AMODULE`` is False, and module
|
||||
global ``HAVE_BMODULE`` is False, the returned function will have docstring::
|
||||
|
||||
>>> something # doctest: +SKIP
|
||||
>>> something + else # doctest: +SKIP
|
||||
>>> something # doctest: +SKIP
|
||||
|
||||
"""
|
||||
lines = func.__doc__.split('\n')
|
||||
new_lines = []
|
||||
for line in lines:
|
||||
match = SKIP_RE.match(line)
|
||||
if match is None:
|
||||
new_lines.append(line)
|
||||
continue
|
||||
code, space, expr = match.groups()
|
||||
|
||||
try:
|
||||
# Works as a function decorator
|
||||
if eval(expr, func.__globals__):
|
||||
code = code + space + "# doctest: +SKIP"
|
||||
except AttributeError:
|
||||
# Works as a class decorator
|
||||
if eval(expr, func.__init__.__globals__):
|
||||
code = code + space + "# doctest: +SKIP"
|
||||
|
||||
new_lines.append(code)
|
||||
func.__doc__ = "\n".join(new_lines)
|
||||
return func
|
||||
|
||||
|
||||
def roundtrip(image, plugin, suffix):
|
||||
"""Save and read an image using a specified plugin"""
|
||||
if '.' not in suffix:
|
||||
suffix = '.' + suffix
|
||||
with NamedTemporaryFile(suffix=suffix, delete=False) as temp_file:
|
||||
fname = temp_file.name
|
||||
io.imsave(fname, image, plugin=plugin)
|
||||
new = io.imread(fname, plugin=plugin)
|
||||
try:
|
||||
os.remove(fname)
|
||||
except Exception:
|
||||
pass
|
||||
return new
|
||||
|
||||
|
||||
def color_check(plugin, fmt='png'):
|
||||
"""Check roundtrip behavior for color images.
|
||||
|
||||
All major input types should be handled as ubytes and read
|
||||
back correctly.
|
||||
"""
|
||||
img = img_as_ubyte(data.chelsea())
|
||||
r1 = roundtrip(img, plugin, fmt)
|
||||
testing.assert_allclose(img, r1)
|
||||
|
||||
img2 = img > 128
|
||||
r2 = roundtrip(img2, plugin, fmt)
|
||||
testing.assert_allclose(img2, r2.astype(bool))
|
||||
|
||||
img3 = img_as_float(img)
|
||||
r3 = roundtrip(img3, plugin, fmt)
|
||||
testing.assert_allclose(r3, img)
|
||||
|
||||
img4 = img_as_int(img)
|
||||
if fmt.lower() in (('tif', 'tiff')):
|
||||
img4 -= 100
|
||||
r4 = roundtrip(img4, plugin, fmt)
|
||||
testing.assert_allclose(r4, img4)
|
||||
else:
|
||||
r4 = roundtrip(img4, plugin, fmt)
|
||||
testing.assert_allclose(r4, img_as_ubyte(img4))
|
||||
|
||||
img5 = img_as_uint(img)
|
||||
r5 = roundtrip(img5, plugin, fmt)
|
||||
testing.assert_allclose(r5, img)
|
||||
|
||||
|
||||
def mono_check(plugin, fmt='png'):
|
||||
"""Check the roundtrip behavior for images that support most types.
|
||||
|
||||
All major input types should be handled.
|
||||
"""
|
||||
|
||||
img = img_as_ubyte(data.moon())
|
||||
r1 = roundtrip(img, plugin, fmt)
|
||||
testing.assert_allclose(img, r1)
|
||||
|
||||
img2 = img > 128
|
||||
r2 = roundtrip(img2, plugin, fmt)
|
||||
testing.assert_allclose(img2, r2.astype(bool))
|
||||
|
||||
img3 = img_as_float(img)
|
||||
r3 = roundtrip(img3, plugin, fmt)
|
||||
if r3.dtype.kind == 'f':
|
||||
testing.assert_allclose(img3, r3)
|
||||
else:
|
||||
testing.assert_allclose(r3, img_as_uint(img))
|
||||
|
||||
img4 = img_as_int(img)
|
||||
if fmt.lower() in (('tif', 'tiff')):
|
||||
img4 -= 100
|
||||
r4 = roundtrip(img4, plugin, fmt)
|
||||
testing.assert_allclose(r4, img4)
|
||||
else:
|
||||
r4 = roundtrip(img4, plugin, fmt)
|
||||
testing.assert_allclose(r4, img_as_uint(img4))
|
||||
|
||||
img5 = img_as_uint(img)
|
||||
r5 = roundtrip(img5, plugin, fmt)
|
||||
testing.assert_allclose(r5, img5)
|
||||
|
||||
|
||||
def setup_test():
|
||||
"""Default package level setup routine for skimage tests.
|
||||
|
||||
Import packages known to raise warnings, and then
|
||||
force warnings to raise errors.
|
||||
|
||||
Also set the random seed to zero.
|
||||
"""
|
||||
warnings.simplefilter('default')
|
||||
|
||||
if _error_on_warnings:
|
||||
|
||||
np.random.seed(0)
|
||||
|
||||
warnings.simplefilter('error')
|
||||
|
||||
warnings.filterwarnings(
|
||||
'default', message='unclosed file', category=ResourceWarning
|
||||
)
|
||||
|
||||
# Ignore other warnings only seen when using older versions of
|
||||
# dependencies.
|
||||
warnings.filterwarnings(
|
||||
'default',
|
||||
message='Conversion of the second argument of issubdtype',
|
||||
category=FutureWarning
|
||||
)
|
||||
|
||||
warnings.filterwarnings(
|
||||
'default',
|
||||
message='the matrix subclass is not the recommended way',
|
||||
category=PendingDeprecationWarning, module='numpy'
|
||||
)
|
||||
|
||||
warnings.filterwarnings(
|
||||
'default',
|
||||
message='Your installed pillow version',
|
||||
category=UserWarning,
|
||||
module='skimage.io'
|
||||
)
|
||||
|
||||
# ignore warning from cycle_spin about Dask not being installed
|
||||
warnings.filterwarnings(
|
||||
'default',
|
||||
message='The optional dask dependency is not installed.',
|
||||
category=UserWarning
|
||||
)
|
||||
|
||||
warnings.filterwarnings(
|
||||
'default',
|
||||
message='numpy.ufunc size changed',
|
||||
category=RuntimeWarning
|
||||
)
|
||||
|
||||
warnings.filterwarnings(
|
||||
'default',
|
||||
message='\n\nThe scipy.sparse array containers',
|
||||
category=DeprecationWarning
|
||||
)
|
||||
|
||||
# ignore dtype deprecation warning from NumPy arising from use of SciPy
|
||||
# as a reference in test_watershed09. Should be fixed in scipy>=1.9.4
|
||||
# https://github.com/scipy/scipy/commit/da3ff893b9ac161938e11f9bcd5380e09cf03150
|
||||
warnings.filterwarnings(
|
||||
'default',
|
||||
message=('`np.int0` is a deprecated alias for `np.intp`'),
|
||||
category=DeprecationWarning
|
||||
)
|
||||
|
||||
|
||||
def teardown_test():
|
||||
"""Default package level teardown routine for skimage tests.
|
||||
|
||||
Restore warnings to default behavior
|
||||
"""
|
||||
if _error_on_warnings:
|
||||
warnings.resetwarnings()
|
||||
warnings.simplefilter('default')
|
||||
|
||||
|
||||
def fetch(data_filename):
|
||||
"""Attempt to fetch data, but if unavailable, skip the tests."""
|
||||
try:
|
||||
return _fetch(data_filename)
|
||||
except (ConnectionError, ModuleNotFoundError):
|
||||
pytest.skip(f'Unable to download {data_filename}',
|
||||
allow_module_level=True)
|
||||
|
||||
|
||||
@pytest.mark.skip()
|
||||
def test_parallel(num_threads=2, warnings_matching=None):
|
||||
"""Decorator to run the same function multiple times in parallel.
|
||||
|
||||
This decorator is useful to ensure that separate threads execute
|
||||
concurrently and correctly while releasing the GIL.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
num_threads : int, optional
|
||||
The number of times the function is run in parallel.
|
||||
|
||||
warnings_matching: list or None
|
||||
This parameter is passed on to `expected_warnings` so as not to have
|
||||
race conditions with the warnings filters. A single
|
||||
`expected_warnings` context manager is used for all threads.
|
||||
If None, then no warnings are checked.
|
||||
|
||||
"""
|
||||
|
||||
assert num_threads > 0
|
||||
|
||||
def wrapper(func):
|
||||
@functools.wraps(func)
|
||||
def inner(*args, **kwargs):
|
||||
with expected_warnings(warnings_matching):
|
||||
threads = []
|
||||
for i in range(num_threads - 1):
|
||||
thread = threading.Thread(target=func, args=args,
|
||||
kwargs=kwargs)
|
||||
threads.append(thread)
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
|
||||
func(*args, **kwargs)
|
||||
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
|
||||
return inner
|
||||
|
||||
return wrapper
|
||||
0
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/tests/__init__.py
vendored
Normal file
94
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/tests/test_coord.py
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
import time
|
||||
|
||||
import numpy as np
|
||||
import pytest
|
||||
from scipy.spatial.distance import pdist, minkowski
|
||||
|
||||
from skimage._shared.coord import ensure_spacing
|
||||
|
||||
|
||||
@pytest.mark.parametrize("p", [1, 2, np.inf])
|
||||
@pytest.mark.parametrize("size", [30, 50, None])
|
||||
def test_ensure_spacing_trivial(p, size):
|
||||
# --- Empty input
|
||||
assert ensure_spacing([], p_norm=p) == []
|
||||
|
||||
# --- A unique point
|
||||
coord = np.random.randn(1, 2)
|
||||
assert np.array_equal(coord, ensure_spacing(coord, p_norm=p,
|
||||
min_split_size=size))
|
||||
|
||||
# --- Verified spacing
|
||||
coord = np.random.randn(100, 2)
|
||||
|
||||
# --- 0 spacing
|
||||
assert np.array_equal(coord, ensure_spacing(coord, spacing=0, p_norm=p,
|
||||
min_split_size=size))
|
||||
|
||||
# Spacing is chosen to be half the minimum distance
|
||||
spacing = pdist(coord, metric=minkowski, p=p).min() * 0.5
|
||||
|
||||
out = ensure_spacing(coord, spacing=spacing, p_norm=p,
|
||||
min_split_size=size)
|
||||
|
||||
assert np.array_equal(coord, out)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("ndim", [1, 2, 3, 4, 5])
|
||||
@pytest.mark.parametrize("size", [2, 10, None])
|
||||
def test_ensure_spacing_nD(ndim, size):
|
||||
coord = np.ones((5, ndim))
|
||||
|
||||
expected = np.ones((1, ndim))
|
||||
|
||||
assert np.array_equal(ensure_spacing(coord, min_split_size=size), expected)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("p", [1, 2, np.inf])
|
||||
@pytest.mark.parametrize("size", [50, 100, None])
|
||||
def test_ensure_spacing_batch_processing(p, size):
|
||||
coord = np.random.randn(100, 2)
|
||||
|
||||
# --- Consider the average distance btween the point as spacing
|
||||
spacing = np.median(pdist(coord, metric=minkowski, p=p))
|
||||
|
||||
expected = ensure_spacing(coord, spacing=spacing, p_norm=p)
|
||||
|
||||
assert np.array_equal(ensure_spacing(coord, spacing=spacing, p_norm=p,
|
||||
min_split_size=size),
|
||||
expected)
|
||||
|
||||
|
||||
def test_max_batch_size():
|
||||
"""Small batches are slow, large batches -> large allocations -> also slow.
|
||||
|
||||
https://github.com/scikit-image/scikit-image/pull/6035#discussion_r751518691
|
||||
"""
|
||||
coords = np.random.randint(low=0, high=1848, size=(40000, 2))
|
||||
tstart = time.time()
|
||||
ensure_spacing(coords, spacing=100, min_split_size=50,
|
||||
max_split_size=2000)
|
||||
dur1 = time.time() - tstart
|
||||
|
||||
tstart = time.time()
|
||||
ensure_spacing(coords, spacing=100, min_split_size=50,
|
||||
max_split_size=20000)
|
||||
dur2 = time.time() - tstart
|
||||
|
||||
# Originally checked dur1 < dur2 to assert that the default batch size was
|
||||
# faster than a much larger batch size. However, on rare occasion a CI test
|
||||
# case would fail with dur1 ~5% larger than dur2. To be more robust to
|
||||
# variable load or differences across architectures, we relax this here.
|
||||
assert dur1 < 1.33 * dur2
|
||||
|
||||
|
||||
@pytest.mark.parametrize("p", [1, 2, np.inf])
|
||||
@pytest.mark.parametrize("size", [30, 50, None])
|
||||
def test_ensure_spacing_p_norm(p, size):
|
||||
coord = np.random.randn(100, 2)
|
||||
|
||||
# --- Consider the average distance btween the point as spacing
|
||||
spacing = np.median(pdist(coord, metric=minkowski, p=p))
|
||||
out = ensure_spacing(coord, spacing=spacing, p_norm=p, min_split_size=size)
|
||||
|
||||
assert pdist(out, metric=minkowski, p=p).min() > spacing
|
||||
21
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/tests/test_fast_exp.py
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
from ..fast_exp import fast_exp
|
||||
import numpy as np
|
||||
|
||||
|
||||
def test_fast_exp():
|
||||
|
||||
X = np.linspace(-5, 0, 5000, endpoint=True)
|
||||
|
||||
# Ground truth
|
||||
Y = np.exp(X)
|
||||
|
||||
# Approximation at double precision
|
||||
_y_f64 = np.array([fast_exp['float64_t'](x) for x in X])
|
||||
|
||||
# Approximation at single precision
|
||||
_y_f32 = np.array([fast_exp['float32_t'](x) for x in X.astype('float32')],
|
||||
dtype='float32')
|
||||
|
||||
for _y in [_y_f64, _y_f32]:
|
||||
|
||||
assert np.abs(Y - _y).mean() < 3e-3
|
||||
78
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/tests/test_geometry.py
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
import pytest
|
||||
from skimage._shared._geometry import polygon_clip, polygon_area
|
||||
|
||||
import numpy as np
|
||||
from numpy.testing import assert_equal, assert_almost_equal
|
||||
|
||||
pytest.importorskip("matplotlib")
|
||||
|
||||
|
||||
hand = np.array(
|
||||
[[ 1.64516129, 1.16145833 ],
|
||||
[ 1.64516129, 1.59375 ],
|
||||
[ 1.35080645, 1.921875 ],
|
||||
[ 1.375 , 2.18229167 ],
|
||||
[ 1.68548387, 1.9375 ],
|
||||
[ 1.60887097, 2.55208333 ],
|
||||
[ 1.68548387, 2.69791667 ],
|
||||
[ 1.76209677, 2.56770833 ],
|
||||
[ 1.83064516, 1.97395833 ],
|
||||
[ 1.89516129, 2.75 ],
|
||||
[ 1.9516129 , 2.84895833 ],
|
||||
[ 2.01209677, 2.76041667 ],
|
||||
[ 1.99193548, 1.99479167 ],
|
||||
[ 2.11290323, 2.63020833 ],
|
||||
[ 2.2016129 , 2.734375 ],
|
||||
[ 2.25403226, 2.60416667 ],
|
||||
[ 2.14919355, 1.953125 ],
|
||||
[ 2.30645161, 2.36979167 ],
|
||||
[ 2.39112903, 2.36979167 ],
|
||||
[ 2.41532258, 2.1875 ],
|
||||
[ 2.1733871 , 1.703125 ],
|
||||
[ 2.07782258, 1.16666667 ]])
|
||||
|
||||
|
||||
def test_polygon_area():
|
||||
x = [0, 0, 1, 1]
|
||||
y = [0, 1, 1, 0]
|
||||
|
||||
assert_almost_equal(polygon_area(y, x), 1)
|
||||
|
||||
x = [0, 0, 1]
|
||||
y = [0, 1, 1]
|
||||
|
||||
assert_almost_equal(polygon_area(y, x), 0.5)
|
||||
|
||||
x = [0, 0, 0.5, 1, 1, 0.5]
|
||||
y = [0, 1, 0.5, 1, 0, 0.5]
|
||||
|
||||
assert_almost_equal(polygon_area(y, x), 0.5)
|
||||
|
||||
|
||||
def test_poly_clip():
|
||||
x = [0, 1, 2, 1]
|
||||
y = [0, -1, 0, 1]
|
||||
|
||||
yc, xc = polygon_clip(y, x, 0, 0, 1, 1)
|
||||
assert_equal(polygon_area(yc, xc), 0.5)
|
||||
|
||||
x = [-1, 1.5, 1.5, -1]
|
||||
y = [.5, 0.5, 1.5, 1.5]
|
||||
yc, xc = polygon_clip(y, x, 0, 0, 1, 1)
|
||||
assert_equal(polygon_area(yc, xc), 0.5)
|
||||
|
||||
|
||||
def test_hand_clip():
|
||||
(r0, c0, r1, c1) = (1.0, 1.5, 2.1, 2.5)
|
||||
clip_r, clip_c = polygon_clip(hand[:, 1], hand[:, 0], r0, c0, r1, c1)
|
||||
assert_equal(clip_r.size, 19)
|
||||
assert_equal(clip_r[0], clip_r[-1])
|
||||
assert_equal(clip_c[0], clip_c[-1])
|
||||
|
||||
(r0, c0, r1, c1) = (1.0, 1.5, 1.7, 2.5)
|
||||
clip_r, clip_c = polygon_clip(hand[:, 1], hand[:, 0], r0, c0, r1, c1)
|
||||
assert_equal(clip_r.size, 6)
|
||||
|
||||
(r0, c0, r1, c1) = (1.0, 1.5, 1.5, 2.5)
|
||||
clip_r, clip_c = polygon_clip(hand[:, 1], hand[:, 0], r0, c0, r1, c1)
|
||||
assert_equal(clip_r.size, 5)
|
||||
27
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/tests/test_interpolation.py
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
from skimage._shared.interpolation import coord_map_py
|
||||
from skimage._shared.testing import assert_array_equal
|
||||
|
||||
def test_coord_map():
|
||||
symmetric = [coord_map_py(4, n, 'S') for n in range(-6, 6)]
|
||||
expected_symmetric = [2, 3, 3, 2, 1, 0, 0, 1, 2, 3, 3, 2]
|
||||
assert_array_equal(symmetric, expected_symmetric)
|
||||
|
||||
wrap = [coord_map_py(4, n, 'W') for n in range(-6, 6)]
|
||||
expected_wrap = [2, 3, 0, 1, 2, 3, 0, 1, 2, 3, 0, 1]
|
||||
assert_array_equal(wrap, expected_wrap)
|
||||
|
||||
edge = [coord_map_py(4, n, 'E') for n in range(-6, 6)]
|
||||
expected_edge = [0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 3, 3]
|
||||
assert_array_equal(edge, expected_edge)
|
||||
|
||||
reflect = [coord_map_py(4, n, 'R') for n in range(-6, 6)]
|
||||
expected_reflect = [0, 1, 2, 3, 2, 1, 0, 1, 2, 3, 2, 1]
|
||||
assert_array_equal(reflect, expected_reflect)
|
||||
|
||||
reflect = [coord_map_py(1, n, 'R') for n in range(-6, 6)]
|
||||
expected_reflect = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
|
||||
assert_array_equal(reflect, expected_reflect)
|
||||
|
||||
other = [coord_map_py(4, n, 'undefined') for n in range(-6, 6)]
|
||||
expected_other = list(range(-6, 6))
|
||||
assert_array_equal(other, expected_other)
|
||||
42
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/tests/test_safe_as_int.py
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
import numpy as np
|
||||
from skimage._shared.utils import safe_as_int
|
||||
from skimage._shared import testing
|
||||
|
||||
|
||||
def test_int_cast_not_possible():
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int(7.1)
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int([7.1, 0.9])
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int(np.r_[7.1, 0.9])
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int((7.1, 0.9))
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int(((3, 4, 1),
|
||||
(2, 7.6, 289)))
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int(7.1, 0.09)
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int([7.1, 0.9], 0.09)
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int(np.r_[7.1, 0.9], 0.09)
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int((7.1, 0.9), 0.09)
|
||||
with testing.raises(ValueError):
|
||||
safe_as_int(((3, 4, 1),
|
||||
(2, 7.6, 289)), 0.25)
|
||||
|
||||
|
||||
def test_int_cast_possible():
|
||||
testing.assert_equal(safe_as_int(7.1, atol=0.11), 7)
|
||||
testing.assert_equal(safe_as_int(-7.1, atol=0.11), -7)
|
||||
testing.assert_equal(safe_as_int(41.9, atol=0.11), 42)
|
||||
testing.assert_array_equal(safe_as_int([2, 42, 5789234.0, 87, 4]),
|
||||
np.r_[2, 42, 5789234, 87, 4])
|
||||
testing.assert_array_equal(safe_as_int(np.r_[[[3, 4, 1.000000001],
|
||||
[7, 2, -8.999999999],
|
||||
[6, 9, -4234918347.]]]),
|
||||
np.r_[[[3, 4, 1],
|
||||
[7, 2, -9],
|
||||
[6, 9, -4234918347]]])
|
||||
123
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/tests/test_testing.py
vendored
Normal file
@@ -0,0 +1,123 @@
|
||||
""" Testing decorators module
|
||||
"""
|
||||
|
||||
from numpy.testing import assert_equal
|
||||
from skimage._shared.testing import doctest_skip_parser, test_parallel
|
||||
from skimage._shared import testing
|
||||
|
||||
from skimage._shared._warnings import expected_warnings
|
||||
from warnings import warn
|
||||
|
||||
|
||||
def test_skipper():
|
||||
def f():
|
||||
pass
|
||||
|
||||
class c():
|
||||
|
||||
def __init__(self):
|
||||
self.me = "I think, therefore..."
|
||||
|
||||
docstring = \
|
||||
""" Header
|
||||
|
||||
>>> something # skip if not HAVE_AMODULE
|
||||
>>> something + else
|
||||
>>> a = 1 # skip if not HAVE_BMODULE
|
||||
>>> something2 # skip if HAVE_AMODULE
|
||||
"""
|
||||
f.__doc__ = docstring
|
||||
c.__doc__ = docstring
|
||||
|
||||
global HAVE_AMODULE, HAVE_BMODULE
|
||||
HAVE_AMODULE = False
|
||||
HAVE_BMODULE = True
|
||||
|
||||
f2 = doctest_skip_parser(f)
|
||||
c2 = doctest_skip_parser(c)
|
||||
assert f is f2
|
||||
assert c is c2
|
||||
|
||||
expected = \
|
||||
""" Header
|
||||
|
||||
>>> something # doctest: +SKIP
|
||||
>>> something + else
|
||||
>>> a = 1
|
||||
>>> something2
|
||||
"""
|
||||
assert_equal(f2.__doc__, expected)
|
||||
assert_equal(c2.__doc__, expected)
|
||||
|
||||
HAVE_AMODULE = True
|
||||
HAVE_BMODULE = False
|
||||
f.__doc__ = docstring
|
||||
c.__doc__ = docstring
|
||||
f2 = doctest_skip_parser(f)
|
||||
c2 = doctest_skip_parser(c)
|
||||
|
||||
assert f is f2
|
||||
expected = \
|
||||
""" Header
|
||||
|
||||
>>> something
|
||||
>>> something + else
|
||||
>>> a = 1 # doctest: +SKIP
|
||||
>>> something2 # doctest: +SKIP
|
||||
"""
|
||||
assert_equal(f2.__doc__, expected)
|
||||
assert_equal(c2.__doc__, expected)
|
||||
|
||||
del HAVE_AMODULE
|
||||
f.__doc__ = docstring
|
||||
c.__doc__ = docstring
|
||||
with testing.raises(NameError):
|
||||
doctest_skip_parser(f)
|
||||
with testing.raises(NameError):
|
||||
doctest_skip_parser(c)
|
||||
|
||||
|
||||
def test_test_parallel():
|
||||
state = []
|
||||
|
||||
@test_parallel()
|
||||
def change_state1():
|
||||
state.append(None)
|
||||
change_state1()
|
||||
assert len(state) == 2
|
||||
|
||||
@test_parallel(num_threads=1)
|
||||
def change_state2():
|
||||
state.append(None)
|
||||
change_state2()
|
||||
assert len(state) == 3
|
||||
|
||||
@test_parallel(num_threads=3)
|
||||
def change_state3():
|
||||
state.append(None)
|
||||
change_state3()
|
||||
assert len(state) == 6
|
||||
|
||||
|
||||
def test_parallel_warning():
|
||||
@test_parallel()
|
||||
def change_state_warns_fails():
|
||||
warn("Test warning for test parallel", stacklevel=2)
|
||||
|
||||
with expected_warnings(['Test warning for test parallel']):
|
||||
change_state_warns_fails()
|
||||
|
||||
@test_parallel(warnings_matching=['Test warning for test parallel'])
|
||||
def change_state_warns_passes():
|
||||
warn("Test warning for test parallel", stacklevel=2)
|
||||
|
||||
change_state_warns_passes()
|
||||
|
||||
|
||||
def test_expected_warnings_noop():
|
||||
# This will ensure the line beolow it behaves like a no-op
|
||||
with expected_warnings(['Expected warnings test']):
|
||||
|
||||
# This should behave as a no-op
|
||||
with expected_warnings(None):
|
||||
warn('Expected warnings test')
|
||||
294
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/tests/test_utils.py
vendored
Normal file
@@ -0,0 +1,294 @@
|
||||
import sys
|
||||
import warnings
|
||||
import inspect
|
||||
|
||||
import numpy as np
|
||||
import pytest
|
||||
|
||||
from skimage._shared import testing
|
||||
from skimage._shared.utils import (check_nD, deprecate_kwarg,
|
||||
_validate_interpolation_order,
|
||||
change_default_value, remove_arg,
|
||||
_supported_float_type,
|
||||
channel_as_last_axis)
|
||||
|
||||
complex_dtypes = [np.complex64, np.complex128]
|
||||
if hasattr(np, 'complex256'):
|
||||
complex_dtypes += [np.complex256]
|
||||
|
||||
have_numpydoc = False
|
||||
try:
|
||||
import numpydoc # noqa: F401
|
||||
have_numpydoc = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def test_remove_argument():
|
||||
|
||||
@remove_arg('arg1', changed_version='0.12')
|
||||
def foo(arg0, arg1=0, arg2=1):
|
||||
"""Expected docstring"""
|
||||
return arg0, arg1, arg2
|
||||
|
||||
@remove_arg('arg1', changed_version='0.12',
|
||||
help_msg="Some indication on future behavior")
|
||||
def bar(arg0, arg1=0, arg2=1):
|
||||
"""Expected docstring"""
|
||||
return arg0, arg1, arg2
|
||||
|
||||
# Assert warning messages
|
||||
expected_msg = ("arg1 argument is deprecated and will be removed "
|
||||
"in version 0.12. To avoid this warning, "
|
||||
"please do not use the arg1 argument. Please see "
|
||||
"foo documentation for more details.")
|
||||
|
||||
with pytest.warns(FutureWarning) as record:
|
||||
assert foo(0, 1) == (0, 1, 1)
|
||||
|
||||
assert str(record[0].message) == expected_msg
|
||||
|
||||
with pytest.warns(FutureWarning) as record:
|
||||
assert foo(0, arg1=1) == (0, 1, 1)
|
||||
|
||||
assert str(record[0].message) == expected_msg
|
||||
|
||||
expected_msg = ("arg1 argument is deprecated and will be removed "
|
||||
"in version 0.12. To avoid this warning, "
|
||||
"please do not use the arg1 argument. Please see "
|
||||
"bar documentation for more details."
|
||||
" Some indication on future behavior")
|
||||
|
||||
with pytest.warns(FutureWarning) as record:
|
||||
assert bar(0, 1) == (0, 1, 1)
|
||||
|
||||
assert str(record[0].message) == expected_msg
|
||||
|
||||
with pytest.warns(FutureWarning) as record:
|
||||
assert bar(0, arg1=1) == (0, 1, 1)
|
||||
|
||||
assert str(record[0].message) == expected_msg
|
||||
with warnings.catch_warnings(record=True) as recorded:
|
||||
# No kwargs
|
||||
assert foo(0) == (0, 0, 1)
|
||||
assert foo(0, arg2=0) == (0, 0, 0)
|
||||
|
||||
# Function name and doc is preserved
|
||||
assert foo.__name__ == 'foo'
|
||||
if sys.flags.optimize < 2:
|
||||
# if PYTHONOPTIMIZE is set to 2, docstrings are stripped
|
||||
assert foo.__doc__ == 'Expected docstring'
|
||||
# Assert no warnings were raised
|
||||
assert len(recorded) == 0
|
||||
|
||||
|
||||
def test_change_default_value():
|
||||
|
||||
@change_default_value('arg1', new_value=-1, changed_version='0.12')
|
||||
def foo(arg0, arg1=0, arg2=1):
|
||||
"""Expected docstring"""
|
||||
return arg0, arg1, arg2
|
||||
|
||||
@change_default_value('arg1', new_value=-1, changed_version='0.12',
|
||||
warning_msg="Custom warning message")
|
||||
def bar(arg0, arg1=0, arg2=1):
|
||||
"""Expected docstring"""
|
||||
return arg0, arg1, arg2
|
||||
|
||||
# Assert warning messages
|
||||
with pytest.warns(FutureWarning) as record:
|
||||
assert foo(0) == (0, 0, 1)
|
||||
assert bar(0) == (0, 0, 1)
|
||||
|
||||
expected_msg = ("The new recommended value for arg1 is -1. Until "
|
||||
"version 0.12, the default arg1 value is 0. From "
|
||||
"version 0.12, the arg1 default value will be -1. "
|
||||
"To avoid this warning, please explicitly set arg1 value.")
|
||||
|
||||
assert str(record[0].message) == expected_msg
|
||||
assert str(record[1].message) == "Custom warning message"
|
||||
|
||||
# Assert that nothing happens if arg1 is set
|
||||
with warnings.catch_warnings(record=True) as recorded:
|
||||
# No kwargs
|
||||
assert foo(0, 2) == (0, 2, 1)
|
||||
assert foo(0, arg1=0) == (0, 0, 1)
|
||||
|
||||
# Function name and doc is preserved
|
||||
assert foo.__name__ == 'foo'
|
||||
if sys.flags.optimize < 2:
|
||||
# if PYTHONOPTIMIZE is set to 2, docstrings are stripped
|
||||
assert foo.__doc__ == 'Expected docstring'
|
||||
# Assert no warnings were raised
|
||||
assert len(recorded) == 0
|
||||
|
||||
|
||||
def test_deprecate_kwarg():
|
||||
|
||||
@deprecate_kwarg({'old_arg1': 'new_arg1'}, '0.19')
|
||||
def foo(arg0, new_arg1=1, arg2=None):
|
||||
"""Expected docstring"""
|
||||
return arg0, new_arg1, arg2
|
||||
|
||||
@deprecate_kwarg({'old_arg1': 'new_arg1'},
|
||||
deprecated_version='0.19',
|
||||
warning_msg="Custom warning message")
|
||||
def bar(arg0, new_arg1=1, arg2=None):
|
||||
"""Expected docstring"""
|
||||
return arg0, new_arg1, arg2
|
||||
|
||||
# Assert that the DeprecationWarning is raised when the deprecated
|
||||
# argument name is used and that the reasult is valid
|
||||
with pytest.warns(FutureWarning) as record:
|
||||
assert foo(0, old_arg1=1) == (0, 1, None)
|
||||
assert bar(0, old_arg1=1) == (0, 1, None)
|
||||
|
||||
msg = ("`old_arg1` is a deprecated argument name "
|
||||
"for `foo`. Please use `new_arg1` instead.")
|
||||
assert str(record[0].message) == msg
|
||||
assert str(record[1].message) == "Custom warning message"
|
||||
|
||||
# Assert that nothing happens when the function is called with the
|
||||
# new API
|
||||
with warnings.catch_warnings(record=True) as recorded:
|
||||
# No kwargs
|
||||
assert foo(0) == (0, 1, None)
|
||||
assert foo(0, 2) == (0, 2, None)
|
||||
assert foo(0, 1, 2) == (0, 1, 2)
|
||||
# Kwargs without deprecated argument
|
||||
assert foo(0, new_arg1=1, arg2=2) == (0, 1, 2)
|
||||
assert foo(0, new_arg1=2) == (0, 2, None)
|
||||
assert foo(0, arg2=2) == (0, 1, 2)
|
||||
assert foo(0, 1, arg2=2) == (0, 1, 2)
|
||||
# Function name and doc is preserved
|
||||
assert foo.__name__ == 'foo'
|
||||
if sys.flags.optimize < 2:
|
||||
# if PYTHONOPTIMIZE is set to 2, docstrings are stripped
|
||||
if not have_numpydoc:
|
||||
assert foo.__doc__ == """Expected docstring"""
|
||||
else:
|
||||
assert foo.__doc__ == """Expected docstring
|
||||
|
||||
|
||||
Other Parameters
|
||||
----------------
|
||||
old_arg1 : DEPRECATED
|
||||
Deprecated in favor of `new_arg1`.
|
||||
|
||||
.. deprecated:: 0.19
|
||||
"""
|
||||
|
||||
assert len(recorded) == 0
|
||||
|
||||
|
||||
def test_check_nD():
|
||||
z = np.random.random(200**2).reshape((200, 200))
|
||||
x = z[10:30, 30:10]
|
||||
with testing.raises(ValueError):
|
||||
check_nD(x, 2)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('dtype', [bool, int, np.uint8, np.uint16,
|
||||
float, np.float32, np.float64])
|
||||
@pytest.mark.parametrize('order', [None, -1, 0, 1, 2, 3, 4, 5, 6])
|
||||
def test_validate_interpolation_order(dtype, order):
|
||||
if order is None:
|
||||
# Default order
|
||||
assert (_validate_interpolation_order(dtype, None) == 0
|
||||
if dtype == bool else 1)
|
||||
elif order < 0 or order > 5:
|
||||
# Order not in valid range
|
||||
with testing.raises(ValueError):
|
||||
_validate_interpolation_order(dtype, order)
|
||||
elif dtype == bool and order != 0:
|
||||
# Deprecated order for bool array
|
||||
with pytest.raises(ValueError):
|
||||
_validate_interpolation_order(bool, order)
|
||||
else:
|
||||
# Valid use case
|
||||
assert _validate_interpolation_order(dtype, order) == order
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'dtype',
|
||||
[bool, np.float16, np.float32, np.float64, np.uint8, np.uint16, np.uint32,
|
||||
np.uint64, np.int8, np.int16, np.int32, np.int64]
|
||||
)
|
||||
def test_supported_float_dtype_real(dtype):
|
||||
float_dtype = _supported_float_type(dtype)
|
||||
if dtype in [np.float16, np.float32]:
|
||||
assert float_dtype == np.float32
|
||||
else:
|
||||
assert float_dtype == np.float64
|
||||
|
||||
|
||||
@pytest.mark.parametrize('dtype', complex_dtypes)
|
||||
@pytest.mark.parametrize('allow_complex', [False, True])
|
||||
def test_supported_float_dtype_complex(dtype, allow_complex):
|
||||
if allow_complex:
|
||||
float_dtype = _supported_float_type(dtype, allow_complex=allow_complex)
|
||||
if dtype == np.complex64:
|
||||
assert float_dtype == np.complex64
|
||||
else:
|
||||
assert float_dtype == np.complex128
|
||||
else:
|
||||
with testing.raises(ValueError):
|
||||
_supported_float_type(dtype, allow_complex=allow_complex)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'dtype', ['f', 'float32', np.float32, np.dtype(np.float32)]
|
||||
)
|
||||
def test_supported_float_dtype_input_kinds(dtype):
|
||||
assert _supported_float_type(dtype) == np.float32
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'dtypes, expected',
|
||||
[
|
||||
((np.float16, np.float64), np.float64),
|
||||
([np.float32, np.uint16, np.int8], np.float64),
|
||||
({np.float32, np.float16}, np.float32),
|
||||
]
|
||||
)
|
||||
def test_supported_float_dtype_sequence(dtypes, expected):
|
||||
float_dtype = _supported_float_type(dtypes)
|
||||
assert float_dtype == expected
|
||||
|
||||
|
||||
@channel_as_last_axis(multichannel_output=False)
|
||||
def _decorated_channel_axis_size(x, *, channel_axis=None):
|
||||
if channel_axis is None:
|
||||
return None
|
||||
assert channel_axis == -1
|
||||
return x.shape[-1]
|
||||
|
||||
|
||||
@testing.parametrize('channel_axis', [None, 0, 1, 2, -1, -2, -3])
|
||||
def test_decorated_channel_axis_shape(channel_axis):
|
||||
# Verify that channel_as_last_axis modifies the channel_axis as expected
|
||||
|
||||
# need unique size per axis here
|
||||
x = np.zeros((2, 3, 4))
|
||||
|
||||
size = _decorated_channel_axis_size(x, channel_axis=channel_axis)
|
||||
if channel_axis is None:
|
||||
assert size is None
|
||||
else:
|
||||
assert size == x.shape[channel_axis]
|
||||
|
||||
|
||||
@deprecate_kwarg({"old_kwarg": "new_kwarg"}, deprecated_version="x.y.z")
|
||||
def _function_with_deprecated_kwarg(*, new_kwarg):
|
||||
pass
|
||||
|
||||
|
||||
def test_deprecate_kwarg_location():
|
||||
"""Assert that warning message issued by deprecate_kwarg points to
|
||||
file and line number where decorated function is called.
|
||||
"""
|
||||
with pytest.warns(FutureWarning) as record:
|
||||
_function_with_deprecated_kwarg(old_kwarg=True)
|
||||
expected_lineno = inspect.currentframe().f_lineno - 1
|
||||
assert record[0].lineno == expected_lineno
|
||||
assert record[0].filename == __file__
|
||||
41
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/tests/test_version_requirements.py
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
"""Tests for the version requirement functions.
|
||||
|
||||
"""
|
||||
import numpy as np
|
||||
from numpy.testing import assert_equal
|
||||
from skimage._shared import version_requirements as version_req
|
||||
from skimage._shared import testing
|
||||
|
||||
|
||||
def test_get_module_version():
|
||||
assert version_req.get_module_version('numpy')
|
||||
assert version_req.get_module_version('scipy')
|
||||
with testing.raises(ImportError):
|
||||
version_req.get_module_version('fakenumpy')
|
||||
|
||||
|
||||
def test_is_installed():
|
||||
assert version_req.is_installed('python', '>=2.7')
|
||||
assert not version_req.is_installed('numpy', '<1.0')
|
||||
|
||||
|
||||
def test_require():
|
||||
# A function that only runs on Python >2.7 and numpy > 1.5 (should pass)
|
||||
@version_req.require('python', '>2.7')
|
||||
@version_req.require('numpy', '>1.5')
|
||||
def foo():
|
||||
return 1
|
||||
|
||||
assert_equal(foo(), 1)
|
||||
|
||||
# function that requires scipy < 0.1 (should fail)
|
||||
@version_req.require('scipy', '<0.1')
|
||||
def bar():
|
||||
return 0
|
||||
|
||||
with testing.raises(ImportError):
|
||||
bar()
|
||||
|
||||
|
||||
def test_get_module():
|
||||
assert version_req.get_module("numpy") is np
|
||||
37
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/tests/test_warnings.py
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
import os
|
||||
from skimage._shared._warnings import expected_warnings
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def setup():
|
||||
# Remove any environment variable if it exists
|
||||
old_strictness = os.environ.pop('SKIMAGE_TEST_STRICT_WARNINGS', None)
|
||||
yield
|
||||
# Add the user's desired strictness
|
||||
if old_strictness is not None:
|
||||
os.environ['SKIMAGE_TEST_STRICT_WARNINGS'] = old_strictness
|
||||
|
||||
|
||||
def test_strict_warnigns_default(setup):
|
||||
# By default we should fail on missing expected warnings
|
||||
with pytest.raises(ValueError):
|
||||
with expected_warnings(['some warnings']):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.parametrize('strictness', ['1', 'true', 'True', 'TRUE'])
|
||||
def test_strict_warning_true(setup, strictness):
|
||||
os.environ['SKIMAGE_TEST_STRICT_WARNINGS'] = strictness
|
||||
with pytest.raises(ValueError):
|
||||
with expected_warnings(['some warnings']):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.parametrize('strictness', ['0', 'false', 'False', 'FALSE'])
|
||||
def test_strict_warning_false(setup, strictness):
|
||||
# If the user doesn't wish to be strict about warnings
|
||||
# the following shouldn't raise any error
|
||||
os.environ['SKIMAGE_TEST_STRICT_WARNINGS'] = strictness
|
||||
with expected_warnings(['some warnings']):
|
||||
pass
|
||||
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/transform.cpython-311-x86_64-linux-gnu.so
vendored
Executable file
759
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/utils.py
vendored
Normal file
@@ -0,0 +1,759 @@
|
||||
import inspect
|
||||
import functools
|
||||
import sys
|
||||
import warnings
|
||||
from collections.abc import Iterable
|
||||
|
||||
import numpy as np
|
||||
|
||||
from ._warnings import all_warnings, warn
|
||||
|
||||
|
||||
__all__ = ['deprecated', 'get_bound_method_class', 'all_warnings',
|
||||
'safe_as_int', 'check_shape_equality', 'check_nD', 'warn',
|
||||
'reshape_nd', 'identity', 'slice_at_axis']
|
||||
|
||||
|
||||
class skimage_deprecation(Warning):
|
||||
"""Create our own deprecation class, since Python >= 2.7
|
||||
silences deprecations by default.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def _get_stack_rank(func):
|
||||
"""Return function rank in the call stack."""
|
||||
if _is_wrapped(func):
|
||||
return 1 + _get_stack_rank(func.__wrapped__)
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
def _is_wrapped(func):
|
||||
return "__wrapped__" in dir(func)
|
||||
|
||||
|
||||
def _get_stack_length(func):
|
||||
"""Return function call stack length."""
|
||||
return _get_stack_rank(func.__globals__.get(func.__name__, func))
|
||||
|
||||
|
||||
class _DecoratorBaseClass:
|
||||
"""Used to manage decorators' warnings stacklevel.
|
||||
|
||||
The `_stack_length` class variable is used to store the number of
|
||||
times a function is wrapped by a decorator.
|
||||
|
||||
Let `stack_length` be the total number of times a decorated
|
||||
function is wrapped, and `stack_rank` be the rank of the decorator
|
||||
in the decorators stack. The stacklevel of a warning is then
|
||||
`stacklevel = 1 + stack_length - stack_rank`.
|
||||
"""
|
||||
|
||||
_stack_length = {}
|
||||
|
||||
def get_stack_length(self, func):
|
||||
return self._stack_length.get(func.__name__,
|
||||
_get_stack_length(func))
|
||||
|
||||
|
||||
class change_default_value(_DecoratorBaseClass):
|
||||
"""Decorator for changing the default value of an argument.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
arg_name: str
|
||||
The name of the argument to be updated.
|
||||
new_value: any
|
||||
The argument new value.
|
||||
changed_version : str
|
||||
The package version in which the change will be introduced.
|
||||
warning_msg: str
|
||||
Optional warning message. If None, a generic warning message
|
||||
is used.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, arg_name, *, new_value, changed_version,
|
||||
warning_msg=None):
|
||||
self.arg_name = arg_name
|
||||
self.new_value = new_value
|
||||
self.warning_msg = warning_msg
|
||||
self.changed_version = changed_version
|
||||
|
||||
def __call__(self, func):
|
||||
parameters = inspect.signature(func).parameters
|
||||
arg_idx = list(parameters.keys()).index(self.arg_name)
|
||||
old_value = parameters[self.arg_name].default
|
||||
|
||||
stack_rank = _get_stack_rank(func)
|
||||
|
||||
if self.warning_msg is None:
|
||||
self.warning_msg = (
|
||||
f'The new recommended value for {self.arg_name} is '
|
||||
f'{self.new_value}. Until version {self.changed_version}, '
|
||||
f'the default {self.arg_name} value is {old_value}. '
|
||||
f'From version {self.changed_version}, the {self.arg_name} '
|
||||
f'default value will be {self.new_value}. To avoid '
|
||||
f'this warning, please explicitly set {self.arg_name} value.')
|
||||
|
||||
@functools.wraps(func)
|
||||
def fixed_func(*args, **kwargs):
|
||||
stacklevel = 1 + self.get_stack_length(func) - stack_rank
|
||||
if len(args) < arg_idx + 1 and self.arg_name not in kwargs.keys():
|
||||
# warn that arg_name default value changed:
|
||||
warnings.warn(self.warning_msg, FutureWarning,
|
||||
stacklevel=stacklevel)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return fixed_func
|
||||
|
||||
|
||||
class remove_arg(_DecoratorBaseClass):
|
||||
"""Decorator to remove an argument from function's signature.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
arg_name: str
|
||||
The name of the argument to be removed.
|
||||
changed_version : str
|
||||
The package version in which the warning will be replaced by
|
||||
an error.
|
||||
help_msg: str
|
||||
Optional message appended to the generic warning message.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, arg_name, *, changed_version, help_msg=None):
|
||||
self.arg_name = arg_name
|
||||
self.help_msg = help_msg
|
||||
self.changed_version = changed_version
|
||||
|
||||
def __call__(self, func):
|
||||
|
||||
parameters = inspect.signature(func).parameters
|
||||
arg_idx = list(parameters.keys()).index(self.arg_name)
|
||||
warning_msg = (
|
||||
f'{self.arg_name} argument is deprecated and will be removed '
|
||||
f'in version {self.changed_version}. To avoid this warning, '
|
||||
f'please do not use the {self.arg_name} argument. Please '
|
||||
f'see {func.__name__} documentation for more details.')
|
||||
|
||||
if self.help_msg is not None:
|
||||
warning_msg += f' {self.help_msg}'
|
||||
|
||||
stack_rank = _get_stack_rank(func)
|
||||
|
||||
@functools.wraps(func)
|
||||
def fixed_func(*args, **kwargs):
|
||||
stacklevel = 1 + self.get_stack_length(func) - stack_rank
|
||||
if len(args) > arg_idx or self.arg_name in kwargs.keys():
|
||||
# warn that arg_name is deprecated
|
||||
warnings.warn(warning_msg, FutureWarning,
|
||||
stacklevel=stacklevel)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return fixed_func
|
||||
|
||||
|
||||
def docstring_add_deprecated(func, kwarg_mapping, deprecated_version):
|
||||
"""Add deprecated kwarg(s) to the "Other Params" section of a docstring.
|
||||
|
||||
Parameters
|
||||
---------
|
||||
func : function
|
||||
The function whose docstring we wish to update.
|
||||
kwarg_mapping : dict
|
||||
A dict containing {old_arg: new_arg} key/value pairs as used by
|
||||
`deprecate_kwarg`.
|
||||
deprecated_version : str
|
||||
A major.minor version string specifying when old_arg was
|
||||
deprecated.
|
||||
|
||||
Returns
|
||||
-------
|
||||
new_doc : str
|
||||
The updated docstring. Returns the original docstring if numpydoc is
|
||||
not available.
|
||||
"""
|
||||
if func.__doc__ is None:
|
||||
return None
|
||||
try:
|
||||
from numpydoc.docscrape import FunctionDoc, Parameter
|
||||
except ImportError:
|
||||
# Return an unmodified docstring if numpydoc is not available.
|
||||
return func.__doc__
|
||||
|
||||
Doc = FunctionDoc(func)
|
||||
for old_arg, new_arg in kwarg_mapping.items():
|
||||
desc = [f'Deprecated in favor of `{new_arg}`.',
|
||||
'',
|
||||
f'.. deprecated:: {deprecated_version}']
|
||||
Doc['Other Parameters'].append(
|
||||
Parameter(name=old_arg,
|
||||
type='DEPRECATED',
|
||||
desc=desc)
|
||||
)
|
||||
new_docstring = str(Doc)
|
||||
|
||||
# new_docstring will have a header starting with:
|
||||
#
|
||||
# .. function:: func.__name__
|
||||
#
|
||||
# and some additional blank lines. We strip these off below.
|
||||
split = new_docstring.split('\n')
|
||||
no_header = split[1:]
|
||||
while not no_header[0].strip():
|
||||
no_header.pop(0)
|
||||
|
||||
# Store the initial description before any of the Parameters fields.
|
||||
# Usually this is a single line, but the while loop covers any case
|
||||
# where it is not.
|
||||
descr = no_header.pop(0)
|
||||
while no_header[0].strip():
|
||||
descr += '\n ' + no_header.pop(0)
|
||||
descr += '\n\n'
|
||||
# '\n ' rather than '\n' here to restore the original indentation.
|
||||
final_docstring = descr + '\n '.join(no_header)
|
||||
# strip any extra spaces from ends of lines
|
||||
final_docstring = '\n'.join(
|
||||
[line.rstrip() for line in final_docstring.split('\n')]
|
||||
)
|
||||
return final_docstring
|
||||
|
||||
|
||||
class deprecate_kwarg(_DecoratorBaseClass):
|
||||
"""Decorator ensuring backward compatibility when argument names are
|
||||
modified in a function definition.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
kwarg_mapping: dict
|
||||
Mapping between the function's old argument names and the new
|
||||
ones.
|
||||
deprecated_version : str
|
||||
The package version in which the argument was first deprecated.
|
||||
warning_msg: str
|
||||
Optional warning message. If None, a generic warning message
|
||||
is used.
|
||||
removed_version : str
|
||||
The package version in which the deprecated argument will be
|
||||
removed.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, kwarg_mapping, deprecated_version, warning_msg=None,
|
||||
removed_version=None):
|
||||
self.kwarg_mapping = kwarg_mapping
|
||||
if warning_msg is None:
|
||||
self.warning_msg = ("`{old_arg}` is a deprecated argument name "
|
||||
"for `{func_name}`. ")
|
||||
if removed_version is not None:
|
||||
self.warning_msg += (f'It will be removed in '
|
||||
f'version {removed_version}. ')
|
||||
self.warning_msg += "Please use `{new_arg}` instead."
|
||||
else:
|
||||
self.warning_msg = warning_msg
|
||||
|
||||
self.deprecated_version = deprecated_version
|
||||
|
||||
def __call__(self, func):
|
||||
|
||||
stack_rank = _get_stack_rank(func)
|
||||
|
||||
@functools.wraps(func)
|
||||
def fixed_func(*args, **kwargs):
|
||||
stacklevel = 1 + self.get_stack_length(func) - stack_rank
|
||||
|
||||
for old_arg, new_arg in self.kwarg_mapping.items():
|
||||
if old_arg in kwargs:
|
||||
# warn that the function interface has changed:
|
||||
warnings.warn(self.warning_msg.format(
|
||||
old_arg=old_arg, func_name=func.__name__,
|
||||
new_arg=new_arg), FutureWarning,
|
||||
stacklevel=stacklevel)
|
||||
# Substitute new_arg to old_arg
|
||||
kwargs[new_arg] = kwargs.pop(old_arg)
|
||||
|
||||
# Call the function with the fixed arguments
|
||||
return func(*args, **kwargs)
|
||||
|
||||
if func.__doc__ is not None:
|
||||
newdoc = docstring_add_deprecated(func, self.kwarg_mapping,
|
||||
self.deprecated_version)
|
||||
fixed_func.__doc__ = newdoc
|
||||
return fixed_func
|
||||
|
||||
|
||||
class channel_as_last_axis:
|
||||
"""Decorator for automatically making channels axis last for all arrays.
|
||||
|
||||
This decorator reorders axes for compatibility with functions that only
|
||||
support channels along the last axis. After the function call is complete
|
||||
the channels axis is restored back to its original position.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
channel_arg_positions : tuple of int, optional
|
||||
Positional arguments at the positions specified in this tuple are
|
||||
assumed to be multichannel arrays. The default is to assume only the
|
||||
first argument to the function is a multichannel array.
|
||||
channel_kwarg_names : tuple of str, optional
|
||||
A tuple containing the names of any keyword arguments corresponding to
|
||||
multichannel arrays.
|
||||
multichannel_output : bool, optional
|
||||
A boolean that should be True if the output of the function is not a
|
||||
multichannel array and False otherwise. This decorator does not
|
||||
currently support the general case of functions with multiple outputs
|
||||
where some or all are multichannel.
|
||||
|
||||
"""
|
||||
def __init__(self, channel_arg_positions=(0,), channel_kwarg_names=(),
|
||||
multichannel_output=True):
|
||||
self.arg_positions = set(channel_arg_positions)
|
||||
self.kwarg_names = set(channel_kwarg_names)
|
||||
self.multichannel_output = multichannel_output
|
||||
|
||||
def __call__(self, func):
|
||||
|
||||
@functools.wraps(func)
|
||||
def fixed_func(*args, **kwargs):
|
||||
|
||||
channel_axis = kwargs.get('channel_axis', None)
|
||||
|
||||
if channel_axis is None:
|
||||
return func(*args, **kwargs)
|
||||
|
||||
# TODO: convert scalars to a tuple in anticipation of eventually
|
||||
# supporting a tuple of channel axes. Right now, only an
|
||||
# integer or a single-element tuple is supported, though.
|
||||
if np.isscalar(channel_axis):
|
||||
channel_axis = (channel_axis,)
|
||||
if len(channel_axis) > 1:
|
||||
raise ValueError(
|
||||
"only a single channel axis is currently supported")
|
||||
|
||||
if channel_axis == (-1,) or channel_axis == -1:
|
||||
return func(*args, **kwargs)
|
||||
|
||||
if self.arg_positions:
|
||||
new_args = []
|
||||
for pos, arg in enumerate(args):
|
||||
if pos in self.arg_positions:
|
||||
new_args.append(np.moveaxis(arg, channel_axis[0], -1))
|
||||
else:
|
||||
new_args.append(arg)
|
||||
new_args = tuple(new_args)
|
||||
else:
|
||||
new_args = args
|
||||
|
||||
for name in self.kwarg_names:
|
||||
kwargs[name] = np.moveaxis(kwargs[name], channel_axis[0], -1)
|
||||
|
||||
# now that we have moved the channels axis to the last position,
|
||||
# change the channel_axis argument to -1
|
||||
kwargs["channel_axis"] = -1
|
||||
|
||||
# Call the function with the fixed arguments
|
||||
out = func(*new_args, **kwargs)
|
||||
if self.multichannel_output:
|
||||
out = np.moveaxis(out, -1, channel_axis[0])
|
||||
return out
|
||||
|
||||
return fixed_func
|
||||
|
||||
|
||||
class deprecated:
|
||||
"""Decorator to mark deprecated functions with warning.
|
||||
|
||||
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
alt_func : str
|
||||
If given, tell user what function to use instead.
|
||||
behavior : {'warn', 'raise'}
|
||||
Behavior during call to deprecated function: 'warn' = warn user that
|
||||
function is deprecated; 'raise' = raise error.
|
||||
removed_version : str
|
||||
The package version in which the deprecated function will be removed.
|
||||
"""
|
||||
|
||||
def __init__(self, alt_func=None, behavior='warn', removed_version=None):
|
||||
self.alt_func = alt_func
|
||||
self.behavior = behavior
|
||||
self.removed_version = removed_version
|
||||
|
||||
def __call__(self, func):
|
||||
|
||||
alt_msg = ''
|
||||
if self.alt_func is not None:
|
||||
alt_msg = f' Use ``{self.alt_func}`` instead.'
|
||||
rmv_msg = ''
|
||||
if self.removed_version is not None:
|
||||
rmv_msg = f' and will be removed in version {self.removed_version}'
|
||||
|
||||
msg = f'Function ``{func.__name__}`` is deprecated{rmv_msg}.{alt_msg}'
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapped(*args, **kwargs):
|
||||
if self.behavior == 'warn':
|
||||
func_code = func.__code__
|
||||
warnings.simplefilter('always', skimage_deprecation)
|
||||
warnings.warn_explicit(msg,
|
||||
category=skimage_deprecation,
|
||||
filename=func_code.co_filename,
|
||||
lineno=func_code.co_firstlineno + 1)
|
||||
elif self.behavior == 'raise':
|
||||
raise skimage_deprecation(msg)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
# modify doc string to display deprecation warning
|
||||
doc = '**Deprecated function**.' + alt_msg
|
||||
if wrapped.__doc__ is None:
|
||||
wrapped.__doc__ = doc
|
||||
else:
|
||||
wrapped.__doc__ = doc + '\n\n ' + wrapped.__doc__
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def get_bound_method_class(m):
|
||||
"""Return the class for a bound method.
|
||||
|
||||
"""
|
||||
return m.im_class if sys.version < '3' else m.__self__.__class__
|
||||
|
||||
|
||||
def safe_as_int(val, atol=1e-3):
|
||||
"""
|
||||
Attempt to safely cast values to integer format.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
val : scalar or iterable of scalars
|
||||
Number or container of numbers which are intended to be interpreted as
|
||||
integers, e.g., for indexing purposes, but which may not carry integer
|
||||
type.
|
||||
atol : float
|
||||
Absolute tolerance away from nearest integer to consider values in
|
||||
``val`` functionally integers.
|
||||
|
||||
Returns
|
||||
-------
|
||||
val_int : NumPy scalar or ndarray of dtype `np.int64`
|
||||
Returns the input value(s) coerced to dtype `np.int64` assuming all
|
||||
were within ``atol`` of the nearest integer.
|
||||
|
||||
Notes
|
||||
-----
|
||||
This operation calculates ``val`` modulo 1, which returns the mantissa of
|
||||
all values. Then all mantissas greater than 0.5 are subtracted from one.
|
||||
Finally, the absolute tolerance from zero is calculated. If it is less
|
||||
than ``atol`` for all value(s) in ``val``, they are rounded and returned
|
||||
in an integer array. Or, if ``val`` was a scalar, a NumPy scalar type is
|
||||
returned.
|
||||
|
||||
If any value(s) are outside the specified tolerance, an informative error
|
||||
is raised.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> safe_as_int(7.0)
|
||||
7
|
||||
|
||||
>>> safe_as_int([9, 4, 2.9999999999])
|
||||
array([9, 4, 3])
|
||||
|
||||
>>> safe_as_int(53.1)
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValueError: Integer argument required but received 53.1, check inputs.
|
||||
|
||||
>>> safe_as_int(53.01, atol=0.01)
|
||||
53
|
||||
|
||||
"""
|
||||
mod = np.asarray(val) % 1 # Extract mantissa
|
||||
|
||||
# Check for and subtract any mod values > 0.5 from 1
|
||||
if mod.ndim == 0: # Scalar input, cannot be indexed
|
||||
if mod > 0.5:
|
||||
mod = 1 - mod
|
||||
else: # Iterable input, now ndarray
|
||||
mod[mod > 0.5] = 1 - mod[mod > 0.5] # Test on each side of nearest int
|
||||
|
||||
try:
|
||||
np.testing.assert_allclose(mod, 0, atol=atol)
|
||||
except AssertionError:
|
||||
raise ValueError(f'Integer argument required but received '
|
||||
f'{val}, check inputs.')
|
||||
|
||||
return np.round(val).astype(np.int64)
|
||||
|
||||
|
||||
def check_shape_equality(*images):
|
||||
"""Check that all images have the same shape"""
|
||||
image0 = images[0]
|
||||
if not all(image0.shape == image.shape for image in images[1:]):
|
||||
raise ValueError('Input images must have the same dimensions.')
|
||||
return
|
||||
|
||||
|
||||
def slice_at_axis(sl, axis):
|
||||
"""
|
||||
Construct tuple of slices to slice an array in the given dimension.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
sl : slice
|
||||
The slice for the given dimension.
|
||||
axis : int
|
||||
The axis to which `sl` is applied. All other dimensions are left
|
||||
"unsliced".
|
||||
|
||||
Returns
|
||||
-------
|
||||
sl : tuple of slices
|
||||
A tuple with slices matching `shape` in length.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> slice_at_axis(slice(None, 3, -1), 1)
|
||||
(slice(None, None, None), slice(None, 3, -1), Ellipsis)
|
||||
"""
|
||||
return (slice(None),) * axis + (sl,) + (...,)
|
||||
|
||||
|
||||
def reshape_nd(arr, ndim, dim):
|
||||
"""Reshape a 1D array to have n dimensions, all singletons but one.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
arr : array, shape (N,)
|
||||
Input array
|
||||
ndim : int
|
||||
Number of desired dimensions of reshaped array.
|
||||
dim : int
|
||||
Which dimension/axis will not be singleton-sized.
|
||||
|
||||
Returns
|
||||
-------
|
||||
arr_reshaped : array, shape ([1, ...], N, [1,...])
|
||||
View of `arr` reshaped to the desired shape.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> rng = np.random.default_rng()
|
||||
>>> arr = rng.random(7)
|
||||
>>> reshape_nd(arr, 2, 0).shape
|
||||
(7, 1)
|
||||
>>> reshape_nd(arr, 3, 1).shape
|
||||
(1, 7, 1)
|
||||
>>> reshape_nd(arr, 4, -1).shape
|
||||
(1, 1, 1, 7)
|
||||
"""
|
||||
if arr.ndim != 1:
|
||||
raise ValueError("arr must be a 1D array")
|
||||
new_shape = [1] * ndim
|
||||
new_shape[dim] = -1
|
||||
return np.reshape(arr, new_shape)
|
||||
|
||||
|
||||
def check_nD(array, ndim, arg_name='image'):
|
||||
"""
|
||||
Verify an array meets the desired ndims and array isn't empty.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
array : array-like
|
||||
Input array to be validated
|
||||
ndim : int or iterable of ints
|
||||
Allowable ndim or ndims for the array.
|
||||
arg_name : str, optional
|
||||
The name of the array in the original function.
|
||||
|
||||
"""
|
||||
array = np.asanyarray(array)
|
||||
msg_incorrect_dim = "The parameter `%s` must be a %s-dimensional array"
|
||||
msg_empty_array = "The parameter `%s` cannot be an empty array"
|
||||
if isinstance(ndim, int):
|
||||
ndim = [ndim]
|
||||
if array.size == 0:
|
||||
raise ValueError(msg_empty_array % (arg_name))
|
||||
if array.ndim not in ndim:
|
||||
raise ValueError(
|
||||
msg_incorrect_dim % (arg_name, '-or-'.join([str(n) for n in ndim]))
|
||||
)
|
||||
|
||||
|
||||
def convert_to_float(image, preserve_range):
|
||||
"""Convert input image to float image with the appropriate range.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
image : ndarray
|
||||
Input image.
|
||||
preserve_range : bool
|
||||
Determines if the range of the image should be kept or transformed
|
||||
using img_as_float. Also see
|
||||
https://scikit-image.org/docs/dev/user_guide/data_types.html
|
||||
|
||||
Notes
|
||||
-----
|
||||
* Input images with `float32` data type are not upcast.
|
||||
|
||||
Returns
|
||||
-------
|
||||
image : ndarray
|
||||
Transformed version of the input.
|
||||
|
||||
"""
|
||||
if image.dtype == np.float16:
|
||||
return image.astype(np.float32)
|
||||
if preserve_range:
|
||||
# Convert image to double only if it is not single or double
|
||||
# precision float
|
||||
if image.dtype.char not in 'df':
|
||||
image = image.astype(float)
|
||||
else:
|
||||
from ..util.dtype import img_as_float
|
||||
image = img_as_float(image)
|
||||
return image
|
||||
|
||||
|
||||
def _validate_interpolation_order(image_dtype, order):
|
||||
"""Validate and return spline interpolation's order.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
image_dtype : dtype
|
||||
Image dtype.
|
||||
order : int, optional
|
||||
The order of the spline interpolation. The order has to be in
|
||||
the range 0-5. See `skimage.transform.warp` for detail.
|
||||
|
||||
Returns
|
||||
-------
|
||||
order : int
|
||||
if input order is None, returns 0 if image_dtype is bool and 1
|
||||
otherwise. Otherwise, image_dtype is checked and input order
|
||||
is validated accordingly (order > 0 is not supported for bool
|
||||
image dtype)
|
||||
|
||||
"""
|
||||
|
||||
if order is None:
|
||||
return 0 if image_dtype == bool else 1
|
||||
|
||||
if order < 0 or order > 5:
|
||||
raise ValueError("Spline interpolation order has to be in the "
|
||||
"range 0-5.")
|
||||
|
||||
if image_dtype == bool and order != 0:
|
||||
raise ValueError(
|
||||
"Input image dtype is bool. Interpolation is not defined "
|
||||
"with bool data type. Please set order to 0 or explicitly "
|
||||
"cast input image to another data type.")
|
||||
|
||||
return order
|
||||
|
||||
|
||||
def _to_np_mode(mode):
|
||||
"""Convert padding modes from `ndi.correlate` to `np.pad`."""
|
||||
mode_translation_dict = dict(nearest='edge', reflect='symmetric',
|
||||
mirror='reflect')
|
||||
if mode in mode_translation_dict:
|
||||
mode = mode_translation_dict[mode]
|
||||
return mode
|
||||
|
||||
|
||||
def _to_ndimage_mode(mode):
|
||||
"""Convert from `numpy.pad` mode name to the corresponding ndimage mode."""
|
||||
mode_translation_dict = dict(constant='constant', edge='nearest',
|
||||
symmetric='reflect', reflect='mirror',
|
||||
wrap='wrap')
|
||||
if mode not in mode_translation_dict:
|
||||
raise ValueError(
|
||||
f"Unknown mode: '{mode}', or cannot translate mode. The "
|
||||
f"mode should be one of 'constant', 'edge', 'symmetric', "
|
||||
f"'reflect', or 'wrap'. See the documentation of numpy.pad for "
|
||||
f"more info.")
|
||||
return _fix_ndimage_mode(mode_translation_dict[mode])
|
||||
|
||||
|
||||
def _fix_ndimage_mode(mode):
|
||||
# SciPy 1.6.0 introduced grid variants of constant and wrap which
|
||||
# have less surprising behavior for images. Use these when available
|
||||
grid_modes = {'constant': 'grid-constant', 'wrap': 'grid-wrap'}
|
||||
return grid_modes.get(mode, mode)
|
||||
|
||||
|
||||
new_float_type = {
|
||||
# preserved types
|
||||
np.float32().dtype.char: np.float32,
|
||||
np.float64().dtype.char: np.float64,
|
||||
np.complex64().dtype.char: np.complex64,
|
||||
np.complex128().dtype.char: np.complex128,
|
||||
# altered types
|
||||
np.float16().dtype.char: np.float32,
|
||||
'g': np.float64, # np.float128 ; doesn't exist on windows
|
||||
'G': np.complex128, # np.complex256 ; doesn't exist on windows
|
||||
}
|
||||
|
||||
|
||||
def _supported_float_type(input_dtype, allow_complex=False):
|
||||
"""Return an appropriate floating-point dtype for a given dtype.
|
||||
|
||||
float32, float64, complex64, complex128 are preserved.
|
||||
float16 is promoted to float32.
|
||||
complex256 is demoted to complex128.
|
||||
Other types are cast to float64.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
input_dtype : np.dtype or Iterable of np.dtype
|
||||
The input dtype. If a sequence of multiple dtypes is provided, each
|
||||
dtype is first converted to a supported floating point type and the
|
||||
final dtype is then determined by applying `np.result_type` on the
|
||||
sequence of supported floating point types.
|
||||
allow_complex : bool, optional
|
||||
If False, raise a ValueError on complex-valued inputs.
|
||||
|
||||
Returns
|
||||
-------
|
||||
float_type : dtype
|
||||
Floating-point dtype for the image.
|
||||
"""
|
||||
if isinstance(input_dtype, Iterable) and not isinstance(input_dtype, str):
|
||||
return np.result_type(*(_supported_float_type(d) for d in input_dtype))
|
||||
input_dtype = np.dtype(input_dtype)
|
||||
if not allow_complex and input_dtype.kind == 'c':
|
||||
raise ValueError("complex valued input is not supported")
|
||||
return new_float_type.get(input_dtype.char, np.float64)
|
||||
|
||||
|
||||
def identity(image, *args, **kwargs):
|
||||
"""Returns the first argument unmodified."""
|
||||
return image
|
||||
|
||||
|
||||
def as_binary_ndarray(array, *, variable_name):
|
||||
"""Return `array` as a numpy.ndarray of dtype bool.
|
||||
|
||||
Raises
|
||||
------
|
||||
ValueError:
|
||||
An error including the given `variable_name` if `array` can not be
|
||||
safely cast to a boolean array.
|
||||
"""
|
||||
array = np.asarray(array)
|
||||
if array.dtype != bool:
|
||||
if np.any((array != 1) & (array != 0)):
|
||||
raise ValueError(
|
||||
f"{variable_name} array is not of dtype boolean or "
|
||||
f"contains values other than 0 and 1 so cannot be "
|
||||
f"safely cast to boolean array."
|
||||
)
|
||||
return np.asarray(array, dtype=bool)
|
||||
168
.CondaPkg/env/lib/python3.11/site-packages/skimage/_shared/version_requirements.py
vendored
Normal file
@@ -0,0 +1,168 @@
|
||||
import sys
|
||||
|
||||
from packaging import version as _version
|
||||
|
||||
|
||||
def ensure_python_version(min_version):
|
||||
if not isinstance(min_version, tuple):
|
||||
min_version = (min_version, )
|
||||
if sys.version_info < min_version:
|
||||
# since ensure_python_version is in the critical import path,
|
||||
# we lazy import it.
|
||||
from platform import python_version
|
||||
|
||||
raise ImportError("""
|
||||
|
||||
You are running scikit-image on an unsupported version of Python.
|
||||
|
||||
Unfortunately, scikit-image 0.15 and above no longer work with your installed
|
||||
version of Python ({}). You therefore have two options: either upgrade to
|
||||
Python {}, or install an older version of scikit-image.
|
||||
|
||||
For Python 2.7 or Python 3.4, use
|
||||
|
||||
$ pip install 'scikit-image<0.15'
|
||||
|
||||
Please also consider updating `pip` and `setuptools`:
|
||||
|
||||
$ pip install pip setuptools --upgrade
|
||||
|
||||
Newer versions of these tools avoid installing packages incompatible
|
||||
with your version of Python.
|
||||
""".format(python_version(), '.'.join([str(v) for v in min_version])))
|
||||
|
||||
|
||||
def _check_version(actver, version, cmp_op):
|
||||
"""
|
||||
Check version string of an active module against a required version.
|
||||
|
||||
If dev/prerelease tags result in TypeError for string-number comparison,
|
||||
it is assumed that the dependency is satisfied.
|
||||
Users on dev branches are responsible for keeping their own packages up to
|
||||
date.
|
||||
"""
|
||||
try:
|
||||
if cmp_op == '>':
|
||||
return _version.parse(actver) > _version.parse(version)
|
||||
elif cmp_op == '>=':
|
||||
return _version.parse(actver) >= _version.parse(version)
|
||||
elif cmp_op == '=':
|
||||
return _version.parse(actver) == _version.parse(version)
|
||||
elif cmp_op == '<':
|
||||
return _version.parse(actver) < _version.parse(version)
|
||||
else:
|
||||
return False
|
||||
except TypeError:
|
||||
return True
|
||||
|
||||
|
||||
def get_module_version(module_name):
|
||||
"""Return module version or None if version can't be retrieved."""
|
||||
mod = __import__(module_name,
|
||||
fromlist=[module_name.rpartition('.')[-1]])
|
||||
return getattr(mod, '__version__', getattr(mod, 'VERSION', None))
|
||||
|
||||
|
||||
def is_installed(name, version=None):
|
||||
"""Test if *name* is installed.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name : str
|
||||
Name of module or "python"
|
||||
version : str, optional
|
||||
Version string to test against.
|
||||
If version is not None, checking version
|
||||
(must have an attribute named '__version__' or 'VERSION')
|
||||
Version may start with =, >=, > or < to specify the exact requirement
|
||||
|
||||
Returns
|
||||
-------
|
||||
out : bool
|
||||
True if `name` is installed matching the optional version.
|
||||
"""
|
||||
if name.lower() == 'python':
|
||||
actver = sys.version[:6]
|
||||
else:
|
||||
try:
|
||||
actver = get_module_version(name)
|
||||
except ImportError:
|
||||
return False
|
||||
if version is None:
|
||||
return True
|
||||
else:
|
||||
# since version_requirements is in the critical import path,
|
||||
# we lazy import re
|
||||
import re
|
||||
|
||||
match = re.search('[0-9]', version)
|
||||
assert match is not None, "Invalid version number"
|
||||
symb = version[:match.start()]
|
||||
if not symb:
|
||||
symb = '='
|
||||
assert symb in ('>=', '>', '=', '<'),\
|
||||
f"Invalid version condition '{symb}'"
|
||||
version = version[match.start():]
|
||||
return _check_version(actver, version, symb)
|
||||
|
||||
|
||||
def require(name, version=None):
|
||||
"""Return decorator that forces a requirement for a function or class.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name : str
|
||||
Name of module or "python".
|
||||
version : str, optional
|
||||
Version string to test against.
|
||||
If version is not None, checking version
|
||||
(must have an attribute named '__version__' or 'VERSION')
|
||||
Version may start with =, >=, > or < to specify the exact requirement
|
||||
|
||||
Returns
|
||||
-------
|
||||
func : function
|
||||
A decorator that raises an ImportError if a function is run
|
||||
in the absence of the input dependency.
|
||||
"""
|
||||
# since version_requirements is in the critical import path, we lazy import
|
||||
# functools
|
||||
import functools
|
||||
|
||||
def decorator(obj):
|
||||
@functools.wraps(obj)
|
||||
def func_wrapped(*args, **kwargs):
|
||||
if is_installed(name, version):
|
||||
return obj(*args, **kwargs)
|
||||
else:
|
||||
msg = f'"{obj}" in "{obj.__module__}" requires "{name}'
|
||||
if version is not None:
|
||||
msg += f" {version}"
|
||||
raise ImportError(msg + '"')
|
||||
return func_wrapped
|
||||
return decorator
|
||||
|
||||
|
||||
def get_module(module_name, version=None):
|
||||
"""Return a module object of name *module_name* if installed.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
module_name : str
|
||||
Name of module.
|
||||
version : str, optional
|
||||
Version string to test against.
|
||||
If version is not None, checking version
|
||||
(must have an attribute named '__version__' or 'VERSION')
|
||||
Version may start with =, >=, > or < to specify the exact requirement
|
||||
|
||||
Returns
|
||||
-------
|
||||
mod : module or None
|
||||
Module if *module_name* is installed matching the optional version
|
||||
or None otherwise.
|
||||
"""
|
||||
if not is_installed(module_name, version):
|
||||
return None
|
||||
return __import__(module_name,
|
||||
fromlist=[module_name.rpartition('.')[-1]])
|
||||
127
.CondaPkg/env/lib/python3.11/site-packages/skimage/color/__init__.py
vendored
Normal file
@@ -0,0 +1,127 @@
|
||||
from .colorconv import (convert_colorspace,
|
||||
rgba2rgb,
|
||||
rgb2hsv,
|
||||
hsv2rgb,
|
||||
rgb2xyz,
|
||||
xyz2rgb,
|
||||
rgb2rgbcie,
|
||||
rgbcie2rgb,
|
||||
rgb2gray,
|
||||
gray2rgb,
|
||||
gray2rgba,
|
||||
xyz2lab,
|
||||
lab2xyz,
|
||||
lab2rgb,
|
||||
rgb2lab,
|
||||
xyz2luv,
|
||||
luv2xyz,
|
||||
luv2rgb,
|
||||
rgb2luv,
|
||||
rgb2hed,
|
||||
hed2rgb,
|
||||
lab2lch,
|
||||
lch2lab,
|
||||
rgb2yuv,
|
||||
yuv2rgb,
|
||||
rgb2yiq,
|
||||
yiq2rgb,
|
||||
rgb2ypbpr,
|
||||
ypbpr2rgb,
|
||||
rgb2ycbcr,
|
||||
ycbcr2rgb,
|
||||
rgb2ydbdr,
|
||||
ydbdr2rgb,
|
||||
separate_stains,
|
||||
combine_stains,
|
||||
rgb_from_hed,
|
||||
hed_from_rgb,
|
||||
rgb_from_hdx,
|
||||
hdx_from_rgb,
|
||||
rgb_from_fgx,
|
||||
fgx_from_rgb,
|
||||
rgb_from_bex,
|
||||
bex_from_rgb,
|
||||
rgb_from_rbd,
|
||||
rbd_from_rgb,
|
||||
rgb_from_gdx,
|
||||
gdx_from_rgb,
|
||||
rgb_from_hax,
|
||||
hax_from_rgb,
|
||||
rgb_from_bro,
|
||||
bro_from_rgb,
|
||||
rgb_from_bpx,
|
||||
bpx_from_rgb,
|
||||
rgb_from_ahx,
|
||||
ahx_from_rgb,
|
||||
rgb_from_hpx,
|
||||
hpx_from_rgb)
|
||||
|
||||
from .colorlabel import color_dict, label2rgb
|
||||
|
||||
from .delta_e import (deltaE_cie76,
|
||||
deltaE_ciede94,
|
||||
deltaE_ciede2000,
|
||||
deltaE_cmc,
|
||||
)
|
||||
|
||||
|
||||
__all__ = ['convert_colorspace',
|
||||
'rgba2rgb',
|
||||
'rgb2hsv',
|
||||
'hsv2rgb',
|
||||
'rgb2xyz',
|
||||
'xyz2rgb',
|
||||
'rgb2rgbcie',
|
||||
'rgbcie2rgb',
|
||||
'rgb2gray',
|
||||
'gray2rgb',
|
||||
'gray2rgba',
|
||||
'xyz2lab',
|
||||
'lab2xyz',
|
||||
'lab2rgb',
|
||||
'rgb2lab',
|
||||
'rgb2hed',
|
||||
'hed2rgb',
|
||||
'lab2lch',
|
||||
'lch2lab',
|
||||
'rgb2yuv',
|
||||
'yuv2rgb',
|
||||
'rgb2yiq',
|
||||
'yiq2rgb',
|
||||
'rgb2ypbpr',
|
||||
'ypbpr2rgb',
|
||||
'rgb2ycbcr',
|
||||
'ycbcr2rgb',
|
||||
'rgb2ydbdr',
|
||||
'ydbdr2rgb',
|
||||
'separate_stains',
|
||||
'combine_stains',
|
||||
'rgb_from_hed',
|
||||
'hed_from_rgb',
|
||||
'rgb_from_hdx',
|
||||
'hdx_from_rgb',
|
||||
'rgb_from_fgx',
|
||||
'fgx_from_rgb',
|
||||
'rgb_from_bex',
|
||||
'bex_from_rgb',
|
||||
'rgb_from_rbd',
|
||||
'rbd_from_rgb',
|
||||
'rgb_from_gdx',
|
||||
'gdx_from_rgb',
|
||||
'rgb_from_hax',
|
||||
'hax_from_rgb',
|
||||
'rgb_from_bro',
|
||||
'bro_from_rgb',
|
||||
'rgb_from_bpx',
|
||||
'bpx_from_rgb',
|
||||
'rgb_from_ahx',
|
||||
'ahx_from_rgb',
|
||||
'rgb_from_hpx',
|
||||
'hpx_from_rgb',
|
||||
'color_dict',
|
||||
'label2rgb',
|
||||
'deltaE_cie76',
|
||||
'deltaE_ciede94',
|
||||
'deltaE_ciede2000',
|
||||
'deltaE_cmc',
|
||||
]
|
||||
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/color/__pycache__/__init__.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/color/__pycache__/adapt_rgb.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/color/__pycache__/colorconv.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/color/__pycache__/colorlabel.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/color/__pycache__/delta_e.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/color/__pycache__/rgb_colors.cpython-311.pyc
vendored
Normal file
79
.CondaPkg/env/lib/python3.11/site-packages/skimage/color/adapt_rgb.py
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
import functools
|
||||
|
||||
import numpy as np
|
||||
|
||||
from .. import color
|
||||
from ..util.dtype import _convert
|
||||
|
||||
|
||||
__all__ = ['adapt_rgb', 'hsv_value', 'each_channel']
|
||||
|
||||
|
||||
def is_rgb_like(image, channel_axis=-1):
|
||||
"""Return True if the image *looks* like it's RGB.
|
||||
|
||||
This function should not be public because it is only intended to be used
|
||||
for functions that don't accept volumes as input, since checking an image's
|
||||
shape is fragile.
|
||||
"""
|
||||
return (image.ndim == 3) and (image.shape[channel_axis] in (3, 4))
|
||||
|
||||
|
||||
def adapt_rgb(apply_to_rgb):
|
||||
"""Return decorator that adapts to RGB images to a gray-scale filter.
|
||||
|
||||
This function is only intended to be used for functions that don't accept
|
||||
volumes as input, since checking an image's shape is fragile.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
apply_to_rgb : function
|
||||
Function that returns a filtered image from an image-filter and RGB
|
||||
image. This will only be called if the image is RGB-like.
|
||||
"""
|
||||
def decorator(image_filter):
|
||||
@functools.wraps(image_filter)
|
||||
def image_filter_adapted(image, *args, **kwargs):
|
||||
if is_rgb_like(image):
|
||||
return apply_to_rgb(image_filter, image, *args, **kwargs)
|
||||
else:
|
||||
return image_filter(image, *args, **kwargs)
|
||||
return image_filter_adapted
|
||||
return decorator
|
||||
|
||||
|
||||
def hsv_value(image_filter, image, *args, **kwargs):
|
||||
"""Return color image by applying `image_filter` on HSV-value of `image`.
|
||||
|
||||
Note that this function is intended for use with `adapt_rgb`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
image_filter : function
|
||||
Function that filters a gray-scale image.
|
||||
image : array
|
||||
Input image. Note that RGBA images are treated as RGB.
|
||||
"""
|
||||
# Slice the first three channels so that we remove any alpha channels.
|
||||
hsv = color.rgb2hsv(image[:, :, :3])
|
||||
value = hsv[:, :, 2].copy()
|
||||
value = image_filter(value, *args, **kwargs)
|
||||
hsv[:, :, 2] = _convert(value, hsv.dtype)
|
||||
return color.hsv2rgb(hsv)
|
||||
|
||||
|
||||
def each_channel(image_filter, image, *args, **kwargs):
|
||||
"""Return color image by applying `image_filter` on channels of `image`.
|
||||
|
||||
Note that this function is intended for use with `adapt_rgb`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
image_filter : function
|
||||
Function that filters a gray-scale image.
|
||||
image : array
|
||||
Input image.
|
||||
"""
|
||||
c_new = [image_filter(c, *args, **kwargs)
|
||||
for c in np.moveaxis(image, -1, 0)]
|
||||
return np.stack(c_new, axis=-1)
|
||||
2199
.CondaPkg/env/lib/python3.11/site-packages/skimage/color/colorconv.py
vendored
Normal file
273
.CondaPkg/env/lib/python3.11/site-packages/skimage/color/colorlabel.py
vendored
Normal file
@@ -0,0 +1,273 @@
|
||||
import itertools
|
||||
|
||||
import numpy as np
|
||||
|
||||
from .._shared.utils import _supported_float_type, warn
|
||||
from ..util import img_as_float
|
||||
from . import rgb_colors
|
||||
from .colorconv import gray2rgb, rgb2hsv, hsv2rgb
|
||||
|
||||
|
||||
__all__ = ['color_dict', 'label2rgb', 'DEFAULT_COLORS']
|
||||
|
||||
|
||||
DEFAULT_COLORS = ('red', 'blue', 'yellow', 'magenta', 'green',
|
||||
'indigo', 'darkorange', 'cyan', 'pink', 'yellowgreen')
|
||||
|
||||
|
||||
color_dict = {k: v for k, v in rgb_colors.__dict__.items()
|
||||
if isinstance(v, tuple)}
|
||||
|
||||
|
||||
def _rgb_vector(color):
|
||||
"""Return RGB color as (1, 3) array.
|
||||
|
||||
This RGB array gets multiplied by masked regions of an RGB image, which are
|
||||
partially flattened by masking (i.e. dimensions 2D + RGB -> 1D + RGB).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
color : str or array
|
||||
Color name in `color_dict` or RGB float values between [0, 1].
|
||||
"""
|
||||
if isinstance(color, str):
|
||||
color = color_dict[color]
|
||||
# Slice to handle RGBA colors.
|
||||
return np.array(color[:3])
|
||||
|
||||
|
||||
def _match_label_with_color(label, colors, bg_label, bg_color):
|
||||
"""Return `unique_labels` and `color_cycle` for label array and color list.
|
||||
|
||||
Colors are cycled for normal labels, but the background color should only
|
||||
be used for the background.
|
||||
"""
|
||||
# Temporarily set background color; it will be removed later.
|
||||
if bg_color is None:
|
||||
bg_color = (0, 0, 0)
|
||||
bg_color = _rgb_vector(bg_color)
|
||||
|
||||
# map labels to their ranks among all labels from small to large
|
||||
unique_labels, mapped_labels = np.unique(label, return_inverse=True)
|
||||
|
||||
# get rank of bg_label
|
||||
bg_label_rank_list = mapped_labels[label.flat == bg_label]
|
||||
|
||||
# The rank of each label is the index of the color it is matched to in
|
||||
# color cycle. bg_label should always be mapped to the first color, so
|
||||
# its rank must be 0. Other labels should be ranked from small to large
|
||||
# from 1.
|
||||
if len(bg_label_rank_list) > 0:
|
||||
bg_label_rank = bg_label_rank_list[0]
|
||||
mapped_labels[mapped_labels < bg_label_rank] += 1
|
||||
mapped_labels[label.flat == bg_label] = 0
|
||||
else:
|
||||
mapped_labels += 1
|
||||
|
||||
# Modify labels and color cycle so background color is used only once.
|
||||
color_cycle = itertools.cycle(colors)
|
||||
color_cycle = itertools.chain([bg_color], color_cycle)
|
||||
|
||||
return mapped_labels, color_cycle
|
||||
|
||||
|
||||
def label2rgb(label, image=None, colors=None, alpha=0.3,
|
||||
bg_label=0, bg_color=(0, 0, 0), image_alpha=1, kind='overlay',
|
||||
*, saturation=0, channel_axis=-1):
|
||||
"""Return an RGB image where color-coded labels are painted over the image.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
label : ndarray
|
||||
Integer array of labels with the same shape as `image`.
|
||||
image : ndarray, optional
|
||||
Image used as underlay for labels. It should have the same shape as
|
||||
`labels`, optionally with an additional RGB (channels) axis. If `image`
|
||||
is an RGB image, it is converted to grayscale before coloring.
|
||||
colors : list, optional
|
||||
List of colors. If the number of labels exceeds the number of colors,
|
||||
then the colors are cycled.
|
||||
alpha : float [0, 1], optional
|
||||
Opacity of colorized labels. Ignored if image is `None`.
|
||||
bg_label : int, optional
|
||||
Label that's treated as the background. If `bg_label` is specified,
|
||||
`bg_color` is `None`, and `kind` is `overlay`,
|
||||
background is not painted by any colors.
|
||||
bg_color : str or array, optional
|
||||
Background color. Must be a name in `color_dict` or RGB float values
|
||||
between [0, 1].
|
||||
image_alpha : float [0, 1], optional
|
||||
Opacity of the image.
|
||||
kind : string, one of {'overlay', 'avg'}
|
||||
The kind of color image desired. 'overlay' cycles over defined colors
|
||||
and overlays the colored labels over the original image. 'avg' replaces
|
||||
each labeled segment with its average color, for a stained-class or
|
||||
pastel painting appearance.
|
||||
saturation : float [0, 1], optional
|
||||
Parameter to control the saturation applied to the original image
|
||||
between fully saturated (original RGB, `saturation=1`) and fully
|
||||
unsaturated (grayscale, `saturation=0`). Only applies when
|
||||
`kind='overlay'`.
|
||||
channel_axis : int, optional
|
||||
This parameter indicates which axis of the output array will correspond
|
||||
to channels. If `image` is provided, this must also match the axis of
|
||||
`image` that corresponds to channels.
|
||||
|
||||
.. versionadded:: 0.19
|
||||
``channel_axis`` was added in 0.19.
|
||||
|
||||
Returns
|
||||
-------
|
||||
result : ndarray of float, same shape as `image`
|
||||
The result of blending a cycling colormap (`colors`) for each distinct
|
||||
value in `label` with the image, at a certain alpha value.
|
||||
"""
|
||||
if image is not None:
|
||||
image = np.moveaxis(image, source=channel_axis, destination=-1)
|
||||
if kind == 'overlay':
|
||||
rgb = _label2rgb_overlay(label, image, colors, alpha, bg_label,
|
||||
bg_color, image_alpha, saturation)
|
||||
elif kind == 'avg':
|
||||
rgb = _label2rgb_avg(label, image, bg_label, bg_color)
|
||||
else:
|
||||
raise ValueError("`kind` must be either 'overlay' or 'avg'.")
|
||||
return np.moveaxis(rgb, source=-1, destination=channel_axis)
|
||||
|
||||
|
||||
def _label2rgb_overlay(label, image=None, colors=None, alpha=0.3,
|
||||
bg_label=-1, bg_color=None, image_alpha=1,
|
||||
saturation=0):
|
||||
"""Return an RGB image where color-coded labels are painted over the image.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
label : ndarray
|
||||
Integer array of labels with the same shape as `image`.
|
||||
image : ndarray, optional
|
||||
Image used as underlay for labels. It should have the same shape as
|
||||
`labels`, optionally with an additional RGB (channels) axis. If `image`
|
||||
is an RGB image, it is converted to grayscale before coloring.
|
||||
colors : list, optional
|
||||
List of colors. If the number of labels exceeds the number of colors,
|
||||
then the colors are cycled.
|
||||
alpha : float [0, 1], optional
|
||||
Opacity of colorized labels. Ignored if image is `None`.
|
||||
bg_label : int, optional
|
||||
Label that's treated as the background. If `bg_label` is specified and
|
||||
`bg_color` is `None`, background is not painted by any colors.
|
||||
bg_color : str or array, optional
|
||||
Background color. Must be a name in `color_dict` or RGB float values
|
||||
between [0, 1].
|
||||
image_alpha : float [0, 1], optional
|
||||
Opacity of the image.
|
||||
saturation : float [0, 1], optional
|
||||
Parameter to control the saturation applied to the original image
|
||||
between fully saturated (original RGB, `saturation=1`) and fully
|
||||
unsaturated (grayscale, `saturation=0`).
|
||||
|
||||
Returns
|
||||
-------
|
||||
result : ndarray of float, same shape as `image`
|
||||
The result of blending a cycling colormap (`colors`) for each distinct
|
||||
value in `label` with the image, at a certain alpha value.
|
||||
"""
|
||||
if not 0 <= saturation <= 1:
|
||||
warn(f'saturation must be in range [0, 1], got {saturation}')
|
||||
|
||||
if colors is None:
|
||||
colors = DEFAULT_COLORS
|
||||
colors = [_rgb_vector(c) for c in colors]
|
||||
|
||||
if image is None:
|
||||
image = np.zeros(label.shape + (3,), dtype=np.float64)
|
||||
# Opacity doesn't make sense if no image exists.
|
||||
alpha = 1
|
||||
else:
|
||||
if (image.shape[:label.ndim] != label.shape
|
||||
or image.ndim > label.ndim + 1):
|
||||
raise ValueError("`image` and `label` must be the same shape")
|
||||
|
||||
if image.ndim == label.ndim + 1 and image.shape[-1] != 3:
|
||||
raise ValueError(
|
||||
"`image` must be RGB (image.shape[-1] must be 3)."
|
||||
)
|
||||
|
||||
if image.min() < 0:
|
||||
warn("Negative intensities in `image` are not supported")
|
||||
|
||||
float_dtype = _supported_float_type(image.dtype)
|
||||
image = img_as_float(image).astype(float_dtype, copy=False)
|
||||
if image.ndim > label.ndim:
|
||||
hsv = rgb2hsv(image)
|
||||
hsv[..., 1] *= saturation
|
||||
image = hsv2rgb(hsv)
|
||||
elif image.ndim == label.ndim:
|
||||
image = gray2rgb(image)
|
||||
image = image * image_alpha + (1 - image_alpha)
|
||||
|
||||
# Ensure that all labels are non-negative so we can index into
|
||||
# `label_to_color` correctly.
|
||||
offset = min(label.min(), bg_label)
|
||||
if offset != 0:
|
||||
label = label - offset # Make sure you don't modify the input array.
|
||||
bg_label -= offset
|
||||
|
||||
new_type = np.min_scalar_type(int(label.max()))
|
||||
if new_type == bool:
|
||||
new_type = np.uint8
|
||||
label = label.astype(new_type)
|
||||
|
||||
mapped_labels_flat, color_cycle = _match_label_with_color(label, colors,
|
||||
bg_label,
|
||||
bg_color)
|
||||
|
||||
if len(mapped_labels_flat) == 0:
|
||||
return image
|
||||
|
||||
dense_labels = range(np.max(mapped_labels_flat) + 1)
|
||||
|
||||
label_to_color = np.stack([c for i, c in zip(dense_labels, color_cycle)])
|
||||
|
||||
mapped_labels = label
|
||||
mapped_labels.flat = mapped_labels_flat
|
||||
result = label_to_color[mapped_labels] * alpha + image * (1 - alpha)
|
||||
|
||||
# Remove background label if its color was not specified.
|
||||
remove_background = 0 in mapped_labels_flat and bg_color is None
|
||||
if remove_background:
|
||||
result[label == bg_label] = image[label == bg_label]
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _label2rgb_avg(label_field, image, bg_label=0, bg_color=(0, 0, 0)):
|
||||
"""Visualise each segment in `label_field` with its mean color in `image`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
label_field : ndarray of int
|
||||
A segmentation of an image.
|
||||
image : array, shape ``label_field.shape + (3,)``
|
||||
A color image of the same spatial shape as `label_field`.
|
||||
bg_label : int, optional
|
||||
A value in `label_field` to be treated as background.
|
||||
bg_color : 3-tuple of int, optional
|
||||
The color for the background label
|
||||
|
||||
Returns
|
||||
-------
|
||||
out : ndarray, same shape and type as `image`
|
||||
The output visualization.
|
||||
"""
|
||||
out = np.zeros(label_field.shape + (3,), dtype=image.dtype)
|
||||
labels = np.unique(label_field)
|
||||
bg = (labels == bg_label)
|
||||
if bg.any():
|
||||
labels = labels[labels != bg_label]
|
||||
mask = (label_field == bg_label).nonzero()
|
||||
out[mask] = bg_color
|
||||
for label in labels:
|
||||
mask = (label_field == label).nonzero()
|
||||
color = image[mask].mean(axis=0)
|
||||
out[mask] = color
|
||||
return out
|
||||
391
.CondaPkg/env/lib/python3.11/site-packages/skimage/color/delta_e.py
vendored
Normal file
@@ -0,0 +1,391 @@
|
||||
"""
|
||||
Functions for calculating the "distance" between colors.
|
||||
|
||||
Implicit in these definitions of "distance" is the notion of "Just Noticeable
|
||||
Distance" (JND). This represents the distance between colors where a human can
|
||||
perceive different colors. Humans are more sensitive to certain colors than
|
||||
others, which different deltaE metrics correct for with varying degrees of
|
||||
sophistication.
|
||||
|
||||
The literature often mentions 1 as the minimum distance for visual
|
||||
differentiation, but more recent studies (Mahy 1994) peg JND at 2.3
|
||||
|
||||
The delta-E notation comes from the German word for "Sensation" (Empfindung).
|
||||
|
||||
Reference
|
||||
---------
|
||||
https://en.wikipedia.org/wiki/Color_difference
|
||||
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
|
||||
from .._shared.utils import _supported_float_type
|
||||
from .colorconv import lab2lch, _cart2polar_2pi
|
||||
|
||||
|
||||
def _float_inputs(lab1, lab2, allow_float32=True):
|
||||
lab1 = np.asarray(lab1)
|
||||
lab2 = np.asarray(lab2)
|
||||
if allow_float32:
|
||||
float_dtype = _supported_float_type([lab1.dtype, lab2.dtype])
|
||||
else:
|
||||
float_dtype = np.float64
|
||||
lab1 = lab1.astype(float_dtype, copy=False)
|
||||
lab2 = lab2.astype(float_dtype, copy=False)
|
||||
return lab1, lab2
|
||||
|
||||
|
||||
def deltaE_cie76(lab1, lab2, channel_axis=-1):
|
||||
"""Euclidean distance between two points in Lab color space
|
||||
|
||||
Parameters
|
||||
----------
|
||||
lab1 : array_like
|
||||
reference color (Lab colorspace)
|
||||
lab2 : array_like
|
||||
comparison color (Lab colorspace)
|
||||
channel_axis : int, optional
|
||||
This parameter indicates which axis of the arrays corresponds to
|
||||
channels.
|
||||
|
||||
.. versionadded:: 0.19
|
||||
``channel_axis`` was added in 0.19.
|
||||
|
||||
Returns
|
||||
-------
|
||||
dE : array_like
|
||||
distance between colors `lab1` and `lab2`
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] https://en.wikipedia.org/wiki/Color_difference
|
||||
.. [2] A. R. Robertson, "The CIE 1976 color-difference formulae,"
|
||||
Color Res. Appl. 2, 7-11 (1977).
|
||||
"""
|
||||
lab1, lab2 = _float_inputs(lab1, lab2, allow_float32=True)
|
||||
L1, a1, b1 = np.moveaxis(lab1, source=channel_axis, destination=0)[:3]
|
||||
L2, a2, b2 = np.moveaxis(lab2, source=channel_axis, destination=0)[:3]
|
||||
return np.sqrt((L2 - L1) ** 2 + (a2 - a1) ** 2 + (b2 - b1) ** 2)
|
||||
|
||||
|
||||
def deltaE_ciede94(lab1, lab2, kH=1, kC=1, kL=1, k1=0.045, k2=0.015, *,
|
||||
channel_axis=-1):
|
||||
"""Color difference according to CIEDE 94 standard
|
||||
|
||||
Accommodates perceptual non-uniformities through the use of application
|
||||
specific scale factors (`kH`, `kC`, `kL`, `k1`, and `k2`).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
lab1 : array_like
|
||||
reference color (Lab colorspace)
|
||||
lab2 : array_like
|
||||
comparison color (Lab colorspace)
|
||||
kH : float, optional
|
||||
Hue scale
|
||||
kC : float, optional
|
||||
Chroma scale
|
||||
kL : float, optional
|
||||
Lightness scale
|
||||
k1 : float, optional
|
||||
first scale parameter
|
||||
k2 : float, optional
|
||||
second scale parameter
|
||||
channel_axis : int, optional
|
||||
This parameter indicates which axis of the arrays corresponds to
|
||||
channels.
|
||||
|
||||
.. versionadded:: 0.19
|
||||
``channel_axis`` was added in 0.19.
|
||||
|
||||
Returns
|
||||
-------
|
||||
dE : array_like
|
||||
color difference between `lab1` and `lab2`
|
||||
|
||||
Notes
|
||||
-----
|
||||
deltaE_ciede94 is not symmetric with respect to lab1 and lab2. CIEDE94
|
||||
defines the scales for the lightness, hue, and chroma in terms of the first
|
||||
color. Consequently, the first color should be regarded as the "reference"
|
||||
color.
|
||||
|
||||
`kL`, `k1`, `k2` depend on the application and default to the values
|
||||
suggested for graphic arts
|
||||
|
||||
========== ============== ==========
|
||||
Parameter Graphic Arts Textiles
|
||||
========== ============== ==========
|
||||
`kL` 1.000 2.000
|
||||
`k1` 0.045 0.048
|
||||
`k2` 0.015 0.014
|
||||
========== ============== ==========
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] https://en.wikipedia.org/wiki/Color_difference
|
||||
.. [2] http://www.brucelindbloom.com/index.html?Eqn_DeltaE_CIE94.html
|
||||
"""
|
||||
lab1, lab2 = _float_inputs(lab1, lab2, allow_float32=True)
|
||||
lab1 = np.moveaxis(lab1, source=channel_axis, destination=0)
|
||||
lab2 = np.moveaxis(lab2, source=channel_axis, destination=0)
|
||||
|
||||
L1, C1 = lab2lch(lab1, channel_axis=0)[:2]
|
||||
L2, C2 = lab2lch(lab2, channel_axis=0)[:2]
|
||||
|
||||
dL = L1 - L2
|
||||
dC = C1 - C2
|
||||
dH2 = get_dH2(lab1, lab2, channel_axis=0)
|
||||
|
||||
SL = 1
|
||||
SC = 1 + k1 * C1
|
||||
SH = 1 + k2 * C1
|
||||
|
||||
dE2 = (dL / (kL * SL)) ** 2
|
||||
dE2 += (dC / (kC * SC)) ** 2
|
||||
dE2 += dH2 / (kH * SH) ** 2
|
||||
return np.sqrt(np.maximum(dE2, 0))
|
||||
|
||||
|
||||
def deltaE_ciede2000(lab1, lab2, kL=1, kC=1, kH=1, *, channel_axis=-1):
|
||||
"""Color difference as given by the CIEDE 2000 standard.
|
||||
|
||||
CIEDE 2000 is a major revision of CIDE94. The perceptual calibration is
|
||||
largely based on experience with automotive paint on smooth surfaces.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
lab1 : array_like
|
||||
reference color (Lab colorspace)
|
||||
lab2 : array_like
|
||||
comparison color (Lab colorspace)
|
||||
kL : float (range), optional
|
||||
lightness scale factor, 1 for "acceptably close"; 2 for "imperceptible"
|
||||
see deltaE_cmc
|
||||
kC : float (range), optional
|
||||
chroma scale factor, usually 1
|
||||
kH : float (range), optional
|
||||
hue scale factor, usually 1
|
||||
channel_axis : int, optional
|
||||
This parameter indicates which axis of the arrays corresponds to
|
||||
channels.
|
||||
|
||||
.. versionadded:: 0.19
|
||||
``channel_axis`` was added in 0.19.
|
||||
|
||||
Returns
|
||||
-------
|
||||
deltaE : array_like
|
||||
The distance between `lab1` and `lab2`
|
||||
|
||||
Notes
|
||||
-----
|
||||
CIEDE 2000 assumes parametric weighting factors for the lightness, chroma,
|
||||
and hue (`kL`, `kC`, `kH` respectively). These default to 1.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] https://en.wikipedia.org/wiki/Color_difference
|
||||
.. [2] http://www.ece.rochester.edu/~gsharma/ciede2000/ciede2000noteCRNA.pdf
|
||||
:DOI:`10.1364/AO.33.008069`
|
||||
.. [3] M. Melgosa, J. Quesada, and E. Hita, "Uniformity of some recent
|
||||
color metrics tested with an accurate color-difference tolerance
|
||||
dataset," Appl. Opt. 33, 8069-8077 (1994).
|
||||
"""
|
||||
lab1, lab2 = _float_inputs(lab1, lab2, allow_float32=True)
|
||||
|
||||
channel_axis = channel_axis % lab1.ndim
|
||||
unroll = False
|
||||
if lab1.ndim == 1 and lab2.ndim == 1:
|
||||
unroll = True
|
||||
if lab1.ndim == 1:
|
||||
lab1 = lab1[None, :]
|
||||
if lab2.ndim == 1:
|
||||
lab2 = lab2[None, :]
|
||||
channel_axis += 1
|
||||
L1, a1, b1 = np.moveaxis(lab1, source=channel_axis, destination=0)[:3]
|
||||
L2, a2, b2 = np.moveaxis(lab2, source=channel_axis, destination=0)[:3]
|
||||
|
||||
|
||||
# distort `a` based on average chroma
|
||||
# then convert to lch coordinates from distorted `a`
|
||||
# all subsequence calculations are in the new coordinates
|
||||
# (often denoted "prime" in the literature)
|
||||
Cbar = 0.5 * (np.hypot(a1, b1) + np.hypot(a2, b2))
|
||||
c7 = Cbar ** 7
|
||||
G = 0.5 * (1 - np.sqrt(c7 / (c7 + 25 ** 7)))
|
||||
scale = 1 + G
|
||||
C1, h1 = _cart2polar_2pi(a1 * scale, b1)
|
||||
C2, h2 = _cart2polar_2pi(a2 * scale, b2)
|
||||
# recall that c, h are polar coordinates. c==r, h==theta
|
||||
|
||||
# cide2000 has four terms to delta_e:
|
||||
# 1) Luminance term
|
||||
# 2) Hue term
|
||||
# 3) Chroma term
|
||||
# 4) hue Rotation term
|
||||
|
||||
# lightness term
|
||||
Lbar = 0.5 * (L1 + L2)
|
||||
tmp = (Lbar - 50) ** 2
|
||||
SL = 1 + 0.015 * tmp / np.sqrt(20 + tmp)
|
||||
L_term = (L2 - L1) / (kL * SL)
|
||||
|
||||
# chroma term
|
||||
Cbar = 0.5 * (C1 + C2) # new coordinates
|
||||
SC = 1 + 0.045 * Cbar
|
||||
C_term = (C2 - C1) / (kC * SC)
|
||||
|
||||
# hue term
|
||||
h_diff = h2 - h1
|
||||
h_sum = h1 + h2
|
||||
CC = C1 * C2
|
||||
|
||||
dH = h_diff.copy()
|
||||
dH[h_diff > np.pi] -= 2 * np.pi
|
||||
dH[h_diff < -np.pi] += 2 * np.pi
|
||||
dH[CC == 0.] = 0. # if r == 0, dtheta == 0
|
||||
dH_term = 2 * np.sqrt(CC) * np.sin(dH / 2)
|
||||
|
||||
Hbar = h_sum.copy()
|
||||
mask = np.logical_and(CC != 0., np.abs(h_diff) > np.pi)
|
||||
Hbar[mask * (h_sum < 2 * np.pi)] += 2 * np.pi
|
||||
Hbar[mask * (h_sum >= 2 * np.pi)] -= 2 * np.pi
|
||||
Hbar[CC == 0.] *= 2
|
||||
Hbar *= 0.5
|
||||
|
||||
T = (1 -
|
||||
0.17 * np.cos(Hbar - np.deg2rad(30)) +
|
||||
0.24 * np.cos(2 * Hbar) +
|
||||
0.32 * np.cos(3 * Hbar + np.deg2rad(6)) -
|
||||
0.20 * np.cos(4 * Hbar - np.deg2rad(63))
|
||||
)
|
||||
SH = 1 + 0.015 * Cbar * T
|
||||
|
||||
H_term = dH_term / (kH * SH)
|
||||
|
||||
# hue rotation
|
||||
c7 = Cbar ** 7
|
||||
Rc = 2 * np.sqrt(c7 / (c7 + 25 ** 7))
|
||||
dtheta = np.deg2rad(30) * np.exp(-((np.rad2deg(Hbar) - 275) / 25) ** 2)
|
||||
R_term = -np.sin(2 * dtheta) * Rc * C_term * H_term
|
||||
|
||||
# put it all together
|
||||
dE2 = L_term ** 2
|
||||
dE2 += C_term ** 2
|
||||
dE2 += H_term ** 2
|
||||
dE2 += R_term
|
||||
ans = np.sqrt(np.maximum(dE2, 0))
|
||||
if unroll:
|
||||
ans = ans[0]
|
||||
return ans
|
||||
|
||||
|
||||
def deltaE_cmc(lab1, lab2, kL=1, kC=1, *, channel_axis=-1):
|
||||
"""Color difference from the CMC l:c standard.
|
||||
|
||||
This color difference was developed by the Colour Measurement Committee
|
||||
(CMC) of the Society of Dyers and Colourists (United Kingdom). It is
|
||||
intended for use in the textile industry.
|
||||
|
||||
The scale factors `kL`, `kC` set the weight given to differences in
|
||||
lightness and chroma relative to differences in hue. The usual values are
|
||||
``kL=2``, ``kC=1`` for "acceptability" and ``kL=1``, ``kC=1`` for
|
||||
"imperceptibility". Colors with ``dE > 1`` are "different" for the given
|
||||
scale factors.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
lab1 : array_like
|
||||
reference color (Lab colorspace)
|
||||
lab2 : array_like
|
||||
comparison color (Lab colorspace)
|
||||
channel_axis : int, optional
|
||||
This parameter indicates which axis of the arrays corresponds to
|
||||
channels.
|
||||
|
||||
.. versionadded:: 0.19
|
||||
``channel_axis`` was added in 0.19.
|
||||
|
||||
Returns
|
||||
-------
|
||||
dE : array_like
|
||||
distance between colors `lab1` and `lab2`
|
||||
|
||||
Notes
|
||||
-----
|
||||
deltaE_cmc the defines the scales for the lightness, hue, and chroma
|
||||
in terms of the first color. Consequently
|
||||
``deltaE_cmc(lab1, lab2) != deltaE_cmc(lab2, lab1)``
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] https://en.wikipedia.org/wiki/Color_difference
|
||||
.. [2] http://www.brucelindbloom.com/index.html?Eqn_DeltaE_CIE94.html
|
||||
.. [3] F. J. J. Clarke, R. McDonald, and B. Rigg, "Modification to the
|
||||
JPC79 colour-difference formula," J. Soc. Dyers Colour. 100, 128-132
|
||||
(1984).
|
||||
"""
|
||||
lab1, lab2 = _float_inputs(lab1, lab2, allow_float32=True)
|
||||
lab1 = np.moveaxis(lab1, source=channel_axis, destination=0)
|
||||
lab2 = np.moveaxis(lab2, source=channel_axis, destination=0)
|
||||
L1, C1, h1 = lab2lch(lab1, channel_axis=0)[:3]
|
||||
L2, C2, h2 = lab2lch(lab2, channel_axis=0)[:3]
|
||||
|
||||
dC = C1 - C2
|
||||
dL = L1 - L2
|
||||
dH2 = get_dH2(lab1, lab2, channel_axis=0)
|
||||
|
||||
T = np.where(np.logical_and(np.rad2deg(h1) >= 164, np.rad2deg(h1) <= 345),
|
||||
0.56 + 0.2 * np.abs(np.cos(h1 + np.deg2rad(168))),
|
||||
0.36 + 0.4 * np.abs(np.cos(h1 + np.deg2rad(35)))
|
||||
)
|
||||
c1_4 = C1 ** 4
|
||||
F = np.sqrt(c1_4 / (c1_4 + 1900))
|
||||
|
||||
SL = np.where(L1 < 16, 0.511, 0.040975 * L1 / (1. + 0.01765 * L1))
|
||||
SC = 0.638 + 0.0638 * C1 / (1. + 0.0131 * C1)
|
||||
SH = SC * (F * T + 1 - F)
|
||||
|
||||
dE2 = (dL / (kL * SL)) ** 2
|
||||
dE2 += (dC / (kC * SC)) ** 2
|
||||
dE2 += dH2 / (SH ** 2)
|
||||
|
||||
return np.sqrt(np.maximum(dE2, 0))
|
||||
|
||||
|
||||
def get_dH2(lab1, lab2, *, channel_axis=-1):
|
||||
"""squared hue difference term occurring in deltaE_cmc and deltaE_ciede94
|
||||
|
||||
Despite its name, "dH" is not a simple difference of hue values. We avoid
|
||||
working directly with the hue value, since differencing angles is
|
||||
troublesome. The hue term is usually written as:
|
||||
c1 = sqrt(a1**2 + b1**2)
|
||||
c2 = sqrt(a2**2 + b2**2)
|
||||
term = (a1-a2)**2 + (b1-b2)**2 - (c1-c2)**2
|
||||
dH = sqrt(term)
|
||||
|
||||
However, this has poor roundoff properties when a or b is dominant.
|
||||
Instead, ab is a vector with elements a and b. The same dH term can be
|
||||
re-written as:
|
||||
|ab1-ab2|**2 - (|ab1| - |ab2|)**2
|
||||
and then simplified to:
|
||||
2*|ab1|*|ab2| - 2*dot(ab1, ab2)
|
||||
"""
|
||||
# This function needs double precision internally for accuracy
|
||||
input_is_float_32 = _supported_float_type([lab1, lab2]) == np.float32
|
||||
lab1, lab2 = _float_inputs(lab1, lab2, allow_float32=False)
|
||||
|
||||
a1, b1 = np.moveaxis(lab1, source=channel_axis, destination=0)[1:3]
|
||||
a2, b2 = np.moveaxis(lab2, source=channel_axis, destination=0)[1:3]
|
||||
|
||||
# magnitude of (a, b) is the chroma
|
||||
C1 = np.hypot(a1, b1)
|
||||
C2 = np.hypot(a2, b2)
|
||||
|
||||
term = (C1 * C2) - (a1 * a2 + b1 * b2)
|
||||
out = 2 * term
|
||||
if input_is_float_32:
|
||||
out = out.astype(np.float32)
|
||||
return out
|
||||
146
.CondaPkg/env/lib/python3.11/site-packages/skimage/color/rgb_colors.py
vendored
Normal file
@@ -0,0 +1,146 @@
|
||||
aliceblue = (0.941, 0.973, 1)
|
||||
antiquewhite = (0.98, 0.922, 0.843)
|
||||
aqua = (0, 1, 1)
|
||||
aquamarine = (0.498, 1, 0.831)
|
||||
azure = (0.941, 1, 1)
|
||||
beige = (0.961, 0.961, 0.863)
|
||||
bisque = (1, 0.894, 0.769)
|
||||
black = (0, 0, 0)
|
||||
blanchedalmond = (1, 0.922, 0.804)
|
||||
blue = (0, 0, 1)
|
||||
blueviolet = (0.541, 0.169, 0.886)
|
||||
brown = (0.647, 0.165, 0.165)
|
||||
burlywood = (0.871, 0.722, 0.529)
|
||||
cadetblue = (0.373, 0.62, 0.627)
|
||||
chartreuse = (0.498, 1, 0)
|
||||
chocolate = (0.824, 0.412, 0.118)
|
||||
coral = (1, 0.498, 0.314)
|
||||
cornflowerblue = (0.392, 0.584, 0.929)
|
||||
cornsilk = (1, 0.973, 0.863)
|
||||
crimson = (0.863, 0.0784, 0.235)
|
||||
cyan = (0, 1, 1)
|
||||
darkblue = (0, 0, 0.545)
|
||||
darkcyan = (0, 0.545, 0.545)
|
||||
darkgoldenrod = (0.722, 0.525, 0.0431)
|
||||
darkgray = (0.663, 0.663, 0.663)
|
||||
darkgreen = (0, 0.392, 0)
|
||||
darkgrey = (0.663, 0.663, 0.663)
|
||||
darkkhaki = (0.741, 0.718, 0.42)
|
||||
darkmagenta = (0.545, 0, 0.545)
|
||||
darkolivegreen = (0.333, 0.42, 0.184)
|
||||
darkorange = (1, 0.549, 0)
|
||||
darkorchid = (0.6, 0.196, 0.8)
|
||||
darkred = (0.545, 0, 0)
|
||||
darksalmon = (0.914, 0.588, 0.478)
|
||||
darkseagreen = (0.561, 0.737, 0.561)
|
||||
darkslateblue = (0.282, 0.239, 0.545)
|
||||
darkslategray = (0.184, 0.31, 0.31)
|
||||
darkslategrey = (0.184, 0.31, 0.31)
|
||||
darkturquoise = (0, 0.808, 0.82)
|
||||
darkviolet = (0.58, 0, 0.827)
|
||||
deeppink = (1, 0.0784, 0.576)
|
||||
deepskyblue = (0, 0.749, 1)
|
||||
dimgray = (0.412, 0.412, 0.412)
|
||||
dimgrey = (0.412, 0.412, 0.412)
|
||||
dodgerblue = (0.118, 0.565, 1)
|
||||
firebrick = (0.698, 0.133, 0.133)
|
||||
floralwhite = (1, 0.98, 0.941)
|
||||
forestgreen = (0.133, 0.545, 0.133)
|
||||
fuchsia = (1, 0, 1)
|
||||
gainsboro = (0.863, 0.863, 0.863)
|
||||
ghostwhite = (0.973, 0.973, 1)
|
||||
gold = (1, 0.843, 0)
|
||||
goldenrod = (0.855, 0.647, 0.125)
|
||||
gray = (0.502, 0.502, 0.502)
|
||||
green = (0, 0.502, 0)
|
||||
greenyellow = (0.678, 1, 0.184)
|
||||
grey = (0.502, 0.502, 0.502)
|
||||
honeydew = (0.941, 1, 0.941)
|
||||
hotpink = (1, 0.412, 0.706)
|
||||
indianred = (0.804, 0.361, 0.361)
|
||||
indigo = (0.294, 0, 0.51)
|
||||
ivory = (1, 1, 0.941)
|
||||
khaki = (0.941, 0.902, 0.549)
|
||||
lavender = (0.902, 0.902, 0.98)
|
||||
lavenderblush = (1, 0.941, 0.961)
|
||||
lawngreen = (0.486, 0.988, 0)
|
||||
lemonchiffon = (1, 0.98, 0.804)
|
||||
lightblue = (0.678, 0.847, 0.902)
|
||||
lightcoral = (0.941, 0.502, 0.502)
|
||||
lightcyan = (0.878, 1, 1)
|
||||
lightgoldenrodyellow = (0.98, 0.98, 0.824)
|
||||
lightgray = (0.827, 0.827, 0.827)
|
||||
lightgreen = (0.565, 0.933, 0.565)
|
||||
lightgrey = (0.827, 0.827, 0.827)
|
||||
lightpink = (1, 0.714, 0.757)
|
||||
lightsalmon = (1, 0.627, 0.478)
|
||||
lightseagreen = (0.125, 0.698, 0.667)
|
||||
lightskyblue = (0.529, 0.808, 0.98)
|
||||
lightslategray = (0.467, 0.533, 0.6)
|
||||
lightslategrey = (0.467, 0.533, 0.6)
|
||||
lightsteelblue = (0.69, 0.769, 0.871)
|
||||
lightyellow = (1, 1, 0.878)
|
||||
lime = (0, 1, 0)
|
||||
limegreen = (0.196, 0.804, 0.196)
|
||||
linen = (0.98, 0.941, 0.902)
|
||||
magenta = (1, 0, 1)
|
||||
maroon = (0.502, 0, 0)
|
||||
mediumaquamarine = (0.4, 0.804, 0.667)
|
||||
mediumblue = (0, 0, 0.804)
|
||||
mediumorchid = (0.729, 0.333, 0.827)
|
||||
mediumpurple = (0.576, 0.439, 0.859)
|
||||
mediumseagreen = (0.235, 0.702, 0.443)
|
||||
mediumslateblue = (0.482, 0.408, 0.933)
|
||||
mediumspringgreen = (0, 0.98, 0.604)
|
||||
mediumturquoise = (0.282, 0.82, 0.8)
|
||||
mediumvioletred = (0.78, 0.0824, 0.522)
|
||||
midnightblue = (0.098, 0.098, 0.439)
|
||||
mintcream = (0.961, 1, 0.98)
|
||||
mistyrose = (1, 0.894, 0.882)
|
||||
moccasin = (1, 0.894, 0.71)
|
||||
navajowhite = (1, 0.871, 0.678)
|
||||
navy = (0, 0, 0.502)
|
||||
oldlace = (0.992, 0.961, 0.902)
|
||||
olive = (0.502, 0.502, 0)
|
||||
olivedrab = (0.42, 0.557, 0.137)
|
||||
orange = (1, 0.647, 0)
|
||||
orangered = (1, 0.271, 0)
|
||||
orchid = (0.855, 0.439, 0.839)
|
||||
palegoldenrod = (0.933, 0.91, 0.667)
|
||||
palegreen = (0.596, 0.984, 0.596)
|
||||
palevioletred = (0.686, 0.933, 0.933)
|
||||
papayawhip = (1, 0.937, 0.835)
|
||||
peachpuff = (1, 0.855, 0.725)
|
||||
peru = (0.804, 0.522, 0.247)
|
||||
pink = (1, 0.753, 0.796)
|
||||
plum = (0.867, 0.627, 0.867)
|
||||
powderblue = (0.69, 0.878, 0.902)
|
||||
purple = (0.502, 0, 0.502)
|
||||
red = (1, 0, 0)
|
||||
rosybrown = (0.737, 0.561, 0.561)
|
||||
royalblue = (0.255, 0.412, 0.882)
|
||||
saddlebrown = (0.545, 0.271, 0.0745)
|
||||
salmon = (0.98, 0.502, 0.447)
|
||||
sandybrown = (0.98, 0.643, 0.376)
|
||||
seagreen = (0.18, 0.545, 0.341)
|
||||
seashell = (1, 0.961, 0.933)
|
||||
sienna = (0.627, 0.322, 0.176)
|
||||
silver = (0.753, 0.753, 0.753)
|
||||
skyblue = (0.529, 0.808, 0.922)
|
||||
slateblue = (0.416, 0.353, 0.804)
|
||||
slategray = (0.439, 0.502, 0.565)
|
||||
slategrey = (0.439, 0.502, 0.565)
|
||||
snow = (1, 0.98, 0.98)
|
||||
springgreen = (0, 1, 0.498)
|
||||
steelblue = (0.275, 0.51, 0.706)
|
||||
tan = (0.824, 0.706, 0.549)
|
||||
teal = (0, 0.502, 0.502)
|
||||
thistle = (0.847, 0.749, 0.847)
|
||||
tomato = (1, 0.388, 0.278)
|
||||
turquoise = (0.251, 0.878, 0.816)
|
||||
violet = (0.933, 0.51, 0.933)
|
||||
wheat = (0.961, 0.871, 0.702)
|
||||
white = (1, 1, 1)
|
||||
whitesmoke = (0.961, 0.961, 0.961)
|
||||
yellow = (1, 1, 0)
|
||||
yellowgreen = (0.604, 0.804, 0.196)
|
||||
0
.CondaPkg/env/lib/python3.11/site-packages/skimage/color/tests/__init__.py
vendored
Normal file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/color/tests/__pycache__/__init__.cpython-311.pyc
vendored
Normal file
94
.CondaPkg/env/lib/python3.11/site-packages/skimage/color/tests/test_adapt_rgb.py
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
from functools import partial
|
||||
|
||||
import numpy as np
|
||||
|
||||
from skimage import img_as_float, img_as_uint
|
||||
from skimage import color, data, filters
|
||||
from skimage.color.adapt_rgb import adapt_rgb, each_channel, hsv_value
|
||||
|
||||
# Down-sample image for quicker testing.
|
||||
COLOR_IMAGE = data.astronaut()[::5, ::6]
|
||||
GRAY_IMAGE = data.camera()[::5, ::5]
|
||||
|
||||
SIGMA = 3
|
||||
smooth = partial(filters.gaussian, sigma=SIGMA)
|
||||
assert_allclose = partial(np.testing.assert_allclose, atol=1e-8)
|
||||
|
||||
|
||||
@adapt_rgb(each_channel)
|
||||
def edges_each(image):
|
||||
return filters.sobel(image)
|
||||
|
||||
|
||||
@adapt_rgb(each_channel)
|
||||
def smooth_each(image, sigma):
|
||||
return filters.gaussian(image, sigma)
|
||||
|
||||
|
||||
@adapt_rgb(each_channel)
|
||||
def mask_each(image, mask):
|
||||
result = image.copy()
|
||||
result[mask] = 0
|
||||
return result
|
||||
|
||||
|
||||
@adapt_rgb(hsv_value)
|
||||
def edges_hsv(image):
|
||||
return filters.sobel(image)
|
||||
|
||||
|
||||
@adapt_rgb(hsv_value)
|
||||
def smooth_hsv(image, sigma):
|
||||
return filters.gaussian(image, sigma)
|
||||
|
||||
|
||||
@adapt_rgb(hsv_value)
|
||||
def edges_hsv_uint(image):
|
||||
return img_as_uint(filters.sobel(image))
|
||||
|
||||
|
||||
def test_gray_scale_image():
|
||||
# We don't need to test both `hsv_value` and `each_channel` since
|
||||
# `adapt_rgb` is handling gray-scale inputs.
|
||||
assert_allclose(edges_each(GRAY_IMAGE), filters.sobel(GRAY_IMAGE))
|
||||
|
||||
|
||||
def test_each_channel():
|
||||
filtered = edges_each(COLOR_IMAGE)
|
||||
for i, channel in enumerate(np.rollaxis(filtered, axis=-1)):
|
||||
expected = img_as_float(filters.sobel(COLOR_IMAGE[:, :, i]))
|
||||
assert_allclose(channel, expected)
|
||||
|
||||
|
||||
def test_each_channel_with_filter_argument():
|
||||
filtered = smooth_each(COLOR_IMAGE, SIGMA)
|
||||
for i, channel in enumerate(np.rollaxis(filtered, axis=-1)):
|
||||
assert_allclose(channel, smooth(COLOR_IMAGE[:, :, i]))
|
||||
|
||||
|
||||
def test_each_channel_with_asymmetric_kernel():
|
||||
mask = np.triu(np.ones(COLOR_IMAGE.shape[:2], dtype=bool))
|
||||
mask_each(COLOR_IMAGE, mask)
|
||||
|
||||
|
||||
def test_hsv_value():
|
||||
filtered = edges_hsv(COLOR_IMAGE)
|
||||
value = color.rgb2hsv(COLOR_IMAGE)[:, :, 2]
|
||||
assert_allclose(color.rgb2hsv(filtered)[:, :, 2], filters.sobel(value))
|
||||
|
||||
|
||||
def test_hsv_value_with_filter_argument():
|
||||
filtered = smooth_hsv(COLOR_IMAGE, SIGMA)
|
||||
value = color.rgb2hsv(COLOR_IMAGE)[:, :, 2]
|
||||
assert_allclose(color.rgb2hsv(filtered)[:, :, 2], smooth(value))
|
||||
|
||||
|
||||
def test_hsv_value_with_non_float_output():
|
||||
# Since `rgb2hsv` returns a float image and the result of the filtered
|
||||
# result is inserted into the HSV image, we want to make sure there isn't
|
||||
# a dtype mismatch.
|
||||
filtered = edges_hsv_uint(COLOR_IMAGE)
|
||||
filtered_value = color.rgb2hsv(filtered)[:, :, 2]
|
||||
value = color.rgb2hsv(COLOR_IMAGE)[:, :, 2]
|
||||
# Reduce tolerance because dtype conversion.
|
||||
assert_allclose(filtered_value, filters.sobel(value), rtol=1e-5, atol=1e-5)
|
||||
985
.CondaPkg/env/lib/python3.11/site-packages/skimage/color/tests/test_colorconv.py
vendored
Normal file
@@ -0,0 +1,985 @@
|
||||
"""Tests for color conversion functions.
|
||||
|
||||
Authors
|
||||
-------
|
||||
- the rgb2hsv test was written by Nicolas Pinto, 2009
|
||||
- other tests written by Ralf Gommers, 2009
|
||||
|
||||
:license: modified BSD
|
||||
"""
|
||||
|
||||
import colorsys
|
||||
import numpy as np
|
||||
import pytest
|
||||
from numpy.testing import (assert_almost_equal, assert_array_almost_equal,
|
||||
assert_equal)
|
||||
|
||||
from skimage import data
|
||||
from skimage._shared._warnings import expected_warnings
|
||||
from skimage._shared.testing import fetch
|
||||
from skimage._shared.utils import _supported_float_type, slice_at_axis
|
||||
from skimage.color import (rgb2hsv, hsv2rgb,
|
||||
rgb2xyz, xyz2rgb,
|
||||
rgb2hed, hed2rgb,
|
||||
separate_stains,
|
||||
combine_stains,
|
||||
rgb2rgbcie, rgbcie2rgb,
|
||||
convert_colorspace,
|
||||
rgb2gray, gray2rgb,
|
||||
xyz2lab, lab2xyz,
|
||||
lab2rgb, rgb2lab,
|
||||
xyz2luv, luv2xyz,
|
||||
luv2rgb, rgb2luv,
|
||||
lab2lch, lch2lab,
|
||||
rgb2yuv, yuv2rgb,
|
||||
rgb2yiq, yiq2rgb,
|
||||
rgb2ypbpr, ypbpr2rgb,
|
||||
rgb2ycbcr, ycbcr2rgb,
|
||||
rgb2ydbdr, ydbdr2rgb,
|
||||
rgba2rgb, gray2rgba)
|
||||
from skimage.util import img_as_float, img_as_ubyte, img_as_float32
|
||||
|
||||
|
||||
class TestColorconv():
|
||||
|
||||
img_rgb = data.colorwheel()
|
||||
img_grayscale = data.camera()
|
||||
img_rgba = np.array([[[0, 0.5, 1, 0],
|
||||
[0, 0.5, 1, 1],
|
||||
[0, 0.5, 1, 0.5]]]).astype(float)
|
||||
img_stains = img_as_float(img_rgb) * 0.3
|
||||
|
||||
colbars = np.array([[1, 1, 0, 0, 1, 1, 0, 0],
|
||||
[1, 1, 1, 1, 0, 0, 0, 0],
|
||||
[1, 0, 1, 0, 1, 0, 1, 0]]).astype(float)
|
||||
colbars_array = np.swapaxes(colbars.reshape(3, 4, 2), 0, 2)
|
||||
colbars_point75 = colbars * 0.75
|
||||
colbars_point75_array = np.swapaxes(colbars_point75.reshape(3, 4, 2), 0, 2)
|
||||
|
||||
xyz_array = np.array([[[0.4124, 0.21260, 0.01930]], # red
|
||||
[[0, 0, 0]], # black
|
||||
[[.9505, 1., 1.089]], # white
|
||||
[[.1805, .0722, .9505]], # blue
|
||||
[[.07719, .15438, .02573]], # green
|
||||
])
|
||||
lab_array = np.array([[[53.233, 80.109, 67.220]], # red
|
||||
[[0., 0., 0.]], # black
|
||||
[[100.0, 0.005, -0.010]], # white
|
||||
[[32.303, 79.197, -107.864]], # blue
|
||||
[[46.229, -51.7, 49.898]], # green
|
||||
])
|
||||
|
||||
luv_array = np.array([[[53.233, 175.053, 37.751]], # red
|
||||
[[0., 0., 0.]], # black
|
||||
[[100., 0.001, -0.017]], # white
|
||||
[[32.303, -9.400, -130.358]], # blue
|
||||
[[46.228, -43.774, 56.589]], # green
|
||||
])
|
||||
|
||||
# RGBA to RGB
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, 2, -1, -2, -3])
|
||||
def test_rgba2rgb_conversion(self, channel_axis):
|
||||
rgba = self.img_rgba
|
||||
|
||||
rgba = np.moveaxis(rgba, source=-1, destination=channel_axis)
|
||||
rgb = rgba2rgb(rgba, channel_axis=channel_axis)
|
||||
rgb = np.moveaxis(rgb, source=channel_axis, destination=-1)
|
||||
|
||||
expected = np.array([[[1, 1, 1],
|
||||
[0, 0.5, 1],
|
||||
[0.5, 0.75, 1]]]).astype(float)
|
||||
assert_equal(rgb.shape, expected.shape)
|
||||
assert_almost_equal(rgb, expected)
|
||||
|
||||
def test_rgba2rgb_error_grayscale(self):
|
||||
with pytest.raises(ValueError):
|
||||
rgba2rgb(self.img_grayscale)
|
||||
|
||||
@pytest.mark.parametrize("channel_axis", [None, 1.5])
|
||||
def test_rgba2rgb_error_channel_axis_invalid(self, channel_axis):
|
||||
with pytest.raises(TypeError):
|
||||
rgba2rgb(self.img_rgba, channel_axis=channel_axis)
|
||||
|
||||
@pytest.mark.parametrize("channel_axis", [-4, 3])
|
||||
def test_rgba2rgb_error_channel_axis_out_of_range(self, channel_axis):
|
||||
with pytest.raises(np.AxisError):
|
||||
rgba2rgb(self.img_rgba, channel_axis=channel_axis)
|
||||
|
||||
def test_rgba2rgb_error_rgb(self):
|
||||
with pytest.raises(ValueError):
|
||||
rgba2rgb(self.img_rgb)
|
||||
|
||||
def test_rgba2rgb_dtype(self):
|
||||
rgba = self.img_rgba.astype('float64')
|
||||
rgba32 = img_as_float32(rgba)
|
||||
|
||||
assert rgba2rgb(rgba).dtype == rgba.dtype
|
||||
assert rgba2rgb(rgba32).dtype == rgba32.dtype
|
||||
|
||||
# RGB to HSV
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1, -2])
|
||||
def test_rgb2hsv_conversion(self, channel_axis):
|
||||
rgb = img_as_float(self.img_rgb)[::16, ::16]
|
||||
|
||||
_rgb = np.moveaxis(rgb, source=-1, destination=channel_axis)
|
||||
hsv = rgb2hsv(_rgb, channel_axis=channel_axis)
|
||||
hsv = np.moveaxis(hsv, source=channel_axis, destination=-1)
|
||||
hsv = hsv.reshape(-1, 3)
|
||||
|
||||
# ground truth from colorsys
|
||||
gt = np.array([colorsys.rgb_to_hsv(pt[0], pt[1], pt[2])
|
||||
for pt in rgb.reshape(-1, 3)]
|
||||
)
|
||||
assert_almost_equal(hsv, gt)
|
||||
|
||||
def test_rgb2hsv_error_grayscale(self):
|
||||
with pytest.raises(ValueError):
|
||||
rgb2hsv(self.img_grayscale)
|
||||
|
||||
def test_rgb2hsv_dtype(self):
|
||||
rgb = img_as_float(self.img_rgb)
|
||||
rgb32 = img_as_float32(self.img_rgb)
|
||||
|
||||
assert rgb2hsv(rgb).dtype == rgb.dtype
|
||||
assert rgb2hsv(rgb32).dtype == rgb32.dtype
|
||||
|
||||
# HSV to RGB
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1, -2])
|
||||
def test_hsv2rgb_conversion(self, channel_axis):
|
||||
rgb = self.img_rgb.astype("float32")[::16, ::16]
|
||||
# create HSV image with colorsys
|
||||
hsv = np.array([colorsys.rgb_to_hsv(pt[0], pt[1], pt[2])
|
||||
for pt in rgb.reshape(-1, 3)]).reshape(rgb.shape)
|
||||
|
||||
hsv = np.moveaxis(hsv, source=-1, destination=channel_axis)
|
||||
_rgb = hsv2rgb(hsv, channel_axis=channel_axis)
|
||||
_rgb = np.moveaxis(_rgb, source=channel_axis, destination=-1)
|
||||
|
||||
# convert back to RGB and compare with original.
|
||||
# relative precision for RGB -> HSV roundtrip is about 1e-6
|
||||
assert_almost_equal(rgb, _rgb, decimal=4)
|
||||
|
||||
def test_hsv2rgb_error_grayscale(self):
|
||||
with pytest.raises(ValueError):
|
||||
hsv2rgb(self.img_grayscale)
|
||||
|
||||
def test_hsv2rgb_dtype(self):
|
||||
rgb = self.img_rgb.astype("float32")[::16, ::16]
|
||||
# create HSV image with colorsys
|
||||
hsv = np.array([colorsys.rgb_to_hsv(pt[0], pt[1], pt[2])
|
||||
for pt in rgb.reshape(-1, 3)],
|
||||
dtype='float64').reshape(rgb.shape)
|
||||
hsv32 = hsv.astype('float32')
|
||||
|
||||
assert hsv2rgb(hsv).dtype == hsv.dtype
|
||||
assert hsv2rgb(hsv32).dtype == hsv32.dtype
|
||||
|
||||
# RGB to XYZ
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1, -2])
|
||||
def test_rgb2xyz_conversion(self, channel_axis):
|
||||
gt = np.array([[[0.950456, 1. , 1.088754],
|
||||
[0.538003, 0.787329, 1.06942 ],
|
||||
[0.592876, 0.28484 , 0.969561],
|
||||
[0.180423, 0.072169, 0.950227]],
|
||||
[[0.770033, 0.927831, 0.138527],
|
||||
[0.35758 , 0.71516 , 0.119193],
|
||||
[0.412453, 0.212671, 0.019334],
|
||||
[0. , 0. , 0. ]]])
|
||||
|
||||
img = np.moveaxis(
|
||||
self.colbars_array, source=-1, destination=channel_axis
|
||||
)
|
||||
out = rgb2xyz(img, channel_axis=channel_axis)
|
||||
out = np.moveaxis(out, source=channel_axis, destination=-1)
|
||||
|
||||
assert_almost_equal(out, gt)
|
||||
|
||||
# stop repeating the "raises" checks for all other functions that are
|
||||
# implemented with color._convert()
|
||||
def test_rgb2xyz_error_grayscale(self):
|
||||
with pytest.raises(ValueError):
|
||||
rgb2xyz(self.img_grayscale)
|
||||
|
||||
def test_rgb2xyz_dtype(self):
|
||||
img = self.colbars_array
|
||||
img32 = img.astype('float32')
|
||||
|
||||
assert rgb2xyz(img).dtype == img.dtype
|
||||
assert rgb2xyz(img32).dtype == img32.dtype
|
||||
|
||||
# XYZ to RGB
|
||||
def test_xyz2rgb_conversion(self):
|
||||
assert_almost_equal(xyz2rgb(rgb2xyz(self.colbars_array)),
|
||||
self.colbars_array)
|
||||
|
||||
def test_xyz2rgb_dtype(self):
|
||||
img = rgb2xyz(self.colbars_array)
|
||||
img32 = img.astype('float32')
|
||||
|
||||
assert xyz2rgb(img).dtype == img.dtype
|
||||
assert xyz2rgb(img32).dtype == img32.dtype
|
||||
|
||||
# RGB<->XYZ roundtrip on another image
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1, -2])
|
||||
def test_xyz_rgb_roundtrip(self, channel_axis):
|
||||
img_rgb = img_as_float(self.img_rgb)
|
||||
|
||||
img_rgb = np.moveaxis(img_rgb, source=-1, destination=channel_axis)
|
||||
round_trip = xyz2rgb(rgb2xyz(img_rgb, channel_axis=channel_axis),
|
||||
channel_axis=channel_axis)
|
||||
|
||||
assert_array_almost_equal(round_trip, img_rgb)
|
||||
|
||||
# HED<->RGB roundtrip with ubyte image
|
||||
def test_hed_rgb_roundtrip(self):
|
||||
img_in = img_as_ubyte(self.img_stains)
|
||||
img_out = rgb2hed(hed2rgb(img_in))
|
||||
assert_equal(img_as_ubyte(img_out), img_in)
|
||||
|
||||
# HED<->RGB roundtrip with float image
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1, -2])
|
||||
def test_hed_rgb_float_roundtrip(self, channel_axis):
|
||||
img_in = self.img_stains
|
||||
img_in = np.moveaxis(img_in, source=-1, destination=channel_axis)
|
||||
img_out = rgb2hed(
|
||||
hed2rgb(img_in, channel_axis=channel_axis),
|
||||
channel_axis=channel_axis
|
||||
)
|
||||
assert_array_almost_equal(img_out, img_in)
|
||||
|
||||
# BRO<->RGB roundtrip with ubyte image
|
||||
def test_bro_rgb_roundtrip(self):
|
||||
from skimage.color.colorconv import bro_from_rgb, rgb_from_bro
|
||||
img_in = img_as_ubyte(self.img_stains)
|
||||
img_out = combine_stains(img_in, rgb_from_bro)
|
||||
img_out = separate_stains(img_out, bro_from_rgb)
|
||||
assert_equal(img_as_ubyte(img_out), img_in)
|
||||
|
||||
# BRO<->RGB roundtrip with float image
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1])
|
||||
def test_bro_rgb_roundtrip_float(self, channel_axis):
|
||||
from skimage.color.colorconv import bro_from_rgb, rgb_from_bro
|
||||
img_in = self.img_stains
|
||||
img_in = np.moveaxis(img_in, source=-1, destination=channel_axis)
|
||||
img_out = combine_stains(
|
||||
img_in, rgb_from_bro, channel_axis=channel_axis
|
||||
)
|
||||
img_out = separate_stains(
|
||||
img_out, bro_from_rgb, channel_axis=channel_axis
|
||||
)
|
||||
assert_array_almost_equal(img_out, img_in)
|
||||
|
||||
# RGB to RGB CIE
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1, -2])
|
||||
def test_rgb2rgbcie_conversion(self, channel_axis):
|
||||
gt = np.array([[[ 0.1488856 , 0.18288098, 0.19277574],
|
||||
[ 0.01163224, 0.16649536, 0.18948516],
|
||||
[ 0.12259182, 0.03308008, 0.17298223],
|
||||
[-0.01466154, 0.01669446, 0.16969164]],
|
||||
[[ 0.16354714, 0.16618652, 0.0230841 ],
|
||||
[ 0.02629378, 0.1498009 , 0.01979351],
|
||||
[ 0.13725336, 0.01638562, 0.00329059],
|
||||
[ 0. , 0. , 0. ]]])
|
||||
|
||||
img = np.moveaxis(
|
||||
self.colbars_array, source=-1, destination=channel_axis
|
||||
)
|
||||
out = rgb2rgbcie(img, channel_axis=channel_axis)
|
||||
|
||||
out = np.moveaxis(out, source=channel_axis, destination=-1)
|
||||
|
||||
assert_almost_equal(out, gt)
|
||||
|
||||
def test_rgb2rgbcie_dtype(self):
|
||||
img = self.colbars_array.astype('float64')
|
||||
img32 = img.astype('float32')
|
||||
|
||||
assert rgb2rgbcie(img).dtype == img.dtype
|
||||
assert rgb2rgbcie(img32).dtype == img32.dtype
|
||||
|
||||
# RGB CIE to RGB
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1, -2])
|
||||
def test_rgbcie2rgb_conversion(self, channel_axis):
|
||||
rgb = np.moveaxis(
|
||||
self.colbars_array, source=-1, destination=channel_axis
|
||||
)
|
||||
round_trip = rgbcie2rgb(rgb2rgbcie(rgb, channel_axis=channel_axis),
|
||||
channel_axis=channel_axis)
|
||||
# only roundtrip test, we checked rgb2rgbcie above already
|
||||
assert_almost_equal(round_trip, rgb)
|
||||
|
||||
def test_rgbcie2rgb_dtype(self):
|
||||
img = rgb2rgbcie(self.colbars_array).astype('float64')
|
||||
img32 = img.astype('float32')
|
||||
|
||||
assert rgbcie2rgb(img).dtype == img.dtype
|
||||
assert rgbcie2rgb(img32).dtype == img32.dtype
|
||||
|
||||
@pytest.mark.parametrize("channel_axis", [0, -1])
|
||||
def test_convert_colorspace(self, channel_axis):
|
||||
colspaces = ['HSV', 'RGB CIE', 'XYZ', 'YCbCr', 'YPbPr', 'YDbDr']
|
||||
colfuncs_from = [
|
||||
hsv2rgb, rgbcie2rgb, xyz2rgb,
|
||||
ycbcr2rgb, ypbpr2rgb, ydbdr2rgb
|
||||
]
|
||||
colfuncs_to = [
|
||||
rgb2hsv, rgb2rgbcie, rgb2xyz,
|
||||
rgb2ycbcr, rgb2ypbpr, rgb2ydbdr
|
||||
]
|
||||
|
||||
colbars_array = np.moveaxis(
|
||||
self.colbars_array, source=-1, destination=channel_axis
|
||||
)
|
||||
|
||||
kw = dict(channel_axis=channel_axis)
|
||||
|
||||
assert_almost_equal(
|
||||
convert_colorspace(colbars_array, 'RGB', 'RGB', **kw),
|
||||
colbars_array)
|
||||
|
||||
for i, space in enumerate(colspaces):
|
||||
gt = colfuncs_from[i](colbars_array, **kw)
|
||||
assert_almost_equal(
|
||||
convert_colorspace(colbars_array, space, 'RGB', **kw), gt)
|
||||
gt = colfuncs_to[i](colbars_array, **kw)
|
||||
assert_almost_equal(
|
||||
convert_colorspace(colbars_array, 'RGB', space, **kw), gt)
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
convert_colorspace(self.colbars_array, 'nokey', 'XYZ')
|
||||
with pytest.raises(ValueError):
|
||||
convert_colorspace(self.colbars_array, 'RGB', 'nokey')
|
||||
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1, -2])
|
||||
def test_rgb2gray(self, channel_axis):
|
||||
x = np.array([1, 1, 1]).reshape((1, 1, 3)).astype(float)
|
||||
x = np.moveaxis(x, source=-1, destination=channel_axis)
|
||||
g = rgb2gray(x, channel_axis=channel_axis)
|
||||
assert_array_almost_equal(g, 1)
|
||||
|
||||
assert_equal(g.shape, (1, 1))
|
||||
|
||||
def test_rgb2gray_contiguous(self):
|
||||
x = np.random.rand(10, 10, 3)
|
||||
assert rgb2gray(x).flags["C_CONTIGUOUS"]
|
||||
assert rgb2gray(x[:5, :5]).flags["C_CONTIGUOUS"]
|
||||
|
||||
def test_rgb2gray_alpha(self):
|
||||
x = np.empty((10, 10, 4))
|
||||
with pytest.raises(ValueError):
|
||||
rgb2gray(x)
|
||||
|
||||
def test_rgb2gray_on_gray(self):
|
||||
with pytest.raises(ValueError):
|
||||
rgb2gray(np.empty((5, 5)))
|
||||
|
||||
def test_rgb2gray_dtype(self):
|
||||
img = np.random.rand(10, 10, 3).astype('float64')
|
||||
img32 = img.astype('float32')
|
||||
|
||||
assert rgb2gray(img).dtype == img.dtype
|
||||
assert rgb2gray(img32).dtype == img32.dtype
|
||||
|
||||
# test matrices for xyz2lab and lab2xyz generated using
|
||||
# http://www.easyrgb.com/index.php?X=CALC
|
||||
# Note: easyrgb website displays xyz*100
|
||||
def test_xyz2lab(self):
|
||||
assert_array_almost_equal(xyz2lab(self.xyz_array),
|
||||
self.lab_array, decimal=3)
|
||||
|
||||
# Test the conversion with the rest of the illuminants.
|
||||
for I in ["A", "B", "C", "d50", "d55", "d65"]:
|
||||
I = I.lower()
|
||||
for obs in ["2", "10", "R"]:
|
||||
obs = obs.lower()
|
||||
fname = f'color/tests/data/lab_array_{I}_{obs}.npy'
|
||||
lab_array_I_obs = np.load(fetch(fname))
|
||||
assert_array_almost_equal(lab_array_I_obs,
|
||||
xyz2lab(self.xyz_array, I, obs),
|
||||
decimal=2)
|
||||
for I in ["d75", "e"]:
|
||||
fname = f'color/tests/data/lab_array_{I}_2.npy'
|
||||
lab_array_I_obs = np.load(fetch(fname))
|
||||
assert_array_almost_equal(lab_array_I_obs,
|
||||
xyz2lab(self.xyz_array, I, "2"),
|
||||
decimal=2)
|
||||
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1, -2])
|
||||
def test_xyz2lab_channel_axis(self, channel_axis):
|
||||
# test conversion with channels along a specified axis
|
||||
xyz = np.moveaxis(self.xyz_array, source=-1, destination=channel_axis)
|
||||
lab = xyz2lab(xyz, channel_axis=channel_axis)
|
||||
lab = np.moveaxis(lab, source=channel_axis, destination=-1)
|
||||
assert_array_almost_equal(lab, self.lab_array, decimal=3)
|
||||
|
||||
def test_xyz2lab_dtype(self):
|
||||
img = self.xyz_array.astype('float64')
|
||||
img32 = img.astype('float32')
|
||||
|
||||
assert xyz2lab(img).dtype == img.dtype
|
||||
assert xyz2lab(img32).dtype == img32.dtype
|
||||
|
||||
def test_lab2xyz(self):
|
||||
assert_array_almost_equal(lab2xyz(self.lab_array),
|
||||
self.xyz_array, decimal=3)
|
||||
|
||||
# Test the conversion with the rest of the illuminants.
|
||||
for I in ["A", "B", "C", "d50", "d55", "d65"]:
|
||||
I = I.lower()
|
||||
for obs in ["2", "10", "R"]:
|
||||
obs = obs.lower()
|
||||
fname = f'color/tests/data/lab_array_{I}_{obs}.npy'
|
||||
lab_array_I_obs = np.load(fetch(fname))
|
||||
assert_array_almost_equal(lab2xyz(lab_array_I_obs, I, obs),
|
||||
self.xyz_array, decimal=3)
|
||||
for I in ["d75", "e"]:
|
||||
fname = f'color/tests/data/lab_array_{I}_2.npy'
|
||||
lab_array_I_obs = np.load(fetch(fname))
|
||||
assert_array_almost_equal(lab2xyz(lab_array_I_obs, I, "2"),
|
||||
self.xyz_array, decimal=3)
|
||||
|
||||
# And we include a call to test the exception handling in the code.
|
||||
with pytest.raises(ValueError):
|
||||
lab2xyz(lab_array_I_obs, "NaI", "2") # Not an illuminant
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
lab2xyz(lab_array_I_obs, "d50", "42") # Not a degree
|
||||
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1, -2])
|
||||
def test_lab2xyz_channel_axis(self, channel_axis):
|
||||
# test conversion with channels along a specified axis
|
||||
lab = np.moveaxis(self.lab_array, source=-1, destination=channel_axis)
|
||||
xyz = lab2xyz(lab, channel_axis=channel_axis)
|
||||
xyz = np.moveaxis(xyz, source=channel_axis, destination=-1)
|
||||
assert_array_almost_equal(xyz, self.xyz_array, decimal=3)
|
||||
|
||||
def test_lab2xyz_dtype(self):
|
||||
img = self.lab_array.astype('float64')
|
||||
img32 = img.astype('float32')
|
||||
|
||||
assert lab2xyz(img).dtype == img.dtype
|
||||
assert lab2xyz(img32).dtype == img32.dtype
|
||||
|
||||
def test_rgb2lab_brucelindbloom(self):
|
||||
"""
|
||||
Test the RGB->Lab conversion by comparing to the calculator on the
|
||||
authoritative Bruce Lindbloom
|
||||
[website](http://brucelindbloom.com/index.html?ColorCalculator.html).
|
||||
"""
|
||||
# Obtained with D65 white point, sRGB model and gamma
|
||||
gt_for_colbars = np.array([
|
||||
[100, 0, 0],
|
||||
[97.1393, -21.5537, 94.4780],
|
||||
[91.1132, -48.0875, -14.1312],
|
||||
[87.7347, -86.1827, 83.1793],
|
||||
[60.3242, 98.2343, -60.8249],
|
||||
[53.2408, 80.0925, 67.2032],
|
||||
[32.2970, 79.1875, -107.8602],
|
||||
[0, 0, 0]]).T
|
||||
gt_array = np.swapaxes(gt_for_colbars.reshape(3, 4, 2), 0, 2)
|
||||
assert_array_almost_equal(
|
||||
rgb2lab(self.colbars_array), gt_array, decimal=2
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1, -2])
|
||||
def test_lab_rgb_roundtrip(self, channel_axis):
|
||||
img_rgb = img_as_float(self.img_rgb)
|
||||
img_rgb = np.moveaxis(img_rgb, source=-1, destination=channel_axis)
|
||||
assert_array_almost_equal(
|
||||
lab2rgb(
|
||||
rgb2lab(img_rgb, channel_axis=channel_axis),
|
||||
channel_axis=channel_axis
|
||||
),
|
||||
img_rgb,
|
||||
)
|
||||
|
||||
def test_rgb2lab_dtype(self):
|
||||
img = self.colbars_array.astype('float64')
|
||||
img32 = img.astype('float32')
|
||||
|
||||
assert rgb2lab(img).dtype == img.dtype
|
||||
assert rgb2lab(img32).dtype == img32.dtype
|
||||
|
||||
def test_lab2rgb_dtype(self):
|
||||
img = self.lab_array.astype('float64')
|
||||
img32 = img.astype('float32')
|
||||
|
||||
assert lab2rgb(img).dtype == img.dtype
|
||||
assert lab2rgb(img32).dtype == img32.dtype
|
||||
|
||||
# test matrices for xyz2luv and luv2xyz generated using
|
||||
# http://www.easyrgb.com/index.php?X=CALC
|
||||
# Note: easyrgb website displays xyz*100
|
||||
def test_xyz2luv(self):
|
||||
assert_array_almost_equal(xyz2luv(self.xyz_array),
|
||||
self.luv_array, decimal=3)
|
||||
|
||||
# Test the conversion with the rest of the illuminants.
|
||||
for I in ["A", "B", "C", "d50", "d55", "d65"]:
|
||||
I = I.lower()
|
||||
for obs in ["2", "10", "R"]:
|
||||
obs = obs.lower()
|
||||
fname = f'color/tests/data/luv_array_{I}_{obs}.npy'
|
||||
luv_array_I_obs = np.load(fetch(fname))
|
||||
assert_array_almost_equal(luv_array_I_obs,
|
||||
xyz2luv(self.xyz_array, I, obs),
|
||||
decimal=2)
|
||||
for I in ["d75", "e"]:
|
||||
fname = f'color/tests/data/luv_array_{I}_2.npy'
|
||||
luv_array_I_obs = np.load(fetch(fname))
|
||||
assert_array_almost_equal(luv_array_I_obs,
|
||||
xyz2luv(self.xyz_array, I, "2"),
|
||||
decimal=2)
|
||||
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1, -2])
|
||||
def test_xyz2luv_channel_axis(self, channel_axis):
|
||||
# test conversion with channels along a specified axis
|
||||
xyz = np.moveaxis(self.xyz_array, source=-1, destination=channel_axis)
|
||||
luv = xyz2luv(xyz, channel_axis=channel_axis)
|
||||
luv = np.moveaxis(luv, source=channel_axis, destination=-1)
|
||||
assert_array_almost_equal(luv, self.luv_array, decimal=3)
|
||||
|
||||
def test_xyz2luv_dtype(self):
|
||||
img = self.xyz_array.astype('float64')
|
||||
img32 = img.astype('float32')
|
||||
|
||||
assert xyz2luv(img).dtype == img.dtype
|
||||
assert xyz2luv(img32).dtype == img32.dtype
|
||||
|
||||
def test_luv2xyz(self):
|
||||
assert_array_almost_equal(luv2xyz(self.luv_array),
|
||||
self.xyz_array, decimal=3)
|
||||
|
||||
# Test the conversion with the rest of the illuminants.
|
||||
for I in ["A", "B", "C", "d50", "d55", "d65"]:
|
||||
I = I.lower()
|
||||
for obs in ["2", "10", "R"]:
|
||||
obs = obs.lower()
|
||||
fname = f'color/tests/data/luv_array_{I}_{obs}.npy'
|
||||
luv_array_I_obs = np.load(fetch(fname))
|
||||
assert_array_almost_equal(luv2xyz(luv_array_I_obs, I, obs),
|
||||
self.xyz_array, decimal=3)
|
||||
for I in ["d75", "e"]:
|
||||
fname = f'color/tests/data/luv_array_{I}_2.npy'
|
||||
luv_array_I_obs = np.load(fetch(fname))
|
||||
assert_array_almost_equal(luv2xyz(luv_array_I_obs, I, "2"),
|
||||
self.xyz_array, decimal=3)
|
||||
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1, -2])
|
||||
def test_luv2xyz_channel_axis(self, channel_axis):
|
||||
# test conversion with channels along a specified axis
|
||||
luv = np.moveaxis(self.luv_array, source=-1, destination=channel_axis)
|
||||
xyz = luv2xyz(luv, channel_axis=channel_axis)
|
||||
xyz = np.moveaxis(xyz, source=channel_axis, destination=-1)
|
||||
assert_array_almost_equal(xyz, self.xyz_array, decimal=3)
|
||||
|
||||
def test_luv2xyz_dtype(self):
|
||||
img = self.luv_array.astype('float64')
|
||||
img32 = img.astype('float32')
|
||||
|
||||
assert luv2xyz(img).dtype == img.dtype
|
||||
assert luv2xyz(img32).dtype == img32.dtype
|
||||
|
||||
def test_rgb2luv_brucelindbloom(self):
|
||||
"""
|
||||
Test the RGB->Lab conversion by comparing to the calculator on the
|
||||
authoritative Bruce Lindbloom
|
||||
[website](http://brucelindbloom.com/index.html?ColorCalculator.html).
|
||||
"""
|
||||
# Obtained with D65 white point, sRGB model and gamma
|
||||
gt_for_colbars = np.array([
|
||||
[100, 0, 0],
|
||||
[97.1393, 7.7056, 106.7866],
|
||||
[91.1132, -70.4773, -15.2042],
|
||||
[87.7347, -83.0776, 107.3985],
|
||||
[60.3242, 84.0714, -108.6834],
|
||||
[53.2408, 175.0151, 37.7564],
|
||||
[32.2970, -9.4054, -130.3423],
|
||||
[0, 0, 0]]).T
|
||||
gt_array = np.swapaxes(gt_for_colbars.reshape(3, 4, 2), 0, 2)
|
||||
assert_array_almost_equal(rgb2luv(self.colbars_array),
|
||||
gt_array, decimal=2)
|
||||
|
||||
def test_rgb2luv_dtype(self):
|
||||
img = self.colbars_array.astype('float64')
|
||||
img32 = img.astype('float32')
|
||||
|
||||
assert rgb2luv(img).dtype == img.dtype
|
||||
assert rgb2luv(img32).dtype == img32.dtype
|
||||
|
||||
def test_luv2rgb_dtype(self):
|
||||
img = self.luv_array.astype('float64')
|
||||
img32 = img.astype('float32')
|
||||
|
||||
assert luv2rgb(img).dtype == img.dtype
|
||||
assert luv2rgb(img32).dtype == img32.dtype
|
||||
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1 -2])
|
||||
def test_luv_rgb_roundtrip(self, channel_axis):
|
||||
img_rgb = img_as_float(self.img_rgb)
|
||||
img_rgb = np.moveaxis(img_rgb, source=-1, destination=channel_axis)
|
||||
assert_array_almost_equal(
|
||||
luv2rgb(
|
||||
rgb2luv(img_rgb, channel_axis=channel_axis),
|
||||
channel_axis=channel_axis
|
||||
),
|
||||
img_rgb,
|
||||
)
|
||||
|
||||
def test_lab_rgb_outlier(self):
|
||||
lab_array = np.ones((3, 1, 3))
|
||||
lab_array[0] = [50, -12, 85]
|
||||
lab_array[1] = [50, 12, -85]
|
||||
lab_array[2] = [90, -4, -47]
|
||||
rgb_array = np.array([[[0.501, 0.481, 0]],
|
||||
[[0, 0.482, 1.]],
|
||||
[[0.578, 0.914, 1.]],
|
||||
])
|
||||
assert_almost_equal(lab2rgb(lab_array), rgb_array, decimal=3)
|
||||
|
||||
def test_lab_full_gamut(self):
|
||||
a, b = np.meshgrid(np.arange(-100, 100), np.arange(-100, 100))
|
||||
L = np.ones(a.shape)
|
||||
lab = np.dstack((L, a, b))
|
||||
regex = (
|
||||
"Conversion from CIE-LAB to XYZ color space resulted in "
|
||||
"\\d+ negative Z values that have been clipped to zero"
|
||||
)
|
||||
for value in [0, 10, 20]:
|
||||
lab[:, :, 0] = value
|
||||
with pytest.warns(UserWarning, match=regex):
|
||||
lab2xyz(lab)
|
||||
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1, -2])
|
||||
def test_lab_lch_roundtrip(self, channel_axis):
|
||||
rgb = img_as_float(self.img_rgb)
|
||||
rgb = np.moveaxis(rgb, source=-1, destination=channel_axis)
|
||||
lab = rgb2lab(rgb, channel_axis=channel_axis)
|
||||
lab2 = lch2lab(
|
||||
lab2lch(lab, channel_axis=channel_axis),
|
||||
channel_axis=channel_axis,
|
||||
)
|
||||
assert_array_almost_equal(lab2, lab)
|
||||
|
||||
def test_rgb_lch_roundtrip(self):
|
||||
rgb = img_as_float(self.img_rgb)
|
||||
lab = rgb2lab(rgb)
|
||||
lch = lab2lch(lab)
|
||||
lab2 = lch2lab(lch)
|
||||
rgb2 = lab2rgb(lab2)
|
||||
assert_array_almost_equal(rgb, rgb2)
|
||||
|
||||
def test_lab_lch_0d(self):
|
||||
lab0 = self._get_lab0()
|
||||
lch0 = lab2lch(lab0)
|
||||
lch2 = lab2lch(lab0[None, None, :])
|
||||
assert_array_almost_equal(lch0, lch2[0, 0, :])
|
||||
|
||||
def test_lab_lch_1d(self):
|
||||
lab0 = self._get_lab0()
|
||||
lch0 = lab2lch(lab0)
|
||||
lch1 = lab2lch(lab0[None, :])
|
||||
assert_array_almost_equal(lch0, lch1[0, :])
|
||||
|
||||
def test_lab_lch_3d(self):
|
||||
lab0 = self._get_lab0()
|
||||
lch0 = lab2lch(lab0)
|
||||
lch3 = lab2lch(lab0[None, None, None, :])
|
||||
assert_array_almost_equal(lch0, lch3[0, 0, 0, :])
|
||||
|
||||
def _get_lab0(self):
|
||||
rgb = img_as_float(self.img_rgb[:1, :1, :])
|
||||
return rgb2lab(rgb)[0, 0, :]
|
||||
|
||||
def test_yuv(self):
|
||||
rgb = np.array([[[1.0, 1.0, 1.0]]])
|
||||
assert_array_almost_equal(rgb2yuv(rgb), np.array([[[1, 0, 0]]]))
|
||||
assert_array_almost_equal(rgb2yiq(rgb), np.array([[[1, 0, 0]]]))
|
||||
assert_array_almost_equal(rgb2ypbpr(rgb), np.array([[[1, 0, 0]]]))
|
||||
assert_array_almost_equal(
|
||||
rgb2ycbcr(rgb), np.array([[[235, 128, 128]]])
|
||||
)
|
||||
assert_array_almost_equal(rgb2ydbdr(rgb), np.array([[[1, 0, 0]]]))
|
||||
rgb = np.array([[[0.0, 1.0, 0.0]]])
|
||||
assert_array_almost_equal(
|
||||
rgb2yuv(rgb), np.array([[[0.587, -0.28886916, -0.51496512]]])
|
||||
)
|
||||
assert_array_almost_equal(
|
||||
rgb2yiq(rgb), np.array([[[0.587, -0.27455667, -0.52273617]]])
|
||||
)
|
||||
assert_array_almost_equal(
|
||||
rgb2ypbpr(rgb), np.array([[[0.587, -0.331264, -0.418688]]])
|
||||
)
|
||||
assert_array_almost_equal(
|
||||
rgb2ycbcr(rgb), np.array([[[144.553, 53.797, 34.214]]])
|
||||
)
|
||||
assert_array_almost_equal(
|
||||
rgb2ydbdr(rgb), np.array([[[0.587, -0.883, 1.116]]])
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1, -2])
|
||||
def test_yuv_roundtrip(self, channel_axis):
|
||||
img_rgb = img_as_float(self.img_rgb)[::16, ::16]
|
||||
img_rgb = np.moveaxis(img_rgb, source=-1, destination=channel_axis)
|
||||
assert_array_almost_equal(
|
||||
yuv2rgb(rgb2yuv(img_rgb, channel_axis=channel_axis),
|
||||
channel_axis=channel_axis),
|
||||
img_rgb)
|
||||
assert_array_almost_equal(
|
||||
yiq2rgb(rgb2yiq(img_rgb, channel_axis=channel_axis),
|
||||
channel_axis=channel_axis),
|
||||
img_rgb)
|
||||
assert_array_almost_equal(
|
||||
ypbpr2rgb(rgb2ypbpr(img_rgb, channel_axis=channel_axis),
|
||||
channel_axis=channel_axis),
|
||||
img_rgb)
|
||||
assert_array_almost_equal(
|
||||
ycbcr2rgb(rgb2ycbcr(img_rgb, channel_axis=channel_axis),
|
||||
channel_axis=channel_axis),
|
||||
img_rgb)
|
||||
assert_array_almost_equal(
|
||||
ydbdr2rgb(rgb2ydbdr(img_rgb, channel_axis=channel_axis),
|
||||
channel_axis=channel_axis),
|
||||
img_rgb)
|
||||
|
||||
def test_rgb2yuv_dtype(self):
|
||||
img = self.colbars_array.astype('float64')
|
||||
img32 = img.astype('float32')
|
||||
|
||||
assert rgb2yuv(img).dtype == img.dtype
|
||||
assert rgb2yuv(img32).dtype == img32.dtype
|
||||
|
||||
def test_yuv2rgb_dtype(self):
|
||||
img = rgb2yuv(self.colbars_array).astype('float64')
|
||||
img32 = img.astype('float32')
|
||||
|
||||
assert yuv2rgb(img).dtype == img.dtype
|
||||
assert yuv2rgb(img32).dtype == img32.dtype
|
||||
|
||||
def test_rgb2yiq_conversion(self):
|
||||
rgb = img_as_float(self.img_rgb)[::16, ::16]
|
||||
yiq = rgb2yiq(rgb).reshape(-1, 3)
|
||||
gt = np.array([colorsys.rgb_to_yiq(pt[0], pt[1], pt[2])
|
||||
for pt in rgb.reshape(-1, 3)]
|
||||
)
|
||||
assert_almost_equal(yiq, gt, decimal=2)
|
||||
|
||||
@pytest.mark.parametrize("func", [lab2rgb, lab2xyz])
|
||||
def test_warning_stacklevel(self, func):
|
||||
regex = (
|
||||
"Conversion from CIE-LAB.* XYZ.*color space resulted in "
|
||||
"1 negative Z values that have been clipped to zero"
|
||||
)
|
||||
with pytest.warns(UserWarning, match=regex) as messages:
|
||||
func(lab=[[[0, 0, 300.]]])
|
||||
assert len(messages) == 1
|
||||
assert messages[0].filename == __file__, "warning points at wrong file"
|
||||
|
||||
|
||||
def test_gray2rgb():
|
||||
x = np.array([0, 0.5, 1])
|
||||
w = gray2rgb(x)
|
||||
expected_output = np.array([[ 0, 0, 0 ],
|
||||
[ 0.5, 0.5, 0.5, ],
|
||||
[ 1, 1, 1 ]])
|
||||
|
||||
assert_equal(w, expected_output)
|
||||
|
||||
x = x.reshape((3, 1))
|
||||
y = gray2rgb(x)
|
||||
|
||||
assert_equal(y.shape, (3, 1, 3))
|
||||
assert_equal(y.dtype, x.dtype)
|
||||
assert_equal(y[..., 0], x)
|
||||
assert_equal(y[0, 0, :], [0, 0, 0])
|
||||
|
||||
x = np.array([[0, 128, 255]], dtype=np.uint8)
|
||||
z = gray2rgb(x)
|
||||
|
||||
assert_equal(z.shape, (1, 3, 3))
|
||||
assert_equal(z[..., 0], x)
|
||||
assert_equal(z[0, 1, :], [128, 128, 128])
|
||||
|
||||
|
||||
def test_gray2rgb_rgb():
|
||||
x = np.random.rand(5, 5, 4)
|
||||
y = gray2rgb(x)
|
||||
assert y.shape == (x.shape + (3,))
|
||||
for i in range(3):
|
||||
assert_equal(x, y[..., i])
|
||||
|
||||
|
||||
@pytest.mark.parametrize("shape", [(5, 5), (5, 5, 4), (5, 4, 5, 4)])
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1, -2])
|
||||
def test_gray2rgba(shape, channel_axis):
|
||||
# nD case
|
||||
img = np.random.random(shape)
|
||||
rgba = gray2rgba(img, channel_axis=channel_axis)
|
||||
assert rgba.ndim == img.ndim + 1
|
||||
|
||||
# Shape check
|
||||
new_axis_loc = channel_axis % rgba.ndim
|
||||
assert_equal(rgba.shape,
|
||||
shape[:new_axis_loc] + (4, ) + shape[new_axis_loc:])
|
||||
|
||||
# dtype check
|
||||
assert rgba.dtype == img.dtype
|
||||
|
||||
# RGB channels check
|
||||
for channel in range(3):
|
||||
assert_equal(rgba[slice_at_axis(channel, axis=new_axis_loc)], img)
|
||||
|
||||
# Alpha channel check
|
||||
assert_equal(rgba[slice_at_axis(3, axis=new_axis_loc)], 1.0)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("shape", [(5, 5), (5, 5, 4), (5, 4, 5, 4)])
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1, -2])
|
||||
def test_gray2rgb_channel_axis(shape, channel_axis):
|
||||
# nD case
|
||||
img = np.random.random(shape)
|
||||
rgb = gray2rgb(img, channel_axis=channel_axis)
|
||||
assert rgb.ndim == img.ndim + 1
|
||||
|
||||
# Shape check
|
||||
new_axis_loc = channel_axis % rgb.ndim
|
||||
assert_equal(rgb.shape,
|
||||
shape[:new_axis_loc] + (3, ) + shape[new_axis_loc:])
|
||||
|
||||
# dtype check
|
||||
assert rgb.dtype == img.dtype
|
||||
|
||||
|
||||
def test_gray2rgba_dtype():
|
||||
img_f64 = np.random.random((5, 5))
|
||||
img_f32 = img_f64.astype('float32')
|
||||
img_u8 = img_as_ubyte(img_f64)
|
||||
img_int = img_u8.astype(int)
|
||||
|
||||
for img in [img_f64, img_f32, img_u8, img_int]:
|
||||
assert gray2rgba(img).dtype == img.dtype
|
||||
|
||||
|
||||
def test_gray2rgba_alpha():
|
||||
img = np.random.random((5, 5))
|
||||
img_u8 = img_as_ubyte(img)
|
||||
|
||||
# Default
|
||||
alpha = None
|
||||
rgba = gray2rgba(img, alpha)
|
||||
|
||||
assert_equal(rgba[..., :3], gray2rgb(img))
|
||||
assert_equal(rgba[..., 3], 1.0)
|
||||
|
||||
# Scalar
|
||||
alpha = 0.5
|
||||
rgba = gray2rgba(img, alpha)
|
||||
|
||||
assert_equal(rgba[..., :3], gray2rgb(img))
|
||||
assert_equal(rgba[..., 3], alpha)
|
||||
|
||||
# Array
|
||||
alpha = np.random.random((5, 5))
|
||||
rgba = gray2rgba(img, alpha)
|
||||
|
||||
assert_equal(rgba[..., :3], gray2rgb(img))
|
||||
assert_equal(rgba[..., 3], alpha)
|
||||
|
||||
# Warning about alpha cast
|
||||
alpha = 0.5
|
||||
with expected_warnings(["alpha cannot be safely cast to image dtype"]):
|
||||
rgba = gray2rgba(img_u8, alpha)
|
||||
assert_equal(rgba[..., :3], gray2rgb(img_u8))
|
||||
|
||||
# Invalid shape
|
||||
alpha = np.random.random((5, 5, 1))
|
||||
expected_err_msg = ("alpha.shape must match image.shape")
|
||||
|
||||
with pytest.raises(ValueError) as err:
|
||||
rgba = gray2rgba(img, alpha)
|
||||
assert expected_err_msg == str(err.value)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("func", [rgb2gray, gray2rgb, gray2rgba])
|
||||
@pytest.mark.parametrize("shape", ([(3, ), (2, 3), (4, 5, 3), (5, 4, 5, 3),
|
||||
(4, 5, 4, 5, 3)]))
|
||||
def test_nD_gray_conversion(func, shape):
|
||||
img = np.random.rand(*shape)
|
||||
out = func(img)
|
||||
common_ndim = min(out.ndim, len(shape))
|
||||
assert out.shape[:common_ndim] == shape[:common_ndim]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("func", [rgb2hsv, hsv2rgb,
|
||||
rgb2xyz, xyz2rgb,
|
||||
rgb2hed, hed2rgb,
|
||||
rgb2rgbcie, rgbcie2rgb,
|
||||
xyz2lab, lab2xyz,
|
||||
lab2rgb, rgb2lab,
|
||||
xyz2luv, luv2xyz,
|
||||
luv2rgb, rgb2luv,
|
||||
lab2lch, lch2lab,
|
||||
rgb2yuv, yuv2rgb,
|
||||
rgb2yiq, yiq2rgb,
|
||||
rgb2ypbpr, ypbpr2rgb,
|
||||
rgb2ycbcr, ycbcr2rgb,
|
||||
rgb2ydbdr, ydbdr2rgb])
|
||||
@pytest.mark.parametrize("shape", ([(3, ), (2, 3), (4, 5, 3), (5, 4, 5, 3),
|
||||
(4, 5, 4, 5, 3)]))
|
||||
def test_nD_color_conversion(func, shape):
|
||||
img = np.random.rand(*shape)
|
||||
out = func(img)
|
||||
|
||||
assert out.shape == img.shape
|
||||
|
||||
|
||||
@pytest.mark.parametrize("shape", ([(4, ), (2, 4), (4, 5, 4), (5, 4, 5, 4),
|
||||
(4, 5, 4, 5, 4)]))
|
||||
def test_rgba2rgb_nD(shape):
|
||||
img = np.random.rand(*shape)
|
||||
out = rgba2rgb(img)
|
||||
|
||||
expected_shape = shape[:-1] + (3, )
|
||||
|
||||
assert out.shape == expected_shape
|
||||
|
||||
|
||||
@pytest.mark.parametrize('dtype', [np.float16, np.float32, np.float64])
|
||||
def test_rgba2rgb_dtypes(dtype):
|
||||
rgba = np.array([[[0, 0.5, 1, 0],
|
||||
[0, 0.5, 1, 1],
|
||||
[0, 0.5, 1, 0.5]]]).astype(dtype=dtype)
|
||||
rgb = rgba2rgb(rgba)
|
||||
float_dtype = _supported_float_type(rgba.dtype)
|
||||
assert rgb.dtype == float_dtype
|
||||
expected = np.array([[[1, 1, 1],
|
||||
[0, 0.5, 1],
|
||||
[0.5, 0.75, 1]]]).astype(float)
|
||||
assert rgb.shape == expected.shape
|
||||
assert_almost_equal(rgb, expected)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('dtype', [np.float16, np.float32, np.float64])
|
||||
def test_lab_lch_roundtrip_dtypes(dtype):
|
||||
rgb = img_as_float(data.colorwheel()).astype(dtype=dtype, copy=False)
|
||||
lab = rgb2lab(rgb)
|
||||
float_dtype = _supported_float_type(dtype)
|
||||
assert lab.dtype == float_dtype
|
||||
lab2 = lch2lab(lab2lch(lab))
|
||||
decimal = 4 if float_dtype == np.float32 else 7
|
||||
assert_array_almost_equal(lab2, lab, decimal=decimal)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('dtype', [np.float16, np.float32, np.float64])
|
||||
def test_rgb2hsv_dtypes(dtype):
|
||||
rgb = img_as_float(data.colorwheel())[::16, ::16]
|
||||
rgb = rgb.astype(dtype=dtype, copy=False)
|
||||
hsv = rgb2hsv(rgb).reshape(-1, 3)
|
||||
float_dtype = _supported_float_type(dtype)
|
||||
assert hsv.dtype == float_dtype
|
||||
# ground truth from colorsys
|
||||
gt = np.array([colorsys.rgb_to_hsv(pt[0], pt[1], pt[2])
|
||||
for pt in rgb.reshape(-1, 3)]
|
||||
)
|
||||
decimal = 3 if float_dtype == np.float32 else 7
|
||||
assert_array_almost_equal(hsv, gt, decimal=decimal)
|
||||
314
.CondaPkg/env/lib/python3.11/site-packages/skimage/color/tests/test_colorlabel.py
vendored
Normal file
@@ -0,0 +1,314 @@
|
||||
import itertools
|
||||
|
||||
import numpy as np
|
||||
import pytest
|
||||
from numpy.testing import (assert_array_almost_equal,
|
||||
assert_array_equal, assert_no_warnings,
|
||||
assert_warns)
|
||||
|
||||
from skimage._shared.testing import expected_warnings
|
||||
from skimage.color.colorconv import hsv2rgb, rgb2hsv
|
||||
from skimage.color.colorlabel import label2rgb
|
||||
|
||||
|
||||
def test_shape_mismatch():
|
||||
image = np.ones((3, 3))
|
||||
label = np.ones((2, 2))
|
||||
with pytest.raises(ValueError):
|
||||
label2rgb(image, label, bg_label=-1)
|
||||
|
||||
|
||||
def test_wrong_kind():
|
||||
label = np.ones((3, 3))
|
||||
# Must not raise an error.
|
||||
label2rgb(label, bg_label=-1)
|
||||
# kind='foo' is wrong.
|
||||
with pytest.raises(ValueError):
|
||||
label2rgb(label, kind='foo', bg_label=-1)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1])
|
||||
def test_uint_image(channel_axis):
|
||||
img = np.random.randint(0, 255, (10, 10), dtype=np.uint8)
|
||||
labels = np.zeros((10, 10), dtype=np.int64)
|
||||
labels[1:3, 1:3] = 1
|
||||
labels[6:9, 6:9] = 2
|
||||
output = label2rgb(labels, image=img, bg_label=0,
|
||||
channel_axis=channel_axis)
|
||||
# Make sure that the output is made of floats and in the correct range
|
||||
assert np.issubdtype(output.dtype, np.floating)
|
||||
assert output.max() <= 1
|
||||
|
||||
# size 3 (RGB) along the specified channel_axis
|
||||
new_axis = channel_axis % output.ndim
|
||||
assert output.shape[new_axis] == 3
|
||||
|
||||
|
||||
def test_rgb():
|
||||
image = np.ones((1, 3))
|
||||
label = np.arange(3).reshape(1, -1)
|
||||
colors = [(1, 0, 0), (0, 1, 0), (0, 0, 1)]
|
||||
# Set alphas just in case the defaults change
|
||||
rgb = label2rgb(label, image=image, colors=colors, alpha=1,
|
||||
image_alpha=1, bg_label=-1)
|
||||
assert_array_almost_equal(rgb, [colors])
|
||||
|
||||
|
||||
def test_alpha():
|
||||
image = np.random.uniform(size=(3, 3))
|
||||
label = np.random.randint(0, 9, size=(3, 3))
|
||||
# If we set `alpha = 0`, then rgb should match image exactly.
|
||||
rgb = label2rgb(label, image=image, alpha=0, image_alpha=1,
|
||||
bg_label=-1)
|
||||
assert_array_almost_equal(rgb[..., 0], image)
|
||||
assert_array_almost_equal(rgb[..., 1], image)
|
||||
assert_array_almost_equal(rgb[..., 2], image)
|
||||
|
||||
|
||||
def test_no_input_image():
|
||||
label = np.arange(3).reshape(1, -1)
|
||||
colors = [(1, 0, 0), (0, 1, 0), (0, 0, 1)]
|
||||
rgb = label2rgb(label, colors=colors, bg_label=-1)
|
||||
assert_array_almost_equal(rgb, [colors])
|
||||
|
||||
|
||||
def test_image_alpha():
|
||||
image = np.random.uniform(size=(1, 3))
|
||||
label = np.arange(3).reshape(1, -1)
|
||||
colors = [(1, 0, 0), (0, 1, 0), (0, 0, 1)]
|
||||
# If we set `image_alpha = 0`, then rgb should match label colors exactly.
|
||||
rgb = label2rgb(label, image=image, colors=colors, alpha=1,
|
||||
image_alpha=0, bg_label=-1)
|
||||
assert_array_almost_equal(rgb, [colors])
|
||||
|
||||
|
||||
def test_color_names():
|
||||
image = np.ones((1, 3))
|
||||
label = np.arange(3).reshape(1, -1)
|
||||
cnames = ['red', 'lime', 'blue']
|
||||
colors = [(1, 0, 0), (0, 1, 0), (0, 0, 1)]
|
||||
# Set alphas just in case the defaults change
|
||||
rgb = label2rgb(label, image=image, colors=cnames, alpha=1,
|
||||
image_alpha=1, bg_label=-1)
|
||||
assert_array_almost_equal(rgb, [colors])
|
||||
|
||||
|
||||
def test_bg_and_color_cycle():
|
||||
image = np.zeros((1, 10)) # dummy image
|
||||
label = np.arange(10).reshape(1, -1)
|
||||
colors = [(1, 0, 0), (0, 0, 1)]
|
||||
bg_color = (0, 0, 0)
|
||||
rgb = label2rgb(label, image=image, bg_label=0, bg_color=bg_color,
|
||||
colors=colors, alpha=1)
|
||||
assert_array_almost_equal(rgb[0, 0], bg_color)
|
||||
for pixel, color in zip(rgb[0, 1:], itertools.cycle(colors)):
|
||||
assert_array_almost_equal(pixel, color)
|
||||
|
||||
|
||||
def test_negative_labels():
|
||||
labels = np.array([0, -1, -2, 0])
|
||||
rout = np.array([(0., 0., 0.), (0., 0., 1.), (1., 0., 0.), (0., 0., 0.)])
|
||||
assert_array_almost_equal(
|
||||
rout, label2rgb(labels, bg_label=0, alpha=1, image_alpha=1))
|
||||
|
||||
|
||||
def test_nonconsecutive():
|
||||
labels = np.array([0, 2, 4, 0])
|
||||
colors = [(1, 0, 0), (0, 0, 1)]
|
||||
rout = np.array([(1., 0., 0.), (0., 0., 1.), (1., 0., 0.), (1., 0., 0.)])
|
||||
assert_array_almost_equal(
|
||||
rout, label2rgb(labels, colors=colors, alpha=1,
|
||||
image_alpha=1, bg_label=-1))
|
||||
|
||||
|
||||
def test_label_consistency():
|
||||
"""Assert that the same labels map to the same colors."""
|
||||
label_1 = np.arange(5).reshape(1, -1)
|
||||
label_2 = np.array([0, 1])
|
||||
colors = [(1, 0, 0), (0, 1, 0), (0, 0, 1), (1, 1, 0), (1, 0, 1)]
|
||||
# Set alphas just in case the defaults change
|
||||
rgb_1 = label2rgb(label_1, colors=colors, bg_label=-1)
|
||||
rgb_2 = label2rgb(label_2, colors=colors, bg_label=-1)
|
||||
for label_id in label_2.flat:
|
||||
assert_array_almost_equal(rgb_1[label_1 == label_id],
|
||||
rgb_2[label_2 == label_id])
|
||||
|
||||
|
||||
def test_leave_labels_alone():
|
||||
labels = np.array([-1, 0, 1])
|
||||
labels_saved = labels.copy()
|
||||
|
||||
label2rgb(labels, bg_label=-1)
|
||||
label2rgb(labels, bg_label=1)
|
||||
assert_array_equal(labels, labels_saved)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1])
|
||||
def test_avg(channel_axis):
|
||||
# label image
|
||||
label_field = np.array([[1, 1, 1, 2],
|
||||
[1, 2, 2, 2],
|
||||
[3, 3, 4, 4]], dtype=np.uint8)
|
||||
|
||||
# color image
|
||||
r = np.array([[1., 1., 0., 0.],
|
||||
[0., 0., 1., 1.],
|
||||
[0., 0., 0., 0.]])
|
||||
g = np.array([[0., 0., 0., 1.],
|
||||
[1., 1., 1., 0.],
|
||||
[0., 0., 0., 0.]])
|
||||
b = np.array([[0., 0., 0., 1.],
|
||||
[0., 1., 1., 1.],
|
||||
[0., 0., 1., 1.]])
|
||||
image = np.dstack((r, g, b))
|
||||
|
||||
# reference label-colored image
|
||||
rout = np.array([[0.5, 0.5, 0.5, 0.5],
|
||||
[0.5, 0.5, 0.5, 0.5],
|
||||
[0., 0., 0., 0.]])
|
||||
gout = np.array([[0.25, 0.25, 0.25, 0.75],
|
||||
[0.25, 0.75, 0.75, 0.75],
|
||||
[0., 0., 0., 0.]])
|
||||
bout = np.array([[0., 0., 0., 1.],
|
||||
[0., 1., 1., 1.],
|
||||
[0.0, 0.0, 1.0, 1.0]])
|
||||
expected_out = np.dstack((rout, gout, bout))
|
||||
|
||||
# test standard averaging
|
||||
_image = np.moveaxis(image, source=-1, destination=channel_axis)
|
||||
out = label2rgb(label_field, _image, kind='avg', bg_label=-1,
|
||||
channel_axis=channel_axis)
|
||||
out = np.moveaxis(out, source=channel_axis, destination=-1)
|
||||
assert_array_equal(out, expected_out)
|
||||
|
||||
# test averaging with custom background value
|
||||
out_bg = label2rgb(label_field, _image, bg_label=2, bg_color=(0, 0, 0),
|
||||
kind='avg', channel_axis=channel_axis)
|
||||
out_bg = np.moveaxis(out_bg, source=channel_axis, destination=-1)
|
||||
expected_out_bg = expected_out.copy()
|
||||
expected_out_bg[label_field == 2] = 0
|
||||
assert_array_equal(out_bg, expected_out_bg)
|
||||
|
||||
# test default background color
|
||||
out_bg = label2rgb(label_field, _image, bg_label=2, kind='avg',
|
||||
channel_axis=channel_axis)
|
||||
out_bg = np.moveaxis(out_bg, source=channel_axis, destination=-1)
|
||||
assert_array_equal(out_bg, expected_out_bg)
|
||||
|
||||
|
||||
def test_negative_intensity():
|
||||
labels = np.arange(100).reshape(10, 10)
|
||||
image = np.full((10, 10), -1, dtype='float64')
|
||||
assert_warns(UserWarning, label2rgb, labels, image, bg_label=-1)
|
||||
|
||||
|
||||
def test_bg_color_rgb_string():
|
||||
img = np.random.randint(0, 255, (10, 10), dtype=np.uint8)
|
||||
labels = np.zeros((10, 10), dtype=np.int64)
|
||||
labels[1:3, 1:3] = 1
|
||||
labels[6:9, 6:9] = 2
|
||||
output = label2rgb(labels, image=img, alpha=0.9,
|
||||
bg_label=0, bg_color='red')
|
||||
assert output[0, 0, 0] > 0.9 # red channel
|
||||
|
||||
|
||||
def test_avg_with_2d_image():
|
||||
img = np.random.randint(0, 255, (10, 10), dtype=np.uint8)
|
||||
labels = np.zeros((10, 10), dtype=np.int64)
|
||||
labels[1:3, 1:3] = 1
|
||||
labels[6:9, 6:9] = 2
|
||||
assert_no_warnings(label2rgb, labels, image=img, bg_label=0, kind='avg')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('image_type', ['rgb', 'gray', None])
|
||||
def test_label2rgb_nd(image_type):
|
||||
# validate 1D and 3D cases by testing their output relative to the 2D case
|
||||
shape = (10, 10)
|
||||
if image_type == 'rgb':
|
||||
img = np.random.randint(0, 255, shape + (3,), dtype=np.uint8)
|
||||
elif image_type == 'gray':
|
||||
img = np.random.randint(0, 255, shape, dtype=np.uint8)
|
||||
else:
|
||||
img = None
|
||||
|
||||
# add a couple of rectangular labels
|
||||
labels = np.zeros(shape, dtype=np.int64)
|
||||
# Note: Have to choose labels here so that the 1D slice below also contains
|
||||
# both label values. Otherwise the labeled colors will not match.
|
||||
labels[2:-2, 1:3] = 1
|
||||
labels[3:-3, 6:9] = 2
|
||||
|
||||
# label in the 2D case (correct 2D output is tested in other functions)
|
||||
labeled_2d = label2rgb(labels, image=img, bg_label=0)
|
||||
|
||||
# labeling a single line gives an equivalent result
|
||||
image_1d = img[5] if image_type is not None else None
|
||||
labeled_1d = label2rgb(labels[5], image=image_1d, bg_label=0)
|
||||
expected = labeled_2d[5]
|
||||
assert_array_equal(labeled_1d, expected)
|
||||
|
||||
# Labeling a 3D stack of duplicates gives the same result in each plane
|
||||
image_3d = np.stack((img, ) * 4) if image_type is not None else None
|
||||
labels_3d = np.stack((labels,) * 4)
|
||||
labeled_3d = label2rgb(labels_3d, image=image_3d, bg_label=0)
|
||||
for labeled_plane in labeled_3d:
|
||||
assert_array_equal(labeled_plane, labeled_2d)
|
||||
|
||||
|
||||
def test_label2rgb_shape_errors():
|
||||
img = np.random.randint(0, 255, (10, 10, 3), dtype=np.uint8)
|
||||
labels = np.zeros((10, 10), dtype=np.int64)
|
||||
labels[2:5, 2:5] = 1
|
||||
|
||||
# mismatched 2D shape
|
||||
with pytest.raises(ValueError):
|
||||
label2rgb(labels, img[1:])
|
||||
|
||||
# too many axes in img
|
||||
with pytest.raises(ValueError):
|
||||
label2rgb(labels, img[..., np.newaxis])
|
||||
|
||||
# too many channels along the last axis
|
||||
with pytest.raises(ValueError):
|
||||
label2rgb(labels, np.concatenate((img, img), axis=-1))
|
||||
|
||||
|
||||
def test_overlay_full_saturation():
|
||||
rgb_img = np.random.uniform(size=(10, 10, 3))
|
||||
labels = np.ones((10, 10), dtype=np.int64)
|
||||
labels[5:, 5:] = 2
|
||||
labels[:3, :3] = 0
|
||||
alpha = 0.3
|
||||
rgb = label2rgb(labels, image=rgb_img, alpha=alpha,
|
||||
bg_label=0, saturation=1)
|
||||
# check that rgb part of input image is preserved, where labels=0
|
||||
assert_array_almost_equal(rgb_img[:3, :3] * (1 - alpha), rgb[:3, :3])
|
||||
|
||||
|
||||
def test_overlay_custom_saturation():
|
||||
rgb_img = np.random.uniform(size=(10, 10, 3))
|
||||
labels = np.ones((10, 10), dtype=np.int64)
|
||||
labels[5:, 5:] = 2
|
||||
labels[:3, :3] = 0
|
||||
alpha = 0.3
|
||||
saturation = 0.3
|
||||
rgb = label2rgb(labels, image=rgb_img, alpha=alpha,
|
||||
bg_label=0, saturation=saturation)
|
||||
|
||||
hsv = rgb2hsv(rgb_img)
|
||||
hsv[..., 1] *= saturation
|
||||
saturaded_img = hsv2rgb(hsv)
|
||||
|
||||
# check that rgb part of input image is saturated, where labels=0
|
||||
assert_array_almost_equal(saturaded_img[:3, :3] * (1 - alpha), rgb[:3, :3])
|
||||
|
||||
|
||||
def test_saturation_warning():
|
||||
rgb_img = np.random.uniform(size=(10, 10, 3))
|
||||
labels = np.ones((10, 10), dtype=np.int64)
|
||||
with expected_warnings(["saturation must be in range"]):
|
||||
label2rgb(labels, image=rgb_img,
|
||||
bg_label=0, saturation=2)
|
||||
with expected_warnings(["saturation must be in range"]):
|
||||
label2rgb(labels, image=rgb_img,
|
||||
bg_label=0, saturation=-1)
|
||||
207
.CondaPkg/env/lib/python3.11/site-packages/skimage/color/tests/test_delta_e.py
vendored
Normal file
@@ -0,0 +1,207 @@
|
||||
"""Test for correctness of color distance functions"""
|
||||
|
||||
import numpy as np
|
||||
import pytest
|
||||
from numpy.testing import assert_allclose, assert_almost_equal, assert_equal
|
||||
|
||||
from skimage._shared.testing import fetch
|
||||
from skimage._shared.utils import _supported_float_type
|
||||
from skimage.color.delta_e import (deltaE_cie76, deltaE_ciede94,
|
||||
deltaE_ciede2000, deltaE_cmc)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1])
|
||||
@pytest.mark.parametrize('dtype', [np.float32, np.float64])
|
||||
def test_ciede2000_dE(dtype, channel_axis):
|
||||
data = load_ciede2000_data()
|
||||
N = len(data)
|
||||
lab1 = np.zeros((N, 3), dtype=dtype)
|
||||
lab1[:, 0] = data['L1']
|
||||
lab1[:, 1] = data['a1']
|
||||
lab1[:, 2] = data['b1']
|
||||
|
||||
lab2 = np.zeros((N, 3), dtype=dtype)
|
||||
lab2[:, 0] = data['L2']
|
||||
lab2[:, 1] = data['a2']
|
||||
lab2[:, 2] = data['b2']
|
||||
|
||||
lab1 = np.moveaxis(lab1, source=-1, destination=channel_axis)
|
||||
lab2 = np.moveaxis(lab2, source=-1, destination=channel_axis)
|
||||
dE2 = deltaE_ciede2000(lab1, lab2, channel_axis=channel_axis)
|
||||
assert dE2.dtype == _supported_float_type(dtype)
|
||||
|
||||
assert_allclose(dE2, data['dE'], rtol=1e-2)
|
||||
|
||||
|
||||
def load_ciede2000_data():
|
||||
dtype = [('pair', int),
|
||||
('1', int),
|
||||
('L1', float),
|
||||
('a1', float),
|
||||
('b1', float),
|
||||
('a1_prime', float),
|
||||
('C1_prime', float),
|
||||
('h1_prime', float),
|
||||
('hbar_prime', float),
|
||||
('G', float),
|
||||
('T', float),
|
||||
('SL', float),
|
||||
('SC', float),
|
||||
('SH', float),
|
||||
('RT', float),
|
||||
('dE', float),
|
||||
('2', int),
|
||||
('L2', float),
|
||||
('a2', float),
|
||||
('b2', float),
|
||||
('a2_prime', float),
|
||||
('C2_prime', float),
|
||||
('h2_prime', float),
|
||||
]
|
||||
|
||||
# note: ciede_test_data.txt contains several intermediate quantities
|
||||
path = fetch('color/tests/ciede2000_test_data.txt')
|
||||
return np.loadtxt(path, dtype=dtype)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1])
|
||||
@pytest.mark.parametrize('dtype', [np.float32, np.float64])
|
||||
def test_cie76(dtype, channel_axis):
|
||||
data = load_ciede2000_data()
|
||||
N = len(data)
|
||||
lab1 = np.zeros((N, 3), dtype=dtype)
|
||||
lab1[:, 0] = data['L1']
|
||||
lab1[:, 1] = data['a1']
|
||||
lab1[:, 2] = data['b1']
|
||||
|
||||
lab2 = np.zeros((N, 3), dtype=dtype)
|
||||
lab2[:, 0] = data['L2']
|
||||
lab2[:, 1] = data['a2']
|
||||
lab2[:, 2] = data['b2']
|
||||
|
||||
lab1 = np.moveaxis(lab1, source=-1, destination=channel_axis)
|
||||
lab2 = np.moveaxis(lab2, source=-1, destination=channel_axis)
|
||||
dE2 = deltaE_cie76(lab1, lab2, channel_axis=channel_axis)
|
||||
assert dE2.dtype == _supported_float_type(dtype)
|
||||
oracle = np.array([
|
||||
4.00106328, 6.31415011, 9.1776999, 2.06270077, 2.36957073,
|
||||
2.91529271, 2.23606798, 2.23606798, 4.98000036, 4.9800004,
|
||||
4.98000044, 4.98000049, 4.98000036, 4.9800004, 4.98000044,
|
||||
3.53553391, 36.86800781, 31.91002977, 30.25309901, 27.40894015,
|
||||
0.89242934, 0.7972, 0.8583065, 0.82982507, 3.1819238,
|
||||
2.21334297, 1.53890382, 4.60630929, 6.58467989, 3.88641412,
|
||||
1.50514845, 2.3237848, 0.94413208, 1.31910843
|
||||
])
|
||||
rtol = 1e-5 if dtype == np.float32 else 1e-8
|
||||
assert_allclose(dE2, oracle, rtol=rtol)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1])
|
||||
@pytest.mark.parametrize('dtype', [np.float32, np.float64])
|
||||
def test_ciede94(dtype, channel_axis):
|
||||
data = load_ciede2000_data()
|
||||
N = len(data)
|
||||
lab1 = np.zeros((N, 3), dtype=dtype)
|
||||
lab1[:, 0] = data['L1']
|
||||
lab1[:, 1] = data['a1']
|
||||
lab1[:, 2] = data['b1']
|
||||
|
||||
lab2 = np.zeros((N, 3), dtype=dtype)
|
||||
lab2[:, 0] = data['L2']
|
||||
lab2[:, 1] = data['a2']
|
||||
lab2[:, 2] = data['b2']
|
||||
|
||||
lab1 = np.moveaxis(lab1, source=-1, destination=channel_axis)
|
||||
lab2 = np.moveaxis(lab2, source=-1, destination=channel_axis)
|
||||
dE2 = deltaE_ciede94(lab1, lab2, channel_axis=channel_axis)
|
||||
assert dE2.dtype == _supported_float_type(dtype)
|
||||
oracle = np.array([
|
||||
1.39503887, 1.93410055, 2.45433566, 0.68449187, 0.6695627,
|
||||
0.69194527, 2.23606798, 2.03163832, 4.80069441, 4.80069445,
|
||||
4.80069449, 4.80069453, 4.80069441, 4.80069445, 4.80069449,
|
||||
3.40774352, 34.6891632, 29.44137328, 27.91408781, 24.93766082,
|
||||
0.82213163, 0.71658427, 0.8048753, 0.75284394, 1.39099471,
|
||||
1.24808929, 1.29795787, 1.82045088, 2.55613309, 1.42491303,
|
||||
1.41945261, 2.3225685, 0.93853308, 1.30654464
|
||||
])
|
||||
rtol = 1e-5 if dtype == np.float32 else 1e-8
|
||||
assert_allclose(dE2, oracle, rtol=rtol)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("channel_axis", [0, 1, -1])
|
||||
@pytest.mark.parametrize('dtype', [np.float32, np.float64])
|
||||
def test_cmc(dtype, channel_axis):
|
||||
data = load_ciede2000_data()
|
||||
N = len(data)
|
||||
lab1 = np.zeros((N, 3), dtype=dtype)
|
||||
lab1[:, 0] = data['L1']
|
||||
lab1[:, 1] = data['a1']
|
||||
lab1[:, 2] = data['b1']
|
||||
|
||||
lab2 = np.zeros((N, 3), dtype=dtype)
|
||||
lab2[:, 0] = data['L2']
|
||||
lab2[:, 1] = data['a2']
|
||||
lab2[:, 2] = data['b2']
|
||||
|
||||
lab1 = np.moveaxis(lab1, source=-1, destination=channel_axis)
|
||||
lab2 = np.moveaxis(lab2, source=-1, destination=channel_axis)
|
||||
dE2 = deltaE_cmc(lab1, lab2, channel_axis=channel_axis)
|
||||
assert dE2.dtype == _supported_float_type(dtype)
|
||||
oracle = np.array([
|
||||
1.73873611, 2.49660844, 3.30494501, 0.85735576, 0.88332927,
|
||||
0.97822692, 3.50480874, 2.87930032, 6.5783807, 6.57838075,
|
||||
6.5783808, 6.57838086, 6.67492321, 6.67492326, 6.67492331,
|
||||
4.66852997, 42.10875485, 39.45889064, 38.36005919, 33.93663807,
|
||||
1.14400168, 1.00600419, 1.11302547, 1.05335328, 1.42822951,
|
||||
1.2548143, 1.76838061, 2.02583367, 3.08695508, 1.74893533,
|
||||
1.90095165, 1.70258148, 1.80317207, 2.44934417
|
||||
])
|
||||
rtol = 1e-5 if dtype == np.float32 else 1e-8
|
||||
assert_allclose(dE2, oracle, rtol=rtol)
|
||||
|
||||
# Equal or close colors make `delta_e.get_dH2` function to return
|
||||
# negative values resulting in NaNs when passed to sqrt (see #1908
|
||||
# issue on Github):
|
||||
lab1 = lab2
|
||||
expected = np.zeros_like(oracle)
|
||||
assert_almost_equal(
|
||||
deltaE_cmc(lab1, lab2, channel_axis=channel_axis), expected, decimal=6
|
||||
)
|
||||
|
||||
lab2[0, 0] += np.finfo(float).eps
|
||||
assert_almost_equal(
|
||||
deltaE_cmc(lab1, lab2, channel_axis=channel_axis), expected, decimal=6
|
||||
)
|
||||
|
||||
|
||||
def test_cmc_single_item():
|
||||
# Single item case:
|
||||
lab1 = lab2 = np.array([0., 1.59607713, 0.87755709])
|
||||
assert_equal(deltaE_cmc(lab1, lab2), 0)
|
||||
|
||||
lab2[0] += np.finfo(float).eps
|
||||
assert_equal(deltaE_cmc(lab1, lab2), 0)
|
||||
|
||||
|
||||
def test_single_color_cie76():
|
||||
lab1 = (0.5, 0.5, 0.5)
|
||||
lab2 = (0.4, 0.4, 0.4)
|
||||
deltaE_cie76(lab1, lab2)
|
||||
|
||||
|
||||
def test_single_color_ciede94():
|
||||
lab1 = (0.5, 0.5, 0.5)
|
||||
lab2 = (0.4, 0.4, 0.4)
|
||||
deltaE_ciede94(lab1, lab2)
|
||||
|
||||
|
||||
def test_single_color_ciede2000():
|
||||
lab1 = (0.5, 0.5, 0.5)
|
||||
lab2 = (0.4, 0.4, 0.4)
|
||||
deltaE_ciede2000(lab1, lab2)
|
||||
|
||||
|
||||
def test_single_color_cmc():
|
||||
lab1 = (0.5, 0.5, 0.5)
|
||||
lab2 = (0.4, 0.4, 0.4)
|
||||
deltaE_cmc(lab1, lab2)
|
||||
15
.CondaPkg/env/lib/python3.11/site-packages/skimage/conftest.py
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
from skimage._shared.testing import setup_test, teardown_test
|
||||
|
||||
# List of files that pytest should ignore
|
||||
collect_ignore = [
|
||||
"io/_plugins",
|
||||
"future/graph", # Remove after v0.20 release
|
||||
]
|
||||
|
||||
|
||||
def pytest_runtest_setup(item):
|
||||
setup_test()
|
||||
|
||||
|
||||
def pytest_runtest_teardown(item):
|
||||
teardown_test()
|
||||
9
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/README.txt
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
This directory contains sample data from scikit-image.
|
||||
|
||||
By default, it only contains a small subset of the entire dataset.
|
||||
|
||||
The full detaset can be downloaded by using the following commands from
|
||||
a python console.
|
||||
|
||||
>>> from skimage.data import download_all
|
||||
>>> download_all()
|
||||
3
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/__init__.py
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import lazy_loader as lazy
|
||||
|
||||
__getattr__, __dir__, __all__ = lazy.attach_stub(__name__, __file__)
|
||||
91
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/__init__.pyi
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
__all__ = [
|
||||
'astronaut',
|
||||
'binary_blobs',
|
||||
'brain',
|
||||
'brick',
|
||||
'camera',
|
||||
'cat',
|
||||
'cell',
|
||||
'cells3d',
|
||||
'checkerboard',
|
||||
'chelsea',
|
||||
'clock',
|
||||
'coffee',
|
||||
'coins',
|
||||
'colorwheel',
|
||||
'create_image_fetcher',
|
||||
'data_dir',
|
||||
'download_all',
|
||||
'eagle',
|
||||
'file_hash',
|
||||
'grass',
|
||||
'gravel',
|
||||
'horse',
|
||||
'hubble_deep_field',
|
||||
'human_mitosis',
|
||||
'image_fetcher',
|
||||
'immunohistochemistry',
|
||||
'kidney',
|
||||
'lbp_frontal_face_cascade_filename',
|
||||
'lfw_subset',
|
||||
'lily',
|
||||
'logo',
|
||||
'microaneurysms',
|
||||
'moon',
|
||||
'nickel_solidification',
|
||||
'page',
|
||||
'protein_transport',
|
||||
'retina',
|
||||
'rocket',
|
||||
'shepp_logan_phantom',
|
||||
'skin',
|
||||
'stereo_motorcycle',
|
||||
'text',
|
||||
'vortex',
|
||||
]
|
||||
|
||||
from ._binary_blobs import binary_blobs
|
||||
from ._fetchers import (
|
||||
astronaut,
|
||||
brain,
|
||||
brick,
|
||||
camera,
|
||||
cat,
|
||||
cell,
|
||||
cells3d,
|
||||
checkerboard,
|
||||
chelsea,
|
||||
clock,
|
||||
coffee,
|
||||
coins,
|
||||
colorwheel,
|
||||
create_image_fetcher,
|
||||
data_dir,
|
||||
download_all,
|
||||
eagle,
|
||||
file_hash,
|
||||
grass,
|
||||
gravel,
|
||||
horse,
|
||||
hubble_deep_field,
|
||||
human_mitosis,
|
||||
image_fetcher,
|
||||
immunohistochemistry,
|
||||
kidney,
|
||||
lbp_frontal_face_cascade_filename,
|
||||
lfw_subset,
|
||||
lily,
|
||||
logo,
|
||||
microaneurysms,
|
||||
moon,
|
||||
nickel_solidification,
|
||||
page,
|
||||
protein_transport,
|
||||
retina,
|
||||
rocket,
|
||||
shepp_logan_phantom,
|
||||
skin,
|
||||
stereo_motorcycle,
|
||||
text,
|
||||
vortex,
|
||||
)
|
||||
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/__pycache__/__init__.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/__pycache__/_binary_blobs.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/__pycache__/_fetchers.cpython-311.pyc
vendored
Normal file
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/__pycache__/_registry.cpython-311.pyc
vendored
Normal file
63
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/_binary_blobs.py
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
import numpy as np
|
||||
|
||||
from .._shared.filters import gaussian
|
||||
|
||||
|
||||
def binary_blobs(length=512, blob_size_fraction=0.1, n_dim=2,
|
||||
volume_fraction=0.5, seed=None):
|
||||
"""
|
||||
Generate synthetic binary image with several rounded blob-like objects.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
length : int, optional
|
||||
Linear size of output image.
|
||||
blob_size_fraction : float, optional
|
||||
Typical linear size of blob, as a fraction of ``length``, should be
|
||||
smaller than 1.
|
||||
n_dim : int, optional
|
||||
Number of dimensions of output image.
|
||||
volume_fraction : float, default 0.5
|
||||
Fraction of image pixels covered by the blobs (where the output is 1).
|
||||
Should be in [0, 1].
|
||||
seed : {None, int, `numpy.random.Generator`}, optional
|
||||
If `seed` is None the `numpy.random.Generator` singleton is used.
|
||||
If `seed` is an int, a new ``Generator`` instance is used,
|
||||
seeded with `seed`.
|
||||
If `seed` is already a ``Generator`` instance then that instance is
|
||||
used.
|
||||
|
||||
Returns
|
||||
-------
|
||||
blobs : ndarray of bools
|
||||
Output binary image
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from skimage import data
|
||||
>>> data.binary_blobs(length=5, blob_size_fraction=0.2) # doctest: +SKIP
|
||||
array([[ True, False, True, True, True],
|
||||
[ True, True, True, False, True],
|
||||
[False, True, False, True, True],
|
||||
[ True, False, False, True, True],
|
||||
[ True, False, False, False, True]])
|
||||
>>> blobs = data.binary_blobs(length=256, blob_size_fraction=0.1)
|
||||
>>> # Finer structures
|
||||
>>> blobs = data.binary_blobs(length=256, blob_size_fraction=0.05)
|
||||
>>> # Blobs cover a smaller volume fraction of the image
|
||||
>>> blobs = data.binary_blobs(length=256, volume_fraction=0.3)
|
||||
|
||||
"""
|
||||
# filters is quite an expensive import since it imports all of scipy.signal
|
||||
# We lazy import here
|
||||
|
||||
rs = np.random.default_rng(seed)
|
||||
shape = tuple([length] * n_dim)
|
||||
mask = np.zeros(shape)
|
||||
n_pts = max(int(1. / blob_size_fraction) ** n_dim, 1)
|
||||
points = (length * rs.random((n_dim, n_pts))).astype(int)
|
||||
mask[tuple(indices for indices in points)] = 1
|
||||
mask = gaussian(mask, sigma=0.25 * length * blob_size_fraction,
|
||||
preserve_range=False)
|
||||
threshold = np.percentile(mask, 100 * (1 - volume_fraction))
|
||||
return np.logical_not(mask < threshold)
|
||||
1248
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/_fetchers.py
vendored
Normal file
186
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/_registry.py
vendored
Normal file
@@ -0,0 +1,186 @@
|
||||
# flake8: noqa
|
||||
|
||||
# This minimal dataset was available as part of
|
||||
# scikit-image 0.15 and will be retained until
|
||||
# further notice.
|
||||
# Testing data and additional datasets should only
|
||||
# be made available by pooch
|
||||
legacy_datasets = [
|
||||
'astronaut.png',
|
||||
'brick.png',
|
||||
'camera.png',
|
||||
'chessboard_GRAY.png',
|
||||
'chessboard_RGB.png',
|
||||
'chelsea.png',
|
||||
'clock_motion.png',
|
||||
'coffee.png',
|
||||
'coins.png',
|
||||
'color.png',
|
||||
'cell.png',
|
||||
'grass.png',
|
||||
'gravel.png',
|
||||
'horse.png',
|
||||
'hubble_deep_field.jpg',
|
||||
'ihc.png',
|
||||
'lbpcascade_frontalface_opencv.xml',
|
||||
'lfw_subset.npy',
|
||||
'logo.png',
|
||||
'microaneurysms.png',
|
||||
'moon.png',
|
||||
'page.png',
|
||||
'text.png',
|
||||
'retina.jpg',
|
||||
'rocket.jpg',
|
||||
'phantom.png',
|
||||
'motorcycle_disp.npz',
|
||||
'motorcycle_left.png',
|
||||
'motorcycle_right.png',
|
||||
]
|
||||
|
||||
# Registry of datafiles that can be downloaded along with their SHA256 hashes
|
||||
# To generate the SHA256 hash, use the command
|
||||
# openssl sha256 filename
|
||||
registry = {
|
||||
"color/tests/data/lab_array_a_10.npy": "a3ef76f1530e374f9121020f1f220bc89767dc866f4bbd1b1f47e5b84891a38c",
|
||||
"color/tests/data/lab_array_a_2.npy": "793d5981cbffceb14b5fb589f998a2b1acdb5ff9c14d364c8e9e8bd45a80b275",
|
||||
"color/tests/data/lab_array_a_r.npy": "3d3613da109d0c87827525fc49b58111aefc12438fa6426654979f66807b9227",
|
||||
"color/tests/data/lab_array_b_10.npy": "e8d648b28077c1bfcef55ec6dc8679819612b56a01647f8c0a78625bb06f99b6",
|
||||
"color/tests/data/lab_array_b_2.npy": "da9c6aa99e4ab3af8ec3107bbf11647cc483a0760285dd5c9fb66988be393ca1",
|
||||
"color/tests/data/lab_array_b_r.npy": "d9eee96f4d65a2fbba82039508aac8c18304752ee8e33233e2a013e65bb91464",
|
||||
"color/tests/data/lab_array_c_10.npy": "88b4ff2a2d2c4f48e7bb265609221d4b9ef439a4e2d8a86989696bfdb47790e6",
|
||||
"color/tests/data/lab_array_c_2.npy": "e1b8acfdc7284ab9cd339de66948134304073b6f734ecf9ad42f8297b83d3405",
|
||||
"color/tests/data/lab_array_c_r.npy": "09ffba2ed69e467864fea883493cd2d2706da028433464e3e858a8086842867e",
|
||||
"color/tests/data/lab_array_d50_10.npy": "42e2ff26cb10e2a98fcf1bc06c2483302ff4fabf971fe8d49b530f490b5d24c7",
|
||||
"color/tests/data/lab_array_d50_2.npy": "4aa03b7018ff7276643d3c082123cf07304f9d8d898ae92a5756a86955de4faf",
|
||||
"color/tests/data/lab_array_d50_r.npy": "57db02009f9a68dade33ce1ecffead0418d8ac8113b2a589fc02a20e6bf7e799",
|
||||
"color/tests/data/lab_array_d55_10.npy": "ab4f21368b6d8351578ab093381c44b49ae87a6b7f25c11aa094b07f215eed7d",
|
||||
"color/tests/data/lab_array_d55_2.npy": "0319723de4632a252bae828b7c96d038fb075a7df05beadfbad653da05efe372",
|
||||
"color/tests/data/lab_array_d55_r.npy": "060ebc446f7b4da4df58a60f0006133dbca735da87ba61854f4a75d28db67a3a",
|
||||
"color/tests/data/lab_array_d65_10.npy": "5cb9e9c384d2577aaf8b7d2d21ff5b505708b80605a2f59d10e89d22c3d308d2",
|
||||
"color/tests/data/lab_array_d65_2.npy": "16e847160f7ba4f19806d8194ed44a6654c9367e5a2cb240aa6e7eece44a6649",
|
||||
"color/tests/data/lab_array_d65_r.npy": "82d0dd7a46741f627b8868793e64cdc2f9944fe1e049b573f752a93760a1577c",
|
||||
"color/tests/data/lab_array_d75_10.npy": "c2d3de5422c785c925926b0c6223aeaf50b9393619d1c30830190d433606cbe1",
|
||||
"color/tests/data/lab_array_d75_2.npy": "c94d53da398d36e076471ff7e0dafcaffc64ce4ba33b4d04849c32d19c87494a",
|
||||
"color/tests/data/lab_array_e_2.npy": "ac05f17a83961b020ceccbdd46bddc86943d43e678dabcc898caf4a1e4be6165",
|
||||
"color/tests/data/luv_array_a_10.npy": "c8af67f9fd64a6e9c610ac0c12c5315a49ca229363f048e5d851409d4a3ae5b6",
|
||||
"color/tests/data/luv_array_a_2.npy": "eaf05dc61f4a70ece367d5e751a14d42b7c397c7b1c2df4cfecec9ddf26e1c1a",
|
||||
"color/tests/data/luv_array_a_r.npy": "2c0891add787ec757601f9c61ad14dd9621dd969af4e32753f2e64df437081b7",
|
||||
"color/tests/data/luv_array_b_10.npy": "a5407736b8a43071139ca178d12cdf930f32f52a0644f0b13f89d8895c8b43db",
|
||||
"color/tests/data/luv_array_b_2.npy": "8e74173d54dc549b6c0ebd1f1d70489d2905cad87744e41ed74384f21f22986d",
|
||||
"color/tests/data/luv_array_b_r.npy": "0a74c41df369cbb5fc0a00c16d60dc6f946ebf144bc5e506545b0d160fa53dfa",
|
||||
"color/tests/data/luv_array_c_10.npy": "3a5f975ffa57f69a1be9e02b153e8161f83040ce3002ea1b0a05b9fbdd0d8ec4",
|
||||
"color/tests/data/luv_array_c_2.npy": "32506cd50ea2181997cb88d3511e275740e8151d6c693cd178f5eafd8b0c6e47",
|
||||
"color/tests/data/luv_array_c_r.npy": "c0fbf98cc0e62ed426ab4d228986d6660089444a7bbfcc64cbb1c632644067bb",
|
||||
"color/tests/data/luv_array_d50_10.npy": "fe223db556222ce3a59198bed3a3324c2c719b8083fb84dc5b00f214b4773b16",
|
||||
"color/tests/data/luv_array_d50_2.npy": "48e8989048904bdf2c3c1ada265c1c29c5eff60f02f848a25cde622982c84901",
|
||||
"color/tests/data/luv_array_d50_r.npy": "f93f0def9c93f872dd10ce4a91fdb3f06eea61ddb6e72387b7669909827d4f9c",
|
||||
"color/tests/data/luv_array_d55_10.npy": "d88d53d2bad230c2331442187712ec52ffdee62bf0f60b200c33411bfed76c60",
|
||||
"color/tests/data/luv_array_d55_2.npy": "c761b40475df591ae9c0475d54ef712d067190ca4652efc6308b69080a652061",
|
||||
"color/tests/data/luv_array_d55_r.npy": "05fbd57e3602ee4d5202b9f18f9b5fc05b545891a9b4456d2a88aa798a5a774a",
|
||||
"color/tests/data/luv_array_d65_10.npy": "41a5452ffac4d31dd579d9528e725432c60d77b5f505d801898d9401429c89bf",
|
||||
"color/tests/data/luv_array_d65_2.npy": "962ce180132c6c11798cbc423b2b204d1d10187670f6eb5dec1058eaad301e0e",
|
||||
"color/tests/data/luv_array_d65_r.npy": "78db8c19af26dd802ce98b039a33855f7c8d6a103a2721d094b1d9c619717449",
|
||||
"color/tests/data/luv_array_d75_10.npy": "e1cc70d56eb6789633d4c2a4059b9533f616a7c8592c9bd342403e41d72f45e4",
|
||||
"color/tests/data/luv_array_d75_2.npy": "07db3bd59bd89de8e5ff62dad786fe5f4b299133495ba9bea30495b375133a98",
|
||||
"color/tests/data/luv_array_e_2.npy": "41b1037d81b267305ffe9e8e97e0affa9fa54b18e60413b01b8f11861cb32213",
|
||||
"color/tests/ciede2000_test_data.txt": "2e005c6f76ddfb7bbcc8f68490f1f7b4b4a2a4b06b36a80c985677a2799c0e40",
|
||||
"data/astronaut.png": "88431cd9653ccd539741b555fb0a46b61558b301d4110412b5bc28b5e3ea6cb5",
|
||||
"data/brick.png": "7966caf324f6ba843118d98f7a07746d22f6a343430add0233eca5f6eaaa8fcf",
|
||||
"data/cell.png": "8d23a7fb81f7cc877cd09f330357fc7f595651306e84e17252f6e0a1b3f61515",
|
||||
"data/camera.png": "b0793d2adda0fa6ae899c03989482bff9a42d3d5690fc7e3648f2795d730c23a",
|
||||
"data/chessboard_GRAY.png": "3e51870774515af4d07d820bd8827364c70839bf9b573c746e485095e893df90",
|
||||
"data/chessboard_RGB.png": "1ac01eff2d4e50f4eda55a2ddecdc28a6576623a58d7a7ef84513c5cc19a0331",
|
||||
"data/chelsea.png": "596aa1e7cb875eb79f437e310381d26b338a81c2da23439704a73c4651e8c4bb",
|
||||
"data/clock_motion.png": "f029226b28b642e80113d86622e9b215ee067a0966feaf5e60604a1e05733955",
|
||||
"data/coffee.png": "cc02f8ca188b167c775a7101b5d767d1e71792cf762c33d6fa15a4599b5a8de7",
|
||||
"data/coins.png": "f8d773fc9cfa6f4d8e5942dc34d0a0788fcaed2a4fefbbed0aef5398d7ef4cba",
|
||||
"data/color.png": "7d2df993de2b4fa2a78e04e5df8050f49a9c511aa75e59ab3bd56ac9c98aef7e",
|
||||
"data/eagle.png": "baba13e43e566c711023c0646ec614924a1ad854b57e70807f7e89d7ba86a9cf",
|
||||
"data/horse.png": "c7fb60789fe394c485f842291ea3b21e50d140f39d6dcb5fb9917cc178225455",
|
||||
"data/grass.png": "b6b6022426b38936c43a4ac09635cd78af074e90f42ffa8227ac8b7452d39f89",
|
||||
"data/hubble_deep_field.jpg": "3a19c5dd8a927a9334bb1229a6d63711b1c0c767fb27e2286e7c84a3e2c2f5f4",
|
||||
"data/ihc.png": "f8dd1aa387ddd1f49d8ad13b50921b237df8e9b262606d258770687b0ef93cef",
|
||||
"data/logo.png": "f2c57fe8af089f08b5ba523d95573c26e62904ac5967f4c8851b27d033690168",
|
||||
"data/lfw_subset.npy": "9560ec2f5edfac01973f63a8a99d00053fecd11e21877e18038fbe500f8e872c",
|
||||
"data/microaneurysms.png": "a1e1be59aa447f8ce082f7fa809997ab369a2b137cb6c4202abc647c7ccf6456",
|
||||
"data/moon.png": "78739619d11f7eb9c165bb5d2efd4772cee557812ec847532dbb1d92ef71f577",
|
||||
"data/motorcycle_left.png": "db18e9c4157617403c3537a6ba355dfeafe9a7eabb6b9b94cb33f6525dd49179",
|
||||
"data/motorcycle_right.png": "5fc913ae870e42a4b662314bc904d1786bcad8e2f0b9b67dba5a229406357797",
|
||||
"data/motorcycle_disp.npz": "2e49c8cebff3fa20359a0cc6880c82e1c03bbb106da81a177218281bc2f113d7",
|
||||
"data/mssim_matlab_output.npz": "cc11a14bfa040c75b02db32282439f2e2e3e96779196c171498afaa70528ed7a",
|
||||
"data/page.png": "341a6f0a61557662b02734a9b6e56ec33a915b2c41886b97509dedf2a43b47a3",
|
||||
"data/phantom.png": "552ff698167aa402cceb17981130607a228a0a0aa7c519299eaa4d5f301ba36c",
|
||||
"data/retina.jpg": "38a07f36f27f095e818aea7b96d34202c05176d30253c66733f2e00379e9e0e6",
|
||||
"data/rocket.jpg": "c2dd0de7c538df8d111e479619b129464d0269d0ae5fd18ca91d33a7fdfea95c",
|
||||
"data/gravel.png": "c48615b451bf1e606fbd72c0aa9f8cc0f068ab7111ef7d93bb9b0f2586440c12",
|
||||
"data/text.png": "bd84aa3a6e3c9887850d45d606c96b2e59433fbef50338570b63c319e668e6d1",
|
||||
"data/chessboard_GRAY_U16.tif": "9fd3392c5b6cbc5f686d8ff83eb57ef91d038ee0852ac26817e5ac99df4c7f45",
|
||||
"data/chessboard_GRAY_U16B.tif": "b0a9270751f0fc340c90b8b615b62b88187b9ab5995942717566735d523cddb2",
|
||||
"data/chessboard_GRAY_U8.npy": "71f394694b721e8a33760a355b3666c9b7d7fc1188ff96b3cd23c2a1d73a38d8",
|
||||
"data/lbpcascade_frontalface_opencv.xml": "03097789a3dcbb0e40d20b9ef82537dbc3b670b6a7f2268d735470f22e003a91",
|
||||
"data/astronaut_GRAY_hog_L1.npy": "5d8ab22b166d1dd49c12caeff9d178ed28132efea3852b952e9d75f7f7f94954",
|
||||
"data/astronaut_GRAY_hog_L2-Hys.npy": "c4dd6e50d1129aada358311cf8880ce8c775f31e0e550fc322c16e43a96d56fe",
|
||||
"data/rank_filter_tests.npz": "efaf5699630f4a53255e91681dc72a965acd4a8aa1f84671c686fb93e7df046d",
|
||||
"data/rank_filters_tests_3d.npz": "1741c2b978424e93558a07d345b2a0d9bfbb33c095c123da147fca066714ab16",
|
||||
"data/multi.fits": "5c71a83436762a52b1925f2f0d83881af7765ed50aede155af2800e54bbd5040",
|
||||
"data/simple.fits": "cd36087fdbb909b6ba506bbff6bcd4c5f4da3a41862608fbac5e8555ef53d40f",
|
||||
"data/palette_color.png": "c4e817035fb9f7730fe95cff1da3866dea01728efc72b6e703d78f7ab9717bdd",
|
||||
"data/palette_gray.png": "bace7f73783bf3ab3b7fdaf701707e4fa09f0dbd0ea72cf5b12ddc73d50b02a9",
|
||||
"data/green_palette.png": "42d49d94be8f9bc76e50639d3701ed0484258721f6b0bd7f50bb1b9274a010f0",
|
||||
"data/truncated.jpg": "4c226038acc78012d335efba29c6119a24444a886842182b7e18db378f4a557d",
|
||||
"data/multipage.tif": "4da0ad0d3df4807a9847247d1b5e565b50d46481f643afb5c37c14802c78130f",
|
||||
"data/multipage_rgb.tif": "1d23b844fd38dce0e2d06f30432817cdb85e52070d8f5460a2ba58aebf34a0de",
|
||||
"data/no_time_for_that_tiny.gif": "20abe94ba9e45f18de416c5fbef8d1f57a499600be40f9a200fae246010eefce",
|
||||
"data/foo3x5x4indexed.png": "48a64c25c6da000ffdb5fcc34ebafe9ba3b1c9b61d7984ea7ca6dc54f9312dfa",
|
||||
"data/mssim_matlab_output.npz": "cc11a14bfa040c75b02db32282439f2e2e3e96779196c171498afaa70528ed7a",
|
||||
"data/gray_morph_output.npz": "3012eb994e864e1dca1f66fada6b4375f84eac63658d049886b710488c2394d1",
|
||||
"data/disk-matlab-output.npz": "8a39d5c866f6216d6a9c9166312aa4bbf4d18fab3d0dcd963c024985bde5856b",
|
||||
"data/diamond-matlab-output.npz": "02fca68907e2b252b501dfe977eef71ae39fadaaa3702ebdc855195422ae1cc2",
|
||||
"data/bw_text.png": "308c2b09f8975a69b212e103b18520e8cbb7a4eccfce0f757836cd371f1b9094",
|
||||
"data/bw_text_skeleton.npy": "9ff4fc23c6a01497d7987f14e3a97cbcc39cce54b2b3b7ee33b84c1b661d0ae1",
|
||||
"data/_blobs_3d_fiji_skeleton.tif": "e3449ad9819425959952050c147278555e5ffe1c2c4a30df29f6a1f9023e10c3",
|
||||
"data/checker_bilevel.png": "2e207e486545874a2a3e69ba653b28fdef923157be9017559540e65d1bcb8e28",
|
||||
"restoration/tests/astronaut_rl.npy":
|
||||
"3f8373e2c6182a89366e51cef6624e3625deac75fdda1079cbdad2a33322152c",
|
||||
"restoration/tests/camera_rl.npy": "fd4f59af84dd471fbbe79ee70c1b7e68a69864c461f0db5ac587e7975363f78f",
|
||||
"restoration/tests/camera_unsup.npy": "3de10a0b97267352b18886b25d66a967f9e1d78ada61050577d78586cab82baa",
|
||||
"restoration/tests/camera_unsup2.npy": "29cdc60605eb528c5f014baa8564d7d1ba0bd4b3170a66522058cbe5aed0960b",
|
||||
"restoration/tests/camera_wiener.npy": "4505ea8b0d63d03250c6d756560d615751b76dd6ffc4a95972fa260c0c84633e",
|
||||
"registration/tests/data/OriginalX-130Y130.png": "bf24a06d99ae131c97e582ef5e1cd0c648a8dad0caab31281f3564045492811f",
|
||||
"registration/tests/data/OriginalX130Y130.png": "7fdd4c06d504fec35ee0703bd7ed2c08830b075a74c8506bae4a70d682f5a2db",
|
||||
"registration/tests/data/OriginalX75Y75.png": "c5cd58893c93140df02896df80b13ecf432f5c86eeaaf8fb311aec52a65c7016",
|
||||
"registration/tests/data/TransformedX-130Y130.png": "1cda90ed69c921eb7605b73b76d141cf4ea03fb8ce3336445ca08080e40d7375",
|
||||
"registration/tests/data/TransformedX130Y130.png": "bb10c6ae3f91a313b0ac543efdb7ca69c4b95e55674c65a88472a6c4f4692a25",
|
||||
"registration/tests/data/TransformedX75Y75.png": "a1e9ead5f8e4a0f604271e1f9c50e89baf53f068f1d19fab2876af4938e695ea",
|
||||
"data/brain.tiff": "bcdbaf424fbad7b1fb0f855f608c68e5a838f35affc323ff04ea17f678eef5c6",
|
||||
"data/cells3d.tif": "afc7c7d80d38bfde09788b4064ac1e64ec14e88454ab785ebdc8dbba5ca3b222",
|
||||
"data/kidney.tif": "80c0799bc58b08cf6eaa53ecd202305eb42fd7bc73746cb6c5064dbeae7e8476",
|
||||
"data/lily.tif": "395c2f0194c25b9824a8cd79266920362a0816bc9e906dd392adce2d8309af03",
|
||||
"data/mitosis.tif": "2751ba667c4067c5d30817cff004aa06f6f6287f1cdbb5b8c9c6a500308cb456",
|
||||
"data/skin.jpg": "8759fe080509712163453f4b17106582b8513e73b0788d80160abf840e272075",
|
||||
"data/pivchallenge-B-B001_1.tif": "e95e09abbcecba723df283ac7d361766328abd943701a2ec2f345d4a2014da2a",
|
||||
"data/pivchallenge-B-B001_2.tif": "4ceb5407e4e333476a0f264c14b7a3f6c0e753fcdc99ee1c4b8196e5f823805e",
|
||||
"data/protein_transport.tif": "a8e24e8d187f33e92ee28508d5615286c850ca75374af7e74e527d290e8b06ea",
|
||||
"data/solidification.tif": "50ef9a52c621b7c0c506ad1fe1b8ee8a158a4d7c8e50ddfce1e273a422dca3f9",
|
||||
}
|
||||
|
||||
registry_urls = {
|
||||
"data/brain.tiff": "https://gitlab.com/scikit-image/data/-/raw/2cdc5ce89b334d28f06a58c9f0ca21aa6992a5ba/brain.tiff",
|
||||
"data/cells3d.tif": "https://gitlab.com/scikit-image/data/-/raw/2cdc5ce89b334d28f06a58c9f0ca21aa6992a5ba/cells3d.tif",
|
||||
"data/eagle.png": "https://gitlab.com/scikit-image/data/-/raw/2cdc5ce89b334d28f06a58c9f0ca21aa6992a5ba/eagle.png",
|
||||
"data/kidney.tif": "https://gitlab.com/scikit-image/data/-/raw/2cdc5ce89b334d28f06a58c9f0ca21aa6992a5ba/kidney-tissue-fluorescence.tif",
|
||||
"data/lily.tif": "https://gitlab.com/scikit-image/data/-/raw/2cdc5ce89b334d28f06a58c9f0ca21aa6992a5ba/lily-of-the-valley-fluorescence.tif",
|
||||
"data/mitosis.tif": "https://gitlab.com/scikit-image/data/-/raw/2cdc5ce89b334d28f06a58c9f0ca21aa6992a5ba/AS_09125_050116030001_D03f00d0.tif",
|
||||
"data/rank_filters_tests_3d.npz": "https://gitlab.com/scikit-image/data/-/raw/2cdc5ce89b334d28f06a58c9f0ca21aa6992a5ba/Tests_besides_Equalize_Otsu/add18_entropy/rank_filters_tests_3d.npz",
|
||||
"data/skin.jpg": "https://gitlab.com/scikit-image/data/-/raw/2cdc5ce89b334d28f06a58c9f0ca21aa6992a5ba/Normal_Epidermis_and_Dermis_with_Intradermal_Nevus_10x.JPG",
|
||||
"data/pivchallenge-B-B001_1.tif": "https://gitlab.com/scikit-image/data/-/raw/2cdc5ce89b334d28f06a58c9f0ca21aa6992a5ba/pivchallenge/B/B001_1.tif",
|
||||
"data/pivchallenge-B-B001_2.tif": "https://gitlab.com/scikit-image/data/-/raw/2cdc5ce89b334d28f06a58c9f0ca21aa6992a5ba/pivchallenge/B/B001_2.tif",
|
||||
"data/protein_transport.tif": "https://gitlab.com/scikit-image/data/-/raw/2cdc5ce89b334d28f06a58c9f0ca21aa6992a5ba/NPCsingleNucleus.tif",
|
||||
"data/solidification.tif": "https://gitlab.com/scikit-image/data/-/raw/2cdc5ce89b334d28f06a58c9f0ca21aa6992a5ba/nickel_solidification.tif",
|
||||
"restoration/tests/astronaut_rl.npy": "https://gitlab.com/scikit-image/data/-/raw/2cdc5ce89b334d28f06a58c9f0ca21aa6992a5ba/astronaut_rl.npy",
|
||||
}
|
||||
|
||||
legacy_registry = {
|
||||
('data/' + filename): registry['data/' + filename]
|
||||
for filename in legacy_datasets
|
||||
}
|
||||
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/astronaut.png
vendored
Normal file
|
After Width: | Height: | Size: 773 KiB |
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/brick.png
vendored
Normal file
|
After Width: | Height: | Size: 104 KiB |
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/camera.png
vendored
Normal file
|
After Width: | Height: | Size: 136 KiB |
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/cell.png
vendored
Normal file
|
After Width: | Height: | Size: 72 KiB |
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/chelsea.png
vendored
Normal file
|
After Width: | Height: | Size: 235 KiB |
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/chessboard_GRAY.png
vendored
Normal file
|
After Width: | Height: | Size: 418 B |
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/chessboard_RGB.png
vendored
Normal file
|
After Width: | Height: | Size: 1.1 KiB |
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/clock_motion.png
vendored
Normal file
|
After Width: | Height: | Size: 57 KiB |
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/coffee.png
vendored
Normal file
|
After Width: | Height: | Size: 456 KiB |
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/coins.png
vendored
Normal file
|
After Width: | Height: | Size: 74 KiB |
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/color.png
vendored
Normal file
|
After Width: | Height: | Size: 84 KiB |
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/grass.png
vendored
Normal file
|
After Width: | Height: | Size: 213 KiB |
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/gravel.png
vendored
Normal file
|
After Width: | Height: | Size: 190 KiB |
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/horse.png
vendored
Normal file
|
After Width: | Height: | Size: 16 KiB |
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/hubble_deep_field.jpg
vendored
Normal file
|
After Width: | Height: | Size: 516 KiB |
BIN
.CondaPkg/env/lib/python3.11/site-packages/skimage/data/ihc.png
vendored
Normal file
|
After Width: | Height: | Size: 467 KiB |