From 38a1f9c20ee71638c16ef2f4afa3b6b9590e50dc Mon Sep 17 00:00:00 2001 From: Philipenko Vladimir Date: Fri, 2 Sep 2022 16:11:17 +0300 Subject: [PATCH 01/16] Use `imops` --- dpipe/im/shape_ops.py | 267 ++++--------------------------- dpipe/im/tests/test_shape_ops.py | 161 ------------------- requirements.txt | 1 + 3 files changed, 33 insertions(+), 396 deletions(-) delete mode 100644 dpipe/im/tests/test_shape_ops.py diff --git a/dpipe/im/shape_ops.py b/dpipe/im/shape_ops.py index 514ce16..fa0c7c7 100644 --- a/dpipe/im/shape_ops.py +++ b/dpipe/im/shape_ops.py @@ -1,12 +1,12 @@ -import warnings -from typing import Callable, Union, Sequence +from typing import Callable, Union import numpy as np -from scipy import ndimage +from imops.crop import crop_to_box +from imops.pad import pad, pad_to_divisible, pad_to_shape, restore_crop +from imops.zoom import zoom, zoom_to_shape -from .box import Box from ..itertools import extract -from .axes import fill_by_indices, AxesLike, AxesParams, resolve_deprecation, broadcast_to_axis +from .axes import AxesLike, AxesParams, broadcast_to_axis, fill_by_indices, resolve_deprecation from .utils import build_slices __all__ = [ @@ -16,192 +16,9 @@ ] -def zoom(x: np.ndarray, scale_factor: AxesParams, axis: AxesLike = None, order: int = 1, - fill_value: Union[float, Callable] = 0) -> np.ndarray: - """ - Rescale ``x`` according to ``scale_factor`` along the ``axes``. - - Parameters - ---------- - x - scale_factor - axis - axis along which the tensor will be scaled. If None - the last ``len(scale_factor)`` axes are used. - order - order of interpolation. - fill_value - value to fill past edges. If Callable (e.g. `numpy.min`) - ``fill_value(x)`` will be used. - """ - x = _to_array(x) - axis = resolve_deprecation(axis, x.ndim, scale_factor) - scale_factor = fill_by_indices(np.ones(x.ndim, 'float64'), scale_factor, axis) - if callable(fill_value): - fill_value = fill_value(x) - - # remove an annoying warning - with warnings.catch_warnings(): - warnings.simplefilter('ignore', UserWarning) - return ndimage.zoom(x, scale_factor, order=order, cval=fill_value) - - -def zoom_to_shape(x: np.ndarray, shape: AxesLike, axis: AxesLike = None, order: int = 1, - fill_value: Union[float, Callable] = 0) -> np.ndarray: - """ - Rescale ``x`` to match ``shape`` along the ``axes``. - - Parameters - ---------- - x - shape - final shape. - axis - axes along which the tensor will be scaled. If None - the last ``len(shape)`` axes are used. - order - order of interpolation. - fill_value - value to fill past edges. If Callable (e.g. `numpy.min`) - ``fill_value(x)`` will be used. - """ - x = _to_array(x) - axis = resolve_deprecation(axis, x.ndim, shape) - old_shape = np.array(x.shape, 'float64') - new_shape = np.array(fill_by_indices(x.shape, shape, axis), 'float64') - return zoom(x, new_shape / old_shape, range(x.ndim), order=order, fill_value=fill_value) - - -def proportional_zoom_to_shape(x: np.ndarray, shape: AxesLike, axis: AxesLike = None, - padding_values: Union[AxesParams, Callable] = 0, order: int = 1) -> np.ndarray: - """ - Proportionally rescale ``x`` to fit ``shape`` along ``axes`` then pad it to that shape. - - Parameters - ---------- - x - shape - final shape. - axis - axes along which ``x`` will be padded. If None - the last ``len(shape)`` axes are used. - padding_values - values to pad with. - order - order of interpolation. - """ - x = _to_array(x) - axis = resolve_deprecation(axis, x.ndim, shape, padding_values) - scale_factor = (np.array(shape, 'float64') / extract(x.shape, axis)).min() - return pad_to_shape(zoom(x, scale_factor, axis, order), shape, axis, padding_values) - - -def pad(x: np.ndarray, padding: Union[AxesLike, Sequence[Sequence[int]]], axis: AxesLike = None, - padding_values: Union[AxesParams, Callable] = 0) -> np.ndarray: - """ - Pad ``x`` according to ``padding`` along the ``axes``. - - Parameters - ---------- - x - tensor to pad. - padding - if 2D array [[start_1, stop_1], ..., [start_n, stop_n]] - specifies individual padding - for each axis from ``axes``. The length of the array must either be equal to 1 or match the length of ``axes``. - If 1D array [val_1, ..., val_n] - same as [[val_1, val_1], ..., [val_n, val_n]]. - If scalar (val) - same as [[val, val]]. - padding_values - values to pad with, must be broadcastable to the resulting array. - If Callable (e.g. `numpy.min`) - ``padding_values(x)`` will be used. - axis - axes along which ``x`` will be padded. If None - the last ``len(padding)`` axes are used. - """ - x = _to_array(x) - padding = np.asarray(padding) - if padding.ndim < 2: - padding = padding.reshape(-1, 1) - axis = resolve_deprecation(axis, x.ndim, padding) - padding = np.asarray(fill_by_indices(np.zeros((x.ndim, 2), dtype=int), np.atleast_2d(padding), axis)) - if (padding < 0).any(): - raise ValueError(f'Padding must be non-negative: {padding.tolist()}.') - if callable(padding_values): - padding_values = padding_values(x) - - new_shape = np.array(x.shape) + np.sum(padding, axis=1) - new_x = np.array(padding_values, dtype=x.dtype) - new_x = np.broadcast_to(new_x, new_shape).copy() - - start = padding[:, 0] - end = np.where(padding[:, 1] != 0, -padding[:, 1], None) - new_x[build_slices(start, end)] = x - return new_x - - -def pad_to_shape(x: np.ndarray, shape: AxesLike, axis: AxesLike = None, padding_values: Union[AxesParams, Callable] = 0, - ratio: AxesParams = 0.5) -> np.ndarray: - """ - Pad ``x`` to match ``shape`` along the ``axes``. - - Parameters - ---------- - x - shape - final shape. - padding_values - values to pad with. If Callable (e.g. `numpy.min`) - ``padding_values(x)`` will be used. - axis - axes along which ``x`` will be padded. If None - the last ``len(shape)`` axes are used. - ratio - the fraction of the padding that will be applied to the left, ``1.0 - ratio`` will be applied to the right. - By default ``0.5 - ratio``, it is applied uniformly to the left and right. - """ - x = _to_array(x) - axis = resolve_deprecation(axis, x.ndim, shape) - shape, ratio = broadcast_to_axis(axis, shape, ratio) - - old_shape = np.array(x.shape)[list(axis)] - if (old_shape > shape).any(): - shape = fill_by_indices(x.shape, shape, axis) - raise ValueError(f'The resulting shape cannot be smaller than the original: {x.shape} vs {shape}') - - delta = shape - old_shape - start = (delta * ratio).astype(int) - padding = np.array((start, delta - start)).T.astype(int) - return pad(x, padding, axis, padding_values=padding_values) - - -def pad_to_divisible(x: np.ndarray, divisor: AxesLike, axis: AxesLike = None, - padding_values: Union[AxesParams, Callable] = 0, ratio: AxesParams = 0.5, - remainder: AxesLike = 0): - """ - Pads ``x`` to be divisible by ``divisor`` along the ``axes``. - - Parameters - ---------- - x - divisor - a value an incoming array should be divisible by. - remainder - ``x`` will be padded such that its shape gives the remainder ``remainder`` when divided by ``divisor``. - axis - axes along which the array will be padded. If None - the last ``len(divisor)`` axes are used. - padding_values - values to pad with. If Callable (e.g. `numpy.min`) - ``padding_values(x)`` will be used. - ratio - the fraction of the padding that will be applied to the left, ``1 - ratio`` will be applied to the right. - - References - ---------- - `pad_to_shape` - """ - x = _to_array(x) - axis = resolve_deprecation(axis, x.ndim, divisor, remainder, ratio) - divisor, remainder, ratio = broadcast_to_axis(axis, divisor, remainder, ratio) - - assert np.all(remainder >= 0) - shape = np.maximum(np.array(x.shape)[list(axis)], remainder) - return pad_to_shape(x, shape + (remainder - shape) % divisor, axis, padding_values, ratio) - - def crop_to_shape(x: np.ndarray, shape: AxesLike, axis: AxesLike = None, ratio: AxesParams = 0.5) -> np.ndarray: """ Crop ``x`` to match ``shape`` along ``axes``. - Parameters ---------- x @@ -212,7 +29,9 @@ def crop_to_shape(x: np.ndarray, shape: AxesLike, axis: AxesLike = None, ratio: ratio the fraction of the crop that will be applied to the left, ``1 - ratio`` will be applied to the right. """ - x = _to_array(x) + if not hasattr(x, 'ndim') or not hasattr(x, 'shape'): + x = np.asarray(x) + axis = resolve_deprecation(axis, x.ndim, shape) shape, ratio = broadcast_to_axis(axis, shape, ratio) @@ -223,55 +42,33 @@ def crop_to_shape(x: np.ndarray, shape: AxesLike, axis: AxesLike = None, ratio: ndim = len(x.shape) ratio = fill_by_indices(np.zeros(ndim), ratio, axis) start = ((old_shape - new_shape) * ratio).astype(int) - return x[build_slices(start, start + new_shape)] - - -def crop_to_box(x: np.ndarray, box: Box, axis: AxesLike = None, padding_values: AxesParams = None) -> np.ndarray: - """ - Crop ``x`` according to ``box`` along ``axes``. - - If axes is None - the last ``box.shape[-1]`` axes are used. - """ - x = _to_array(x) - start, stop = box - axis = resolve_deprecation(axis, x.ndim, start) - - slice_start = np.maximum(start, 0) - slice_stop = np.minimum(stop, extract(x.shape, axis)) - padding = np.array([slice_start - start, stop - slice_stop], dtype=int).T - if padding_values is None and padding.any(): - raise ValueError(f"The box {box} exceeds the input's limits {x.shape}.") - slice_start = fill_by_indices(np.zeros(x.ndim, int), slice_start, axis) - slice_stop = fill_by_indices(x.shape, slice_stop, axis) - x = x[build_slices(slice_start, slice_stop)] - - if padding_values is not None and padding.any(): - x = pad(x, padding, axis, padding_values) - return x + return x[build_slices(start, start + new_shape)] -def restore_crop(x: np.ndarray, box: Box, shape: AxesLike, padding_values: AxesParams = 0) -> np.ndarray: +def proportional_zoom_to_shape( + x: np.ndarray, + shape: AxesLike, + axis: AxesLike = None, + padding_values: Union[AxesParams, Callable] = 0, + order: int = 1, +) -> np.ndarray: """ - Pad ``x`` to match ``shape``. The left padding is taken equal to ``box``'s start. + Proportionally rescale ``x`` to fit ``shape`` along ``axes`` then pad it to that shape. + Parameters + ---------- + x + shape + final shape. + axis + axes along which ``x`` will be padded. If None - the last ``len(shape)`` axes are used. + padding_values + values to pad with. + order + order of interpolation. """ - x = _to_array(x) - assert len(shape) == x.ndim - start, stop = box - - if (stop > shape).any() or (stop - start != x.shape).any(): - raise ValueError(f"The input array (of shape {x.shape}) was not obtained by cropping a " - f"box {start, stop} from the shape {shape}.") - - padding = np.array([start, shape - stop], dtype=int).T - x = pad(x, padding, padding_values=padding_values) - assert all(np.array(x.shape) == shape) - return x - + x = np.asarray(x) + axis = resolve_deprecation(axis, x.ndim, shape, padding_values) + scale_factor = (np.array(shape, 'float64') / extract(x.shape, axis)).min() -def _to_array(x): - # TODO: smarter check - # we want to handle torch when possible - if not hasattr(x, 'ndim') or not hasattr(x, 'shape'): - x = np.asarray(x) - return x + return pad_to_shape(zoom(x, scale_factor, axis, order), shape, axis, padding_values) diff --git a/dpipe/im/tests/test_shape_ops.py b/dpipe/im/tests/test_shape_ops.py deleted file mode 100644 index a207ce0..0000000 --- a/dpipe/im/tests/test_shape_ops.py +++ /dev/null @@ -1,161 +0,0 @@ -import unittest -from functools import partial - -import pytest - -import numpy as np -from dpipe.im.shape_ops import * - -assert_eq = np.testing.assert_array_equal - - -class TestPad(unittest.TestCase): - def test_broadcasting(self): - x = np.random.randint(0, 100, (3, 20, 23)) - main = pad(x, [[3, 3], [3, 3], [3, 3]]) - - assert_eq(x, main[3:-3, 3:-3, 3:-3]) - assert_eq(main, pad(x, [3, 3, 3])) - assert_eq(main, pad(x, 3, axis=[0, 1, 2])) - assert_eq(main, pad(x, [3], axis=[0, 1, 2])) - assert_eq(main, pad(x, [[3]], axis=[0, 1, 2])) - assert_eq(main, pad(x, [[3, 3]], axis=[0, 1, 2])) - assert_eq(main, pad(x, [[3], [3], [3]], axis=[0, 1, 2])) - - assert_eq( - pad(x, 3, axis=[0, 1]), - pad(x, [[3, 3], [3, 3], [0, 0]]) - ) - assert_eq( - pad(x, [2, 4, 3]), - pad(x, [[2, 2], [4, 4], [3, 3]]) - ) - p = pad(x, [[1, 2], [3, 4], [5, 6]]) - assert_eq(x, p[1:-2, 3:-4, 5:-6]) - - p = pad(x, [[1, 2], [3, 4]], axis=[0, 2]) - assert_eq(x, p[1:-2, :, 3:-4]) - - p = pad(x, [[1, 2], [3, 4]], axis=[2, 0]) - assert_eq(x, p[3:-4:, :, 1:-2]) - - with pytest.raises(ValueError): - pad(x, [1, 2], axis=-1) - - def test_padding_values(self): - x = np.array([ - [0, 0, 0, 0], - [0, 0, 0, 0], - [0, 0, 0, 0], - ], dtype=int) - - p = pad(x, [1, 1], padding_values=1) - assert_eq(p, [ - [1, 1, 1, 1, 1, 1], - [1, 0, 0, 0, 0, 1], - [1, 0, 0, 0, 0, 1], - [1, 0, 0, 0, 0, 1], - [1, 1, 1, 1, 1, 1], - ]) - - x = np.random.randint(0, 100, (3, 20, 23)) - assert_eq( - pad(x, [1, 1], padding_values=x.min(), axis=(1, 2)), - pad(x, [1, 1], padding_values=np.min, axis=(1, 2)), - ) - assert_eq( - pad(x, [1, 1], padding_values=x.min(axis=(1, 2), keepdims=True), axis=(1, 2)), - pad(x, [1, 1], padding_values=partial(np.min, axis=(1, 2), keepdims=True), axis=(1, 2)), - ) - - def test_pad(self): - x = np.arange(12).reshape((3, 2, 2)) - padding = np.array(((0, 0), (1, 2), (2, 1))) - padding_values = np.min(x, axis=(1, 2), keepdims=True) - - y = pad(x, padding, padding_values=padding_values) - np.testing.assert_array_equal(y, np.array([ - [ - [0, 0, 0, 0, 0], - [0, 0, 0, 1, 0], - [0, 0, 2, 3, 0], - [0, 0, 0, 0, 0], - [0, 0, 0, 0, 0], - ], - [ - [4, 4, 4, 4, 4], - [4, 4, 4, 5, 4], - [4, 4, 6, 7, 4], - [4, 4, 4, 4, 4], - [4, 4, 4, 4, 4], - ], - [ - [8, 8, 8, 8, 8], - [8, 8, 8, 9, 8], - [8, 8, 10, 11, 8], - [8, 8, 8, 8, 8], - [8, 8, 8, 8, 8], - ], - ])) - - -class TestCropToBox(unittest.TestCase): - def test_shape(self): - for _ in range(100): - shape = np.random.randint(10, 50, size=2) - box_shape = np.random.randint(1, 50, size=2) - box_center = [np.random.randint(s) for s in shape] - start = box_center - box_shape // 2 - - x = np.empty(shape) - box = np.stack([start, start + box_shape]) - - assert (crop_to_box(x, box, padding_values=0).shape == box_shape).all() - - def test_axes(self): - x = np.random.randint(0, 100, (3, 20, 23)) - - assert_eq(x[:, 1:15, 2:14], crop_to_box(x, np.array([[1, 2], [15, 14]]), axis=[1, 2])) - - assert_eq( - x[:, 1:, 2:], - crop_to_box(x, np.array([[1, 2], [40, 33]]), padding_values=0, axis=(1, 2))[:, :19, :21] - ) - - assert_eq( - x[:, :15, :14], - crop_to_box(x, np.array([[-10, -5], [15, 14]]), padding_values=0, axis=(1, 2))[:, 10:, 5:] - ) - - def test_raises(self): - x = np.empty((3, 20, 23)) - with pytest.raises(ValueError): - crop_to_box(x, np.array([[1], [40]]), axis=(1, 2)) - - with pytest.raises(ValueError): - crop_to_box(x, np.array([[-1], [1]]), axis=(1, 2)) - - -class TestShapeOps(unittest.TestCase): - def setUp(self): - self.x = np.random.rand(3, 10, 10) * 2 + 3 - - def _test_to_shape(self, func, shape, bad_shape): - assert func(self.x, shape).shape == shape - with pytest.raises(ValueError): - func(self.x, bad_shape) - - def test_scale_to_shape(self): - shape = (3, 4, 15) - assert zoom_to_shape(self.x, shape).shape == shape - assert zoom_to_shape(self.x, shape[::-1]).shape == shape[::-1] - - def test_pad_to_shape(self): - self._test_to_shape(pad_to_shape, (3, 15, 16), (3, 4, 10)) - - def test_slice_to_shape(self): - self._test_to_shape(crop_to_shape, (3, 4, 8), (3, 15, 10)) - - def test_scale(self): - assert zoom(self.x, (3, 4, 15)).shape == (9, 40, 150) - assert zoom(self.x, (4, 3), axis=(1, 2)).shape == (3, 40, 30) diff --git a/requirements.txt b/requirements.txt index 5a4f5c9..9a5b5d8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,3 +11,4 @@ lazycon>=0.1.0<1.0 loky~=2.8.0 wandb nibabel +imops>=0.3.0<1.0 From af11e257d3a1f7ce8802dd7d6fee9433a90790ea Mon Sep 17 00:00:00 2001 From: Max Date: Thu, 8 Sep 2022 11:15:43 +0300 Subject: [PATCH 02/16] moving tests to ci --- .github/workflows/tests.yml | 63 +++++++++++++++++++ MANIFEST.in | 2 +- dpipe/__init__.py | 2 +- dpipe/__version__.py | 1 + dpipe/prototypes/strategy/tests/conftest.py | 3 - release.sh | 5 -- run_tests.sh | 3 - setup.py | 13 +++- tests/conftest.py | 1 + {dpipe/tests => tests}/mnist/network.config | 0 {dpipe/tests => tests}/mnist/setup.config | 0 pytest.ini => tests/pytest.ini | 2 + .../test_gradient_accumulation.py | 1 + {dpipe/tests => tests}/test_itertools.py | 0 {dpipe/tests => tests}/test_mnist.py | 2 +- 15 files changed, 81 insertions(+), 17 deletions(-) create mode 100644 .github/workflows/tests.yml create mode 100644 dpipe/__version__.py delete mode 100644 dpipe/prototypes/strategy/tests/conftest.py delete mode 100755 release.sh delete mode 100755 run_tests.sh create mode 100644 tests/conftest.py rename {dpipe/tests => tests}/mnist/network.config (100%) rename {dpipe/tests => tests}/mnist/setup.config (100%) rename pytest.ini => tests/pytest.ini (60%) rename {dpipe/tests => tests}/test_gradient_accumulation.py (99%) rename {dpipe/tests => tests}/test_itertools.py (100%) rename {dpipe/tests => tests}/test_mnist.py (95%) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 0000000..8285d35 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,63 @@ +name: Test + +on: [ pull_request ] + +env: + MODULE_NAME: dpipe + +jobs: + test: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [ 3.6, 3.7, 3.8, 3.9, '3.10' ] + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Check the version and build the package + run: | + VERSION=$(python -c "from pathlib import Path; import runpy; folder, = {d.parent for d in Path().resolve().glob('*/__init__.py') if d.parent.is_dir() and (d.parent / '__version__.py').exists()}; print(runpy.run_path(folder / '__version__.py')['__version__'])") + MATCH=$(pip index versions deep-pipe | grep "Available versions:" | grep $VERSION) || echo + echo $MATCH + if [ "$GITHUB_BASE_REF" = "master" ] && [ "$MATCH" != "" ]; then exit 1; fi + python setup.py sdist + + - name: Install + run: | + pip install dist/* + pip install -r tests/requirements.txt + + cd tests + export MODULE_PARENT=$(python -c "import $MODULE_NAME, os; print(os.path.dirname($MODULE_NAME.__path__[0]))") + export MODULE_PARENT=${MODULE_PARENT%"/"} + cd .. + echo $MODULE_PARENT + echo "MODULE_PARENT=$(echo $MODULE_PARENT)" >> $GITHUB_ENV + + - name: Test with pytest + run: | + pytest tests -m "not integration and not cuda" --junitxml=reports/junit-${{ matrix.python-version }}.xml --cov="$MODULE_PARENT/$MODULE_NAME" --cov-report=xml --cov-branch + - name: Generate coverage report + run: | + coverage xml -o reports/coverage-${{ matrix.python-version }}.xml + sed -i -e "s|$MODULE_PARENT/||g" reports/coverage-${{ matrix.python-version }}.xml + sed -i -e "s|$(echo $MODULE_PARENT/ | tr "/" .)||g" reports/coverage-${{ matrix.python-version }}.xml + + - name: Upload artifacts + uses: actions/upload-artifact@v2 + with: + name: reports-${{ matrix.python-version }} + path: reports/*-${{ matrix.python-version }}.xml + if: ${{ always() }} + + - name: Upload coverage results + uses: codecov/codecov-action@v3 + with: + fail_ci_if_error: true + files: reports/coverage-${{ matrix.python-version }}.xml + verbose: true diff --git a/MANIFEST.in b/MANIFEST.in index fb3bdc5..dd1e741 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,4 @@ -include *.md +include README.md include requirements.txt include LICENSE recursive-include dpipe *.py diff --git a/dpipe/__init__.py b/dpipe/__init__.py index 10939f0..70cd492 100644 --- a/dpipe/__init__.py +++ b/dpipe/__init__.py @@ -1 +1 @@ -__version__ = '0.1.2' +from __version__ import __version__ diff --git a/dpipe/__version__.py b/dpipe/__version__.py new file mode 100644 index 0000000..8ce9b36 --- /dev/null +++ b/dpipe/__version__.py @@ -0,0 +1 @@ +__version__ = '0.1.3' diff --git a/dpipe/prototypes/strategy/tests/conftest.py b/dpipe/prototypes/strategy/tests/conftest.py deleted file mode 100644 index 970a90e..0000000 --- a/dpipe/prototypes/strategy/tests/conftest.py +++ /dev/null @@ -1,3 +0,0 @@ -import pytest - -pytest_plugins = ['optimization_fixtures'] diff --git a/release.sh b/release.sh deleted file mode 100755 index f7a0fca..0000000 --- a/release.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env bash - -python setup.py sdist -twine upload dist/* -rm -r dist diff --git a/run_tests.sh b/run_tests.sh deleted file mode 100755 index ed62e0f..0000000 --- a/run_tests.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash - -pytest -m "not integration" diff --git a/setup.py b/setup.py index 728644a..29b2842 100644 --- a/setup.py +++ b/setup.py @@ -1,11 +1,13 @@ -from setuptools import setup, find_packages +from pathlib import Path -from dpipe import __version__ +from setuptools import setup, find_packages classifiers = '''Development Status :: 4 - Beta Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 -Programming Language :: Python :: 3.8''' +Programming Language :: Python :: 3.8 +Programming Language :: Python :: 3.9 +Programming Language :: Python :: 3.10''' with open('README.md', encoding='utf-8') as file: long_description = file.read() @@ -13,6 +15,11 @@ with open('requirements.txt', encoding='utf-8') as file: requirements = file.read().splitlines() +with open(Path(__file__).resolve().parent / 'dpipe/__version__.py', encoding='utf-8') as file: + scope = {} + exec(file.read(), scope) + __version__ = scope['__version__'] + setup( name='deep_pipe', packages=find_packages(include=('dpipe',)), diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..b983a1b --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1 @@ +# pytest_plugins = ['optimization_fixtures'] diff --git a/dpipe/tests/mnist/network.config b/tests/mnist/network.config similarity index 100% rename from dpipe/tests/mnist/network.config rename to tests/mnist/network.config diff --git a/dpipe/tests/mnist/setup.config b/tests/mnist/setup.config similarity index 100% rename from dpipe/tests/mnist/setup.config rename to tests/mnist/setup.config diff --git a/pytest.ini b/tests/pytest.ini similarity index 60% rename from pytest.ini rename to tests/pytest.ini index abb5331..3261e6b 100644 --- a/pytest.ini +++ b/tests/pytest.ini @@ -1,3 +1,5 @@ [pytest] markers = integration: tests for entire pipelines + cuda: requires a cuda-compatible gpu + diff --git a/dpipe/tests/test_gradient_accumulation.py b/tests/test_gradient_accumulation.py similarity index 99% rename from dpipe/tests/test_gradient_accumulation.py rename to tests/test_gradient_accumulation.py index 63de95f..08a45dd 100644 --- a/dpipe/tests/test_gradient_accumulation.py +++ b/tests/test_gradient_accumulation.py @@ -7,6 +7,7 @@ from dpipe.train import train +@pytest.mark.cuda @pytest.mark.parametrize('batch_size', [4, 16, 64]) def test_train(batch_size): net1 = nn.Sequential( diff --git a/dpipe/tests/test_itertools.py b/tests/test_itertools.py similarity index 100% rename from dpipe/tests/test_itertools.py rename to tests/test_itertools.py diff --git a/dpipe/tests/test_mnist.py b/tests/test_mnist.py similarity index 95% rename from dpipe/tests/test_mnist.py rename to tests/test_mnist.py index 118f33c..966602f 100644 --- a/dpipe/tests/test_mnist.py +++ b/tests/test_mnist.py @@ -13,7 +13,7 @@ class TestMNIST(unittest.TestCase): # TODO: use a temp dir base_path = Path('~/tests/MNIST').expanduser() experiment_path = base_path / 'exp' - config_path = 'dpipe/tests/mnist/setup.config' + config_path = Path(__file__).resolve().parent / 'mnist/setup.config' config = load(config_path) @classmethod From e065f5f06b833f9e23e8114ff40f10e59f3a4c31 Mon Sep 17 00:00:00 2001 From: Max Date: Thu, 8 Sep 2022 11:18:42 +0300 Subject: [PATCH 03/16] test reqs --- tests/requirements.txt | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 tests/requirements.txt diff --git a/tests/requirements.txt b/tests/requirements.txt new file mode 100644 index 0000000..9955dec --- /dev/null +++ b/tests/requirements.txt @@ -0,0 +1,2 @@ +pytest +pytest-cov From 0cb82419534754a473ec04a5a8456c054928e38b Mon Sep 17 00:00:00 2001 From: Max Date: Thu, 8 Sep 2022 11:21:09 +0300 Subject: [PATCH 04/16] fixed a typo and some import issues --- dpipe/__init__.py | 2 +- tests/test_gradient_accumulation.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/dpipe/__init__.py b/dpipe/__init__.py index 70cd492..9226fe7 100644 --- a/dpipe/__init__.py +++ b/dpipe/__init__.py @@ -1 +1 @@ -from __version__ import __version__ +from .__version__ import __version__ diff --git a/tests/test_gradient_accumulation.py b/tests/test_gradient_accumulation.py index 08a45dd..6212602 100644 --- a/tests/test_gradient_accumulation.py +++ b/tests/test_gradient_accumulation.py @@ -1,8 +1,6 @@ import pytest -import torch import numpy as np -from torch import nn from dpipe.torch import train_step from dpipe.train import train @@ -10,6 +8,9 @@ @pytest.mark.cuda @pytest.mark.parametrize('batch_size', [4, 16, 64]) def test_train(batch_size): + import torch + from torch import nn + net1 = nn.Sequential( nn.Conv2d(3, 4, kernel_size=3, padding=1), nn.LayerNorm([28, 28]), From 2365ed63a021f8614c30938d46f8adb7e5754257 Mon Sep 17 00:00:00 2001 From: Max Date: Thu, 8 Sep 2022 16:26:08 +0300 Subject: [PATCH 05/16] added torch to reqs --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index 9a5b5d8..331a41a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,4 +11,5 @@ lazycon>=0.1.0<1.0 loky~=2.8.0 wandb nibabel +torch imops>=0.3.0<1.0 From e1e666f702c8f579ffdd8f35a7d38e3491a754ac Mon Sep 17 00:00:00 2001 From: Valentin Date: Mon, 19 Sep 2022 18:32:00 +0300 Subject: [PATCH 06/16] move tests --- .../batch_iter/tests => tests/batch_iter}/test_expiration_pool.py | 0 {dpipe/batch_iter/tests => tests/batch_iter}/test_pipeline.py | 0 {dpipe/batch_iter/tests => tests/batch_iter}/test_sources.py | 0 {dpipe/im/tests => tests/im}/test_axes.py | 0 {dpipe/im/tests => tests/im}/test_box.py | 0 {dpipe/im/tests => tests/im}/test_dist.py | 0 {dpipe/im/tests => tests/im}/test_grid.py | 0 {dpipe/im/tests => tests/im}/test_metrics.py | 0 {dpipe/im/tests => tests/im}/test_patch.py | 0 {dpipe/im/tests => tests/im}/test_preprocessing.py | 0 {dpipe/im/tests => tests/im}/test_shape_utils.py | 0 {dpipe/im/tests => tests/im}/test_utils.py | 0 {dpipe/predict/tests => tests/predict}/test_functional.py | 0 {dpipe/predict/tests => tests/predict}/test_shape.py | 0 14 files changed, 0 insertions(+), 0 deletions(-) rename {dpipe/batch_iter/tests => tests/batch_iter}/test_expiration_pool.py (100%) rename {dpipe/batch_iter/tests => tests/batch_iter}/test_pipeline.py (100%) rename {dpipe/batch_iter/tests => tests/batch_iter}/test_sources.py (100%) rename {dpipe/im/tests => tests/im}/test_axes.py (100%) rename {dpipe/im/tests => tests/im}/test_box.py (100%) rename {dpipe/im/tests => tests/im}/test_dist.py (100%) rename {dpipe/im/tests => tests/im}/test_grid.py (100%) rename {dpipe/im/tests => tests/im}/test_metrics.py (100%) rename {dpipe/im/tests => tests/im}/test_patch.py (100%) rename {dpipe/im/tests => tests/im}/test_preprocessing.py (100%) rename {dpipe/im/tests => tests/im}/test_shape_utils.py (100%) rename {dpipe/im/tests => tests/im}/test_utils.py (100%) rename {dpipe/predict/tests => tests/predict}/test_functional.py (100%) rename {dpipe/predict/tests => tests/predict}/test_shape.py (100%) diff --git a/dpipe/batch_iter/tests/test_expiration_pool.py b/tests/batch_iter/test_expiration_pool.py similarity index 100% rename from dpipe/batch_iter/tests/test_expiration_pool.py rename to tests/batch_iter/test_expiration_pool.py diff --git a/dpipe/batch_iter/tests/test_pipeline.py b/tests/batch_iter/test_pipeline.py similarity index 100% rename from dpipe/batch_iter/tests/test_pipeline.py rename to tests/batch_iter/test_pipeline.py diff --git a/dpipe/batch_iter/tests/test_sources.py b/tests/batch_iter/test_sources.py similarity index 100% rename from dpipe/batch_iter/tests/test_sources.py rename to tests/batch_iter/test_sources.py diff --git a/dpipe/im/tests/test_axes.py b/tests/im/test_axes.py similarity index 100% rename from dpipe/im/tests/test_axes.py rename to tests/im/test_axes.py diff --git a/dpipe/im/tests/test_box.py b/tests/im/test_box.py similarity index 100% rename from dpipe/im/tests/test_box.py rename to tests/im/test_box.py diff --git a/dpipe/im/tests/test_dist.py b/tests/im/test_dist.py similarity index 100% rename from dpipe/im/tests/test_dist.py rename to tests/im/test_dist.py diff --git a/dpipe/im/tests/test_grid.py b/tests/im/test_grid.py similarity index 100% rename from dpipe/im/tests/test_grid.py rename to tests/im/test_grid.py diff --git a/dpipe/im/tests/test_metrics.py b/tests/im/test_metrics.py similarity index 100% rename from dpipe/im/tests/test_metrics.py rename to tests/im/test_metrics.py diff --git a/dpipe/im/tests/test_patch.py b/tests/im/test_patch.py similarity index 100% rename from dpipe/im/tests/test_patch.py rename to tests/im/test_patch.py diff --git a/dpipe/im/tests/test_preprocessing.py b/tests/im/test_preprocessing.py similarity index 100% rename from dpipe/im/tests/test_preprocessing.py rename to tests/im/test_preprocessing.py diff --git a/dpipe/im/tests/test_shape_utils.py b/tests/im/test_shape_utils.py similarity index 100% rename from dpipe/im/tests/test_shape_utils.py rename to tests/im/test_shape_utils.py diff --git a/dpipe/im/tests/test_utils.py b/tests/im/test_utils.py similarity index 100% rename from dpipe/im/tests/test_utils.py rename to tests/im/test_utils.py diff --git a/dpipe/predict/tests/test_functional.py b/tests/predict/test_functional.py similarity index 100% rename from dpipe/predict/tests/test_functional.py rename to tests/predict/test_functional.py diff --git a/dpipe/predict/tests/test_shape.py b/tests/predict/test_shape.py similarity index 100% rename from dpipe/predict/tests/test_shape.py rename to tests/predict/test_shape.py From a081267f8c80b4b4f190cbad477a2f6cc56012e1 Mon Sep 17 00:00:00 2001 From: Max Date: Mon, 19 Sep 2022 22:20:03 +0300 Subject: [PATCH 07/16] updated loky in reqs --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 331a41a..037a6ff 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,7 +8,7 @@ tqdm pdp==0.3.* tensorboard-easy lazycon>=0.1.0<1.0 -loky~=2.8.0 +loky>=3.0.0,<4.0.0 wandb nibabel torch From d44b7620da3996ce8e09d1684f309d2a9ebebae4 Mon Sep 17 00:00:00 2001 From: Philipenko Vladimir Date: Wed, 18 Jan 2023 12:39:33 +0300 Subject: [PATCH 08/16] Use `imops.measure.label` --- dpipe/im/preprocessing.py | 16 +++++++++------- requirements.txt | 2 +- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/dpipe/im/preprocessing.py b/dpipe/im/preprocessing.py index af85119..439d99e 100644 --- a/dpipe/im/preprocessing.py +++ b/dpipe/im/preprocessing.py @@ -1,5 +1,5 @@ import numpy as np -from skimage.measure import label +from imops.measure import label from dpipe.itertools import negate_indices from .axes import AxesLike, check_axes, AxesParams @@ -99,14 +99,16 @@ def describe_connected_components(mask: np.ndarray, background: int = 0, drop_ba volumes a list of corresponding labels' volumes. """ - label_map = label(mask, background=background) - labels, volumes = np.unique(label_map, return_counts=True) + label_map, labels, volumes = label(mask, background=background, return_labels=True, return_sizes=True) + + if not drop_background: + # background's label is always 0 + labels = np.append(labels, 0) + volumes = np.append(volumes, label_map.size - volumes.sum(dtype=int)) + idx = volumes.argsort()[::-1] labels, volumes = labels[idx], volumes[idx] - if drop_background: - # background's label is always 0 - foreground = labels != 0 - labels, volumes = labels[foreground], volumes[foreground] + return label_map, labels, volumes diff --git a/requirements.txt b/requirements.txt index 037a6ff..73b9c75 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,4 +12,4 @@ loky>=3.0.0,<4.0.0 wandb nibabel torch -imops>=0.3.0<1.0 +imops>=0.7.0<1.0 From 915f0d581bc038a00166a32b53bd04042bc4eb74 Mon Sep 17 00:00:00 2001 From: Philipenko Vladimir Date: Wed, 18 Jan 2023 15:10:01 +0300 Subject: [PATCH 09/16] ubuntu-20.04 for tests --- .github/workflows/tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 8285d35..378e0e1 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -7,7 +7,7 @@ env: jobs: test: - runs-on: ubuntu-latest + runs-on: ubuntu-20.04 strategy: matrix: python-version: [ 3.6, 3.7, 3.8, 3.9, '3.10' ] From 7fd060788d9199c2d12cb02e5a86e5d04429c661 Mon Sep 17 00:00:00 2001 From: Philipenko Vladimir Date: Wed, 18 Jan 2023 15:10:09 +0300 Subject: [PATCH 10/16] Update version --- dpipe/__version__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dpipe/__version__.py b/dpipe/__version__.py index 8ce9b36..7525d19 100644 --- a/dpipe/__version__.py +++ b/dpipe/__version__.py @@ -1 +1 @@ -__version__ = '0.1.3' +__version__ = '0.1.4' From 8b9df9d9a7c8f35cbb3d948a0cc867a33f1e238e Mon Sep 17 00:00:00 2001 From: Philipenko Vladimir Date: Wed, 18 Jan 2023 19:14:48 +0300 Subject: [PATCH 11/16] Fix tests reqs --- tests/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/requirements.txt b/tests/requirements.txt index 9955dec..d29db9b 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,2 +1,3 @@ pytest pytest-cov +pytest-subtests From 94c23b4e7b655bc6ac3bf13756fdb4acfe8aa64c Mon Sep 17 00:00:00 2001 From: Philipenko Vladimir Date: Fri, 20 Jan 2023 01:22:38 +0300 Subject: [PATCH 12/16] Fix `padding_values=None` case --- dpipe/predict/shape.py | 8 ++++++-- tests/predict/test_shape.py | 5 ++++- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/dpipe/predict/shape.py b/dpipe/predict/shape.py index 5e2d4bd..1a79e0b 100644 --- a/dpipe/predict/shape.py +++ b/dpipe/predict/shape.py @@ -87,7 +87,8 @@ def patches_grid(patch_size: AxesLike, stride: AxesLike, axis: AxesLike = None, the predicted patches by aggregating the overlapping regions using the ``combiner`` - Average by default. If ``padding_values`` is not None, the array will be padded to an appropriate shape to make a valid division. - Afterwards the padding is removed. + Afterwards the padding is removed. Otherwise if input cannot be patched without remainder + ``ValueError`` is raised. References ---------- @@ -100,12 +101,15 @@ def decorator(predict): def wrapper(x, *args, **kwargs): input_axis = resolve_deprecation(axis, x.ndim, patch_size, stride) local_size, local_stride = broadcast_to_axis(input_axis, patch_size, stride) + shape = extract(x.shape, input_axis) if valid: - shape = extract(x.shape, input_axis) padded_shape = np.maximum(shape, local_size) new_shape = padded_shape + (local_stride - padded_shape + local_size) % local_stride x = pad_to_shape(x, new_shape, input_axis, padding_values, ratio) + elif ((shape - local_size) < 0).any() or ((local_stride - shape + local_size) % local_stride).any(): + raise ValueError('Input cannot be patched without remainder.') + patches = pmap( predict, diff --git a/tests/predict/test_shape.py b/tests/predict/test_shape.py index 3a65a95..b6aafb0 100644 --- a/tests/predict/test_shape.py +++ b/tests/predict/test_shape.py @@ -15,8 +15,11 @@ def check_equal(**kwargs): check_equal(patch_size=10, stride=1, padding_values=0) check_equal(patch_size=10, stride=1, padding_values=None) check_equal(patch_size=10, stride=10, padding_values=0) - check_equal(patch_size=10, stride=10, padding_values=None) + with pytest.raises(ValueError): + check_equal(patch_size=10, stride=10, padding_values=None) + + check_equal(patch_size=30, stride=1, padding_values=0) with pytest.raises(ValueError): check_equal(patch_size=30, stride=1, padding_values=None) From f7696dd12e028b97b8c3a5857be79cb498b6d237 Mon Sep 17 00:00:00 2001 From: Philipenko Vladimir Date: Fri, 20 Jan 2023 01:25:31 +0300 Subject: [PATCH 13/16] 2 more test cases --- tests/predict/test_shape.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/predict/test_shape.py b/tests/predict/test_shape.py index b6aafb0..69e65bb 100644 --- a/tests/predict/test_shape.py +++ b/tests/predict/test_shape.py @@ -23,6 +23,9 @@ def check_equal(**kwargs): with pytest.raises(ValueError): check_equal(patch_size=30, stride=1, padding_values=None) + check_equal(patch_size=9, stride=9, padding_values=None) + check_equal(patch_size=15, stride=12, padding_values=None) + def test_divisible_patches(): def check_equal(**kwargs): From 05224527f72b1a4448f939e0d768d7abbd8b4fd8 Mon Sep 17 00:00:00 2001 From: Philipenko Vladimir Date: Fri, 20 Jan 2023 12:58:02 +0300 Subject: [PATCH 14/16] Temporarily remove `Loky` tests --- tests/batch_iter/test_pipeline.py | 37 ++++++++++++++++--------------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/tests/batch_iter/test_pipeline.py b/tests/batch_iter/test_pipeline.py index 354f9ef..4871603 100644 --- a/tests/batch_iter/test_pipeline.py +++ b/tests/batch_iter/test_pipeline.py @@ -38,24 +38,25 @@ def test_parallel(): assert abs(faster - delta / 2) < sleep -def test_loky(): - size = 100 - for i, item in enumerate(wrap_pipeline(range(size), Loky(lambda x: x ** 2, n_workers=2))): - assert item == i ** 2 - assert i == size - 1 - # at this point the first worker is killed - # start a new one - for i, item in enumerate(wrap_pipeline(range(size), Loky(lambda x: x ** 2, n_workers=2))): - assert item == i ** 2 - assert i == size - 1 - - # several workers - for i, item in enumerate(wrap_pipeline( - range(size), - Loky(lambda x: x ** 2, n_workers=2), - Loky(lambda x: x ** 2, n_workers=2))): - assert item == i ** 4 - assert i == size - 1 +# TODO: uncomment as soon as #68 is solved +# def test_loky(): +# size = 100 +# for i, item in enumerate(wrap_pipeline(range(size), Loky(lambda x: x ** 2, n_workers=2))): +# assert item == i ** 2 +# assert i == size - 1 +# # at this point the first worker is killed +# # start a new one +# for i, item in enumerate(wrap_pipeline(range(size), Loky(lambda x: x ** 2, n_workers=2))): +# assert item == i ** 2 +# assert i == size - 1 + +# # several workers +# for i, item in enumerate(wrap_pipeline( +# range(size), +# Loky(lambda x: x ** 2, n_workers=2), +# Loky(lambda x: x ** 2, n_workers=2))): +# assert item == i ** 4 +# assert i == size - 1 def test_premature_stop(): From 633c0f4105348d7d0533c08dcc30cac1afb42b35 Mon Sep 17 00:00:00 2001 From: Philipenko Vladimir Date: Fri, 20 Jan 2023 13:35:52 +0300 Subject: [PATCH 15/16] Compare counters --- tests/batch_iter/test_pipeline.py | 46 ++++++++++++++++++------------- 1 file changed, 27 insertions(+), 19 deletions(-) diff --git a/tests/batch_iter/test_pipeline.py b/tests/batch_iter/test_pipeline.py index 4871603..e1b2047 100644 --- a/tests/batch_iter/test_pipeline.py +++ b/tests/batch_iter/test_pipeline.py @@ -1,4 +1,5 @@ import time +from collections import Counter from itertools import repeat import pytest @@ -38,25 +39,32 @@ def test_parallel(): assert abs(faster - delta / 2) < sleep -# TODO: uncomment as soon as #68 is solved -# def test_loky(): -# size = 100 -# for i, item in enumerate(wrap_pipeline(range(size), Loky(lambda x: x ** 2, n_workers=2))): -# assert item == i ** 2 -# assert i == size - 1 -# # at this point the first worker is killed -# # start a new one -# for i, item in enumerate(wrap_pipeline(range(size), Loky(lambda x: x ** 2, n_workers=2))): -# assert item == i ** 2 -# assert i == size - 1 - -# # several workers -# for i, item in enumerate(wrap_pipeline( -# range(size), -# Loky(lambda x: x ** 2, n_workers=2), -# Loky(lambda x: x ** 2, n_workers=2))): -# assert item == i ** 4 -# assert i == size - 1 +# TODO: check order of output itmes as soon as #68 is solved +def test_loky(): + size = 100 + + source_items = list(range(size)) + items = [] + + for i, item in enumerate(wrap_pipeline(source_items, Loky(lambda x: x ** 2, n_workers=2))): + items.append(item) + assert Counter(items) == Counter(map(lambda x: x ** 2, source_items)) + # at this point the first worker is killed + # start a new one + items = [] + for i, item in enumerate(wrap_pipeline(range(size), Loky(lambda x: x ** 2, n_workers=2))): + items.append(item) + assert Counter(items) == Counter(map(lambda x: x ** 2, source_items)) + + # several workers + items = [] + for i, item in enumerate(wrap_pipeline( + range(size), + Loky(lambda x: x ** 2, n_workers=2), + Loky(lambda x: x ** 2, n_workers=2))): + items.append(item) + assert Counter(items) == Counter(map(lambda x: x ** 4, source_items)) + assert i == size - 1 def test_premature_stop(): From 9534551b6ed198fb9da6fc3be5535d8827a8164f Mon Sep 17 00:00:00 2001 From: Philipenko Vladimir Date: Fri, 20 Jan 2023 13:48:46 +0300 Subject: [PATCH 16/16] Fix version --- dpipe/__version__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dpipe/__version__.py b/dpipe/__version__.py index 7525d19..7fd229a 100644 --- a/dpipe/__version__.py +++ b/dpipe/__version__.py @@ -1 +1 @@ -__version__ = '0.1.4' +__version__ = '0.2.0'