Skip to content

TEST: Check non-integral slopes, intercepts in ArrayProxy API #847

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
Dec 6, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 1 addition & 3 deletions .azure-pipelines/windows.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,7 @@ jobs:
displayName: 'Update build tools'
- script: |
python -m pip install --find-links %EXTRA_WHEELS% %DEPENDS%
python -m pip install nose mock coverage codecov
python -m pip install pytest
python -m pip install nose mock coverage codecov pytest
displayName: 'Install dependencies'
- script: |
python -m pip install .
Expand All @@ -41,7 +40,6 @@ jobs:
cd for_testing
cp ../.coveragerc .
nosetests --with-doctest --with-coverage --cover-package nibabel nibabel
pytest -v ../nibabel/tests/test_affines.py ../nibabel/tests/test_volumeutils.py
displayName: 'Nose tests'
- script: |
cd for_testing
Expand Down
1 change: 0 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,6 @@ script:
cd for_testing
cp ../.coveragerc .
nosetests --with-doctest --with-coverage --cover-package nibabel nibabel
pytest -v ../nibabel/tests/test_affines.py ../nibabel/tests/test_volumeutils.py
else
false
fi
Expand Down
9 changes: 6 additions & 3 deletions nibabel/arrayproxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -360,10 +360,13 @@ def _get_scaled(self, dtype, slicer):
scl_slope = np.asanyarray(self._slope)
scl_inter = np.asanyarray(self._inter)
use_dtype = scl_slope.dtype if dtype is None else dtype
slope = scl_slope.astype(use_dtype)
inter = scl_inter.astype(use_dtype)

if np.can_cast(scl_slope, use_dtype):
scl_slope = scl_slope.astype(use_dtype)
if np.can_cast(scl_inter, use_dtype):
scl_inter = scl_inter.astype(use_dtype)
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Unconditionally casting to int types would truncate the new parameters.

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

So if it can't cast, but the user asks to, should this maybe spit a warning out?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's a helper method intended to do a best-effort downcast, but not to the detriment of accuracy. If you ask me to return ints but it would change the values (beyond a floating point rounding error), then the expected behavior is to keep floats. Spitting out a warning here would be extremely noisy.

# Read array and upcast as necessary for big slopes, intercepts
scaled = apply_read_scaling(self._get_unscaled(slicer=slicer), slope, inter)
scaled = apply_read_scaling(self._get_unscaled(slicer=slicer), scl_slope, scl_inter)
if dtype is not None:
scaled = scaled.astype(np.promote_types(scaled.dtype, dtype), copy=False)
return scaled
Expand Down
25 changes: 18 additions & 7 deletions nibabel/tests/test_proxy_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,14 +47,15 @@
from .._h5py_compat import h5py, have_h5py
from .. import ecat
from .. import parrec
from ..casting import have_binary128

from ..arrayproxy import ArrayProxy, is_proxy

from nose import SkipTest
from nose.tools import (assert_true, assert_false, assert_raises,
assert_equal, assert_not_equal, assert_greater_equal)

from numpy.testing import (assert_almost_equal, assert_array_equal)
from numpy.testing import assert_almost_equal, assert_array_equal, assert_allclose

from ..testing import data_path as DATA_PATH, assert_dt_equal

Expand Down Expand Up @@ -142,7 +143,10 @@ def validate_get_scaled(self, pmaker, params):

for dtype in np.sctypes['float'] + np.sctypes['int'] + np.sctypes['uint']:
out = prox.get_scaled(dtype=dtype)
assert_almost_equal(out, params['arr_out'])
# Half-precision is imprecise. Obviously. It's a bad idea, but don't break
# the test over it.
rtol = 1e-03 if dtype == np.float16 else 1e-05
assert_allclose(out, params['arr_out'].astype(out.dtype), rtol=rtol, atol=1e-08)
assert_greater_equal(out.dtype, np.dtype(dtype))
# Shape matches expected shape
assert_equal(out.shape, params['shape'])
Expand Down Expand Up @@ -192,6 +196,7 @@ class TestAnalyzeProxyAPI(_TestProxyAPI):
shapes = ((2,), (2, 3), (2, 3, 4), (2, 3, 4, 5))
has_slope = False
has_inter = False
data_dtypes = (np.uint8, np.int16, np.int32, np.float32, np.complex64, np.float64)
array_order = 'F'
# Cannot set offset for Freesurfer
settable_offset = True
Expand All @@ -216,11 +221,12 @@ def obj_params(self):
offsets = (self.header_class().get_data_offset(),)
else:
offsets = (0, 16)
slopes = (1., 2.) if self.has_slope else (1.,)
inters = (0., 10.) if self.has_inter else (0.,)
dtypes = (np.uint8, np.int16, np.float32)
# For non-integral parameters, cast to float32 value can be losslessly cast
# later, enabling exact checks, then back to float for consistency
slopes = (1., 2., float(np.float32(3.1416))) if self.has_slope else (1.,)
inters = (0., 10., float(np.float32(2.7183))) if self.has_inter else (0.,)
for shape, dtype, offset, slope, inter in product(self.shapes,
dtypes,
self.data_dtypes,
offsets,
slopes,
inters):
Expand Down Expand Up @@ -262,7 +268,7 @@ def sio_func():
dtype=dtype,
dtype_out=dtype_out,
arr=arr.copy(),
arr_out=arr * slope + inter,
arr_out=arr.astype(dtype_out) * slope + inter,
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If arr is float32, arr * slope + inter is float32. This hasn't mattered with the simple factors we've been using, but the new parameters produce different rounding errors in float32 (basic numpy coercion) and float64 (nibabel array scaling).

This change ensures that we produce our expected array in the wider dtype.

shape=shape,
offset=offset,
slope=slope,
Expand Down Expand Up @@ -325,6 +331,10 @@ class TestSpm2AnalyzeProxyAPI(TestSpm99AnalyzeProxyAPI):
class TestNifti1ProxyAPI(TestSpm99AnalyzeProxyAPI):
header_class = Nifti1Header
has_inter = True
data_dtypes = (np.uint8, np.int16, np.int32, np.float32, np.complex64, np.float64,
np.int8, np.uint16, np.uint32, np.int64, np.uint64, np.complex128)
if have_binary128():
data_dtypes.extend(np.float128, np.complex256)


class TestMGHAPI(TestAnalyzeProxyAPI):
Expand All @@ -334,6 +344,7 @@ class TestMGHAPI(TestAnalyzeProxyAPI):
has_inter = False
settable_offset = False
data_endian = '>'
data_dtypes = (np.uint8, np.int16, np.int32, np.float32)


class TestMinc1API(_TestProxyAPI):
Expand Down