diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 9a0d227fdf..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,153 +0,0 @@ -# vim ft=yaml -# Multiple lines can be made a single "virtual line" because of how Travis -# munges each line before executing it to print out the exit status. It's okay -# for it to be on multiple physical lines, so long as you remember: - There -# can't be any leading "-"s - All newlines will be removed, so use ";"s - -dist: xenial -sudo: true -language: python - -cache: - directories: - - $HOME/.cache/pip -env: - global: - - DEPENDS="six numpy scipy matplotlib h5py pillow pydicom hypothesis" - - OPTIONAL_DEPENDS="" - - INSTALL_TYPE="setup" - - EXTRA_WHEELS="https://5cf40426d9f06eb7461d-6fe47d9331aba7cd62fc36c7196769e4.ssl.cf2.rackcdn.com" - - PRE_WHEELS="https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com" - - EXTRA_PIP_FLAGS="--find-links=$EXTRA_WHEELS" - - PRE_PIP_FLAGS="--pre $EXTRA_PIP_FLAGS --find-links $PRE_WHEELS" -python: - - 3.5 - - 3.6 - - 3.7 -matrix: - include: - - python: 3.4 - dist: trusty - sudo: false - - python: 2.7 - env: - - COVERAGE=1 - # Absolute minimum dependencies - - python: 2.7 - env: - - DEPENDS="numpy==1.7.1" - # Absolute minimum dependencies plus oldest MPL - # Check these against: - # nibabel/info.py - # doc/source/installation.rst - # requirements.txt - - python: 2.7 - env: - - DEPENDS="numpy==1.7.1 matplotlib==1.3.1" - # Minimum pydicom dependency - - python: 2.7 - env: - - DEPENDS="numpy==1.7.1 pydicom==0.9.9 pillow==2.6" - # pydicom master branch - - python: 3.5 - env: - - DEPENDS="numpy git+https://github.com/pydicom/pydicom.git@master" - # test 2.7 against pre-release builds of everything - - python: 2.7 - env: - - EXTRA_PIP_FLAGS="$PRE_PIP_FLAGS" - # test 3.5 against pre-release builds of everything - - python: 3.5 - env: - - EXTRA_PIP_FLAGS="$PRE_PIP_FLAGS" - # Documentation doctests - - python: 2.7 - env: - - DOC_DOC_TEST=1 - - python: 2.7 - env: - - INSTALL_TYPE=sdist - - python: 2.7 - env: - - INSTALL_TYPE=wheel - - python: 2.7 - env: - - INSTALL_TYPE=requirements - - python: 2.7 - env: - - STYLE=1 - - python: 3.5 - env: - - STYLE=1 - - python: 3.5 - env: - - DOC_DOC_TEST=1 - # Run tests with indexed_gzip present - - python: 2.7 - env: - - OPTIONAL_DEPENDS="indexed_gzip" - - python: 3.5 - env: - - OPTIONAL_DEPENDS="indexed_gzip" -before_install: - - source tools/travis_tools.sh - - python -m pip install --upgrade pip - - pip install --upgrade virtualenv - - virtualenv --python=python venv - - source venv/bin/activate - - python --version # just to check - - pip install -U pip wheel # needed at one point - - retry pip install nose flake8 mock hypothesis # always - - pip install $EXTRA_PIP_FLAGS $DEPENDS $OPTIONAL_DEPENDS - - if [ "${COVERAGE}" == "1" ]; then - pip install coverage; - pip install coveralls; - pip install codecov; - fi -# command to install dependencies -install: - - | - if [ "$INSTALL_TYPE" == "setup" ]; then - python setup.py install - elif [ "$INSTALL_TYPE" == "sdist" ]; then - python setup_egg.py egg_info # check egg_info while we're here - python setup_egg.py sdist - pip install $EXTRA_PIP_FLAGS dist/*.tar.gz - elif [ "$INSTALL_TYPE" == "wheel" ]; then - pip install wheel - python setup_egg.py bdist_wheel - pip install $EXTRA_PIP_FLAGS dist/*.whl - elif [ "$INSTALL_TYPE" == "requirements" ]; then - pip install $EXTRA_PIP_FLAGS -r requirements.txt - python setup.py install - fi - # Point to nibabel data directory - - export NIBABEL_DATA_DIR="$PWD/nibabel-data" -# command to run tests, e.g. python setup.py test -script: - - | - if [ "${STYLE}" == "1" ]; then - # Run styles only on core nibabel code. - flake8 nibabel - else - # Change into an innocuous directory and find tests from installation - mkdir for_testing - cd for_testing - if [ "${COVERAGE}" == "1" ]; then - cp ../.coveragerc .; - COVER_ARGS="--with-coverage --cover-package nibabel"; - fi - if [ "$DOC_DOC_TEST" == "1" ]; then - cd ../doc; - pip install -r ../doc-requirements.txt - make html; - make doctest; - else - nosetests --with-doctest $COVER_ARGS nibabel; - fi - fi -after_success: - - if [ "${COVERAGE}" == "1" ]; then coveralls; codecov; fi - -notifications: - webhooks: http://nipy.bic.berkeley.edu:54856/travis diff --git a/appveyor.yml b/appveyor.yml index 05510ec886..3fcf1b7ec9 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -4,16 +4,10 @@ environment: matrix: - - PYTHON: C:\Python27 - - PYTHON: C:\Python27-x64 - PYTHON: C:\Python34 - PYTHON: C:\Python34-x64 - PYTHON: C:\Python35 - PYTHON: C:\Python35-x64 - - PYTHON: C:\Python36 - - PYTHON: C:\Python36-x64 - - PYTHON: C:\Python37 - - PYTHON: C:\Python37-x64 install: # Prepend newly installed Python to the PATH of this build (this cannot be @@ -32,4 +26,14 @@ test_script: # Change into an innocuous directory and find tests from installation - mkdir for_testing - cd for_testing + # Numpy debug information + - python -c "import sys, numpy; print('numpy', numpy.__version__); print('python', sys.version)" + # Show all environment variables to ease possible future debugging + - set - nosetests --with-doctest nibabel + + +on_failure: + # enable the next to let the build VM block for up to 60min to log in via RDP and debug + - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1')) + diff --git a/nibabel/tests/test_image_api.py b/nibabel/tests/test_image_api.py index c53b012cc2..01b9ff4fdb 100644 --- a/nibabel/tests/test_image_api.py +++ b/nibabel/tests/test_image_api.py @@ -27,7 +27,6 @@ import warnings from functools import partial -from itertools import product from six import string_types import numpy as np @@ -196,20 +195,156 @@ class DataInterfaceMixin(GetSetDtypeMixin): Use this mixin if your image has a ``dataobj`` property that contains an array or an array-like thing. """ - meth_names = ('get_fdata', 'get_data') - def validate_data_interface(self, imaker, params): # Check get data returns array, and caches img = imaker() assert_equal(img.shape, img.dataobj.shape) assert_data_similar(img.dataobj, params) - for meth_name in self.meth_names: + meth_names = ('get_fdata', 'get_data') + for meth_name in meth_names: if params['is_proxy']: - self._check_proxy_interface(imaker, meth_name) - else: # Array image - self._check_array_interface(imaker, meth_name) + # Parameters assert this is an array proxy + img = imaker() + # Does is_proxy agree? + assert_true(is_proxy(img.dataobj)) + # Confirm it is not a numpy array + assert_false(isinstance(img.dataobj, np.ndarray)) + # Confirm it can be converted to a numpy array with asarray + proxy_data = np.asarray(img.dataobj) + proxy_copy = proxy_data.copy() + # Not yet cached, proxy image: in_memory is False + assert_false(img.in_memory) + # Load with caching='unchanged' + method = getattr(img, meth_name) + data = method(caching='unchanged') + # Still not cached + assert_false(img.in_memory) + # Default load, does caching + data = method() + # Data now cached. in_memory is True if either of the get_data + # or get_fdata caches are not-None + assert_true(img.in_memory) + # We previously got proxy_data from disk, but data, which we + # have just fetched, is a fresh copy. + assert_false(proxy_data is data) + # asarray on dataobj, applied above, returns same numerical + # values. This might not be true get_fdata operating on huge + # integers, but lets assume that's not true here. + assert_array_equal(proxy_data, data) + # Now caching='unchanged' does nothing, returns cached version + data_again = method(caching='unchanged') + assert_true(data is data_again) + # caching='fill' does nothing because the cache is already full + data_yet_again = method(caching='fill') + assert_true(data is data_yet_again) + # changing array data does not change proxy data, or reloaded + # data + data[:] = 42 + assert_array_equal(proxy_data, proxy_copy) + assert_array_equal(np.asarray(img.dataobj), proxy_copy) + # It does change the result of get_data + assert_array_equal(method(), 42) + # until we uncache + img.uncache() + # Which unsets in_memory + assert_false(img.in_memory) + assert_array_equal(method(), proxy_copy) + # Check caching='fill' does cache data + img = imaker() + method = getattr(img, meth_name) + assert_false(img.in_memory) + data = method(caching='fill') + assert_true(img.in_memory) + data_again = method() + assert_true(data is data_again) + # Check the interaction of caching with get_data, get_fdata. + # Caching for `get_data` should have no effect on caching for + # get_fdata, and vice versa. + # Modify the cached data + data[:] = 43 + # Load using the other data fetch method + other_name = set(meth_names).difference({meth_name}).pop() + other_method = getattr(img, other_name) + other_data = other_method() + # We get the original data, not the modified cache + assert_array_equal(proxy_data, other_data) + assert_false(np.all(data == other_data)) + # We can modify the other cache, without affecting the first + other_data[:] = 44 + assert_array_equal(other_method(), 44) + assert_false(np.all(method() == other_method())) + # Check that caching refreshes for new floating point type. + if meth_name == 'get_fdata': + img.uncache() + fdata = img.get_fdata() + assert_equal(fdata.dtype, np.float64) + fdata[:] = 42 + fdata_back = img.get_fdata() + assert_array_equal(fdata_back, 42) + assert_equal(fdata_back.dtype, np.float64) + # New data dtype, no caching, doesn't use or alter cache + fdata_new_dt = img.get_fdata(caching='unchanged', dtype='f4') + # We get back the original read, not the modified cache + assert_array_equal(fdata_new_dt, proxy_data.astype('f4')) + assert_equal(fdata_new_dt.dtype, np.float32) + # The original cache stays in place, for default float64 + assert_array_equal(img.get_fdata(), 42) + # And for not-default float32, because we haven't cached + fdata_new_dt[:] = 43 + fdata_new_dt = img.get_fdata(caching='unchanged', dtype='f4') + assert_array_equal(fdata_new_dt, proxy_data.astype('f4')) + # Until we reset with caching='fill', at which point we + # drop the original float64 cache, and have a float32 cache + fdata_new_dt = img.get_fdata(caching='fill', dtype='f4') + assert_array_equal(fdata_new_dt, proxy_data.astype('f4')) + # We're using the cache, for dtype='f4' reads + fdata_new_dt[:] = 43 + assert_array_equal(img.get_fdata(dtype='f4'), 43) + # We've lost the cache for float64 reads (no longer 42) + assert_array_equal(img.get_fdata(), proxy_data) + else: # not proxy + for caching in (None, 'fill', 'unchanged'): + img = imaker() + method = getattr(img, meth_name) + get_data_func = (method if caching is None else + partial(method, caching=caching)) + assert_true(isinstance(img.dataobj, np.ndarray)) + assert_true(img.in_memory) + data = get_data_func() + # Returned data same object as underlying dataobj if using + # old ``get_data`` method, or using newer ``get_fdata`` + # method, where original array was float64. + dataobj_is_data = (img.dataobj.dtype == np.float64 + or method == img.get_data) + # Set something to the output array. + data[:] = 42 + get_result_changed = np.all(get_data_func() == 42) + assert_equal(get_result_changed, + dataobj_is_data or caching != 'unchanged') + if dataobj_is_data: + assert_true(data is img.dataobj) + # Changing array data changes + # data + assert_array_equal(np.asarray(img.dataobj), 42) + # Uncache has no effect + img.uncache() + assert_array_equal(get_data_func(), 42) + else: + assert_false(data is img.dataobj) + assert_false(np.all(np.asarray(img.dataobj) == 42)) + # Uncache does have an effect + img.uncache() + assert_false(np.all(get_data_func() == 42)) + # in_memory is always true for array images, regardless of + # cache state. + img.uncache() + assert_true(img.in_memory) + # Values to get_(f)data caching parameter must be 'fill' or + # 'unchanged' + assert_raises(ValueError, img.get_data, caching='something') + assert_raises(ValueError, img.get_fdata, caching='something') # Data shape is same as image shape - assert_equal(img.shape, getattr(img, meth_name)().shape) + assert_equal(img.shape, method().shape) # Values to get_data caching parameter must be 'fill' or # 'unchanged' assert_raises(ValueError, img.get_data, caching='something') @@ -219,159 +354,6 @@ def validate_data_interface(self, imaker, params): # So is in_memory assert_raises(AttributeError, setattr, img, 'in_memory', False) - def _check_proxy_interface(self, imaker, meth_name): - # Parameters assert this is an array proxy - img = imaker() - # Does is_proxy agree? - assert_true(is_proxy(img.dataobj)) - # Confirm it is not a numpy array - assert_false(isinstance(img.dataobj, np.ndarray)) - # Confirm it can be converted to a numpy array with asarray - proxy_data = np.asarray(img.dataobj) - proxy_copy = proxy_data.copy() - # Not yet cached, proxy image: in_memory is False - assert_false(img.in_memory) - # Load with caching='unchanged' - method = getattr(img, meth_name) - data = method(caching='unchanged') - # Still not cached - assert_false(img.in_memory) - # Default load, does caching - data = method() - # Data now cached. in_memory is True if either of the get_data - # or get_fdata caches are not-None - assert_true(img.in_memory) - # We previously got proxy_data from disk, but data, which we - # have just fetched, is a fresh copy. - assert_false(proxy_data is data) - # asarray on dataobj, applied above, returns same numerical - # values. This might not be true get_fdata operating on huge - # integers, but lets assume that's not true here. - assert_array_equal(proxy_data, data) - # Now caching='unchanged' does nothing, returns cached version - data_again = method(caching='unchanged') - assert_true(data is data_again) - # caching='fill' does nothing because the cache is already full - data_yet_again = method(caching='fill') - assert_true(data is data_yet_again) - # changing array data does not change proxy data, or reloaded - # data - data[:] = 42 - assert_array_equal(proxy_data, proxy_copy) - assert_array_equal(np.asarray(img.dataobj), proxy_copy) - # It does change the result of get_data - assert_array_equal(method(), 42) - # until we uncache - img.uncache() - # Which unsets in_memory - assert_false(img.in_memory) - assert_array_equal(method(), proxy_copy) - # Check caching='fill' does cache data - img = imaker() - method = getattr(img, meth_name) - assert_false(img.in_memory) - data = method(caching='fill') - assert_true(img.in_memory) - data_again = method() - assert_true(data is data_again) - # Check the interaction of caching with get_data, get_fdata. - # Caching for `get_data` should have no effect on caching for - # get_fdata, and vice versa. - # Modify the cached data - data[:] = 43 - # Load using the other data fetch method - other_name = set(self.meth_names).difference({meth_name}).pop() - other_method = getattr(img, other_name) - other_data = other_method() - # We get the original data, not the modified cache - assert_array_equal(proxy_data, other_data) - assert_false(np.all(data == other_data)) - # We can modify the other cache, without affecting the first - other_data[:] = 44 - assert_array_equal(other_method(), 44) - assert_false(np.all(method() == other_method())) - if meth_name != 'get_fdata': - return - # Check that caching refreshes for new floating point type. - img.uncache() - fdata = img.get_fdata() - assert_equal(fdata.dtype, np.float64) - fdata[:] = 42 - fdata_back = img.get_fdata() - assert_array_equal(fdata_back, 42) - assert_equal(fdata_back.dtype, np.float64) - # New data dtype, no caching, doesn't use or alter cache - fdata_new_dt = img.get_fdata(caching='unchanged', dtype='f4') - # We get back the original read, not the modified cache - assert_array_equal(fdata_new_dt, proxy_data.astype('f4')) - assert_equal(fdata_new_dt.dtype, np.float32) - # The original cache stays in place, for default float64 - assert_array_equal(img.get_fdata(), 42) - # And for not-default float32, because we haven't cached - fdata_new_dt[:] = 43 - fdata_new_dt = img.get_fdata(caching='unchanged', dtype='f4') - assert_array_equal(fdata_new_dt, proxy_data.astype('f4')) - # Until we reset with caching='fill', at which point we - # drop the original float64 cache, and have a float32 cache - fdata_new_dt = img.get_fdata(caching='fill', dtype='f4') - assert_array_equal(fdata_new_dt, proxy_data.astype('f4')) - # We're using the cache, for dtype='f4' reads - fdata_new_dt[:] = 43 - assert_array_equal(img.get_fdata(dtype='f4'), 43) - # We've lost the cache for float64 reads (no longer 42) - assert_array_equal(img.get_fdata(), proxy_data) - - def _check_array_interface(self, imaker, meth_name): - for caching in (None, 'fill', 'unchanged'): - self._check_array_caching(imaker, meth_name, caching) - - def _check_array_caching(self, imaker, meth_name, caching): - img = imaker() - method = getattr(img, meth_name) - get_data_func = (method if caching is None else - partial(method, caching=caching)) - assert_true(isinstance(img.dataobj, np.ndarray)) - assert_true(img.in_memory) - data = get_data_func() - # Returned data same object as underlying dataobj if using - # old ``get_data`` method, or using newer ``get_fdata`` - # method, where original array was float64. - arr_dtype = img.dataobj.dtype - dataobj_is_data = arr_dtype == np.float64 or method == img.get_data - # Set something to the output array. - data[:] = 42 - get_result_changed = np.all(get_data_func() == 42) - assert_equal(get_result_changed, - dataobj_is_data or caching != 'unchanged') - if dataobj_is_data: - assert_true(data is img.dataobj) - # Changing array data changes - # data - assert_array_equal(np.asarray(img.dataobj), 42) - # Uncache has no effect - img.uncache() - assert_array_equal(get_data_func(), 42) - else: - assert_false(data is img.dataobj) - assert_false(np.all(np.asarray(img.dataobj) == 42)) - # Uncache does have an effect - img.uncache() - assert_false(np.all(get_data_func() == 42)) - # in_memory is always true for array images, regardless of - # cache state. - img.uncache() - assert_true(img.in_memory) - if meth_name != 'get_fdata': - return - # Return original array from get_fdata only if the input array is the - # requested dtype. - float_types = np.sctypes['float'] - if arr_dtype not in float_types: - return - for float_type in float_types: - data = get_data_func(dtype=float_type) - assert_equal(data is img.dataobj, arr_dtype == float_type) - def validate_data_deprecated(self, imaker, params): # Check _data property still exists, but raises warning img = imaker() @@ -403,6 +385,7 @@ def validate_shape_deprecated(self, imaker, params): assert_equal(len(w), 1) + class HeaderShapeMixin(object): """ Tests that header shape can be set and got @@ -486,49 +469,40 @@ class MakeImageAPI(LoadImageAPI): header_maker = None # Example shapes for created images example_shapes = ((2,), (2, 3), (2, 3, 4), (2, 3, 4, 5)) - # Supported dtypes for storing to disk - storable_dtypes = (np.uint8, np.int16, np.float32) def obj_params(self): # Return any obj_params from superclass for func, params in super(MakeImageAPI, self).obj_params(): yield func, params - # Create new images + # Create a new images aff = np.diag([1, 2, 3, 1]) def make_imaker(arr, aff, header=None): return lambda: self.image_maker(arr, aff, header) + for shape in self.example_shapes: + for dtype in (np.uint8, np.int16, np.float32): + arr = np.arange(np.prod(shape), dtype=np.float32).reshape(shape) + hdr = self.header_maker() + hdr.set_data_dtype(dtype) + func = make_imaker(arr.copy(), aff, hdr) + params = dict( + dtype=dtype, + affine=aff, + data=arr, + shape=shape, + is_proxy=False) + yield func, params + if not self.can_save: + return + # Add a proxy image + # We assume that loading from a fileobj creates a proxy image + params['is_proxy'] = True - def make_prox_imaker(arr, aff, hdr): - - def prox_imaker(): - img = self.image_maker(arr, aff, hdr) - rt_img = bytesio_round_trip(img) - return self.image_maker(rt_img.dataobj, aff, rt_img.header) - - return prox_imaker - - for shape, stored_dtype in product(self.example_shapes, - self.storable_dtypes): - # To make sure we do not trigger scaling, always use the - # stored_dtype for the input array. - arr = np.arange(np.prod(shape), dtype=stored_dtype).reshape(shape) - hdr = self.header_maker() - hdr.set_data_dtype(stored_dtype) - func = make_imaker(arr.copy(), aff, hdr) - params = dict( - dtype=stored_dtype, - affine=aff, - data=arr, - shape=shape, - is_proxy=False) - yield make_imaker(arr.copy(), aff, hdr), params - if not self.can_save: - continue - # Create proxy images from these array images, by storing via BytesIO. - # We assume that loading from a fileobj creates a proxy image. - params['is_proxy'] = True - yield make_prox_imaker(arr.copy(), aff, hdr), params + def prox_imaker(): + img = self.image_maker(arr, aff, hdr) + rt_img = bytesio_round_trip(img) + return self.image_maker(rt_img.dataobj, aff, rt_img.header) + yield prox_imaker, params class ImageHeaderAPI(MakeImageAPI): @@ -546,8 +520,6 @@ class TestAnalyzeAPI(ImageHeaderAPI): has_scaling = False can_save = True standard_extension = '.img' - # Supported dtypes for storing to disk - storable_dtypes = (np.uint8, np.int16, np.int32, np.float32, np.float64) class TestSpatialImageAPI(TestAnalyzeAPI):