diff --git a/.travis.yml b/.travis.yml index 883867988a..7feeaf7e9d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,8 +16,9 @@ before_install: - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then export PATH=/home/travis/miniconda2/bin:$PATH; else export PATH=/home/travis/miniconda3/bin:$PATH; fi - if $INSTALL_DEB_DEPENDECIES; then sudo rm -rf /dev/shm; fi - if $INSTALL_DEB_DEPENDECIES; then sudo ln -s /run/shm /dev/shm; fi -- if $INSTALL_DEB_DEPENDECIES; then bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh); - fi +- bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) +- sudo apt-get update +- sudo apt-get install xvfb - if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq --no-install-recommends fsl afni elastix; fi - if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq fsl-atlases; @@ -25,6 +26,13 @@ before_install: - if $INSTALL_DEB_DEPENDECIES; then source /etc/fsl/fsl.sh; fi - if $INSTALL_DEB_DEPENDECIES; then source /etc/afni/afni.sh; fi - export FSLOUTPUTTYPE=NIFTI_GZ +# Install vtk and fix numpy installation problem +# Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 +- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then travis_retry sudo apt-get install -qq libx11-dev swig; + echo '[x11]' >> $HOME/.numpy-site.cfg; + echo 'library_dirs = /usr/lib64:/usr/lib:/usr/lib/x86_64-linux-gnu' >> $HOME/.numpy-site.cfg; + echo 'include_dirs = /usr/include:/usr/include/X11' >> $HOME/.numpy-site.cfg; + fi install: - conda update --yes conda - conda create -n testenv --yes pip python=$TRAVIS_PYTHON_VERSION @@ -32,8 +40,19 @@ install: - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then pip install ordereddict; fi - conda install --yes numpy scipy nose networkx dateutil - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then conda install --yes traits; else pip install traits; fi +- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then conda install --yes vtk; fi - pip install python-coveralls - pip install nose-cov +# Add tvtk (PIL is required by blockcanvas) +# Install mayavi (see https://github.com/enthought/mayavi/issues/271) +- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then + pip install http://effbot.org/downloads/Imaging-1.1.7.tar.gz; + pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools; + pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas; + pip install -e git+https://github.com/enthought/etsproxy.git#egg=etsproxy; + pip install https://github.com/dmsurti/mayavi/archive/4d4aaf315a29d6a86707dd95149e27d9ed2225bf.zip; + pip install -e git+https://github.com/enthought/ets.git#egg=ets; + fi - pip install -r requirements.txt # finish remaining requirements - python setup.py install script: diff --git a/CHANGES b/CHANGES index fe8abb464a..1e47aa4657 100644 --- a/CHANGES +++ b/CHANGES @@ -1,6 +1,7 @@ Next release ============ +* FIX: Prevent crash when tvtk is loaded - ETS_TOOLKIT=null (https://github.com/nipy/nipype/pull/973) * ENH: New interfaces in dipy: RESTORE, EstimateResponseSH, CSD and StreamlineTractography (https://github.com/nipy/nipype/pull/1090) * ENH: Added interfaces of AFNI (https://github.com/nipy/nipype/pull/1360, diff --git a/circle.yml b/circle.yml index 9af45ee7c1..dfa54a7469 100644 --- a/circle.yml +++ b/circle.yml @@ -12,14 +12,22 @@ dependencies: - bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) override: # Install apt packages - - sudo apt-get install -y fsl-core fsl-atlases fsl-mni152-templates fsl-feeds afni - - echo "source /etc/fsl/fsl.sh" >> $HOME/.profile - - echo "source /etc/afni/afni.sh" >> $HOME/.profile + - sudo apt-get install -y fsl-core fsl-atlases fsl-mni152-templates fsl-feeds afni swig python-vtk xvfb + - echo 'source /etc/fsl/fsl.sh' >> $HOME/.profile + - echo 'source /etc/afni/afni.sh' >> $HOME/.profile - mkdir -p ~/examples/ && ln -sf /usr/share/fsl-feeds/ ~/examples/feeds + # Enable system-wide vtk + - ln -sf /usr/lib/pymodules/python2.7/vtk ~/virtualenvs/venv-system/lib/python2.7/site-packages/ # Set up python environment - pip install --upgrade pip - pip install -e . - pip install matplotlib sphinx ipython boto coverage dipy + # Add tvtk + - pip install http://effbot.org/downloads/Imaging-1.1.7.tar.gz + - pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools + - pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas + - pip install -e git+https://github.com/enthought/etsproxy.git#egg=etsproxy + - pip install -e git+https://github.com/enthought/ets.git#egg=ets - gem install fakes3 - if [[ ! -d ~/examples/data ]]; then wget "http://tcpdiag.dl.sourceforge.net/project/nipy/nipype/nipype-0.2/nipype-tutorial.tar.bz2" && tar jxvf nipype-tutorial.tar.bz2 && mv nipype-tutorial/* ~/examples/; fi - if [[ ! -d ~/examples/fsl_course_data ]]; then wget -c "http://fsl.fmrib.ox.ac.uk/fslcourse/fdt1.tar.gz" && wget -c "http://fsl.fmrib.ox.ac.uk/fslcourse/fdt2.tar.gz" && wget -c "http://fsl.fmrib.ox.ac.uk/fslcourse/tbss.tar.gz" && mkdir ~/examples/fsl_course_data && tar zxvf fdt1.tar.gz -C ~/examples/fsl_course_data && tar zxvf fdt2.tar.gz -C ~/examples/fsl_course_data && tar zxvf tbss.tar.gz -C ~/examples/fsl_course_data; fi diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index fa63c69c9d..9e18a96b90 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -1,50 +1,49 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -''' +""" Miscellaneous algorithms for 2D contours and 3D triangularized meshes handling - Change directory to provide relative paths for doctests + .. testsetup:: + # Change directory to provide relative paths for doctests >>> import os - >>> filepath = os.path.dirname( os.path.realpath( __file__ ) ) + >>> filepath = os.path.dirname(os.path.realpath( __file__ )) >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) >>> os.chdir(datadir) -''' +""" from __future__ import division -from builtins import zip import os.path as op -from warnings import warn - import numpy as np from numpy import linalg as nla +from builtins import zip + from .. import logging from ..external.six import string_types from ..interfaces.base import (BaseInterface, traits, TraitedSpec, File, BaseInterfaceInputSpec) -iflogger = logging.getLogger('interface') +from ..interfaces.vtkbase import tvtk +from ..interfaces import vtkbase as VTKInfo +IFLOGGER = logging.getLogger('interface') class TVTKBaseInterface(BaseInterface): + """ A base class for interfaces using VTK """ + _redirect_x = True - _vtk_major = 6 def __init__(self, **inputs): - try: - from tvtk.tvtk_classes.vtk_version import vtk_build_version - self._vtk_major = int(vtk_build_version[0]) - except ImportError: - iflogger.warning('VTK version-major inspection using tvtk failed.') - + if VTKInfo.no_tvtk(): + raise ImportError('This interface requires tvtk to run.') super(TVTKBaseInterface, self).__init__(**inputs) class WarpPointsInputSpec(BaseInterfaceInputSpec): points = File(exists=True, mandatory=True, - desc=('file containing the point set')) + desc='file containing the point set') warp = File(exists=True, mandatory=True, - desc=('dense deformation field to be applied')) + desc='dense deformation field to be applied') interp = traits.Enum('cubic', 'nearest', 'linear', usedefault=True, mandatory=True, desc='interpolation') out_points = File(name_source='points', name_template='%s_warped', @@ -57,7 +56,6 @@ class WarpPointsOutputSpec(TraitedSpec): class WarpPoints(TVTKBaseInterface): - """ Applies a displacement field to a point set given in vtk format. Any discrete deformation field, given in physical coordinates and @@ -65,21 +63,19 @@ class WarpPoints(TVTKBaseInterface): ``warp`` file. FSL interfaces are compatible, for instance any field computed with :class:`nipype.interfaces.fsl.utils.ConvertWarp`. - Example - ------- + Example:: + + from nipype.algorithms.mesh import WarpPoints + wp = WarpPoints() + wp.inputs.points = 'surf1.vtk' + wp.inputs.warp = 'warpfield.nii' + res = wp.run() - >>> from nipype.algorithms.mesh import WarpPoints - >>> wp = WarpPoints() - >>> wp.inputs.points = 'surf1.vtk' - >>> wp.inputs.warp = 'warpfield.nii' - >>> res = wp.run() # doctest: +SKIP """ input_spec = WarpPointsInputSpec output_spec = WarpPointsOutputSpec def _gen_fname(self, in_file, suffix='generated', ext=None): - import os.path as op - fname, fext = op.splitext(op.basename(in_file)) if fext == '.gz': @@ -95,22 +91,16 @@ def _gen_fname(self, in_file, suffix='generated', ext=None): def _run_interface(self, runtime): import nibabel as nb - import numpy as np from scipy import ndimage - try: - from tvtk.api import tvtk - except ImportError: - raise ImportError('Interface requires tvtk') - r = tvtk.PolyDataReader(file_name=self.inputs.points) r.update() - mesh = r.output + mesh = VTKInfo.vtk_output(r) points = np.array(mesh.points) warp_dims = nb.funcs.four_to_three(nb.load(self.inputs.warp)) affine = warp_dims[0].affine - voxsize = warp_dims[0].header.get_zooms() + # voxsize = warp_dims[0].header.get_zooms() vox2ras = affine[0:3, 0:3] ras2vox = np.linalg.inv(vox2ras) origin = affine[0:3, 3] @@ -132,21 +122,14 @@ def _run_interface(self, runtime): newpoints = [p + d for p, d in zip(points, disps)] mesh.points = newpoints w = tvtk.PolyDataWriter() - if self._vtk_major <= 5: - w.input = mesh - else: - w.set_input_data_object(mesh) - - w.file_name = self._gen_fname(self.inputs.points, - suffix='warped', - ext='.vtk') + VTKInfo.configure_input_data(w, mesh) + w.file_name = self._gen_fname(self.inputs.points, suffix='warped', ext='.vtk') w.write() return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_points'] = self._gen_fname(self.inputs.points, - suffix='warped', + outputs['out_points'] = self._gen_fname(self.inputs.points, suffix='warped', ext='.vtk') return outputs @@ -160,7 +143,7 @@ class ComputeMeshWarpInputSpec(BaseInterfaceInputSpec): desc=('Test surface (vtk format) from which compute ' 'distance.')) metric = traits.Enum('euclidean', 'sqeuclidean', usedefault=True, - desc=('norm used to report distance')) + desc='norm used to report distance') weighting = traits.Enum( 'none', 'area', usedefault=True, desc=('"none": no weighting is performed, surface": edge distance is ' @@ -181,7 +164,6 @@ class ComputeMeshWarpOutputSpec(TraitedSpec): class ComputeMeshWarp(TVTKBaseInterface): - """ Calculates a the vertex-wise warping to get surface2 from surface1. It also reports the average distance of vertices, using the norm specified @@ -192,14 +174,13 @@ class ComputeMeshWarp(TVTKBaseInterface): A point-to-point correspondence between surfaces is required - Example - ------- + Example:: - >>> import nipype.algorithms.mesh as m - >>> dist = m.ComputeMeshWarp() - >>> dist.inputs.surface1 = 'surf1.vtk' - >>> dist.inputs.surface2 = 'surf2.vtk' - >>> res = dist.run() # doctest: +SKIP + import nipype.algorithms.mesh as m + dist = m.ComputeMeshWarp() + dist.inputs.surface1 = 'surf1.vtk' + dist.inputs.surface2 = 'surf2.vtk' + res = dist.run() """ @@ -217,15 +198,10 @@ def _triangle_area(self, A, B, C): return area def _run_interface(self, runtime): - try: - from tvtk.api import tvtk - except ImportError: - raise ImportError('Interface requires tvtk') - r1 = tvtk.PolyDataReader(file_name=self.inputs.surface1) r2 = tvtk.PolyDataReader(file_name=self.inputs.surface2) - vtk1 = r1.output - vtk2 = r2.output + vtk1 = VTKInfo.vtk_output(r1) + vtk2 = VTKInfo.vtk_output(r2) r1.update() r2.update() assert(len(vtk1.points) == len(vtk2.points)) @@ -242,9 +218,9 @@ def _run_interface(self, runtime): errvector = np.apply_along_axis(nla.norm, 1, diff) if self.inputs.metric == 'sqeuclidean': - errvector = errvector ** 2 + errvector **= 2 - if (self.inputs.weighting == 'area'): + if self.inputs.weighting == 'area': faces = vtk1.polys.to_array().reshape(-1, 4).astype(int)[:, 1:] for i, p1 in enumerate(points2): @@ -269,12 +245,7 @@ def _run_interface(self, runtime): out_mesh.point_data.vectors.name = 'warpings' writer = tvtk.PolyDataWriter( file_name=op.abspath(self.inputs.out_warp)) - - if self._vtk_major <= 5: - writer.input = mesh - else: - writer.set_input_data_object(mesh) - + VTKInfo.configure_input_data(writer, out_mesh) writer.write() self._distance = np.average(errvector, weights=weights) @@ -297,10 +268,10 @@ class MeshWarpMathsInputSpec(BaseInterfaceInputSpec): operator = traits.Either( float_trait, File(exists=True), default=1.0, mandatory=True, - desc=('image, float or tuple of floats to act as operator')) + desc='image, float or tuple of floats to act as operator') operation = traits.Enum('sum', 'sub', 'mul', 'div', usedefault=True, - desc=('operation to be performed')) + desc='operation to be performed') out_warp = File('warp_maths.vtk', usedefault=True, desc='vtk file based on in_surf and warpings mapping it ' @@ -317,7 +288,6 @@ class MeshWarpMathsOutputSpec(TraitedSpec): class MeshWarpMaths(TVTKBaseInterface): - """ Performs the most basic mathematical operations on the warping field defined at each vertex of the input surface. A surface with scalar @@ -328,15 +298,14 @@ class MeshWarpMaths(TVTKBaseInterface): A point-to-point correspondence between surfaces is required - Example - ------- + Example:: - >>> import nipype.algorithms.mesh as m - >>> mmath = m.MeshWarpMaths() - >>> mmath.inputs.in_surf = 'surf1.vtk' - >>> mmath.inputs.operator = 'surf2.vtk' - >>> mmath.inputs.operation = 'mul' - >>> res = mmath.run() # doctest: +SKIP + import nipype.algorithms.mesh as m + mmath = m.MeshWarpMaths() + mmath.inputs.in_surf = 'surf1.vtk' + mmath.inputs.operator = 'surf2.vtk' + mmath.inputs.operation = 'mul' + res = mmath.run() """ @@ -344,25 +313,20 @@ class MeshWarpMaths(TVTKBaseInterface): output_spec = MeshWarpMathsOutputSpec def _run_interface(self, runtime): - try: - from tvtk.api import tvtk - except ImportError: - raise ImportError('Interface requires tvtk') - r1 = tvtk.PolyDataReader(file_name=self.inputs.in_surf) - vtk1 = r1.output + vtk1 = VTKInfo.vtk_output(r1) r1.update() points1 = np.array(vtk1.points) if vtk1.point_data.vectors is None: - raise RuntimeError(('No warping field was found in in_surf')) + raise RuntimeError('No warping field was found in in_surf') operator = self.inputs.operator opfield = np.ones_like(points1) if isinstance(operator, string_types): r2 = tvtk.PolyDataReader(file_name=self.inputs.surface2) - vtk2 = r2.output + vtk2 = VTKInfo.vtk_output(r2) r2.update() assert(len(points1) == len(vtk2.points)) @@ -373,7 +337,7 @@ def _run_interface(self, runtime): if opfield is None: raise RuntimeError( - ('No operator values found in operator file')) + 'No operator values found in operator file') opfield = np.array(opfield) @@ -395,25 +359,15 @@ def _run_interface(self, runtime): warping /= opfield vtk1.point_data.vectors = warping - writer = tvtk.PolyDataWriter( - file_name=op.abspath(self.inputs.out_warp)) - if self._vtk_major <= 5: - writer.input = vtk1 - else: - writer.set_input_data_object(vtk1) + writer = tvtk.PolyDataWriter(file_name=op.abspath(self.inputs.out_warp)) + VTKInfo.configure_input_data(writer, vtk1) writer.write() vtk1.point_data.vectors = None vtk1.points = points1 + warping - writer = tvtk.PolyDataWriter( - file_name=op.abspath(self.inputs.out_file)) - - if self._vtk_major <= 5: - writer.input = vtk1 - else: - writer.set_input_data_object(vtk1) + writer = tvtk.PolyDataWriter(file_name=op.abspath(self.inputs.out_file)) + VTKInfo.configure_input_data(writer, vtk1) writer.write() - return runtime def _list_outputs(self): @@ -424,7 +378,6 @@ def _list_outputs(self): class P2PDistance(ComputeMeshWarp): - """ Calculates a point-to-point (p2p) distance between two corresponding VTK-readable meshes or contours. @@ -437,6 +390,5 @@ class P2PDistance(ComputeMeshWarp): def __init__(self, **inputs): super(P2PDistance, self).__init__(**inputs) - warn(('This interface has been deprecated since 1.0, please use ' - 'ComputeMeshWarp'), - DeprecationWarning) + IFLOGGER.warn('This interface has been deprecated since 1.0, please use ' + 'ComputeMeshWarp') diff --git a/nipype/algorithms/tests/test_mesh_ops.py b/nipype/algorithms/tests/test_mesh_ops.py index 031881b422..38edb8ecef 100644 --- a/nipype/algorithms/tests/test_mesh_ops.py +++ b/nipype/algorithms/tests/test_mesh_ops.py @@ -6,72 +6,98 @@ from shutil import rmtree from tempfile import mkdtemp -from nipype.testing import (assert_equal, skipif, +from nipype.testing import (assert_equal, assert_raises, skipif, assert_almost_equal, example_data) - import numpy as np - from nipype.algorithms import mesh as m +from ...interfaces import vtkbase as VTKInfo -notvtk = True -import platform -if 'darwin' not in platform.system().lower(): - try: - from tvtk.api import tvtk - notvtk = False - except ImportError: - pass - -@skipif(notvtk) def test_ident_distances(): tempdir = mkdtemp() curdir = os.getcwd() os.chdir(tempdir) - in_surf = example_data('surf01.vtk') - dist_ident = m.ComputeMeshWarp() - dist_ident.inputs.surface1 = in_surf - dist_ident.inputs.surface2 = in_surf - dist_ident.inputs.out_file = os.path.join(tempdir, 'distance.npy') - res = dist_ident.run() - yield assert_equal, res.outputs.distance, 0.0 - - dist_ident.inputs.weighting = 'area' - res = dist_ident.run() - yield assert_equal, res.outputs.distance, 0.0 + + if VTKInfo.no_tvtk(): + yield assert_raises, ImportError, m.ComputeMeshWarp + else: + in_surf = example_data('surf01.vtk') + dist_ident = m.ComputeMeshWarp() + dist_ident.inputs.surface1 = in_surf + dist_ident.inputs.surface2 = in_surf + dist_ident.inputs.out_file = os.path.join(tempdir, 'distance.npy') + res = dist_ident.run() + yield assert_equal, res.outputs.distance, 0.0 + + dist_ident.inputs.weighting = 'area' + res = dist_ident.run() + yield assert_equal, res.outputs.distance, 0.0 os.chdir(curdir) rmtree(tempdir) -@skipif(notvtk) def test_trans_distances(): tempdir = mkdtemp() - in_surf = example_data('surf01.vtk') - warped_surf = os.path.join(tempdir, 'warped.vtk') + curdir = os.getcwd() + os.chdir(tempdir) + + if VTKInfo.no_tvtk(): + yield assert_raises, ImportError, m.ComputeMeshWarp + else: + from ...interfaces.vtkbase import tvtk + + in_surf = example_data('surf01.vtk') + warped_surf = os.path.join(tempdir, 'warped.vtk') + + inc = np.array([0.7, 0.3, -0.2]) + + r1 = tvtk.PolyDataReader(file_name=in_surf) + vtk1 = VTKInfo.vtk_output(r1) + r1.update() + vtk1.points = np.array(vtk1.points) + inc + + writer = tvtk.PolyDataWriter(file_name=warped_surf) + VTKInfo.configure_input_data(writer, vtk1) + writer.write() + + dist = m.ComputeMeshWarp() + dist.inputs.surface1 = in_surf + dist.inputs.surface2 = warped_surf + dist.inputs.out_file = os.path.join(tempdir, 'distance.npy') + res = dist.run() + yield assert_almost_equal, res.outputs.distance, np.linalg.norm(inc), 4 + dist.inputs.weighting = 'area' + res = dist.run() + yield assert_almost_equal, res.outputs.distance, np.linalg.norm(inc), 4 + + os.chdir(curdir) + rmtree(tempdir) + +def test_warppoints(): + tempdir = mkdtemp() + curdir = os.getcwd() + os.chdir(tempdir) + + if VTKInfo.no_tvtk(): + yield assert_raises, ImportError, m.WarpPoints + + # TODO: include regression tests for when tvtk is installed + + os.chdir(curdir) + rmtree(tempdir) + + +def test_meshwarpmaths(): + tempdir = mkdtemp() curdir = os.getcwd() os.chdir(tempdir) - inc = np.array([0.7, 0.3, -0.2]) - - r1 = tvtk.PolyDataReader(file_name=in_surf) - vtk1 = r1.output - r1.update() - vtk1.points = np.array(vtk1.points) + inc - - writer = tvtk.PolyDataWriter(file_name=warped_surf) - writer.set_input_data(vtk1) - writer.write() - - dist = m.ComputeMeshWarp() - dist.inputs.surface1 = in_surf - dist.inputs.surface2 = warped_surf - dist.inputs.out_file = os.path.join(tempdir, 'distance.npy') - res = dist.run() - yield assert_almost_equal, res.outputs.distance, np.linalg.norm(inc), 4 - dist.inputs.weighting = 'area' - res = dist.run() - yield assert_almost_equal, res.outputs.distance, np.linalg.norm(inc), 4 + + if VTKInfo.no_tvtk(): + yield assert_raises, ImportError, m.MeshWarpMaths + + # TODO: include regression tests for when tvtk is installed os.chdir(curdir) rmtree(tempdir) diff --git a/nipype/info.py b/nipype/info.py index 14252c2fdf..aa9db1a4b6 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -140,5 +140,6 @@ def get_nipype_gitversion(): "nose>=%s" % NOSE_MIN_VERSION, "future>=%s" % FUTURE_MIN_VERSION, "simplejson>=%s" % SIMPLEJSON_MIN_VERSION, - "prov>=%s" % PROV_MIN_VERSION] + "prov>=%s" % PROV_MIN_VERSION, + "xvfbwrapper"] STATUS = 'stable' diff --git a/nipype/interfaces/base.py b/nipype/interfaces/base.py index 098e6223bd..eb2d406532 100644 --- a/nipype/interfaces/base.py +++ b/nipype/interfaces/base.py @@ -64,17 +64,6 @@ def __init__(self, value): def __str__(self): return repr(self.value) - -def _unlock_display(ndisplay): - lockf = os.path.join('/tmp', '.X%d-lock' % ndisplay) - try: - os.remove(lockf) - except: - return False - - return True - - def _exists_in_path(cmd, environ): ''' Based on a code snippet from @@ -870,7 +859,7 @@ def _outputs_help(cls): """ helpstr = ['Outputs::', ''] if cls.output_spec: - outputs = cls.output_spec() + outputs = cls.output_spec() #pylint: disable=E1102 for name, spec in sorted(outputs.traits(transient=None).items()): helpstr += cls._get_trait_desc(outputs, name, spec) if len(helpstr) == 2: @@ -882,7 +871,8 @@ def _outputs(self): """ outputs = None if self.output_spec: - outputs = self.output_spec() + outputs = self.output_spec() #pylint: disable=E1102 + return outputs @classmethod @@ -987,7 +977,10 @@ def _run_wrapper(self, runtime): vdisp = Xvfb(nolisten='tcp') vdisp.start() - vdisp_num = vdisp.vdisplay_num + try: + vdisp_num = vdisp.new_display + except AttributeError: # outdated version of xvfbwrapper + vdisp_num = vdisp.vdisplay_num iflogger.info('Redirecting X to :%d' % vdisp_num) runtime.environ['DISPLAY'] = ':%d' % vdisp_num @@ -995,14 +988,7 @@ def _run_wrapper(self, runtime): runtime = self._run_interface(runtime) if self._redirect_x: - if sysdisplay is None: - os.unsetenv('DISPLAY') - else: - os.environ['DISPLAY'] = sysdisplay - - iflogger.info('Freeing X :%d' % vdisp_num) vdisp.stop() - _unlock_display(vdisp_num) return runtime @@ -1465,8 +1451,8 @@ def _get_environ(self): def version_from_command(self, flag='-v'): cmdname = self.cmd.split()[0] - if _exists_in_path(cmdname): - env = dict(os.environ) + env = dict(os.environ) + if _exists_in_path(cmdname, env): out_environ = self._get_environ() env.update(out_environ) proc = subprocess.Popen(' '.join((cmdname, flag)), @@ -1628,7 +1614,7 @@ def _list_outputs(self): metadata = dict(name_source=lambda t: t is not None) traits = self.inputs.traits(**metadata) if traits: - outputs = self.output_spec().get() + outputs = self.output_spec().get() #pylint: disable=E1102 for name, trait_spec in traits.items(): out_name = name if trait_spec.output_name is not None: @@ -1746,7 +1732,7 @@ class SEMLikeCommandLine(CommandLine): """ def _list_outputs(self): - outputs = self.output_spec().get() + outputs = self.output_spec().get() #pylint: disable=E1102 return self._outputs_from_inputs(outputs) def _outputs_from_inputs(self, outputs): diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py index 1892466989..8e58b0d8cf 100644 --- a/nipype/interfaces/fsl/utils.py +++ b/nipype/interfaces/fsl/utils.py @@ -8,11 +8,12 @@ -------- See the docstrings of the individual classes for examples. - Change directory to provide relative paths for doctests - >>> import os - >>> filepath = os.path.dirname( os.path.realpath( __file__ ) ) - >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) - >>> os.chdir(datadir) + .. testsetup:: + # Change directory to provide relative paths for doctests + import os + filepath = os.path.dirname(os.path.realpath( __file__ )) + datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) + os.chdir(datadir) """ from __future__ import division @@ -20,6 +21,7 @@ from builtins import range import os +import os.path as op from glob import glob import warnings import tempfile @@ -41,7 +43,7 @@ class CopyGeomInputSpec(FSLCommandInputSpec): dest_file = File(exists=True, mandatory=True, argstr="%s", position=1, desc="destination image", copyfile=True, output_name='out_file', name_source='dest_file', name_template='%s') - ignore_dims = traits.Bool(desc=('Do not copy image dimensions'), + ignore_dims = traits.Bool(desc='Do not copy image dimensions', argstr='-d', position="-1") @@ -89,7 +91,7 @@ class ImageMeantsInputSpec(FSLCommandInputSpec): argstr='-o %s', genfile=True, hash_files=False) mask = File(exists=True, desc='input 3D mask', argstr='-m %s') spatial_coord = traits.List(traits.Int, - desc=(' requested spatial coordinate ' + desc=(' requested spatial coordinate ' '(instead of mask)'), argstr='-c %s') use_mm = traits.Bool(desc=('use mm instead of voxel coordinates (for -c ' @@ -762,8 +764,8 @@ def _list_outputs(self): if isdefined(self.inputs.stat_image2) and ( not isdefined(self.inputs.show_negative_stats) or not self.inputs.show_negative_stats): - stem = "%s_and_%s" % (split_filename(self.inputs.stat_image)[1], - split_filename(self.inputs.stat_image2)[1]) + stem = "%s_and_%s" % (split_filename(self.inputs.stat_image)[1], + split_filename(self.inputs.stat_image2)[1]) else: stem = split_filename(self.inputs.stat_image)[1] out_file = self._gen_fname(stem, suffix='_overlay') @@ -820,7 +822,8 @@ class SlicerInputSpec(FSLCommandInputSpec): xor=_xor_options, requires=['image_width'], desc=('output every n axial slices into one ' 'picture')) - image_width = traits.Int(position=-2, argstr='%d', desc='max picture width') + image_width = traits.Int( + position=-2, argstr='%d', desc='max picture width') out_file = File(position=-1, genfile=True, argstr='%s', desc='picture to write', hash_files=False) scaling = traits.Float(position=0, argstr='-s %f', desc='image scale') @@ -1567,9 +1570,11 @@ def _list_outputs(self): outputs['complex_out_file'] = self._get_output('complex_out_file') elif self.inputs.real_cartesian: outputs['real_out_file'] = self._get_output('real_out_file') - outputs['imaginary_out_file'] = self._get_output('imaginary_out_file') + outputs['imaginary_out_file'] = self._get_output( + 'imaginary_out_file') elif self.inputs.real_polar: - outputs['magnitude_out_file'] = self._get_output('magnitude_out_file') + outputs['magnitude_out_file'] = self._get_output( + 'magnitude_out_file') outputs['phase_out_file'] = self._get_output('phase_out_file') return outputs @@ -1632,7 +1637,8 @@ class WarpUtilsInputSpec(FSLCommandInputSpec): class WarpUtilsOutputSpec(TraitedSpec): - out_file = File(desc=('Name of output file, containing the warp as field or coefficients.')) + out_file = File( + desc=('Name of output file, containing the warp as field or coefficients.')) out_jacobian = File(desc=('Name of output file, containing the map of the determinant of ' 'the Jacobian')) @@ -1690,7 +1696,7 @@ def _parse_inputs(self, skip=None): class ConvertWarpInputSpec(FSLCommandInputSpec): reference = File(exists=True, argstr='--ref=%s', mandatory=True, position=1, - desc=('Name of a file in target space of the full transform.')) + desc='Name of a file in target space of the full transform.') out_file = File(argstr='--out=%s', position=-1, name_source=['reference'], name_template='%s_concatwarp', output_name='out_file', @@ -1702,71 +1708,71 @@ class ConvertWarpInputSpec(FSLCommandInputSpec): desc='filename for pre-transform (affine matrix)') warp1 = File(exists=True, argstr='--warp1=%s', - desc=('Name of file containing initial warp-fields/coefficients (follows premat). This could e.g. be a ' - 'fnirt-transform from a subjects structural scan to an average of a group ' - 'of subjects.')) + desc='Name of file containing initial warp-fields/coefficients (follows premat). This could e.g. be a ' + 'fnirt-transform from a subjects structural scan to an average of a group ' + 'of subjects.') midmat = File(exists=True, argstr="--midmat=%s", desc="Name of file containing mid-warp-affine transform") warp2 = File(exists=True, argstr='--warp2=%s', - desc=('Name of file containing secondary warp-fields/coefficients (after warp1/midmat but before postmat). This could e.g. be a ' - 'fnirt-transform from the average of a group of subjects to some standard ' - 'space (e.g. MNI152).')) + desc='Name of file containing secondary warp-fields/coefficients (after warp1/midmat but before postmat). This could e.g. be a ' + 'fnirt-transform from the average of a group of subjects to some standard ' + 'space (e.g. MNI152).') postmat = File(exists=True, argstr='--postmat=%s', - desc=('Name of file containing an affine transform (applied last). It could e.g. be an affine ' - 'transform that maps the MNI152-space into a better approximation to the ' - 'Talairach-space (if indeed there is one).')) + desc='Name of file containing an affine transform (applied last). It could e.g. be an affine ' + 'transform that maps the MNI152-space into a better approximation to the ' + 'Talairach-space (if indeed there is one).') shift_in_file = File(exists=True, argstr='--shiftmap=%s', - desc=('Name of file containing a "shiftmap", a non-linear transform with ' - 'displacements only in one direction (applied first, before premat). This would typically be a ' - 'fieldmap that has been pre-processed using fugue that maps a ' - 'subjects functional (EPI) data onto an undistorted space (i.e. a space ' - 'that corresponds to his/her true anatomy).')) + desc='Name of file containing a "shiftmap", a non-linear transform with ' + 'displacements only in one direction (applied first, before premat). This would typically be a ' + 'fieldmap that has been pre-processed using fugue that maps a ' + 'subjects functional (EPI) data onto an undistorted space (i.e. a space ' + 'that corresponds to his/her true anatomy).') shift_direction = traits.Enum('y-', 'y', 'x', 'x-', 'z', 'z-', argstr="--shiftdir=%s", requires=['shift_in_file'], - desc=('Indicates the direction that the distortions from ' - '--shiftmap goes. It depends on the direction and ' - 'polarity of the phase-encoding in the EPI sequence.')) + desc='Indicates the direction that the distortions from ' + '--shiftmap goes. It depends on the direction and ' + 'polarity of the phase-encoding in the EPI sequence.') cons_jacobian = traits.Bool(False, argstr='--constrainj', - desc=('Constrain the Jacobian of the warpfield to lie within specified ' - 'min/max limits.')) + desc='Constrain the Jacobian of the warpfield to lie within specified ' + 'min/max limits.') jacobian_min = traits.Float(argstr='--jmin=%f', - desc=('Minimum acceptable Jacobian value for ' - 'constraint (default 0.01)')) + desc='Minimum acceptable Jacobian value for ' + 'constraint (default 0.01)') jacobian_max = traits.Float(argstr='--jmax=%f', - desc=('Maximum acceptable Jacobian value for ' - 'constraint (default 100.0)')) + desc='Maximum acceptable Jacobian value for ' + 'constraint (default 100.0)') abswarp = traits.Bool(argstr='--abs', xor=['relwarp'], - desc=('If set it indicates that the warps in --warp1 and --warp2 should be ' - 'interpreted as absolute. I.e. the values in --warp1/2 are the ' - 'coordinates in the next space, rather than displacements. This flag ' - 'is ignored if --warp1/2 was created by fnirt, which always creates ' - 'relative displacements.')) + desc='If set it indicates that the warps in --warp1 and --warp2 should be ' + 'interpreted as absolute. I.e. the values in --warp1/2 are the ' + 'coordinates in the next space, rather than displacements. This flag ' + 'is ignored if --warp1/2 was created by fnirt, which always creates ' + 'relative displacements.') relwarp = traits.Bool(argstr='--rel', xor=['abswarp'], - desc=('If set it indicates that the warps in --warp1/2 should be interpreted ' - 'as relative. I.e. the values in --warp1/2 are displacements from the ' - 'coordinates in the next space.')) + desc='If set it indicates that the warps in --warp1/2 should be interpreted ' + 'as relative. I.e. the values in --warp1/2 are displacements from the ' + 'coordinates in the next space.') out_abswarp = traits.Bool(argstr='--absout', xor=['out_relwarp'], - desc=('If set it indicates that the warps in --out should be absolute, i.e. ' - 'the values in --out are displacements from the coordinates in --ref.')) + desc='If set it indicates that the warps in --out should be absolute, i.e. ' + 'the values in --out are displacements from the coordinates in --ref.') out_relwarp = traits.Bool(argstr='--relout', xor=['out_abswarp'], - desc=('If set it indicates that the warps in --out should be relative, i.e. ' - 'the values in --out are displacements from the coordinates in --ref.')) + desc='If set it indicates that the warps in --out should be relative, i.e. ' + 'the values in --out are displacements from the coordinates in --ref.') class ConvertWarpOutputSpec(TraitedSpec): out_file = File(exists=True, - desc=('Name of output file, containing the warp as field or coefficients.')) + desc='Name of output file, containing the warp as field or coefficients.') class ConvertWarp(FSLCommand): @@ -1785,7 +1791,7 @@ class ConvertWarp(FSLCommand): >>> warputils.inputs.output_type = "NIFTI_GZ" >>> warputils.cmdline # doctest: +ELLIPSIS 'convertwarp --ref=T1.nii --rel --warp1=warpfield.nii --out=T1_concatwarp.nii.gz' - >>> res = invwarp.run() # doctest: +SKIP + >>> res = warputils.run() # doctest: +SKIP """ @@ -1797,16 +1803,16 @@ class ConvertWarp(FSLCommand): class WarpPointsBaseInputSpec(CommandLineInputSpec): in_coords = File(exists=True, position=-1, argstr='%s', mandatory=True, - desc=('filename of file containing coordinates')) + desc='filename of file containing coordinates') xfm_file = File(exists=True, argstr='-xfm %s', xor=['warp_file'], - desc=('filename of affine transform (e.g. source2dest.mat)')) + desc='filename of affine transform (e.g. source2dest.mat)') warp_file = File(exists=True, argstr='-warp %s', xor=['xfm_file'], - desc=('filename of warpfield (e.g. ' - 'intermediate2dest_warp.nii.gz)')) + desc='filename of warpfield (e.g. ' + 'intermediate2dest_warp.nii.gz)') coord_vox = traits.Bool(True, argstr='-vox', xor=['coord_mm'], - desc=('all coordinates in voxels - default')) + desc='all coordinates in voxels - default') coord_mm = traits.Bool(False, argstr='-mm', xor=['coord_vox'], - desc=('all coordinates in mm')) + desc='all coordinates in mm') out_file = File(name_source='in_coords', name_template='%s_warped', output_name='out_file', desc='output file name') @@ -1814,14 +1820,14 @@ class WarpPointsBaseInputSpec(CommandLineInputSpec): class WarpPointsInputSpec(WarpPointsBaseInputSpec): src_file = File(exists=True, argstr='-src %s', mandatory=True, - desc=('filename of source image')) + desc='filename of source image') dest_file = File(exists=True, argstr='-dest %s', mandatory=True, - desc=('filename of destination image')) + desc='filename of destination image') class WarpPointsOutputSpec(TraitedSpec): out_file = File(exists=True, - desc=('Name of output file, containing the warp as field or coefficients.')) + desc='Name of output file, containing the warp as field or coefficients.') class WarpPoints(CommandLine): @@ -1843,7 +1849,7 @@ class WarpPoints(CommandLine): >>> warppoints.inputs.coord_mm = True >>> warppoints.cmdline # doctest: +ELLIPSIS 'img2imgcoord -mm -dest T1.nii -src epi.nii -warp warpfield.nii surf.txt' - >>> res = invwarp.run() # doctest: +SKIP + >>> res = warppoints.run() # doctest: +SKIP """ @@ -1863,16 +1869,15 @@ def __init__(self, command=None, **inputs): def _format_arg(self, name, trait_spec, value): if name == 'out_file': return '' - else: - return super(WarpPoints, self)._format_arg(name, trait_spec, value) - def _parse_inputs(self, skip=None): - import os.path as op + return super(WarpPoints, self)._format_arg(name, trait_spec, value) + def _parse_inputs(self, skip=None): fname, ext = op.splitext(self.inputs.in_coords) setattr(self, '_in_file', fname) setattr(self, '_outformat', ext[1:]) - first_args = super(WarpPoints, self)._parse_inputs(skip=['in_coords', 'out_file']) + first_args = super(WarpPoints, self)._parse_inputs( + skip=['in_coords', 'out_file']) second_args = fname + '.txt' @@ -1885,23 +1890,15 @@ def _parse_inputs(self, skip=None): return first_args + [second_args] def _vtk_to_coords(self, in_file, out_file=None): - import os.path as op - try: - from tvtk.api import tvtk - except ImportError: - raise ImportError('This interface requires tvtk to run.') - - vtk_major = 5 - try: - from tvtk.tvtk_classes.vtk_version import vtk_build_version - vtk_major = int(vtk_build_version[0]) - except ImportError: - iflogger.warning('VTK version-major inspection using tvtk failed.') + from ..vtkbase import tvtk + from ...interfaces import vtkbase as VTKInfo + + if VTKInfo.no_tvtk(): + raise ImportError('TVTK is required and tvtk package was not found') reader = tvtk.PolyDataReader(file_name=in_file + '.vtk') reader.update() - - mesh = reader.output if vtk_major < 6 else reader.get_output() + mesh = VTKInfo.vtk_output(reader) points = mesh.points if out_file is None: @@ -1911,46 +1908,31 @@ def _vtk_to_coords(self, in_file, out_file=None): return out_file def _coords_to_vtk(self, points, out_file): - import os.path as op - try: - from tvtk.api import tvtk - except ImportError: - raise ImportError('This interface requires tvtk to run.') - - vtk_major = 5 - try: - from tvtk.tvtk_classes.vtk_version import vtk_build_version - vtk_major = int(vtk_build_version[0]) - except ImportError: - iflogger.warning('VTK version-major inspection using tvtk failed.') + from ..vtkbase import tvtk + from ...interfaces import vtkbase as VTKInfo + + if VTKInfo.no_tvtk(): + raise ImportError('TVTK is required and tvtk package was not found') reader = tvtk.PolyDataReader(file_name=self.inputs.in_file) reader.update() - mesh = reader.output if vtk_major < 6 else reader.get_output() + mesh = VTKInfo.vtk_output(reader) mesh.points = points writer = tvtk.PolyDataWriter(file_name=out_file) - if vtk_major < 6: - writer.input = mesh - else: - writer.set_input_data_object(mesh) + VTKInfo.configure_input_data(writer, mesh) writer.write() def _trk_to_coords(self, in_file, out_file=None): - raise NotImplementedError('trk files are not yet supported') - try: - from nibabel.trackvis import TrackvisFile - except ImportError: - raise ImportError('This interface requires nibabel to run') - + from nibabel.trackvis import TrackvisFile trkfile = TrackvisFile.from_file(in_file) streamlines = trkfile.streamlines if out_file is None: out_file, _ = op.splitext(in_file) - np.savetxt(points, out_file + '.txt') + np.savetxt(streamlines, out_file + '.txt') return out_file + '.txt' def _coords_to_trk(self, points, out_file): @@ -1973,7 +1955,8 @@ def _run_interface(self, runtime): self._trk_to_coords(fname, out_file=tmpfile) runtime = super(WarpPoints, self)._run_interface(runtime) - newpoints = np.fromstring('\n'.join(runtime.stdout.split('\n')[1:]), sep=' ') + newpoints = np.fromstring( + '\n'.join(runtime.stdout.split('\n')[1:]), sep=' ') if tmpfile is not None: try: @@ -2024,7 +2007,7 @@ class WarpPointsToStd(WarpPoints): >>> warppoints.inputs.coord_mm = True >>> warppoints.cmdline # doctest: +ELLIPSIS 'img2stdcoord -mm -img T1.nii -std mni.nii -warp warpfield.nii surf.txt' - >>> res = invwarp.run() # doctest: +SKIP + >>> res = warppoints.run() # doctest: +SKIP """ @@ -2035,22 +2018,27 @@ class WarpPointsToStd(WarpPoints): class MotionOutliersInputSpec(FSLCommandInputSpec): - in_file = File(exists=True, mandatory=True, desc="unfiltered 4D image", argstr="-i %s") + in_file = File( + exists=True, mandatory=True, desc="unfiltered 4D image", argstr="-i %s") out_file = File(argstr="-o %s", name_source='in_file', name_template='%s_outliers.txt', keep_extension=True, desc='output outlier file name', hash_files=False) - mask = File(exists=True, argstr="-m %s", desc="mask image for calculating metric") - metric = traits.Enum('refrms', ['refrms', 'dvars', 'refmse', 'fd', 'fdrms'], argstr="--%s", desc="metrics: refrms - RMS intensity difference to reference volume as metric [default metric],\ -refmse - Mean Square Error version of refrms (used in original version of fsl_motion_outliers) \ -dvars - DVARS \ -fd - frame displacement \ -fdrms - FD with RMS matrix calculation") - threshold = traits.Float(argstr="--thresh=%g", desc="specify absolute threshold value (otherwise use box-plot cutoff = P75 + 1.5*IQR)") - no_motion_correction = traits.Bool(argstr="--nomoco", desc="do not run motion correction (assumed already done)") - dummy = traits.Int(argstr="--dummy=%d", desc='number of dummy scans to delete (before running anything and creating EVs)') + mask = File( + exists=True, argstr="-m %s", desc="mask image for calculating metric") + metric = traits.Enum( + 'refrms', ['refrms', 'dvars', 'refmse', 'fd', 'fdrms'], argstr="--%s", + desc='metrics: refrms - RMS intensity difference to reference volume as metric [default metric], ' + 'refmse - Mean Square Error version of refrms (used in original version of fsl_motion_outliers), ' + 'dvars - DVARS, fd - frame displacement, fdrms - FD with RMS matrix calculation') + threshold = traits.Float(argstr="--thresh=%g", + desc="specify absolute threshold value (otherwise use box-plot cutoff = P75 + 1.5*IQR)") + no_motion_correction = traits.Bool( + argstr="--nomoco", desc="do not run motion correction (assumed already done)") + dummy = traits.Int(argstr="--dummy=%d", + desc='number of dummy scans to delete (before running anything and creating EVs)') out_metric_values = File(argstr="-s %s", name_source='in_file', name_template='%s_metrics.txt', keep_extension=True, desc='output metric values (DVARS etc.) file name', hash_files=False) - out_metric_plot = File(argstr="-p %s", name_source='in_file', name_template='%s_metrics.png', - keep_extension=True, desc='output metric values plot (DVARS etc.) file name', hash_files=False) + out_metric_plot = File(argstr="-p %s", name_source='in_file', name_template='%s_metrics.png', hash_files=False, + keep_extension=True, desc='output metric values plot (DVARS etc.) file name') class MotionOutliersOutputSpec(TraitedSpec): diff --git a/nipype/interfaces/vtkbase.py b/nipype/interfaces/vtkbase.py new file mode 100644 index 0000000000..452203367c --- /dev/null +++ b/nipype/interfaces/vtkbase.py @@ -0,0 +1,81 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +""" +vtkbase provides some helpers to use VTK through the tvtk package (mayavi) + +Code using tvtk should import it through this module +""" + +import os +from .. import logging + +iflogger = logging.getLogger('interface') + +# Check that VTK can be imported and get version +_vtk_version = None +try: + import vtk + _vtk_version = (vtk.vtkVersion.GetVTKMajorVersion(), + vtk.vtkVersion.GetVTKMinorVersion()) +except ImportError: + iflogger.warning('VTK was not found') + +# Ensure that tvtk is loaded with the appropriate ETS_TOOLKIT env var +old_ets = os.getenv('ETS_TOOLKIT') +os.environ['ETS_TOOLKIT'] = 'null' +_have_tvtk = False +try: + from tvtk.api import tvtk + _have_tvtk = True +except ImportError: + iflogger.warning('tvtk wasn\'t found') + tvtk = None +finally: + if old_ets is not None: + os.environ['ETS_TOOLKIT'] = old_ets + else: + del os.environ['ETS_TOOLKIT'] + + +def vtk_version(): + """ Get VTK version """ + global _vtk_version + return _vtk_version + + +def no_vtk(): + """ Checks if VTK is installed and the python wrapper is functional """ + global _vtk_version + return _vtk_version is None + + +def no_tvtk(): + """ Checks if tvtk was found """ + global _have_tvtk + return not _have_tvtk + + +def vtk_old(): + """ Checks if VTK uses the old-style pipeline (VTK<6.0) """ + global _vtk_version + if _vtk_version is None: + raise RuntimeException('VTK is not correctly installed.') + return _vtk_version[0] < 6 + + +def configure_input_data(obj, data): + """ + Configure the input data for vtk pipeline object obj. + Copied from latest version of mayavi + """ + if vtk_old(): + obj.input = data + else: + obj.set_input_data(data) + + +def vtk_output(obj): + """ Configure the input data for vtk pipeline object obj.""" + if vtk_old(): + return obj.output + return obj.get_output() diff --git a/requirements.txt b/requirements.txt index 7fa14bd8f5..43a30c6639 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,3 +8,4 @@ nose>=1.2 future==0.15.2 simplejson>=3.8.0 prov>=1.4.0 +xvfbwrapper \ No newline at end of file