From 420fa762f9a34cbe7d3673d5f8b66b9fe04eede0 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 21 Oct 2014 19:48:22 +0200 Subject: [PATCH 01/56] Added ets.toolkit configuration to initialization --- nipype/pipeline/plugins/base.py | 12 +++++++++++- nipype/utils/config.py | 13 +++++++++++++ 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/nipype/pipeline/plugins/base.py b/nipype/pipeline/plugins/base.py index 17ce1d67d4..468eea7466 100644 --- a/nipype/pipeline/plugins/base.py +++ b/nipype/pipeline/plugins/base.py @@ -109,6 +109,7 @@ def create_pyscript(node, updatehash=False, store_exception=True): pkl_file = os.path.join(batch_dir, 'node_%s.pklz' % suffix) savepkl(pkl_file, dict(node=node, updatehash=updatehash)) mpl_backend = node.config["execution"]["matplotlib_backend"] + ets_toolkit = node.config["execution"]["ets_toolkit"] # create python script to load and trap exception cmdstr = """import os import sys @@ -117,6 +118,13 @@ def create_pyscript(node, updatehash=False, store_exception=True): matplotlib.use('%s') except ImportError: pass + +try: + from enthought.etsconfig.api import ETSConfig + ETSConfig.toolkit = '%s' +except: + pass + from nipype import config, logging from nipype.utils.filemanip import loadpkl, savepkl from socket import gethostname @@ -131,6 +139,7 @@ def create_pyscript(node, updatehash=False, store_exception=True): config_dict=%s config.update_config(config_dict) config.update_matplotlib() + config.update_ets() logging.update_logging(config) traceback=None cwd = os.getcwd() @@ -162,7 +171,8 @@ def create_pyscript(node, updatehash=False, store_exception=True): report_crash(info['node'], traceback, gethostname()) raise Exception(e) """ - cmdstr = cmdstr % (mpl_backend, pkl_file, batch_dir, node.config, suffix) + cmdstr = cmdstr % (mpl_backend, ets_toolkit, pkl_file, + batch_dir, node.config, suffix) pyscript = os.path.join(batch_dir, 'pyscript_%s.py' % suffix) fp = open(pyscript, 'wt') fp.writelines(cmdstr) diff --git a/nipype/utils/config.py b/nipype/utils/config.py index 096db974d4..6fb6383adb 100644 --- a/nipype/utils/config.py +++ b/nipype/utils/config.py @@ -38,6 +38,7 @@ keep_inputs = false local_hash_check = true matplotlib_backend = Agg +ets_toolkit = null plugin = Linear remove_node_directories = false remove_unnecessary_outputs = true @@ -53,6 +54,7 @@ interval = 1209600 """ % (homedir, os.getcwd()) + class NipypeConfig(object): """Base nipype config class """ @@ -150,6 +152,17 @@ def update_matplotlib(self): import matplotlib matplotlib.use(self.get('execution', 'matplotlib_backend')) + def update_ets(self): + try: + from enthought.etsconfig.api import ETSConfig + ETSConfig.toolkit = self.get('execution', 'ets_toolkit') + except ImportError: + warn('ETS toolkit could not be imported') + pass + except ValueError as e: + warn(e.msg) + pass + def enable_provenance(self): self._config.set('execution', 'write_provenance', 'true') self._config.set('execution', 'hash_method', 'content') From 559e6985c08e847cdc7aadfe9b2f63700f0adcc0 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 21 Oct 2014 19:58:02 +0200 Subject: [PATCH 02/56] remove ets.toolkit config in mesh (#972) --- nipype/algorithms/mesh.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index 7f08c26638..754272249f 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -72,13 +72,6 @@ def _run_interface(self, runtime): except ImportError: raise ImportError('Interface P2PDistance requires tvtk') - try: - from enthought.etsconfig.api import ETSConfig - ETSConfig.toolkit = 'null' - except ImportError: - iflogger.warn(('ETS toolkit could not be imported')) - pass - r1 = tvtk.PolyDataReader(file_name=self.inputs.surface1) r2 = tvtk.PolyDataReader(file_name=self.inputs.surface2) vtk1 = r1.output From 1231a5e3b6a8cc7bc8ac019e829ff72d1cee06c4 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 3 Mar 2015 12:36:04 +0100 Subject: [PATCH 03/56] add new import under traits as failback for enthougth --- nipype/pipeline/plugins/base.py | 21 ++++++++++++++++----- nipype/utils/config.py | 20 +++++++++++++++----- 2 files changed, 31 insertions(+), 10 deletions(-) diff --git a/nipype/pipeline/plugins/base.py b/nipype/pipeline/plugins/base.py index b13874c98c..67dd34b175 100644 --- a/nipype/pipeline/plugins/base.py +++ b/nipype/pipeline/plugins/base.py @@ -115,7 +115,7 @@ def create_pyscript(node, updatehash=False, store_exception=True): import sys can_import_matplotlib = True #Silently allow matplotlib to be ignored -can_import_ets = True #Silently allow ets to be ignored +can_import_ets = False #Silently allow ets to be ignored try: import matplotlib matplotlib.use('%s') @@ -126,13 +126,24 @@ def create_pyscript(node, updatehash=False, store_exception=True): try: from enthought.etsconfig.api import ETSConfig - ETSConfig.toolkit = '%s' + can_import_ets = True except ImportError: - can_import_ets = False - pass -except ValueError: pass +if not can_import_ets: + try: + from traits.etsconfig.etsconfig import ETSConfig + can_import_ets = True + except ImportError: + pass + +if can_import_ets: + try: + ETSConfig.toolkit = '%s' + except ValueError: + pass + + from nipype import config, logging from nipype.utils.filemanip import loadpkl, savepkl from socket import gethostname diff --git a/nipype/utils/config.py b/nipype/utils/config.py index 883fde89f8..5500b0bfd0 100644 --- a/nipype/utils/config.py +++ b/nipype/utils/config.py @@ -154,16 +154,26 @@ def update_matplotlib(self): matplotlib.use(self.get('execution', 'matplotlib_backend')) def update_ets(self): + can_import_ets = False try: from enthought.etsconfig.api import ETSConfig - ETSConfig.toolkit = self.get('execution', 'ets_toolkit') + can_import_ets = True except ImportError: - warn('ETS toolkit could not be imported') - pass - except ValueError as e: - warn(e.msg) pass + if not can_import_ets: + try: + from traits.etsconfig.etsconfig import ETSConfig + can_import_ets = True + except ImportError: + pass + + if can_import_ets: + try: + ETSConfig.toolkit = '%s' + except ValueError: + pass + def enable_provenance(self): self._config.set('execution', 'write_provenance', 'true') self._config.set('execution', 'hash_method', 'content') From a72fd3447a93db44f1bca6eb02096b25a9cc50ae Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 9 Apr 2015 16:34:45 +0200 Subject: [PATCH 04/56] remove error, add config to docs --- doc/users/config_file.rst | 6 ++++++ nipype/pipeline/plugins/base.py | 1 - 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/doc/users/config_file.rst b/doc/users/config_file.rst index e6ca4f66c6..cc18d6c9a1 100644 --- a/doc/users/config_file.rst +++ b/doc/users/config_file.rst @@ -125,6 +125,12 @@ Execution all pending jobs and checking for job completion. To be nice to cluster schedulers the default is set to 60 seconds. +*ets_toolkit* + This sets the backend for the Enthought traits (should be ``'null'`` in headless + settings). + +*matplotlib_backend* + Example ~~~~~~~ diff --git a/nipype/pipeline/plugins/base.py b/nipype/pipeline/plugins/base.py index 67dd34b175..b65748430b 100644 --- a/nipype/pipeline/plugins/base.py +++ b/nipype/pipeline/plugins/base.py @@ -122,7 +122,6 @@ def create_pyscript(node, updatehash=False, store_exception=True): except ImportError: can_import_matplotlib = False pass -except try: from enthought.etsconfig.api import ETSConfig From 485c44d8a47e7ffadb888c1ca838fadc4b0d5665 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 18 Sep 2015 11:38:59 +0200 Subject: [PATCH 05/56] insert here new TVTKBaseInterface --- nipype/algorithms/mesh.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index 6e67b85a26..b00719cb80 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -24,6 +24,31 @@ from warnings import warn iflogger = logging.getLogger('interface') +have_tvtk = False +try: + import os + os.environ['ETS_TOOLKIT'] = 'null' + from tvtk.api import tvtk + have_tvtk = True +except ImportError: + pass + +iflogger = logging.getLogger('interface') + + +class TVTKBaseInterface(BaseInterface): + _redirect_x = True + _vtk_major = 6 + + def __init__(self, **inputs): + if not have_tvtk: + raise RuntimeError('Interface requires tvtk') + + if have_tvtk: + from tvtk.tvtk_classes.vtk_version import vtk_build_version + self._vtk_major = int(vtk_build_version[0]) + super(TVTKBaseInterface, self).__init__(**inputs) + class WarpPointsInputSpec(BaseInterfaceInputSpec): points = File(exists=True, mandatory=True, From 21b86acbda2455f66afdd1253214c9ae05e99a82 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 18 Sep 2015 12:09:21 +0200 Subject: [PATCH 06/56] ETS_TOOLKIT settings in algorithms.mesh --- nipype/algorithms/mesh.py | 49 ++++++++++++--------------------------- 1 file changed, 15 insertions(+), 34 deletions(-) diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index f884b20955..ebc427a2cd 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -13,6 +13,7 @@ import numpy as np from numpy import linalg as nla +import os import os.path as op from ..external import six from .. import logging @@ -22,15 +23,21 @@ iflogger = logging.getLogger('interface') +oldets = os.getenv('ETS_TOOLKIT') have_tvtk = False + try: - import os os.environ['ETS_TOOLKIT'] = 'null' from tvtk.api import tvtk have_tvtk = True except ImportError: pass +if oldets is not None: + os.environ['ETS_TOOLKIT'] = oldets +else: + del os.environ['ETS_TOOLKIT'] + iflogger = logging.getLogger('interface') @@ -40,24 +47,13 @@ class TVTKBaseInterface(BaseInterface): def __init__(self, **inputs): if not have_tvtk: - raise RuntimeError('Interface requires tvtk') - - if have_tvtk: - from tvtk.tvtk_classes.vtk_version import vtk_build_version - self._vtk_major = int(vtk_build_version[0]) - super(TVTKBaseInterface, self).__init__(**inputs) - - -class TVTKBaseInterface(BaseInterface): - _redirect_x = True - _vtk_major = 6 - - def __init__(self, **inputs): - try: - from tvtk.tvtk_classes.vtk_version import vtk_build_version - self._vtk_major = int(vtk_build_version[0]) - except ImportError: - iflogger.warning('VTK version-major inspection using tvtk failed.') + iflogger.warning('Interface requires tvtk, but it wasn\'t found') + else: + try: + from tvtk.tvtk_classes.vtk_version import vtk_build_version + self._vtk_major = int(vtk_build_version[0]) + except ImportError: + iflogger.warning('VTK version-major inspection using tvtk failed.') super(TVTKBaseInterface, self).__init__(**inputs) @@ -120,11 +116,6 @@ def _run_interface(self, runtime): import numpy as np from scipy import ndimage - try: - from tvtk.api import tvtk - except ImportError: - raise ImportError('Interface requires tvtk') - r = tvtk.PolyDataReader(file_name=self.inputs.points) r.update() mesh = r.output @@ -240,11 +231,6 @@ def _triangle_area(self, A, B, C): return area def _run_interface(self, runtime): - try: - from tvtk.api import tvtk - except ImportError: - raise ImportError('Interface requires tvtk') - r1 = tvtk.PolyDataReader(file_name=self.inputs.surface1) r2 = tvtk.PolyDataReader(file_name=self.inputs.surface2) vtk1 = r1.output @@ -367,11 +353,6 @@ class MeshWarpMaths(TVTKBaseInterface): output_spec = MeshWarpMathsOutputSpec def _run_interface(self, runtime): - try: - from tvtk.api import tvtk - except ImportError: - raise ImportError('Interface requires tvtk') - r1 = tvtk.PolyDataReader(file_name=self.inputs.in_surf) vtk1 = r1.output r1.update() From 90f27bd08ba811c44c90ec09cf07c68cc7132b6c Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 18 Sep 2015 13:00:39 +0200 Subject: [PATCH 07/56] search all tvtk imports and fix ETS_TOOLKIT --- doc/users/config_file.rst | 5 -- nipype/algorithms/mesh.py | 1 - nipype/algorithms/tests/test_mesh_ops.py | 11 +++- nipype/interfaces/fsl/utils.py | 18 ++++++ nipype/pipeline/plugins/base.py | 70 +++++++++++------------- nipype/utils/config.py | 19 +++---- 6 files changed, 67 insertions(+), 57 deletions(-) diff --git a/doc/users/config_file.rst b/doc/users/config_file.rst index 74fa4fcc5b..1f37dbe047 100644 --- a/doc/users/config_file.rst +++ b/doc/users/config_file.rst @@ -139,11 +139,6 @@ Execution *xvfb_max_wait* Maximum time (in seconds) to wait for Xvfb to start, if the _redirect_x parameter of an Interface is True. -*ets_toolkit* - This sets the backend for the Enthought traits (should be ``'null'`` in headless - settings). - - Example ~~~~~~~ diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index ebc427a2cd..f87d9d9e09 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -25,7 +25,6 @@ oldets = os.getenv('ETS_TOOLKIT') have_tvtk = False - try: os.environ['ETS_TOOLKIT'] = 'null' from tvtk.api import tvtk diff --git a/nipype/algorithms/tests/test_mesh_ops.py b/nipype/algorithms/tests/test_mesh_ops.py index 53c4c5fdc2..529e6c6b78 100644 --- a/nipype/algorithms/tests/test_mesh_ops.py +++ b/nipype/algorithms/tests/test_mesh_ops.py @@ -13,15 +13,24 @@ from nipype.algorithms import mesh as m -notvtk = True import platform + +notvtk = True if 'darwin' not in platform.system().lower(): + oldets = os.getenv('ETS_TOOLKIT') + have_tvtk = False try: + os.environ['ETS_TOOLKIT'] = 'null' from tvtk.api import tvtk notvtk = False except ImportError: pass + if oldets is not None: + os.environ['ETS_TOOLKIT'] = oldets + else: + del os.environ['ETS_TOOLKIT'] + @skipif(notvtk) def test_ident_distances(): tempdir = mkdtemp() diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py index d9b09904d9..acba96cce5 100644 --- a/nipype/interfaces/fsl/utils.py +++ b/nipype/interfaces/fsl/utils.py @@ -1854,11 +1854,20 @@ def _parse_inputs(self, skip=None): return first_args + [ second_args ] def _vtk_to_coords(self, in_file, out_file=None): + import os import os.path as op + + oldets = os.getenv('ETS_TOOLKIT') try: + os.environ['ETS_TOOLKIT'] = 'null' from tvtk.api import tvtk except ImportError: raise ImportError('This interface requires tvtk to run.') + finally: + if oldets is not None: + os.environ['ETS_TOOLKIT'] = oldets + else: + del os.environ['ETS_TOOLKIT'] reader = tvtk.PolyDataReader(file_name=in_file+'.vtk') reader.update() @@ -1871,11 +1880,20 @@ def _vtk_to_coords(self, in_file, out_file=None): return out_file def _coords_to_vtk(self, points, out_file): + import os import os.path as op + + oldets = os.getenv('ETS_TOOLKIT') try: + os.environ['ETS_TOOLKIT'] = 'null' from tvtk.api import tvtk except ImportError: raise ImportError('This interface requires tvtk to run.') + finally: + if oldets is not None: + os.environ['ETS_TOOLKIT'] = oldets + else: + del os.environ['ETS_TOOLKIT'] reader = tvtk.PolyDataReader(file_name=self.inputs.in_file) reader.update() diff --git a/nipype/pipeline/plugins/base.py b/nipype/pipeline/plugins/base.py index 3bbdfde4e3..b987dcfc13 100644 --- a/nipype/pipeline/plugins/base.py +++ b/nipype/pipeline/plugins/base.py @@ -54,8 +54,8 @@ def report_crash(node, traceback=None, hostname=None): timeofcrash = strftime('%Y%m%d-%H%M%S') login_name = getpass.getuser() crashfile = 'crash-%s-%s-%s.pklz' % (timeofcrash, - login_name, - name) + login_name, + name) crashdir = node.config['execution']['crashdump_dir'] if crashdir is None: crashdir = os.getcwd() @@ -106,10 +106,10 @@ def create_pyscript(node, updatehash=False, store_exception=True): pkl_file = os.path.join(batch_dir, 'node_%s.pklz' % suffix) savepkl(pkl_file, dict(node=node, updatehash=updatehash)) mpl_backend = node.config["execution"]["matplotlib_backend"] - ets_toolkit = node.config["execution"]["ets_toolkit"] # create python script to load and trap exception cmdstr = """import os import sys + can_import_matplotlib = True #Silently allow matplotlib to be ignored try: import matplotlib @@ -118,8 +118,6 @@ def create_pyscript(node, updatehash=False, store_exception=True): can_import_matplotlib = False pass -os.environ['ETS_TOOLKIT'] = '%s' - from nipype import config, logging from nipype.utils.filemanip import loadpkl, savepkl from socket import gethostname @@ -133,12 +131,9 @@ def create_pyscript(node, updatehash=False, store_exception=True): from collections import OrderedDict config_dict=%s config.update_config(config_dict) - - ## Only configure matplotlib if it was successfully imported, matplotlib is - ## an optional component to nipype + ## Only configure matplotlib if it was successfully imported, matplotlib is an optional component to nipype if can_import_matplotlib: config.update_matplotlib() - config.update_ets() logging.update_logging(config) traceback=None cwd = os.getcwd() @@ -170,8 +165,7 @@ def create_pyscript(node, updatehash=False, store_exception=True): report_crash(info['node'], traceback, gethostname()) raise Exception(e) """ - cmdstr = cmdstr % (mpl_backend, ets_toolkit, pkl_file, - batch_dir, node.config, suffix) + cmdstr = cmdstr % (mpl_backend, pkl_file, batch_dir, node.config, suffix) pyscript = os.path.join(batch_dir, 'pyscript_%s.py' % suffix) fp = open(pyscript, 'wt') fp.writelines(cmdstr) @@ -180,7 +174,6 @@ def create_pyscript(node, updatehash=False, store_exception=True): class PluginBase(object): - """Base class for plugins""" def __init__(self, plugin_args=None): @@ -195,7 +188,6 @@ def run(self, graph, config, updatehash=False): class DistributedPluginBase(PluginBase): - """Execute workflow with a distribution engine """ @@ -238,7 +230,7 @@ def run(self, graph, config, updatehash=False): # setup polling - TODO: change to threaded model notrun = [] while np.any(self.proc_done == False) | \ - np.any(self.proc_pending == True): + np.any(self.proc_pending == True): toappend = [] # trigger callbacks for any pending results while self.pending_tasks: @@ -316,7 +308,7 @@ def _submit_mapnode(self, jobid): self.procs.extend(mapnodesubids) self.depidx = ssp.vstack((self.depidx, ssp.lil_matrix(np.zeros( - (numnodes, self.depidx.shape[1])))), + (numnodes, self.depidx.shape[1])))), 'lil') self.depidx = ssp.hstack((self.depidx, ssp.lil_matrix( @@ -365,23 +357,23 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): self.proc_pending[jobid] = True # Send job to task manager and add to pending tasks logger.info('Executing: %s ID: %d' % - (self.procs[jobid]._id, jobid)) + (self.procs[jobid]._id, jobid)) if self._status_callback: self._status_callback(self.procs[jobid], 'start') continue_with_submission = True if str2bool(self.procs[jobid].config['execution'] - ['local_hash_check']): + ['local_hash_check']): logger.debug('checking hash locally') try: hash_exists, _, _, _ = self.procs[ jobid].hash_exists() logger.debug('Hash exists %s' % str(hash_exists)) if (hash_exists and - (self.procs[jobid].overwrite == False or - (self.procs[jobid].overwrite == None and - not self.procs[jobid]._interface.always_run) - ) - ): + (self.procs[jobid].overwrite == False or + (self.procs[jobid].overwrite == None and + not self.procs[jobid]._interface.always_run) + ) + ): continue_with_submission = False self._task_finished_cb(jobid) self._remove_node_dirs() @@ -473,7 +465,6 @@ def _remove_node_dirs(self): class SGELikeBatchManagerBase(DistributedPluginBase): - """Execute workflow with SGE/OGE/PBS like batch system """ @@ -536,7 +527,7 @@ def _get_result(self, taskid): 'seconds. Batch dir contains crashdump file ' 'if node raised an exception.\n' 'Node working directory: ({2}) '.format( - taskid, timeout, node_dir)) + taskid,timeout,node_dir) ) raise IOError(error_message) except IOError, e: result_data['traceback'] = format_exc() @@ -583,7 +574,6 @@ def _clear_task(self, taskid): class GraphPluginBase(PluginBase): - """Base class for plugins that distribute graphs to workflows """ @@ -613,19 +603,19 @@ def _get_args(self, node, keywords): if keyword == "template" and os.path.isfile(value): value = open(value).read() if (hasattr(node, "plugin_args") and - isinstance(node.plugin_args, dict) and - keyword in node.plugin_args): - if (keyword == "template" and - os.path.isfile(node.plugin_args[keyword])): - tmp_value = open(node.plugin_args[keyword]).read() - else: - tmp_value = node.plugin_args[keyword] - - if ('overwrite' in node.plugin_args and - node.plugin_args['overwrite']): - value = tmp_value - else: - value += tmp_value + isinstance(node.plugin_args, dict) and + keyword in node.plugin_args): + if (keyword == "template" and + os.path.isfile(node.plugin_args[keyword])): + tmp_value = open(node.plugin_args[keyword]).read() + else: + tmp_value = node.plugin_args[keyword] + + if ('overwrite' in node.plugin_args and + node.plugin_args['overwrite']): + value = tmp_value + else: + value += tmp_value values += (value, ) return values @@ -636,6 +626,8 @@ def _submit_graph(self, pyfiles, dependencies, nodes): """ raise NotImplementedError + + def _get_result(self, taskid): if taskid not in self._pending: raise Exception('Task %d not found' % taskid) @@ -643,6 +635,7 @@ def _get_result(self, taskid): return None node_dir = self._pending[taskid] + logger.debug(os.listdir(os.path.realpath(os.path.join(node_dir, '..')))) logger.debug(os.listdir(node_dir)) @@ -663,3 +656,4 @@ def _get_result(self, taskid): result_out['result'] = result_data return result_out + diff --git a/nipype/utils/config.py b/nipype/utils/config.py index f1e1dd0d6f..0a86d1af26 100644 --- a/nipype/utils/config.py +++ b/nipype/utils/config.py @@ -40,7 +40,6 @@ keep_inputs = false local_hash_check = true matplotlib_backend = Agg -ets_toolkit = null plugin = Linear remove_node_directories = false remove_unnecessary_outputs = true @@ -71,7 +70,6 @@ def mkdir_p(path): class NipypeConfig(object): - """Base nipype config class """ @@ -84,12 +82,12 @@ def __init__(self, *args, **kwargs): # To be deprecated in two releases if os.path.exists(old_config_file): if os.path.exists(new_config_file): - msg = ("Detected presence of both old (%s, used by versions " - "< 0.5.2) and new (%s) config files. This version will " - "proceed with the new one. We advise to merge settings " - "and remove old config file if you are not planning to " - "use previous releases of nipype.") % (old_config_file, - new_config_file) + msg=("Detected presence of both old (%s, used by versions " + "< 0.5.2) and new (%s) config files. This version will " + "proceed with the new one. We advise to merge settings " + "and remove old config file if you are not planning to " + "use previous releases of nipype.") % (old_config_file, + new_config_file) warn(msg) else: warn("Moving old config file from: %s to %s" % (old_config_file, @@ -167,10 +165,7 @@ def update_matplotlib(self): import matplotlib matplotlib.use(self.get('execution', 'matplotlib_backend')) - def update_ets(self): - import os - os.environ['ETS_TOOLKIT'] = '%s' % self.get('execution', 'ets_toolkit') - def enable_provenance(self): self._config.set('execution', 'write_provenance', 'true') self._config.set('execution', 'hash_method', 'content') + From 6416e5b1292dcf8f24be6854adb907ea437cda47 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Mon, 14 Dec 2015 07:56:54 +0100 Subject: [PATCH 08/56] add autotest file for TVTKBaseInterface --- .../tests/test_auto_TVTKBaseInterface.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 nipype/algorithms/tests/test_auto_TVTKBaseInterface.py diff --git a/nipype/algorithms/tests/test_auto_TVTKBaseInterface.py b/nipype/algorithms/tests/test_auto_TVTKBaseInterface.py new file mode 100644 index 0000000000..3dd8ac6d2a --- /dev/null +++ b/nipype/algorithms/tests/test_auto_TVTKBaseInterface.py @@ -0,0 +1,16 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ...testing import assert_equal +from ..mesh import TVTKBaseInterface + + +def test_TVTKBaseInterface_inputs(): + input_map = dict(ignore_exception=dict(nohash=True, + usedefault=True, + ), + ) + inputs = TVTKBaseInterface.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + yield assert_equal, getattr(inputs.traits()[key], metakey), value + From c6bb4e3c0f61f845606e4f48d459e0ba88eb5289 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Mon, 25 Jan 2016 15:10:25 -0800 Subject: [PATCH 09/56] removed NotImplementedError in method with implementation --- nipype/interfaces/fsl/utils.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py index 0fd8b1b217..9be811a273 100644 --- a/nipype/interfaces/fsl/utils.py +++ b/nipype/interfaces/fsl/utils.py @@ -1935,7 +1935,6 @@ def _coords_to_vtk(self, points, out_file): writer.write() def _trk_to_coords(self, in_file, out_file=None): - raise NotImplementedError('trk files are not yet supported') try: from nibabel.trackvis import TrackvisFile except ImportError: From b757cf89f8d54ddbaa16e279cb0e1900091fa73c Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 08:43:02 -0800 Subject: [PATCH 10/56] improved code quality, added comments --- nipype/algorithms/mesh.py | 41 +++++++++++++----------- nipype/algorithms/tests/test_mesh_ops.py | 13 ++++---- nipype/interfaces/fsl/utils.py | 36 ++++++++------------- 3 files changed, 44 insertions(+), 46 deletions(-) diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index 1c578df293..ec9eca6da3 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -25,36 +25,41 @@ from ..interfaces.base import (BaseInterface, traits, TraitedSpec, File, BaseInterfaceInputSpec) -oldets = os.getenv('ETS_TOOLKIT') +iflogger = logging.getLogger('interface') + +# Ensure that tvtk is loaded with the appropriate ETS_TOOLKIT env var +old_ets = os.getenv('ETS_TOOLKIT') +os.environ['ETS_TOOLKIT'] = 'null' have_tvtk = False try: - os.environ['ETS_TOOLKIT'] = 'null' from tvtk.api import tvtk have_tvtk = True except ImportError: - pass - -if oldets is not None: - os.environ['ETS_TOOLKIT'] = oldets -else: - del os.environ['ETS_TOOLKIT'] - -iflogger = logging.getLogger('interface') + iflogger.warning('tvtk wasn\'t found') +finally: + if old_ets is not None: + os.environ['ETS_TOOLKIT'] = old_ets + else: + del os.environ['ETS_TOOLKIT'] class TVTKBaseInterface(BaseInterface): + """ A base class for interfaces using VTK """ + _redirect_x = True - _vtk_major = 6 + _vtk_major = 5 def __init__(self, **inputs): if not have_tvtk: - iflogger.warning('Interface requires tvtk, but it wasn\'t found') - else: - try: - from tvtk.tvtk_classes.vtk_version import vtk_build_version - self._vtk_major = int(vtk_build_version[0]) - except ImportError: - iflogger.warning('VTK version-major inspection using tvtk failed.') + raise ImportError('This interface requires tvtk to run.') + + # Identify VTK version major, use 5.0 if failed + try: + from tvtk.tvtk_classes.vtk_version import vtk_build_version + self._vtk_major = int(vtk_build_version[0]) + except ImportError: + iflogger.warning( + 'VTK version-major inspection using tvtk failed, assuming VTK <= 5.0.') super(TVTKBaseInterface, self).__init__(**inputs) diff --git a/nipype/algorithms/tests/test_mesh_ops.py b/nipype/algorithms/tests/test_mesh_ops.py index 529e6c6b78..29d535bd2f 100644 --- a/nipype/algorithms/tests/test_mesh_ops.py +++ b/nipype/algorithms/tests/test_mesh_ops.py @@ -17,19 +17,20 @@ notvtk = True if 'darwin' not in platform.system().lower(): - oldets = os.getenv('ETS_TOOLKIT') + old_ets = os.getenv('ETS_TOOLKIT') + os.environ['ETS_TOOLKIT'] = 'null' have_tvtk = False try: - os.environ['ETS_TOOLKIT'] = 'null' from tvtk.api import tvtk notvtk = False except ImportError: pass + finally: + if old_ets is not None: + os.environ['ETS_TOOLKIT'] = old_ets + else: + del os.environ['ETS_TOOLKIT'] - if oldets is not None: - os.environ['ETS_TOOLKIT'] = oldets - else: - del os.environ['ETS_TOOLKIT'] @skipif(notvtk) def test_ident_distances(): diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py index 9be811a273..f9ab1442af 100644 --- a/nipype/interfaces/fsl/utils.py +++ b/nipype/interfaces/fsl/utils.py @@ -1867,25 +1867,21 @@ def _vtk_to_coords(self, in_file, out_file=None): import os import os.path as op - oldets = os.getenv('ETS_TOOLKIT') + # Ensure that tvtk is loaded with the appropriate ETS_TOOLKIT env var + old_ets = os.getenv('ETS_TOOLKIT') + os.environ['ETS_TOOLKIT'] = 'null' try: - os.environ['ETS_TOOLKIT'] = 'null' from tvtk.api import tvtk + from tvtk.tvtk_classes.vtk_version import vtk_build_version except ImportError: raise ImportError('This interface requires tvtk to run.') finally: - if oldets is not None: - os.environ['ETS_TOOLKIT'] = oldets + if old_ets is not None: + os.environ['ETS_TOOLKIT'] = old_ets else: del os.environ['ETS_TOOLKIT'] - vtk_major = 5 - try: - from tvtk.tvtk_classes.vtk_version import vtk_build_version - vtk_major = int(vtk_build_version[0]) - except ImportError: - iflogger.warning('VTK version-major inspection using tvtk failed.') - + vtk_major = int(vtk_build_version[0]) reader = tvtk.PolyDataReader(file_name=in_file + '.vtk') reader.update() @@ -1902,25 +1898,21 @@ def _coords_to_vtk(self, points, out_file): import os import os.path as op - oldets = os.getenv('ETS_TOOLKIT') + # Ensure that tvtk is loaded with the appropriate ETS_TOOLKIT env var + old_ets = os.getenv('ETS_TOOLKIT') + os.environ['ETS_TOOLKIT'] = 'null' try: - os.environ['ETS_TOOLKIT'] = 'null' from tvtk.api import tvtk + from tvtk.tvtk_classes.vtk_version import vtk_build_version except ImportError: raise ImportError('This interface requires tvtk to run.') finally: - if oldets is not None: - os.environ['ETS_TOOLKIT'] = oldets + if old_ets is not None: + os.environ['ETS_TOOLKIT'] = old_ets else: del os.environ['ETS_TOOLKIT'] - vtk_major = 5 - try: - from tvtk.tvtk_classes.vtk_version import vtk_build_version - vtk_major = int(vtk_build_version[0]) - except ImportError: - iflogger.warning('VTK version-major inspection using tvtk failed.') - + vtk_major = int(vtk_build_version[0]) reader = tvtk.PolyDataReader(file_name=self.inputs.in_file) reader.update() From df17a3dc3205ae1cc07235eb45e3817c12807a82 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 09:56:17 -0800 Subject: [PATCH 11/56] added neurodebian in all travis builds --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 6aeeb361ef..a652a03f1f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,13 +16,13 @@ before_install: - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then export PATH=/home/travis/miniconda2/bin:$PATH; else export PATH=/home/travis/miniconda3/bin:$PATH; fi - if $INSTALL_DEB_DEPENDECIES; then sudo rm -rf /dev/shm; fi - if $INSTALL_DEB_DEPENDECIES; then sudo ln -s /run/shm /dev/shm; fi -- if $INSTALL_DEB_DEPENDECIES; then bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh); - fi +- bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) +- sudo apt-get update - if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq --no-install-recommends fsl afni elastix; fi - if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq fsl-atlases; fi -- travis_retry sudo apt-get install -qq libvtk5-dev swig +- travis_retry sudo apt-get install -qq libx11-dev libvtk5-dev swig - if $INSTALL_DEB_DEPENDECIES; then source /etc/fsl/fsl.sh; fi - if $INSTALL_DEB_DEPENDECIES; then source /etc/afni/afni.sh; fi install: From b9287f4c597ede876e150a0133940e02060dac10 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 10:18:17 -0800 Subject: [PATCH 12/56] added dependency (Pillow) and fixed numpy installation --- .travis.yml | 1 + circle.yml | 9 +++++++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index a652a03f1f..7affd0b90e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -35,6 +35,7 @@ install: - pip install python-coveralls - pip install nose-cov # Add tvtk +- pip install Pillow - pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools - pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas - pip install -e git+https://github.com/enthought/ets.git#egg=ets diff --git a/circle.yml b/circle.yml index 82511ade0e..db253becb3 100644 --- a/circle.yml +++ b/circle.yml @@ -16,13 +16,18 @@ dependencies: override: # Install apt packages - sudo apt-get install -y fsl-core fsl-atlases fsl-mni152-templates fsl-feeds afni libvtk5-dev swig - - echo "source /etc/fsl/fsl.sh" >> $HOME/.profile - - echo "source /etc/afni/afni.sh" >> $HOME/.profile + - echo 'source /etc/fsl/fsl.sh' >> $HOME/.profile + - echo 'source /etc/afni/afni.sh' >> $HOME/.profile + # Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 + - echo '[x11]' >> $HOME/.numpy-site.cfg + - echo 'library_dirs = /usr/lib64:/usr/lib:/usr/lib/x86_64-linux-gnu' >> $HOME/.numpy-site.cfg + - echo 'include_dirs = /usr/include:/usr/include/X11' >> $HOME/.numpy-site.cfg # Set up python environment - pip install --upgrade pip - pip install -e . - pip install matplotlib sphinx ipython boto # Add tvtk + - pip install Pillow - pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools - pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas - pip install -e git+https://github.com/enthought/ets.git#egg=ets From 30b032326221d955f86d7ac26cb5590c3708a583 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 10:19:35 -0800 Subject: [PATCH 13/56] fixed numpy installation -> moved from circle to travis --- .travis.yml | 4 ++++ circle.yml | 4 ---- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index 7affd0b90e..624c17abf0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -25,6 +25,10 @@ before_install: - travis_retry sudo apt-get install -qq libx11-dev libvtk5-dev swig - if $INSTALL_DEB_DEPENDECIES; then source /etc/fsl/fsl.sh; fi - if $INSTALL_DEB_DEPENDECIES; then source /etc/afni/afni.sh; fi +# Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 +- echo '[x11]' >> $HOME/.numpy-site.cfg +- echo 'library_dirs = /usr/lib64:/usr/lib:/usr/lib/x86_64-linux-gnu' >> $HOME/.numpy-site.cfg +- echo 'include_dirs = /usr/include:/usr/include/X11' >> $HOME/.numpy-site.cfg install: - conda update --yes conda - conda create -n testenv --yes pip python=$TRAVIS_PYTHON_VERSION diff --git a/circle.yml b/circle.yml index db253becb3..67fb5df097 100644 --- a/circle.yml +++ b/circle.yml @@ -18,10 +18,6 @@ dependencies: - sudo apt-get install -y fsl-core fsl-atlases fsl-mni152-templates fsl-feeds afni libvtk5-dev swig - echo 'source /etc/fsl/fsl.sh' >> $HOME/.profile - echo 'source /etc/afni/afni.sh' >> $HOME/.profile - # Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 - - echo '[x11]' >> $HOME/.numpy-site.cfg - - echo 'library_dirs = /usr/lib64:/usr/lib:/usr/lib/x86_64-linux-gnu' >> $HOME/.numpy-site.cfg - - echo 'include_dirs = /usr/include:/usr/include/X11' >> $HOME/.numpy-site.cfg # Set up python environment - pip install --upgrade pip - pip install -e . From 9ce6bce35fe29e8fb2128e13b3798fd9f82accfc Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 10:25:38 -0800 Subject: [PATCH 14/56] replaced Pillow by PIL --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 624c17abf0..3d2fbb79d7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -39,7 +39,7 @@ install: - pip install python-coveralls - pip install nose-cov # Add tvtk -- pip install Pillow +- pip install PIL --allow-external PIL --allow-unverified PIL - pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools - pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas - pip install -e git+https://github.com/enthought/ets.git#egg=ets From 4ee2119d162c07808f792e37bede41d37ac0bd75 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 10:32:33 -0800 Subject: [PATCH 15/56] added PIL from url --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 3d2fbb79d7..a4b0fb1ee6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -38,8 +38,8 @@ install: - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then conda install --yes traits; else pip install traits; fi - pip install python-coveralls - pip install nose-cov -# Add tvtk -- pip install PIL --allow-external PIL --allow-unverified PIL +# Add tvtk (PIL is required by blockcanvas) +- pip install PIL http://effbot.org/downloads/Imaging-1.1.7.tar.gz - pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools - pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas - pip install -e git+https://github.com/enthought/ets.git#egg=ets From 24163710e1680060b8d95ec2eb7bf8d6cf31a035 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 10:33:58 -0800 Subject: [PATCH 16/56] added PIL from url also for circle --- circle.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/circle.yml b/circle.yml index 67fb5df097..801b12a909 100644 --- a/circle.yml +++ b/circle.yml @@ -23,7 +23,7 @@ dependencies: - pip install -e . - pip install matplotlib sphinx ipython boto # Add tvtk - - pip install Pillow + - pip install PIL http://effbot.org/downloads/Imaging-1.1.7.tar.gz - pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools - pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas - pip install -e git+https://github.com/enthought/ets.git#egg=ets From fa6e263251c7abb908cff781a2c1df1fe0fb8f02 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 10:37:35 -0800 Subject: [PATCH 17/56] fixed PIL installation --- .travis.yml | 2 +- circle.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index a4b0fb1ee6..c95b053008 100644 --- a/.travis.yml +++ b/.travis.yml @@ -39,7 +39,7 @@ install: - pip install python-coveralls - pip install nose-cov # Add tvtk (PIL is required by blockcanvas) -- pip install PIL http://effbot.org/downloads/Imaging-1.1.7.tar.gz +- pip install http://effbot.org/downloads/Imaging-1.1.7.tar.gz - pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools - pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas - pip install -e git+https://github.com/enthought/ets.git#egg=ets diff --git a/circle.yml b/circle.yml index 801b12a909..75d54c85c9 100644 --- a/circle.yml +++ b/circle.yml @@ -23,7 +23,7 @@ dependencies: - pip install -e . - pip install matplotlib sphinx ipython boto # Add tvtk - - pip install PIL http://effbot.org/downloads/Imaging-1.1.7.tar.gz + - pip install http://effbot.org/downloads/Imaging-1.1.7.tar.gz - pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools - pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas - pip install -e git+https://github.com/enthought/ets.git#egg=ets From 02ea69bd08ccc92ed8e703795421ba1bbfc966a4 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 10:46:26 -0800 Subject: [PATCH 18/56] added missing etsproxy --- .travis.yml | 1 + circle.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index c95b053008..5edac8418d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -42,6 +42,7 @@ install: - pip install http://effbot.org/downloads/Imaging-1.1.7.tar.gz - pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools - pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas +- pip install -e git+https://github.com/enthought/etsproxy.git#egg=etsproxy - pip install -e git+https://github.com/enthought/ets.git#egg=ets - pip install -r requirements.txt # finish remaining requirements - python setup.py install diff --git a/circle.yml b/circle.yml index 75d54c85c9..3f019c07ae 100644 --- a/circle.yml +++ b/circle.yml @@ -26,6 +26,7 @@ dependencies: - pip install http://effbot.org/downloads/Imaging-1.1.7.tar.gz - pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools - pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas + - pip install -e git+https://github.com/enthought/etsproxy.git#egg=etsproxy - pip install -e git+https://github.com/enthought/ets.git#egg=ets - gem install fakes3 - if [[ ! -d ~/examples/data ]]; then wget "http://tcpdiag.dl.sourceforge.net/project/nipy/nipype/nipype-0.2/nipype-tutorial.tar.bz2"; tar jxvf nipype-tutorial.tar.bz2; mkdir ~/examples; mv nipype-tutorial/* ~/examples/; fi From 70036b40278ca2cfa7e72b202194242c596fc50f Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 11:01:08 -0800 Subject: [PATCH 19/56] added python-vtk --- .travis.yml | 2 +- circle.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 5edac8418d..3396ea58dc 100644 --- a/.travis.yml +++ b/.travis.yml @@ -22,7 +22,7 @@ before_install: fsl afni elastix; fi - if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq fsl-atlases; fi -- travis_retry sudo apt-get install -qq libx11-dev libvtk5-dev swig +- travis_retry sudo apt-get install -qq libx11-dev libvtk5-dev swig python-vtk - if $INSTALL_DEB_DEPENDECIES; then source /etc/fsl/fsl.sh; fi - if $INSTALL_DEB_DEPENDECIES; then source /etc/afni/afni.sh; fi # Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 diff --git a/circle.yml b/circle.yml index 3f019c07ae..9c56f98e26 100644 --- a/circle.yml +++ b/circle.yml @@ -15,7 +15,7 @@ dependencies: - sudo apt-get update override: # Install apt packages - - sudo apt-get install -y fsl-core fsl-atlases fsl-mni152-templates fsl-feeds afni libvtk5-dev swig + - sudo apt-get install -y fsl-core fsl-atlases fsl-mni152-templates fsl-feeds afni libvtk5-dev swig python-vtk - echo 'source /etc/fsl/fsl.sh' >> $HOME/.profile - echo 'source /etc/afni/afni.sh' >> $HOME/.profile # Set up python environment From a52e3d9c3bc19d007a0cdbbdf5f724f3e0bc55fe Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 11:26:43 -0800 Subject: [PATCH 20/56] fixed vtk installation in travis --- .travis.yml | 4 ++-- circle.yml | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 3396ea58dc..cfead3ef0d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -22,7 +22,7 @@ before_install: fsl afni elastix; fi - if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq fsl-atlases; fi -- travis_retry sudo apt-get install -qq libx11-dev libvtk5-dev swig python-vtk +- travis_retry sudo apt-get install -qq libx11-dev swig - if $INSTALL_DEB_DEPENDECIES; then source /etc/fsl/fsl.sh; fi - if $INSTALL_DEB_DEPENDECIES; then source /etc/afni/afni.sh; fi # Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 @@ -34,7 +34,7 @@ install: - conda create -n testenv --yes pip python=$TRAVIS_PYTHON_VERSION - source activate testenv - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then pip install ordereddict; fi -- conda install --yes numpy scipy nose networkx dateutil +- conda install --yes numpy scipy nose networkx dateutil vtk - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then conda install --yes traits; else pip install traits; fi - pip install python-coveralls - pip install nose-cov diff --git a/circle.yml b/circle.yml index 9c56f98e26..aec73c7600 100644 --- a/circle.yml +++ b/circle.yml @@ -15,9 +15,10 @@ dependencies: - sudo apt-get update override: # Install apt packages - - sudo apt-get install -y fsl-core fsl-atlases fsl-mni152-templates fsl-feeds afni libvtk5-dev swig python-vtk + - sudo apt-get install -y fsl-core fsl-atlases fsl-mni152-templates fsl-feeds afni swig python-vtk - echo 'source /etc/fsl/fsl.sh' >> $HOME/.profile - echo 'source /etc/afni/afni.sh' >> $HOME/.profile + - echo 'export PYTHONPATH="$PYTHONPATH:/usr/local/lib/python2.7/site-packages/vtk/"' >> $HOME/.profile # Set up python environment - pip install --upgrade pip - pip install -e . From 9a6728ed129a149a7d6bb31c99191e957c307702 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 11:59:10 -0800 Subject: [PATCH 21/56] fixed mayavi in travis and python-vtk in circle --- .travis.yml | 2 ++ circle.yml | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index cfead3ef0d..f2cd116e19 100644 --- a/.travis.yml +++ b/.travis.yml @@ -43,6 +43,8 @@ install: - pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools - pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas - pip install -e git+https://github.com/enthought/etsproxy.git#egg=etsproxy +# Install mayavi (see https://github.com/enthought/mayavi/issues/271) +- pip install https://github.com/dmsurti/mayavi/archive/4d4aaf315a29d6a86707dd95149e27d9ed2225bf.zip - pip install -e git+https://github.com/enthought/ets.git#egg=ets - pip install -r requirements.txt # finish remaining requirements - python setup.py install diff --git a/circle.yml b/circle.yml index aec73c7600..6c403fc0e8 100644 --- a/circle.yml +++ b/circle.yml @@ -18,7 +18,9 @@ dependencies: - sudo apt-get install -y fsl-core fsl-atlases fsl-mni152-templates fsl-feeds afni swig python-vtk - echo 'source /etc/fsl/fsl.sh' >> $HOME/.profile - echo 'source /etc/afni/afni.sh' >> $HOME/.profile - - echo 'export PYTHONPATH="$PYTHONPATH:/usr/local/lib/python2.7/site-packages/vtk/"' >> $HOME/.profile + # Enable system-wide vtk + - echo 'export PYTHONPATH=$PYTHONPATH:/usr/lib/pymodules/python2.7/' >> $HOME/.profile + - export PYTHONPATH=$PYTHONPATH:/usr/lib/pymodules/python2.7/ # Set up python environment - pip install --upgrade pip - pip install -e . From db3ac3e3fbefd254d513a139fb7a69f2104011aa Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 12:49:32 -0800 Subject: [PATCH 22/56] added xvfbwrapper requirement, fixing vtk in ci --- .travis.yml | 14 ++++++++------ circle.yml | 4 ++-- requirements.txt | 1 + 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/.travis.yml b/.travis.yml index f2cd116e19..8b74cf8741 100644 --- a/.travis.yml +++ b/.travis.yml @@ -22,20 +22,22 @@ before_install: fsl afni elastix; fi - if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq fsl-atlases; fi -- travis_retry sudo apt-get install -qq libx11-dev swig - if $INSTALL_DEB_DEPENDECIES; then source /etc/fsl/fsl.sh; fi - if $INSTALL_DEB_DEPENDECIES; then source /etc/afni/afni.sh; fi -# Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 -- echo '[x11]' >> $HOME/.numpy-site.cfg -- echo 'library_dirs = /usr/lib64:/usr/lib:/usr/lib/x86_64-linux-gnu' >> $HOME/.numpy-site.cfg -- echo 'include_dirs = /usr/include:/usr/include/X11' >> $HOME/.numpy-site.cfg +- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then travis_retry sudo apt-get install -qq libx11-dev swig; + # Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 + echo '[x11]' >> $HOME/.numpy-site.cfg; + echo 'library_dirs = /usr/lib64:/usr/lib:/usr/lib/x86_64-linux-gnu' >> $HOME/.numpy-site.cfg; + echo 'include_dirs = /usr/include:/usr/include/X11' >> $HOME/.numpy-site.cfg; + fi install: - conda update --yes conda - conda create -n testenv --yes pip python=$TRAVIS_PYTHON_VERSION - source activate testenv - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then pip install ordereddict; fi -- conda install --yes numpy scipy nose networkx dateutil vtk +- conda install --yes numpy scipy nose networkx dateutil - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then conda install --yes traits; else pip install traits; fi +- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then conda install --yes vtk; else pip install vtk; fi - pip install python-coveralls - pip install nose-cov # Add tvtk (PIL is required by blockcanvas) diff --git a/circle.yml b/circle.yml index 6c403fc0e8..c7c23d6763 100644 --- a/circle.yml +++ b/circle.yml @@ -19,8 +19,8 @@ dependencies: - echo 'source /etc/fsl/fsl.sh' >> $HOME/.profile - echo 'source /etc/afni/afni.sh' >> $HOME/.profile # Enable system-wide vtk - - echo 'export PYTHONPATH=$PYTHONPATH:/usr/lib/pymodules/python2.7/' >> $HOME/.profile - - export PYTHONPATH=$PYTHONPATH:/usr/lib/pymodules/python2.7/ + - echo 'ln -s /usr/lib/pymodules/python2.7/vtk ~/virtualenvs/venv-system/lib/python2.7/site-packages/' >> $HOME/.profile + - ln -s /usr/lib/pymodules/python2.7/vtk ~/virtualenvs/venv-system/lib/python2.7/site-packages/ # Set up python environment - pip install --upgrade pip - pip install -e . diff --git a/requirements.txt b/requirements.txt index 7fa14bd8f5..43a30c6639 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,3 +8,4 @@ nose>=1.2 future==0.15.2 simplejson>=3.8.0 prov>=1.4.0 +xvfbwrapper \ No newline at end of file From 92d98bc6c80d6f5d37a70f938fe1941a758b65d7 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 14:09:39 -0800 Subject: [PATCH 23/56] fixed syntax error in travis file --- .travis.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 8b74cf8741..2a5b3cedb8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -24,9 +24,10 @@ before_install: fi - if $INSTALL_DEB_DEPENDECIES; then source /etc/fsl/fsl.sh; fi - if $INSTALL_DEB_DEPENDECIES; then source /etc/afni/afni.sh; fi +# Install vtk and fix numpy installation problem +# Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then travis_retry sudo apt-get install -qq libx11-dev swig; - # Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 - echo '[x11]' >> $HOME/.numpy-site.cfg; + echo '[x11]' >> $HOME/.numpy-site.cfg; echo 'library_dirs = /usr/lib64:/usr/lib:/usr/lib/x86_64-linux-gnu' >> $HOME/.numpy-site.cfg; echo 'include_dirs = /usr/include:/usr/include/X11' >> $HOME/.numpy-site.cfg; fi From ffd1d4085e97b34a9e0b23d6a3ed9a2a5315b4de Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 15:14:30 -0800 Subject: [PATCH 24/56] fixed errors, added conditional doctest --- nipype/algorithms/mesh.py | 52 ++++---- nipype/interfaces/fsl/utils.py | 211 +++++++++++++++++++-------------- 2 files changed, 152 insertions(+), 111 deletions(-) diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index ec9eca6da3..c0b6e49980 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -1,15 +1,16 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -''' +""" Miscellaneous algorithms for 2D contours and 3D triangularized meshes handling - Change directory to provide relative paths for doctests - >>> import os - >>> filepath = os.path.dirname(os.path.realpath( __file__ )) - >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) - >>> os.chdir(datadir) + .. testsetup:: + # Change directory to provide relative paths for doctests + import os + filepath = os.path.dirname(os.path.realpath( __file__ )) + datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) + os.chdir(datadir) -''' +""" from __future__ import division from builtins import zip @@ -44,6 +45,7 @@ class TVTKBaseInterface(BaseInterface): + """ A base class for interfaces using VTK """ _redirect_x = True @@ -66,9 +68,9 @@ def __init__(self, **inputs): class WarpPointsInputSpec(BaseInterfaceInputSpec): points = File(exists=True, mandatory=True, - desc=('file containing the point set')) + desc='file containing the point set') warp = File(exists=True, mandatory=True, - desc=('dense deformation field to be applied')) + desc='dense deformation field to be applied') interp = traits.Enum('cubic', 'nearest', 'linear', usedefault=True, mandatory=True, desc='interpolation') out_points = File(name_source='points', name_template='%s_warped', @@ -92,11 +94,19 @@ class WarpPoints(TVTKBaseInterface): Example ------- + >>> from nipype.algorithms.mesh import have_tvtk >>> from nipype.algorithms.mesh import WarpPoints - >>> wp = WarpPoints() - >>> wp.inputs.points = 'surf1.vtk' - >>> wp.inputs.warp = 'warpfield.nii' - >>> res = wp.run() # doctest: +SKIP + >>> if not have_tvtk: + ... wp = WarpPoints() + Traceback (most recent call last): + ... + ImportError: This interface requires tvtk to run. + >>> else: + ... wp = WarpPoints() + ... wp.inputs.points = 'surf1.vtk' + ... wp.inputs.warp = 'warpfield.nii' + ... res = wp.run() # doctest: +SKIP + """ input_spec = WarpPointsInputSpec output_spec = WarpPointsOutputSpec @@ -179,7 +189,7 @@ class ComputeMeshWarpInputSpec(BaseInterfaceInputSpec): desc=('Test surface (vtk format) from which compute ' 'distance.')) metric = traits.Enum('euclidean', 'sqeuclidean', usedefault=True, - desc=('norm used to report distance')) + desc='norm used to report distance') weighting = traits.Enum( 'none', 'area', usedefault=True, desc=('"none": no weighting is performed, surface": edge distance is ' @@ -256,9 +266,9 @@ def _run_interface(self, runtime): errvector = np.apply_along_axis(nla.norm, 1, diff) if self.inputs.metric == 'sqeuclidean': - errvector = errvector ** 2 + errvector **= 2 - if (self.inputs.weighting == 'area'): + if self.inputs.weighting == 'area': faces = vtk1.polys.to_array().reshape(-1, 4).astype(int)[:, 1:] for i, p1 in enumerate(points2): @@ -285,9 +295,9 @@ def _run_interface(self, runtime): file_name=op.abspath(self.inputs.out_warp)) if self._vtk_major <= 5: - writer.input = mesh + writer.input = out_mesh else: - writer.set_input_data_object(mesh) + writer.set_input_data_object(out_mesh) writer.write() @@ -311,10 +321,10 @@ class MeshWarpMathsInputSpec(BaseInterfaceInputSpec): operator = traits.Either( float_trait, File(exists=True), default=1.0, mandatory=True, - desc=('image, float or tuple of floats to act as operator')) + desc='image, float or tuple of floats to act as operator') operation = traits.Enum('sum', 'sub', 'mul', 'div', usedefault=True, - desc=('operation to be performed')) + desc='operation to be performed') out_warp = File('warp_maths.vtk', usedefault=True, desc='vtk file based on in_surf and warpings mapping it ' @@ -364,7 +374,7 @@ def _run_interface(self, runtime): points1 = np.array(vtk1.points) if vtk1.point_data.vectors is None: - raise RuntimeError(('No warping field was found in in_surf')) + raise RuntimeError('No warping field was found in in_surf') operator = self.inputs.operator opfield = np.ones_like(points1) diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py index f9ab1442af..fda36d7d42 100644 --- a/nipype/interfaces/fsl/utils.py +++ b/nipype/interfaces/fsl/utils.py @@ -8,11 +8,12 @@ -------- See the docstrings of the individual classes for examples. - Change directory to provide relative paths for doctests - >>> import os - >>> filepath = os.path.dirname( os.path.realpath( __file__ ) ) - >>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data')) - >>> os.chdir(datadir) + .. testsetup:: + # Change directory to provide relative paths for doctests + import os + filepath = os.path.dirname(os.path.realpath( __file__ )) + datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) + os.chdir(datadir) """ from __future__ import division @@ -20,6 +21,7 @@ from builtins import range import os +import os.path as op from glob import glob import warnings import tempfile @@ -41,7 +43,7 @@ class CopyGeomInputSpec(FSLCommandInputSpec): dest_file = File(exists=True, mandatory=True, argstr="%s", position=1, desc="destination image", copyfile=True, output_name='out_file', name_source='dest_file', name_template='%s') - ignore_dims = traits.Bool(desc=('Do not copy image dimensions'), + ignore_dims = traits.Bool(desc='Do not copy image dimensions', argstr='-d', position="-1") @@ -50,6 +52,7 @@ class CopyGeomOutputSpec(TraitedSpec): class CopyGeom(FSLCommand): + """Use fslcpgeom to copy the header geometry information to another image. Copy certain parts of the header information (image dimensions, voxel dimensions, voxel dimensions units string, image orientation/origin or qform/sform info) @@ -89,7 +92,7 @@ class ImageMeantsInputSpec(FSLCommandInputSpec): argstr='-o %s', genfile=True, hash_files=False) mask = File(exists=True, desc='input 3D mask', argstr='-m %s') spatial_coord = traits.List(traits.Int, - desc=(' requested spatial coordinate ' + desc=(' requested spatial coordinate ' '(instead of mask)'), argstr='-c %s') use_mm = traits.Bool(desc=('use mm instead of voxel coordinates (for -c ' @@ -111,6 +114,7 @@ class ImageMeantsOutputSpec(TraitedSpec): class ImageMeants(FSLCommand): + """ Use fslmeants for printing the average timeseries (intensities) to the screen (or saves to a file). The average is taken over all voxels in the mask (or all voxels in the image if no mask is specified) @@ -150,8 +154,9 @@ class SmoothOutputSpec(TraitedSpec): class Smooth(FSLCommand): - '''Use fslmaths to smooth the image - ''' + + """Use fslmaths to smooth the image + """ input_spec = SmoothInputSpec output_spec = SmoothOutputSpec @@ -197,6 +202,7 @@ class MergeOutputSpec(TraitedSpec): class Merge(FSLCommand): + """Use fslmerge to concatenate images Images can be concatenated across time, x, y, or z dimensions. Across the @@ -264,6 +270,7 @@ class ExtractROIOutputSpec(TraitedSpec): class ExtractROI(FSLCommand): + """Uses FSL Fslroi command to extract region of interest (ROI) from an image. @@ -344,6 +351,7 @@ class SplitOutputSpec(TraitedSpec): class Split(FSLCommand): + """Uses FSL Fslsplit command to separate a volume into images in time, x, y or z dimension. """ @@ -395,6 +403,7 @@ class ImageMathsOutputSpec(TraitedSpec): class ImageMaths(FSLCommand): + """Use FSL fslmaths command to allow mathematical manipulation of images `FSL info `_ @@ -469,6 +478,7 @@ class FilterRegressorOutputSpec(TraitedSpec): class FilterRegressor(FSLCommand): + """Data de-noising by regressing out part of a design matrix Uses simple OLS regression on 4D images @@ -524,6 +534,7 @@ class ImageStatsOutputSpec(TraitedSpec): class ImageStats(FSLCommand): + """Use FSL fslstats command to calculate stats from images `FSL info `_ @@ -601,6 +612,7 @@ class AvScaleOutputSpec(TraitedSpec): class AvScale(FSLCommand): + """Use FSL avscale command to extract info from mat file output of FLIRT Examples @@ -695,6 +707,7 @@ class OverlayOutputSpec(TraitedSpec): class Overlay(FSLCommand): + """ Use FSL's overlay command to combine background and statistical images into one volume @@ -741,8 +754,8 @@ def _list_outputs(self): if isdefined(self.inputs.stat_image2) and ( not isdefined(self.inputs.show_negative_stats) or not self.inputs.show_negative_stats): - stem = "%s_and_%s" % (split_filename(self.inputs.stat_image)[1], - split_filename(self.inputs.stat_image2)[1]) + stem = "%s_and_%s" % (split_filename(self.inputs.stat_image)[1], + split_filename(self.inputs.stat_image2)[1]) else: stem = split_filename(self.inputs.stat_image)[1] out_file = self._gen_fname(stem, suffix='_overlay') @@ -799,7 +812,8 @@ class SlicerInputSpec(FSLCommandInputSpec): xor=_xor_options, requires=['image_width'], desc=('output every n axial slices into one ' 'picture')) - image_width = traits.Int(position=-2, argstr='%d', desc='max picture width') + image_width = traits.Int( + position=-2, argstr='%d', desc='max picture width') out_file = File(position=-1, genfile=True, argstr='%s', desc='picture to write', hash_files=False) scaling = traits.Float(position=0, argstr='-s %f', desc='image scale') @@ -810,6 +824,7 @@ class SlicerOutputSpec(TraitedSpec): class Slicer(FSLCommand): + """Use FSL's slicer command to output a png image from a volume. @@ -901,6 +916,7 @@ class PlotTimeSeriesOutputSpec(TraitedSpec): class PlotTimeSeries(FSLCommand): + """Use fsl_tsplot to create images of time course plots. Examples @@ -984,6 +1000,7 @@ class PlotMotionParamsOutputSpec(TraitedSpec): class PlotMotionParams(FSLCommand): + """Use fsl_tsplot to plot the estimated motion parameters from a realignment program. @@ -1097,6 +1114,7 @@ class ConvertXFMOutputSpec(TraitedSpec): class ConvertXFM(FSLCommand): + """Use the FSL utility convert_xfm to modify FLIRT transformation matrices. Examples @@ -1168,6 +1186,7 @@ class SwapDimensionsOutputSpec(TraitedSpec): class SwapDimensions(FSLCommand): + """Use fslswapdim to alter the orientation of an image. This interface accepts a three-tuple corresponding to the new @@ -1210,6 +1229,7 @@ class PowerSpectrumOutputSpec(TraitedSpec): class PowerSpectrum(FSLCommand): + """Use FSL PowerSpectrum command for power spectrum estimation. Examples @@ -1270,6 +1290,7 @@ class SigLossOuputSpec(TraitedSpec): class SigLoss(FSLCommand): + """Estimates signal loss from a field map (in rad/s) Examples @@ -1311,6 +1332,7 @@ class Reorient2StdOutputSpec(TraitedSpec): class Reorient2Std(FSLCommand): + """fslreorient2std is a tool for reorienting the image to match the approximate orientation of the standard template images (MNI152). @@ -1399,6 +1421,7 @@ class InvWarpOutputSpec(TraitedSpec): class InvWarp(FSLCommand): + """ Use FSL Invwarp to invert a FNIRT warp @@ -1485,6 +1508,7 @@ class ComplexOuputSpec(TraitedSpec): class Complex(FSLCommand): + """fslcomplex is a tool for converting complex data Examples @@ -1546,9 +1570,11 @@ def _list_outputs(self): outputs['complex_out_file'] = self._get_output('complex_out_file') elif self.inputs.real_cartesian: outputs['real_out_file'] = self._get_output('real_out_file') - outputs['imaginary_out_file'] = self._get_output('imaginary_out_file') + outputs['imaginary_out_file'] = self._get_output( + 'imaginary_out_file') elif self.inputs.real_polar: - outputs['magnitude_out_file'] = self._get_output('magnitude_out_file') + outputs['magnitude_out_file'] = self._get_output( + 'magnitude_out_file') outputs['phase_out_file'] = self._get_output('phase_out_file') return outputs @@ -1611,12 +1637,14 @@ class WarpUtilsInputSpec(FSLCommandInputSpec): class WarpUtilsOutputSpec(TraitedSpec): - out_file = File(desc=('Name of output file, containing the warp as field or coefficients.')) + out_file = File( + desc=('Name of output file, containing the warp as field or coefficients.')) out_jacobian = File(desc=('Name of output file, containing the map of the determinant of ' 'the Jacobian')) class WarpUtils(FSLCommand): + """Use FSL `fnirtfileutils `_ to convert field->coefficients, coefficients->field, coefficients->other_coefficients etc @@ -1669,7 +1697,7 @@ def _parse_inputs(self, skip=None): class ConvertWarpInputSpec(FSLCommandInputSpec): reference = File(exists=True, argstr='--ref=%s', mandatory=True, position=1, - desc=('Name of a file in target space of the full transform.')) + desc='Name of a file in target space of the full transform.') out_file = File(argstr='--out=%s', position=-1, name_source=['reference'], name_template='%s_concatwarp', output_name='out_file', @@ -1681,74 +1709,75 @@ class ConvertWarpInputSpec(FSLCommandInputSpec): desc='filename for pre-transform (affine matrix)') warp1 = File(exists=True, argstr='--warp1=%s', - desc=('Name of file containing initial warp-fields/coefficients (follows premat). This could e.g. be a ' - 'fnirt-transform from a subjects structural scan to an average of a group ' - 'of subjects.')) + desc='Name of file containing initial warp-fields/coefficients (follows premat). This could e.g. be a ' + 'fnirt-transform from a subjects structural scan to an average of a group ' + 'of subjects.') midmat = File(exists=True, argstr="--midmat=%s", desc="Name of file containing mid-warp-affine transform") warp2 = File(exists=True, argstr='--warp2=%s', - desc=('Name of file containing secondary warp-fields/coefficients (after warp1/midmat but before postmat). This could e.g. be a ' - 'fnirt-transform from the average of a group of subjects to some standard ' - 'space (e.g. MNI152).')) + desc='Name of file containing secondary warp-fields/coefficients (after warp1/midmat but before postmat). This could e.g. be a ' + 'fnirt-transform from the average of a group of subjects to some standard ' + 'space (e.g. MNI152).') postmat = File(exists=True, argstr='--postmat=%s', - desc=('Name of file containing an affine transform (applied last). It could e.g. be an affine ' - 'transform that maps the MNI152-space into a better approximation to the ' - 'Talairach-space (if indeed there is one).')) + desc='Name of file containing an affine transform (applied last). It could e.g. be an affine ' + 'transform that maps the MNI152-space into a better approximation to the ' + 'Talairach-space (if indeed there is one).') shift_in_file = File(exists=True, argstr='--shiftmap=%s', - desc=('Name of file containing a "shiftmap", a non-linear transform with ' - 'displacements only in one direction (applied first, before premat). This would typically be a ' - 'fieldmap that has been pre-processed using fugue that maps a ' - 'subjects functional (EPI) data onto an undistorted space (i.e. a space ' - 'that corresponds to his/her true anatomy).')) + desc='Name of file containing a "shiftmap", a non-linear transform with ' + 'displacements only in one direction (applied first, before premat). This would typically be a ' + 'fieldmap that has been pre-processed using fugue that maps a ' + 'subjects functional (EPI) data onto an undistorted space (i.e. a space ' + 'that corresponds to his/her true anatomy).') shift_direction = traits.Enum('y-', 'y', 'x', 'x-', 'z', 'z-', argstr="--shiftdir=%s", requires=['shift_in_file'], - desc=('Indicates the direction that the distortions from ' - '--shiftmap goes. It depends on the direction and ' - 'polarity of the phase-encoding in the EPI sequence.')) + desc='Indicates the direction that the distortions from ' + '--shiftmap goes. It depends on the direction and ' + 'polarity of the phase-encoding in the EPI sequence.') cons_jacobian = traits.Bool(False, argstr='--constrainj', - desc=('Constrain the Jacobian of the warpfield to lie within specified ' - 'min/max limits.')) + desc='Constrain the Jacobian of the warpfield to lie within specified ' + 'min/max limits.') jacobian_min = traits.Float(argstr='--jmin=%f', - desc=('Minimum acceptable Jacobian value for ' - 'constraint (default 0.01)')) + desc='Minimum acceptable Jacobian value for ' + 'constraint (default 0.01)') jacobian_max = traits.Float(argstr='--jmax=%f', - desc=('Maximum acceptable Jacobian value for ' - 'constraint (default 100.0)')) + desc='Maximum acceptable Jacobian value for ' + 'constraint (default 100.0)') abswarp = traits.Bool(argstr='--abs', xor=['relwarp'], - desc=('If set it indicates that the warps in --warp1 and --warp2 should be ' - 'interpreted as absolute. I.e. the values in --warp1/2 are the ' - 'coordinates in the next space, rather than displacements. This flag ' - 'is ignored if --warp1/2 was created by fnirt, which always creates ' - 'relative displacements.')) + desc='If set it indicates that the warps in --warp1 and --warp2 should be ' + 'interpreted as absolute. I.e. the values in --warp1/2 are the ' + 'coordinates in the next space, rather than displacements. This flag ' + 'is ignored if --warp1/2 was created by fnirt, which always creates ' + 'relative displacements.') relwarp = traits.Bool(argstr='--rel', xor=['abswarp'], - desc=('If set it indicates that the warps in --warp1/2 should be interpreted ' - 'as relative. I.e. the values in --warp1/2 are displacements from the ' - 'coordinates in the next space.')) + desc='If set it indicates that the warps in --warp1/2 should be interpreted ' + 'as relative. I.e. the values in --warp1/2 are displacements from the ' + 'coordinates in the next space.') out_abswarp = traits.Bool(argstr='--absout', xor=['out_relwarp'], - desc=('If set it indicates that the warps in --out should be absolute, i.e. ' - 'the values in --out are displacements from the coordinates in --ref.')) + desc='If set it indicates that the warps in --out should be absolute, i.e. ' + 'the values in --out are displacements from the coordinates in --ref.') out_relwarp = traits.Bool(argstr='--relout', xor=['out_abswarp'], - desc=('If set it indicates that the warps in --out should be relative, i.e. ' - 'the values in --out are displacements from the coordinates in --ref.')) + desc='If set it indicates that the warps in --out should be relative, i.e. ' + 'the values in --out are displacements from the coordinates in --ref.') class ConvertWarpOutputSpec(TraitedSpec): out_file = File(exists=True, - desc=('Name of output file, containing the warp as field or coefficients.')) + desc='Name of output file, containing the warp as field or coefficients.') class ConvertWarp(FSLCommand): + """Use FSL `convertwarp `_ for combining multiple transforms into one. @@ -1764,7 +1793,7 @@ class ConvertWarp(FSLCommand): >>> warputils.inputs.output_type = "NIFTI_GZ" >>> warputils.cmdline # doctest: +ELLIPSIS 'convertwarp --ref=T1.nii --rel --warp1=warpfield.nii --out=T1_concatwarp.nii.gz' - >>> res = invwarp.run() # doctest: +SKIP + >>> res = warputils.run() # doctest: +SKIP """ @@ -1776,16 +1805,16 @@ class ConvertWarp(FSLCommand): class WarpPointsBaseInputSpec(CommandLineInputSpec): in_coords = File(exists=True, position=-1, argstr='%s', mandatory=True, - desc=('filename of file containing coordinates')) + desc='filename of file containing coordinates') xfm_file = File(exists=True, argstr='-xfm %s', xor=['warp_file'], - desc=('filename of affine transform (e.g. source2dest.mat)')) + desc='filename of affine transform (e.g. source2dest.mat)') warp_file = File(exists=True, argstr='-warp %s', xor=['xfm_file'], - desc=('filename of warpfield (e.g. ' - 'intermediate2dest_warp.nii.gz)')) + desc='filename of warpfield (e.g. ' + 'intermediate2dest_warp.nii.gz)') coord_vox = traits.Bool(True, argstr='-vox', xor=['coord_mm'], - desc=('all coordinates in voxels - default')) + desc='all coordinates in voxels - default') coord_mm = traits.Bool(False, argstr='-mm', xor=['coord_vox'], - desc=('all coordinates in mm')) + desc='all coordinates in mm') out_file = File(name_source='in_coords', name_template='%s_warped', output_name='out_file', desc='output file name') @@ -1793,17 +1822,18 @@ class WarpPointsBaseInputSpec(CommandLineInputSpec): class WarpPointsInputSpec(WarpPointsBaseInputSpec): src_file = File(exists=True, argstr='-src %s', mandatory=True, - desc=('filename of source image')) + desc='filename of source image') dest_file = File(exists=True, argstr='-dest %s', mandatory=True, - desc=('filename of destination image')) + desc='filename of destination image') class WarpPointsOutputSpec(TraitedSpec): out_file = File(exists=True, - desc=('Name of output file, containing the warp as field or coefficients.')) + desc='Name of output file, containing the warp as field or coefficients.') class WarpPoints(CommandLine): + """Use FSL `img2imgcoord `_ to transform point sets. Accepts plain text files and vtk files. @@ -1822,7 +1852,7 @@ class WarpPoints(CommandLine): >>> warppoints.inputs.coord_mm = True >>> warppoints.cmdline # doctest: +ELLIPSIS 'img2imgcoord -mm -dest T1.nii -src epi.nii -warp warpfield.nii surf.txt' - >>> res = invwarp.run() # doctest: +SKIP + >>> res = warppoints.run() # doctest: +SKIP """ @@ -1846,12 +1876,11 @@ def _format_arg(self, name, trait_spec, value): return super(WarpPoints, self)._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): - import os.path as op - fname, ext = op.splitext(self.inputs.in_coords) setattr(self, '_in_file', fname) setattr(self, '_outformat', ext[1:]) - first_args = super(WarpPoints, self)._parse_inputs(skip=['in_coords', 'out_file']) + first_args = super(WarpPoints, self)._parse_inputs( + skip=['in_coords', 'out_file']) second_args = fname + '.txt' @@ -1864,9 +1893,6 @@ def _parse_inputs(self, skip=None): return first_args + [second_args] def _vtk_to_coords(self, in_file, out_file=None): - import os - import os.path as op - # Ensure that tvtk is loaded with the appropriate ETS_TOOLKIT env var old_ets = os.getenv('ETS_TOOLKIT') os.environ['ETS_TOOLKIT'] = 'null' @@ -1874,6 +1900,7 @@ def _vtk_to_coords(self, in_file, out_file=None): from tvtk.api import tvtk from tvtk.tvtk_classes.vtk_version import vtk_build_version except ImportError: + vtk_build_version = None raise ImportError('This interface requires tvtk to run.') finally: if old_ets is not None: @@ -1896,7 +1923,6 @@ def _vtk_to_coords(self, in_file, out_file=None): def _coords_to_vtk(self, points, out_file): import os - import os.path as op # Ensure that tvtk is loaded with the appropriate ETS_TOOLKIT env var old_ets = os.getenv('ETS_TOOLKIT') @@ -1905,6 +1931,7 @@ def _coords_to_vtk(self, points, out_file): from tvtk.api import tvtk from tvtk.tvtk_classes.vtk_version import vtk_build_version except ImportError: + vtk_build_version = None raise ImportError('This interface requires tvtk to run.') finally: if old_ets is not None: @@ -1927,18 +1954,14 @@ def _coords_to_vtk(self, points, out_file): writer.write() def _trk_to_coords(self, in_file, out_file=None): - try: - from nibabel.trackvis import TrackvisFile - except ImportError: - raise ImportError('This interface requires nibabel to run') - + from nibabel.trackvis import TrackvisFile trkfile = TrackvisFile.from_file(in_file) streamlines = trkfile.streamlines if out_file is None: out_file, _ = op.splitext(in_file) - np.savetxt(points, out_file + '.txt') + np.savetxt(streamlines, out_file + '.txt') return out_file + '.txt' def _coords_to_trk(self, points, out_file): @@ -1961,7 +1984,8 @@ def _run_interface(self, runtime): self._trk_to_coords(fname, out_file=tmpfile) runtime = super(WarpPoints, self)._run_interface(runtime) - newpoints = np.fromstring('\n'.join(runtime.stdout.split('\n')[1:]), sep=' ') + newpoints = np.fromstring( + '\n'.join(runtime.stdout.split('\n')[1:]), sep=' ') if tmpfile is not None: try: @@ -1992,6 +2016,7 @@ class WarpPointsToStdInputSpec(WarpPointsBaseInputSpec): class WarpPointsToStd(WarpPoints): + """ Use FSL `img2stdcoord `_ to transform point sets to standard space coordinates. Accepts plain text files and @@ -2012,7 +2037,7 @@ class WarpPointsToStd(WarpPoints): >>> warppoints.inputs.coord_mm = True >>> warppoints.cmdline # doctest: +ELLIPSIS 'img2stdcoord -mm -img T1.nii -std mni.nii -warp warpfield.nii surf.txt' - >>> res = invwarp.run() # doctest: +SKIP + >>> res = warppoints.run() # doctest: +SKIP """ @@ -2023,22 +2048,27 @@ class WarpPointsToStd(WarpPoints): class MotionOutliersInputSpec(FSLCommandInputSpec): - in_file = File(exists=True, mandatory=True, desc="unfiltered 4D image", argstr="-i %s") + in_file = File( + exists=True, mandatory=True, desc="unfiltered 4D image", argstr="-i %s") out_file = File(argstr="-o %s", name_source='in_file', name_template='%s_outliers.txt', keep_extension=True, desc='output outlier file name', hash_files=False) - mask = File(exists=True, argstr="-m %s", desc="mask image for calculating metric") - metric = traits.Enum('refrms', ['refrms', 'dvars', 'refmse', 'fd', 'fdrms'], argstr="--%s", desc="metrics: refrms - RMS intensity difference to reference volume as metric [default metric],\ -refmse - Mean Square Error version of refrms (used in original version of fsl_motion_outliers) \ -dvars - DVARS \ -fd - frame displacement \ -fdrms - FD with RMS matrix calculation") - threshold = traits.Float(argstr="--thresh=%g", desc="specify absolute threshold value (otherwise use box-plot cutoff = P75 + 1.5*IQR)") - no_motion_correction = traits.Bool(argstr="--nomoco", desc="do not run motion correction (assumed already done)") - dummy = traits.Int(argstr="--dummy=%d", desc='number of dummy scans to delete (before running anything and creating EVs)') + mask = File( + exists=True, argstr="-m %s", desc="mask image for calculating metric") + metric = traits.Enum( + 'refrms', ['refrms', 'dvars', 'refmse', 'fd', 'fdrms'], argstr="--%s", + desc='metrics: refrms - RMS intensity difference to reference volume as metric [default metric], ' + 'refmse - Mean Square Error version of refrms (used in original version of fsl_motion_outliers), ' + 'dvars - DVARS, fd - frame displacement, fdrms - FD with RMS matrix calculation') + threshold = traits.Float(argstr="--thresh=%g", + desc="specify absolute threshold value (otherwise use box-plot cutoff = P75 + 1.5*IQR)") + no_motion_correction = traits.Bool( + argstr="--nomoco", desc="do not run motion correction (assumed already done)") + dummy = traits.Int(argstr="--dummy=%d", + desc='number of dummy scans to delete (before running anything and creating EVs)') out_metric_values = File(argstr="-s %s", name_source='in_file', name_template='%s_metrics.txt', keep_extension=True, desc='output metric values (DVARS etc.) file name', hash_files=False) - out_metric_plot = File(argstr="-p %s", name_source='in_file', name_template='%s_metrics.png', - keep_extension=True, desc='output metric values plot (DVARS etc.) file name', hash_files=False) + out_metric_plot = File(argstr="-p %s", name_source='in_file', name_template='%s_metrics.png', hash_files=False, + keep_extension=True, desc='output metric values plot (DVARS etc.) file name') class MotionOutliersOutputSpec(TraitedSpec): @@ -2048,6 +2078,7 @@ class MotionOutliersOutputSpec(TraitedSpec): class MotionOutliers(FSLCommand): + """ Use FSL fsl_motion_outliers`http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FSLMotionOutliers`_ to find outliers in timeseries (4d) data. Examples From cbea5b023bd38a7d4380fffb69aea19a50beac67 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 15:31:56 -0800 Subject: [PATCH 25/56] fixing travis for python 3.0, minor in circle --- .travis.yml | 16 +++++++++------- circle.yml | 3 +-- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/.travis.yml b/.travis.yml index 2a5b3cedb8..befb262e23 100644 --- a/.travis.yml +++ b/.travis.yml @@ -38,17 +38,19 @@ install: - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then pip install ordereddict; fi - conda install --yes numpy scipy nose networkx dateutil - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then conda install --yes traits; else pip install traits; fi -- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then conda install --yes vtk; else pip install vtk; fi +- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then conda install --yes vtk; fi - pip install python-coveralls - pip install nose-cov # Add tvtk (PIL is required by blockcanvas) -- pip install http://effbot.org/downloads/Imaging-1.1.7.tar.gz -- pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools -- pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas -- pip install -e git+https://github.com/enthought/etsproxy.git#egg=etsproxy # Install mayavi (see https://github.com/enthought/mayavi/issues/271) -- pip install https://github.com/dmsurti/mayavi/archive/4d4aaf315a29d6a86707dd95149e27d9ed2225bf.zip -- pip install -e git+https://github.com/enthought/ets.git#egg=ets +- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then + pip install http://effbot.org/downloads/Imaging-1.1.7.tar.gz + pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools + pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas + pip install -e git+https://github.com/enthought/etsproxy.git#egg=etsproxy + pip install https://github.com/dmsurti/mayavi/archive/4d4aaf315a29d6a86707dd95149e27d9ed2225bf.zip + pip install -e git+https://github.com/enthought/ets.git#egg=ets + fi - pip install -r requirements.txt # finish remaining requirements - python setup.py install script: diff --git a/circle.yml b/circle.yml index c7c23d6763..632691b852 100644 --- a/circle.yml +++ b/circle.yml @@ -3,7 +3,6 @@ dependencies: - "~/.apt-cache" - "~/examples/data" - "~/examples/fsdata" - - "~/examples/feeds" - "~/mcr" - "~/spm12" - "~/examples/fsl_course_data" @@ -19,7 +18,7 @@ dependencies: - echo 'source /etc/fsl/fsl.sh' >> $HOME/.profile - echo 'source /etc/afni/afni.sh' >> $HOME/.profile # Enable system-wide vtk - - echo 'ln -s /usr/lib/pymodules/python2.7/vtk ~/virtualenvs/venv-system/lib/python2.7/site-packages/' >> $HOME/.profile + - echo 'ln -sf /usr/lib/pymodules/python2.7/vtk ~/virtualenvs/venv-system/lib/python2.7/site-packages/' >> $HOME/.profile - ln -s /usr/lib/pymodules/python2.7/vtk ~/virtualenvs/venv-system/lib/python2.7/site-packages/ # Set up python environment - pip install --upgrade pip From db08c4fa531c9acaf0abc5279c341396cd9ca68e Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 15:41:00 -0800 Subject: [PATCH 26/56] updated doctests for the case vtk is not installed (typically python 3.0) --- .travis.yml | 12 ++++++------ nipype/algorithms/mesh.py | 32 +++++++++++++++++++++++--------- 2 files changed, 29 insertions(+), 15 deletions(-) diff --git a/.travis.yml b/.travis.yml index befb262e23..e6ade72ead 100644 --- a/.travis.yml +++ b/.travis.yml @@ -44,12 +44,12 @@ install: # Add tvtk (PIL is required by blockcanvas) # Install mayavi (see https://github.com/enthought/mayavi/issues/271) - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then - pip install http://effbot.org/downloads/Imaging-1.1.7.tar.gz - pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools - pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas - pip install -e git+https://github.com/enthought/etsproxy.git#egg=etsproxy - pip install https://github.com/dmsurti/mayavi/archive/4d4aaf315a29d6a86707dd95149e27d9ed2225bf.zip - pip install -e git+https://github.com/enthought/ets.git#egg=ets + pip install http://effbot.org/downloads/Imaging-1.1.7.tar.gz; + pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools; + pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas; + pip install -e git+https://github.com/enthought/etsproxy.git#egg=etsproxy; + pip install https://github.com/dmsurti/mayavi/archive/4d4aaf315a29d6a86707dd95149e27d9ed2225bf.zip; + pip install -e git+https://github.com/enthought/ets.git#egg=ets; fi - pip install -r requirements.txt # finish remaining requirements - python setup.py install diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index c0b6e49980..f00249c334 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -224,11 +224,18 @@ class ComputeMeshWarp(TVTKBaseInterface): Example ------- + >>> from nipype.algorithms.mesh import have_tvtk >>> import nipype.algorithms.mesh as m - >>> dist = m.ComputeMeshWarp() - >>> dist.inputs.surface1 = 'surf1.vtk' - >>> dist.inputs.surface2 = 'surf2.vtk' - >>> res = dist.run() # doctest: +SKIP + >>> if not have_tvtk: + ... dist = m.ComputeMeshWarp() + Traceback (most recent call last): + ... + ImportError: This interface requires tvtk to run. + >>> else: + ... dist = m.ComputeMeshWarp() + ... dist.inputs.surface1 = 'surf1.vtk' + ... dist.inputs.surface2 = 'surf2.vtk' + ... res = dist.run() # doctest: +SKIP """ @@ -355,12 +362,19 @@ class MeshWarpMaths(TVTKBaseInterface): Example ------- + >>> from nipype.algorithms.mesh import have_tvtk >>> import nipype.algorithms.mesh as m - >>> mmath = m.MeshWarpMaths() - >>> mmath.inputs.in_surf = 'surf1.vtk' - >>> mmath.inputs.operator = 'surf2.vtk' - >>> mmath.inputs.operation = 'mul' - >>> res = mmath.run() # doctest: +SKIP + >>> if not have_tvtk: + ... mmath = m.MeshWarpMaths() + Traceback (most recent call last): + ... + ImportError: This interface requires tvtk to run. + >>> else: + ... mmath = m.MeshWarpMaths() + ... mmath.inputs.in_surf = 'surf1.vtk' + ... mmath.inputs.operator = 'surf2.vtk' + ... mmath.inputs.operation = 'mul' + ... res = mmath.run() # doctest: +SKIP """ From 7c7b71904e55da44da57df1c1d57450505a1c21c Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 16:07:55 -0800 Subject: [PATCH 27/56] fixed doctests --- circle.yml | 2 +- nipype/algorithms/mesh.py | 28 +++++++++++++++++++--------- 2 files changed, 20 insertions(+), 10 deletions(-) diff --git a/circle.yml b/circle.yml index 632691b852..a4f4aa8d81 100644 --- a/circle.yml +++ b/circle.yml @@ -19,7 +19,7 @@ dependencies: - echo 'source /etc/afni/afni.sh' >> $HOME/.profile # Enable system-wide vtk - echo 'ln -sf /usr/lib/pymodules/python2.7/vtk ~/virtualenvs/venv-system/lib/python2.7/site-packages/' >> $HOME/.profile - - ln -s /usr/lib/pymodules/python2.7/vtk ~/virtualenvs/venv-system/lib/python2.7/site-packages/ + - ln -sf /usr/lib/pymodules/python2.7/vtk ~/virtualenvs/venv-system/lib/python2.7/site-packages/ # Set up python environment - pip install --upgrade pip - pip install -e . diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index f00249c334..a66e4ee02f 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -98,14 +98,18 @@ class WarpPoints(TVTKBaseInterface): >>> from nipype.algorithms.mesh import WarpPoints >>> if not have_tvtk: ... wp = WarpPoints() - Traceback (most recent call last): - ... - ImportError: This interface requires tvtk to run. >>> else: ... wp = WarpPoints() ... wp.inputs.points = 'surf1.vtk' ... wp.inputs.warp = 'warpfield.nii' ... res = wp.run() # doctest: +SKIP + # Exceptions cannot be tested conditionally, so raise error + # https://docs.python.org/2/library/doctest.html#id2 + ... raise ImportError('This interface requires tvtk to run.') + Traceback (most recent call last): + ... + ImportError: This interface requires tvtk to run. + """ input_spec = WarpPointsInputSpec @@ -228,14 +232,17 @@ class ComputeMeshWarp(TVTKBaseInterface): >>> import nipype.algorithms.mesh as m >>> if not have_tvtk: ... dist = m.ComputeMeshWarp() - Traceback (most recent call last): - ... - ImportError: This interface requires tvtk to run. >>> else: ... dist = m.ComputeMeshWarp() ... dist.inputs.surface1 = 'surf1.vtk' ... dist.inputs.surface2 = 'surf2.vtk' ... res = dist.run() # doctest: +SKIP + # Exceptions cannot be tested conditionally, so raise error + # https://docs.python.org/2/library/doctest.html#id2 + ... raise ImportError('This interface requires tvtk to run.') + Traceback (most recent call last): + ... + ImportError: This interface requires tvtk to run. """ @@ -366,15 +373,18 @@ class MeshWarpMaths(TVTKBaseInterface): >>> import nipype.algorithms.mesh as m >>> if not have_tvtk: ... mmath = m.MeshWarpMaths() - Traceback (most recent call last): - ... - ImportError: This interface requires tvtk to run. >>> else: ... mmath = m.MeshWarpMaths() ... mmath.inputs.in_surf = 'surf1.vtk' ... mmath.inputs.operator = 'surf2.vtk' ... mmath.inputs.operation = 'mul' ... res = mmath.run() # doctest: +SKIP + # Exceptions cannot be tested conditionally, so raise error + # https://docs.python.org/2/library/doctest.html#id2 + ... raise ImportError('This interface requires tvtk to run.') + Traceback (most recent call last): + ... + ImportError: This interface requires tvtk to run. """ From 893818763a87c7eeac776cf37cda212945aff6c0 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 16:35:38 -0800 Subject: [PATCH 28/56] removed doctests, added exception testing when vtk is unavailable --- nipype/algorithms/mesh.py | 84 ++++++------------- nipype/algorithms/tests/test_mesh_ops.py | 100 +++++++++++------------ 2 files changed, 71 insertions(+), 113 deletions(-) diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index a66e4ee02f..120251b603 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -5,10 +5,10 @@ .. testsetup:: # Change directory to provide relative paths for doctests - import os - filepath = os.path.dirname(os.path.realpath( __file__ )) - datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) - os.chdir(datadir) + >>> import os + >>> filepath = os.path.dirname(os.path.realpath( __file__ )) + >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) + >>> os.chdir(datadir) """ from __future__ import division @@ -91,25 +91,13 @@ class WarpPoints(TVTKBaseInterface): ``warp`` file. FSL interfaces are compatible, for instance any field computed with :class:`nipype.interfaces.fsl.utils.ConvertWarp`. - Example - ------- - - >>> from nipype.algorithms.mesh import have_tvtk - >>> from nipype.algorithms.mesh import WarpPoints - >>> if not have_tvtk: - ... wp = WarpPoints() - >>> else: - ... wp = WarpPoints() - ... wp.inputs.points = 'surf1.vtk' - ... wp.inputs.warp = 'warpfield.nii' - ... res = wp.run() # doctest: +SKIP - # Exceptions cannot be tested conditionally, so raise error - # https://docs.python.org/2/library/doctest.html#id2 - ... raise ImportError('This interface requires tvtk to run.') - Traceback (most recent call last): - ... - ImportError: This interface requires tvtk to run. + Example:: + from nipype.algorithms.mesh import WarpPoints + wp = WarpPoints() + wp.inputs.points = 'surf1.vtk' + wp.inputs.warp = 'warpfield.nii' + res = wp.run() """ input_spec = WarpPointsInputSpec @@ -225,24 +213,13 @@ class ComputeMeshWarp(TVTKBaseInterface): A point-to-point correspondence between surfaces is required - Example - ------- - - >>> from nipype.algorithms.mesh import have_tvtk - >>> import nipype.algorithms.mesh as m - >>> if not have_tvtk: - ... dist = m.ComputeMeshWarp() - >>> else: - ... dist = m.ComputeMeshWarp() - ... dist.inputs.surface1 = 'surf1.vtk' - ... dist.inputs.surface2 = 'surf2.vtk' - ... res = dist.run() # doctest: +SKIP - # Exceptions cannot be tested conditionally, so raise error - # https://docs.python.org/2/library/doctest.html#id2 - ... raise ImportError('This interface requires tvtk to run.') - Traceback (most recent call last): - ... - ImportError: This interface requires tvtk to run. + Example:: + + import nipype.algorithms.mesh as m + dist = m.ComputeMeshWarp() + dist.inputs.surface1 = 'surf1.vtk' + dist.inputs.surface2 = 'surf2.vtk' + res = dist.run() """ @@ -366,25 +343,14 @@ class MeshWarpMaths(TVTKBaseInterface): A point-to-point correspondence between surfaces is required - Example - ------- - - >>> from nipype.algorithms.mesh import have_tvtk - >>> import nipype.algorithms.mesh as m - >>> if not have_tvtk: - ... mmath = m.MeshWarpMaths() - >>> else: - ... mmath = m.MeshWarpMaths() - ... mmath.inputs.in_surf = 'surf1.vtk' - ... mmath.inputs.operator = 'surf2.vtk' - ... mmath.inputs.operation = 'mul' - ... res = mmath.run() # doctest: +SKIP - # Exceptions cannot be tested conditionally, so raise error - # https://docs.python.org/2/library/doctest.html#id2 - ... raise ImportError('This interface requires tvtk to run.') - Traceback (most recent call last): - ... - ImportError: This interface requires tvtk to run. + Example:: + + import nipype.algorithms.mesh as m + mmath = m.MeshWarpMaths() + mmath.inputs.in_surf = 'surf1.vtk' + mmath.inputs.operator = 'surf2.vtk' + mmath.inputs.operation = 'mul' + res = mmath.run() """ diff --git a/nipype/algorithms/tests/test_mesh_ops.py b/nipype/algorithms/tests/test_mesh_ops.py index 29d535bd2f..80c4e09c44 100644 --- a/nipype/algorithms/tests/test_mesh_ops.py +++ b/nipype/algorithms/tests/test_mesh_ops.py @@ -6,81 +6,73 @@ from shutil import rmtree from tempfile import mkdtemp -from nipype.testing import (assert_equal, skipif, +from nipype.testing import (assert_equal, assert_raises, skipif, assert_almost_equal, example_data) import numpy as np from nipype.algorithms import mesh as m +from nipype.algorithms.mesh import have_vtk import platform -notvtk = True -if 'darwin' not in platform.system().lower(): - old_ets = os.getenv('ETS_TOOLKIT') - os.environ['ETS_TOOLKIT'] = 'null' - have_tvtk = False - try: - from tvtk.api import tvtk - notvtk = False - except ImportError: - pass - finally: - if old_ets is not None: - os.environ['ETS_TOOLKIT'] = old_ets - else: - del os.environ['ETS_TOOLKIT'] - - -@skipif(notvtk) + def test_ident_distances(): tempdir = mkdtemp() curdir = os.getcwd() os.chdir(tempdir) - in_surf = example_data('surf01.vtk') - dist_ident = m.ComputeMeshWarp() - dist_ident.inputs.surface1 = in_surf - dist_ident.inputs.surface2 = in_surf - dist_ident.inputs.out_file = os.path.join(tempdir, 'distance.npy') - res = dist_ident.run() - yield assert_equal, res.outputs.distance, 0.0 - - dist_ident.inputs.weighting = 'area' - res = dist_ident.run() - yield assert_equal, res.outputs.distance, 0.0 + + if not have_vtk: + yield assert_raises, ImportError, m.ComputeMeshWarp + else: + in_surf = example_data('surf01.vtk') + dist_ident = m.ComputeMeshWarp() + dist_ident.inputs.surface1 = in_surf + dist_ident.inputs.surface2 = in_surf + dist_ident.inputs.out_file = os.path.join(tempdir, 'distance.npy') + res = dist_ident.run() + yield assert_equal, res.outputs.distance, 0.0 + + dist_ident.inputs.weighting = 'area' + res = dist_ident.run() + yield assert_equal, res.outputs.distance, 0.0 os.chdir(curdir) rmtree(tempdir) -@skipif(notvtk) def test_trans_distances(): tempdir = mkdtemp() - in_surf = example_data('surf01.vtk') - warped_surf = os.path.join(tempdir, 'warped.vtk') - curdir = os.getcwd() os.chdir(tempdir) - inc = np.array([0.7, 0.3, -0.2]) - - r1 = tvtk.PolyDataReader(file_name=in_surf) - vtk1 = r1.output - r1.update() - vtk1.points = np.array(vtk1.points) + inc - - writer = tvtk.PolyDataWriter(file_name=warped_surf) - writer.set_input_data(vtk1) - writer.write() - - dist = m.ComputeMeshWarp() - dist.inputs.surface1 = in_surf - dist.inputs.surface2 = warped_surf - dist.inputs.out_file = os.path.join(tempdir, 'distance.npy') - res = dist.run() - yield assert_almost_equal, res.outputs.distance, np.linalg.norm(inc), 4 - dist.inputs.weighting = 'area' - res = dist.run() - yield assert_almost_equal, res.outputs.distance, np.linalg.norm(inc), 4 + + if not have_vtk: + yield assert_raises, ImportError, m.ComputeMeshWarp + else: + from nipype.algorithms.mesh import tvtk + in_surf = example_data('surf01.vtk') + warped_surf = os.path.join(tempdir, 'warped.vtk') + + inc = np.array([0.7, 0.3, -0.2]) + + r1 = tvtk.PolyDataReader(file_name=in_surf) + vtk1 = r1.output + r1.update() + vtk1.points = np.array(vtk1.points) + inc + + writer = tvtk.PolyDataWriter(file_name=warped_surf) + writer.set_input_data(vtk1) + writer.write() + + dist = m.ComputeMeshWarp() + dist.inputs.surface1 = in_surf + dist.inputs.surface2 = warped_surf + dist.inputs.out_file = os.path.join(tempdir, 'distance.npy') + res = dist.run() + yield assert_almost_equal, res.outputs.distance, np.linalg.norm(inc), 4 + dist.inputs.weighting = 'area' + res = dist.run() + yield assert_almost_equal, res.outputs.distance, np.linalg.norm(inc), 4 os.chdir(curdir) rmtree(tempdir) From a9c039e4f09244091fee6d899601644500ddd848 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 22:04:51 -0800 Subject: [PATCH 29/56] fixed mesh tests --- nipype/algorithms/mesh.py | 5 ++ nipype/algorithms/tests/test_auto_ErrorMap.py | 35 -------------- nipype/algorithms/tests/test_auto_Overlap.py | 47 ------------------- nipype/algorithms/tests/test_mesh_ops.py | 5 +- 4 files changed, 7 insertions(+), 85 deletions(-) delete mode 100644 nipype/algorithms/tests/test_auto_ErrorMap.py delete mode 100644 nipype/algorithms/tests/test_auto_Overlap.py diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index 120251b603..25c5758671 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -44,6 +44,11 @@ del os.environ['ETS_TOOLKIT'] +def no_tvtk(): + global have_tvtk + return not have_tvtk + + class TVTKBaseInterface(BaseInterface): """ A base class for interfaces using VTK """ diff --git a/nipype/algorithms/tests/test_auto_ErrorMap.py b/nipype/algorithms/tests/test_auto_ErrorMap.py deleted file mode 100644 index 69484529dd..0000000000 --- a/nipype/algorithms/tests/test_auto_ErrorMap.py +++ /dev/null @@ -1,35 +0,0 @@ -# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from ...testing import assert_equal -from ..metrics import ErrorMap - - -def test_ErrorMap_inputs(): - input_map = dict(ignore_exception=dict(nohash=True, - usedefault=True, - ), - in_ref=dict(mandatory=True, - ), - in_tst=dict(mandatory=True, - ), - mask=dict(), - metric=dict(mandatory=True, - usedefault=True, - ), - out_map=dict(), - ) - inputs = ErrorMap.input_spec() - - for key, metadata in list(input_map.items()): - for metakey, value in list(metadata.items()): - yield assert_equal, getattr(inputs.traits()[key], metakey), value - - -def test_ErrorMap_outputs(): - output_map = dict(distance=dict(), - out_map=dict(), - ) - outputs = ErrorMap.output_spec() - - for key, metadata in list(output_map.items()): - for metakey, value in list(metadata.items()): - yield assert_equal, getattr(outputs.traits()[key], metakey), value diff --git a/nipype/algorithms/tests/test_auto_Overlap.py b/nipype/algorithms/tests/test_auto_Overlap.py deleted file mode 100644 index a5a3874bd1..0000000000 --- a/nipype/algorithms/tests/test_auto_Overlap.py +++ /dev/null @@ -1,47 +0,0 @@ -# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT -from ...testing import assert_equal -from ..misc import Overlap - - -def test_Overlap_inputs(): - input_map = dict(bg_overlap=dict(mandatory=True, - usedefault=True, - ), - ignore_exception=dict(nohash=True, - usedefault=True, - ), - mask_volume=dict(), - out_file=dict(usedefault=True, - ), - vol_units=dict(mandatory=True, - usedefault=True, - ), - volume1=dict(mandatory=True, - ), - volume2=dict(mandatory=True, - ), - weighting=dict(usedefault=True, - ), - ) - inputs = Overlap.input_spec() - - for key, metadata in list(input_map.items()): - for metakey, value in list(metadata.items()): - yield assert_equal, getattr(inputs.traits()[key], metakey), value - - -def test_Overlap_outputs(): - output_map = dict(dice=dict(), - diff_file=dict(), - jaccard=dict(), - labels=dict(), - roi_di=dict(), - roi_ji=dict(), - roi_voldiff=dict(), - volume_difference=dict(), - ) - outputs = Overlap.output_spec() - - for key, metadata in list(output_map.items()): - for metakey, value in list(metadata.items()): - yield assert_equal, getattr(outputs.traits()[key], metakey), value diff --git a/nipype/algorithms/tests/test_mesh_ops.py b/nipype/algorithms/tests/test_mesh_ops.py index 80c4e09c44..7c05217c11 100644 --- a/nipype/algorithms/tests/test_mesh_ops.py +++ b/nipype/algorithms/tests/test_mesh_ops.py @@ -12,7 +12,6 @@ import numpy as np from nipype.algorithms import mesh as m -from nipype.algorithms.mesh import have_vtk import platform @@ -22,7 +21,7 @@ def test_ident_distances(): curdir = os.getcwd() os.chdir(tempdir) - if not have_vtk: + if m.no_tvtk(): yield assert_raises, ImportError, m.ComputeMeshWarp else: in_surf = example_data('surf01.vtk') @@ -46,7 +45,7 @@ def test_trans_distances(): curdir = os.getcwd() os.chdir(tempdir) - if not have_vtk: + if m.no_tvtk(): yield assert_raises, ImportError, m.ComputeMeshWarp else: from nipype.algorithms.mesh import tvtk From 6b08b02b469a7933c7798165c6d74ac79cd502d5 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 27 Jan 2016 22:13:37 -0800 Subject: [PATCH 30/56] added base for two regression tests --- nipype/algorithms/tests/test_mesh_ops.py | 28 ++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/nipype/algorithms/tests/test_mesh_ops.py b/nipype/algorithms/tests/test_mesh_ops.py index 7c05217c11..947f0a53ac 100644 --- a/nipype/algorithms/tests/test_mesh_ops.py +++ b/nipype/algorithms/tests/test_mesh_ops.py @@ -75,3 +75,31 @@ def test_trans_distances(): os.chdir(curdir) rmtree(tempdir) + + +def test_warppoints(): + tempdir = mkdtemp() + curdir = os.getcwd() + os.chdir(tempdir) + + if m.no_tvtk(): + yield assert_raises, ImportError, m.WarpPoints + + # TODO: include regression tests for when tvtk is installed + + os.chdir(curdir) + rmtree(tempdir) + + +def test_meshwarpmaths(): + tempdir = mkdtemp() + curdir = os.getcwd() + os.chdir(tempdir) + + if m.no_tvtk(): + yield assert_raises, ImportError, m.MeshWarpMaths + + # TODO: include regression tests for when tvtk is installed + + os.chdir(curdir) + rmtree(tempdir) From a94158b12c093928f7f4911e7e1e4e23f6fe9b8d Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 28 Jan 2016 11:02:24 -0800 Subject: [PATCH 31/56] added xvfbwrapper to requirements, improved vtk handling --- nipype/algorithms/mesh.py | 21 +++++++++++---------- nipype/info.py | 3 ++- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index 25c5758671..db487c6776 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -54,22 +54,23 @@ class TVTKBaseInterface(BaseInterface): """ A base class for interfaces using VTK """ _redirect_x = True - _vtk_major = 5 + _vtk_version = (4, 0, 0) def __init__(self, **inputs): - if not have_tvtk: + if no_tvtk(): raise ImportError('This interface requires tvtk to run.') - # Identify VTK version major, use 5.0 if failed try: from tvtk.tvtk_classes.vtk_version import vtk_build_version - self._vtk_major = int(vtk_build_version[0]) + self._vtk_version = tuple([int(v) for v in vtk_build_version.split('.')]) except ImportError: iflogger.warning( - 'VTK version-major inspection using tvtk failed, assuming VTK <= 5.0.') - + 'VTK version-major inspection using tvtk failed, assuming VTK == 4.0.') super(TVTKBaseInterface, self).__init__(**inputs) + def version(self): + return self._vtk_version + class WarpPointsInputSpec(BaseInterfaceInputSpec): points = File(exists=True, mandatory=True, @@ -158,7 +159,7 @@ def _run_interface(self, runtime): newpoints = [p + d for p, d in zip(points, disps)] mesh.points = newpoints w = tvtk.PolyDataWriter() - if self._vtk_major <= 5: + if self.version()[0] < 6: w.input = mesh else: w.set_input_data_object(mesh) @@ -290,7 +291,7 @@ def _run_interface(self, runtime): writer = tvtk.PolyDataWriter( file_name=op.abspath(self.inputs.out_warp)) - if self._vtk_major <= 5: + if self.version()[0] < 6: writer.input = out_mesh else: writer.set_input_data_object(out_mesh) @@ -411,7 +412,7 @@ def _run_interface(self, runtime): vtk1.point_data.vectors = warping writer = tvtk.PolyDataWriter( file_name=op.abspath(self.inputs.out_warp)) - if self._vtk_major <= 5: + if self.version()[0] < 6: writer.input = vtk1 else: writer.set_input_data_object(vtk1) @@ -422,7 +423,7 @@ def _run_interface(self, runtime): writer = tvtk.PolyDataWriter( file_name=op.abspath(self.inputs.out_file)) - if self._vtk_major <= 5: + if self.version()[0] < 6: writer.input = vtk1 else: writer.set_input_data_object(vtk1) diff --git a/nipype/info.py b/nipype/info.py index 14252c2fdf..aa9db1a4b6 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -140,5 +140,6 @@ def get_nipype_gitversion(): "nose>=%s" % NOSE_MIN_VERSION, "future>=%s" % FUTURE_MIN_VERSION, "simplejson>=%s" % SIMPLEJSON_MIN_VERSION, - "prov>=%s" % PROV_MIN_VERSION] + "prov>=%s" % PROV_MIN_VERSION, + "xvfbwrapper"] STATUS = 'stable' From 8baba45a154c50a316a69b58ee40868bf4f14849 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 28 Jan 2016 11:36:54 -0800 Subject: [PATCH 32/56] improved VTK version parsing --- nipype/algorithms/mesh.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index db487c6776..0d13626e2a 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -62,7 +62,8 @@ def __init__(self, **inputs): try: from tvtk.tvtk_classes.vtk_version import vtk_build_version - self._vtk_version = tuple([int(v) for v in vtk_build_version.split('.')]) + vsplits = vtk_build_version.split('.') + self._vtk_version = tuple([int(vsplits[0]), int(vsplits[1])] + vsplits[2:]) except ImportError: iflogger.warning( 'VTK version-major inspection using tvtk failed, assuming VTK == 4.0.') From 9339e5fdcf1e459fd8e337eaa1d8916261a74dce Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 28 Jan 2016 16:48:11 -0800 Subject: [PATCH 33/56] encapsulated VTK Info in class --- nipype/algorithms/mesh.py | 52 +++++++++++++++--------- nipype/algorithms/tests/test_mesh_ops.py | 13 ++++-- nipype/interfaces/fsl/utils.py | 45 +++++++------------- 3 files changed, 56 insertions(+), 54 deletions(-) diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index 0d13626e2a..1fd0c87441 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -44,9 +44,32 @@ del os.environ['ETS_TOOLKIT'] -def no_tvtk(): - global have_tvtk - return not have_tvtk +class Info(object): + """ Handle VTK version information """ + _vtk_version = None + + @staticmethod + def vtk_version(): + """ Get VTK version """ + if not Info.no_tvtk(): + return None + + if Info._vtk_version is None: + try: + from tvtk.tvtk_classes.vtk_version import vtk_build_version + vsplits = vtk_build_version.split('.') + Info._vtk_version = tuple([int(vsplits[0]), int(vsplits[1])] + vsplits[2:]) + except ImportError: + iflogger.warning( + 'VTK version-major inspection using tvtk failed, assuming VTK == 4.0.') + Info._vtk_version = (4, 0) + + return Info._vtk_version + + @staticmethod + def no_tvtk(): + global have_tvtk + return not have_tvtk class TVTKBaseInterface(BaseInterface): @@ -54,23 +77,14 @@ class TVTKBaseInterface(BaseInterface): """ A base class for interfaces using VTK """ _redirect_x = True - _vtk_version = (4, 0, 0) def __init__(self, **inputs): - if no_tvtk(): + if Info.no_tvtk(): raise ImportError('This interface requires tvtk to run.') - - try: - from tvtk.tvtk_classes.vtk_version import vtk_build_version - vsplits = vtk_build_version.split('.') - self._vtk_version = tuple([int(vsplits[0]), int(vsplits[1])] + vsplits[2:]) - except ImportError: - iflogger.warning( - 'VTK version-major inspection using tvtk failed, assuming VTK == 4.0.') super(TVTKBaseInterface, self).__init__(**inputs) - def version(self): - return self._vtk_version + def vtk_version(self): + return Info.vtk_version() class WarpPointsInputSpec(BaseInterfaceInputSpec): @@ -160,7 +174,7 @@ def _run_interface(self, runtime): newpoints = [p + d for p, d in zip(points, disps)] mesh.points = newpoints w = tvtk.PolyDataWriter() - if self.version()[0] < 6: + if self.vtk_version()[0] < 6: w.input = mesh else: w.set_input_data_object(mesh) @@ -292,7 +306,7 @@ def _run_interface(self, runtime): writer = tvtk.PolyDataWriter( file_name=op.abspath(self.inputs.out_warp)) - if self.version()[0] < 6: + if self.vtk_version()[0] < 6: writer.input = out_mesh else: writer.set_input_data_object(out_mesh) @@ -413,7 +427,7 @@ def _run_interface(self, runtime): vtk1.point_data.vectors = warping writer = tvtk.PolyDataWriter( file_name=op.abspath(self.inputs.out_warp)) - if self.version()[0] < 6: + if self.vtk_version()[0] < 6: writer.input = vtk1 else: writer.set_input_data_object(vtk1) @@ -424,7 +438,7 @@ def _run_interface(self, runtime): writer = tvtk.PolyDataWriter( file_name=op.abspath(self.inputs.out_file)) - if self.version()[0] < 6: + if self.vtk_version()[0] < 6: writer.input = vtk1 else: writer.set_input_data_object(vtk1) diff --git a/nipype/algorithms/tests/test_mesh_ops.py b/nipype/algorithms/tests/test_mesh_ops.py index 947f0a53ac..407dd83dbb 100644 --- a/nipype/algorithms/tests/test_mesh_ops.py +++ b/nipype/algorithms/tests/test_mesh_ops.py @@ -45,7 +45,7 @@ def test_trans_distances(): curdir = os.getcwd() os.chdir(tempdir) - if m.no_tvtk(): + if m.Info.no_tvtk(): yield assert_raises, ImportError, m.ComputeMeshWarp else: from nipype.algorithms.mesh import tvtk @@ -60,7 +60,12 @@ def test_trans_distances(): vtk1.points = np.array(vtk1.points) + inc writer = tvtk.PolyDataWriter(file_name=warped_surf) - writer.set_input_data(vtk1) + + if m.Info.vtk_version() < 6: + writer.set_input(vtk1) + else: + writer.set_input_data_object(vtk1) + writer.write() dist = m.ComputeMeshWarp() @@ -82,7 +87,7 @@ def test_warppoints(): curdir = os.getcwd() os.chdir(tempdir) - if m.no_tvtk(): + if m.Info.no_tvtk(): yield assert_raises, ImportError, m.WarpPoints # TODO: include regression tests for when tvtk is installed @@ -96,7 +101,7 @@ def test_meshwarpmaths(): curdir = os.getcwd() os.chdir(tempdir) - if m.no_tvtk(): + if m.Info.no_tvtk(): yield assert_raises, ImportError, m.MeshWarpMaths # TODO: include regression tests for when tvtk is installed diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py index fda36d7d42..7a33e651c1 100644 --- a/nipype/interfaces/fsl/utils.py +++ b/nipype/interfaces/fsl/utils.py @@ -34,6 +34,8 @@ from ...utils.filemanip import (load_json, save_json, split_filename, fname_presuffix, copyfile) +from ...algorithms.mesh import Info as VTKInfo + warn = warnings.warn @@ -1872,8 +1874,8 @@ def __init__(self, command=None, **inputs): def _format_arg(self, name, trait_spec, value): if name == 'out_file': return '' - else: - return super(WarpPoints, self)._format_arg(name, trait_spec, value) + + return super(WarpPoints, self)._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): fname, ext = op.splitext(self.inputs.in_coords) @@ -1894,21 +1896,12 @@ def _parse_inputs(self, skip=None): def _vtk_to_coords(self, in_file, out_file=None): # Ensure that tvtk is loaded with the appropriate ETS_TOOLKIT env var - old_ets = os.getenv('ETS_TOOLKIT') - os.environ['ETS_TOOLKIT'] = 'null' - try: - from tvtk.api import tvtk - from tvtk.tvtk_classes.vtk_version import vtk_build_version - except ImportError: - vtk_build_version = None - raise ImportError('This interface requires tvtk to run.') - finally: - if old_ets is not None: - os.environ['ETS_TOOLKIT'] = old_ets - else: - del os.environ['ETS_TOOLKIT'] + if VTKInfo.no_tvtk(): + raise ImportError('TVTK is required and tvtk package was not found') - vtk_major = int(vtk_build_version[0]) + from ...algorithms.mesh import tvtk + + vtk_major = VTKInfo.vtk_version()[0] reader = tvtk.PolyDataReader(file_name=in_file + '.vtk') reader.update() @@ -1923,23 +1916,13 @@ def _vtk_to_coords(self, in_file, out_file=None): def _coords_to_vtk(self, points, out_file): import os - # Ensure that tvtk is loaded with the appropriate ETS_TOOLKIT env var - old_ets = os.getenv('ETS_TOOLKIT') - os.environ['ETS_TOOLKIT'] = 'null' - try: - from tvtk.api import tvtk - from tvtk.tvtk_classes.vtk_version import vtk_build_version - except ImportError: - vtk_build_version = None - raise ImportError('This interface requires tvtk to run.') - finally: - if old_ets is not None: - os.environ['ETS_TOOLKIT'] = old_ets - else: - del os.environ['ETS_TOOLKIT'] + if VTKInfo.no_tvtk(): + raise ImportError('TVTK is required and tvtk package was not found') + + from ...algorithms.mesh import tvtk - vtk_major = int(vtk_build_version[0]) + vtk_major = VTKInfo.vtk_version()[0] reader = tvtk.PolyDataReader(file_name=self.inputs.in_file) reader.update() From b7ba47f392440481490a2bfcba0e19823198ff86 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 28 Jan 2016 17:59:12 -0800 Subject: [PATCH 34/56] use tvtk appropriate tools whenever possible --- nipype/algorithms/mesh.py | 56 +++++++++--------------- nipype/algorithms/tests/test_mesh_ops.py | 13 +++--- nipype/interfaces/fsl/utils.py | 14 +++--- 3 files changed, 32 insertions(+), 51 deletions(-) diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index 1fd0c87441..02de4e6429 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -144,10 +144,12 @@ def _run_interface(self, runtime): import nibabel as nb import numpy as np from scipy import ndimage + from tvtk.common import configure_input_data + from tvtk.common import is_old_pipeline as vtk_old r = tvtk.PolyDataReader(file_name=self.inputs.points) r.update() - mesh = r.output + mesh = r.output if vtk_old() else r.get_output() points = np.array(mesh.points) warp_dims = nb.funcs.four_to_three(nb.load(self.inputs.warp)) @@ -174,21 +176,14 @@ def _run_interface(self, runtime): newpoints = [p + d for p, d in zip(points, disps)] mesh.points = newpoints w = tvtk.PolyDataWriter() - if self.vtk_version()[0] < 6: - w.input = mesh - else: - w.set_input_data_object(mesh) - - w.file_name = self._gen_fname(self.inputs.points, - suffix='warped', - ext='.vtk') + configure_input_data(w, mesh) + w.file_name = self._gen_fname(self.inputs.points, suffix='warped', ext='.vtk') w.write() return runtime def _list_outputs(self): outputs = self._outputs().get() - outputs['out_points'] = self._gen_fname(self.inputs.points, - suffix='warped', + outputs['out_points'] = self._gen_fname(self.inputs.points, suffix='warped', ext='.vtk') return outputs @@ -258,10 +253,13 @@ def _triangle_area(self, A, B, C): return area def _run_interface(self, runtime): + from tvtk.common import configure_input_data + from tvtk.common import is_old_pipeline as vtk_old + r1 = tvtk.PolyDataReader(file_name=self.inputs.surface1) r2 = tvtk.PolyDataReader(file_name=self.inputs.surface2) - vtk1 = r1.output - vtk2 = r2.output + vtk1 = r1.output if vtk_old() else r1.get_output() + vtk2 = r2.output if vtk_old() else r2.get_output() r1.update() r2.update() assert(len(vtk1.points) == len(vtk2.points)) @@ -305,12 +303,7 @@ def _run_interface(self, runtime): out_mesh.point_data.vectors.name = 'warpings' writer = tvtk.PolyDataWriter( file_name=op.abspath(self.inputs.out_warp)) - - if self.vtk_version()[0] < 6: - writer.input = out_mesh - else: - writer.set_input_data_object(out_mesh) - + configure_input_data(writer, out_mesh) writer.write() self._distance = np.average(errvector, weights=weights) @@ -379,8 +372,11 @@ class MeshWarpMaths(TVTKBaseInterface): output_spec = MeshWarpMathsOutputSpec def _run_interface(self, runtime): + from tvtk.common import configure_input_data + from tvtk.common import is_old_pipeline as vtk_old + r1 = tvtk.PolyDataReader(file_name=self.inputs.in_surf) - vtk1 = r1.output + vtk1 = r1.output if vtk_old() else r1.get_output() r1.update() points1 = np.array(vtk1.points) @@ -392,7 +388,7 @@ def _run_interface(self, runtime): if isinstance(operator, string_types): r2 = tvtk.PolyDataReader(file_name=self.inputs.surface2) - vtk2 = r2.output + vtk2 = r2.output if vtk_old() else r2.get_output() r2.update() assert(len(points1) == len(vtk2.points)) @@ -425,25 +421,15 @@ def _run_interface(self, runtime): warping /= opfield vtk1.point_data.vectors = warping - writer = tvtk.PolyDataWriter( - file_name=op.abspath(self.inputs.out_warp)) - if self.vtk_version()[0] < 6: - writer.input = vtk1 - else: - writer.set_input_data_object(vtk1) + writer = tvtk.PolyDataWriter(file_name=op.abspath(self.inputs.out_warp)) + configure_input_data(writer, vtk1) writer.write() vtk1.point_data.vectors = None vtk1.points = points1 + warping - writer = tvtk.PolyDataWriter( - file_name=op.abspath(self.inputs.out_file)) - - if self.vtk_version()[0] < 6: - writer.input = vtk1 - else: - writer.set_input_data_object(vtk1) + writer = tvtk.PolyDataWriter(file_name=op.abspath(self.inputs.out_file)) + configure_input_data(writer, vtk1) writer.write() - return runtime def _list_outputs(self): diff --git a/nipype/algorithms/tests/test_mesh_ops.py b/nipype/algorithms/tests/test_mesh_ops.py index 407dd83dbb..a754a97c1f 100644 --- a/nipype/algorithms/tests/test_mesh_ops.py +++ b/nipype/algorithms/tests/test_mesh_ops.py @@ -21,7 +21,7 @@ def test_ident_distances(): curdir = os.getcwd() os.chdir(tempdir) - if m.no_tvtk(): + if m.Info.no_tvtk(): yield assert_raises, ImportError, m.ComputeMeshWarp else: in_surf = example_data('surf01.vtk') @@ -49,23 +49,20 @@ def test_trans_distances(): yield assert_raises, ImportError, m.ComputeMeshWarp else: from nipype.algorithms.mesh import tvtk + from tvtk.common import is_old_pipeline as vtk_old + from tvtk.common import configure_input_data in_surf = example_data('surf01.vtk') warped_surf = os.path.join(tempdir, 'warped.vtk') inc = np.array([0.7, 0.3, -0.2]) r1 = tvtk.PolyDataReader(file_name=in_surf) - vtk1 = r1.output + vtk1 = r1.output if vtk_old() else r1.get_output() r1.update() vtk1.points = np.array(vtk1.points) + inc writer = tvtk.PolyDataWriter(file_name=warped_surf) - - if m.Info.vtk_version() < 6: - writer.set_input(vtk1) - else: - writer.set_input_data_object(vtk1) - + configure_input_data(writer, vtk1) writer.write() dist = m.ComputeMeshWarp() diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py index 7a33e651c1..55467b613c 100644 --- a/nipype/interfaces/fsl/utils.py +++ b/nipype/interfaces/fsl/utils.py @@ -1900,12 +1900,12 @@ def _vtk_to_coords(self, in_file, out_file=None): raise ImportError('TVTK is required and tvtk package was not found') from ...algorithms.mesh import tvtk + from tvtk.common import is_old_pipeline as vtk_old - vtk_major = VTKInfo.vtk_version()[0] reader = tvtk.PolyDataReader(file_name=in_file + '.vtk') reader.update() - mesh = reader.output if vtk_major < 6 else reader.get_output() + mesh = reader.output if vtk_old() else reader.get_output() points = mesh.points if out_file is None: @@ -1921,19 +1921,17 @@ def _coords_to_vtk(self, points, out_file): raise ImportError('TVTK is required and tvtk package was not found') from ...algorithms.mesh import tvtk + from tvtk.common import is_old_pipeline as vtk_old + from tvtk.common import configure_input_data - vtk_major = VTKInfo.vtk_version()[0] reader = tvtk.PolyDataReader(file_name=self.inputs.in_file) reader.update() - mesh = reader.output if vtk_major < 6 else reader.get_output() + mesh = reader.output if vtk_old() else reader.get_output() mesh.points = points writer = tvtk.PolyDataWriter(file_name=out_file) - if vtk_major < 6: - writer.input = mesh - else: - writer.set_input_data_object(mesh) + configure_input_data(writer, mesh) writer.write() def _trk_to_coords(self, in_file, out_file=None): From 9cafde5cfaeb153697f0cc3e89513db4e5d471c2 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 28 Jan 2016 21:40:57 -0800 Subject: [PATCH 35/56] refactored vtk/tvtk use and ETSConfigToolkit --- nipype/algorithms/mesh.py | 82 ++++-------------------- nipype/algorithms/tests/test_mesh_ops.py | 22 +++---- nipype/interfaces/fsl/utils.py | 25 +++----- nipype/interfaces/vtkbase.py | 81 +++++++++++++++++++++++ 4 files changed, 113 insertions(+), 97 deletions(-) create mode 100644 nipype/interfaces/vtkbase.py diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index 02de4e6429..55f4f95cb4 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -14,7 +14,6 @@ from __future__ import division from builtins import zip -import os import os.path as op from warnings import warn @@ -25,52 +24,10 @@ from ..external.six import string_types from ..interfaces.base import (BaseInterface, traits, TraitedSpec, File, BaseInterfaceInputSpec) - +from ..interfaces.vtkbase import tvtk +from ..interfaces import vtkbase as VTKInfo iflogger = logging.getLogger('interface') -# Ensure that tvtk is loaded with the appropriate ETS_TOOLKIT env var -old_ets = os.getenv('ETS_TOOLKIT') -os.environ['ETS_TOOLKIT'] = 'null' -have_tvtk = False -try: - from tvtk.api import tvtk - have_tvtk = True -except ImportError: - iflogger.warning('tvtk wasn\'t found') -finally: - if old_ets is not None: - os.environ['ETS_TOOLKIT'] = old_ets - else: - del os.environ['ETS_TOOLKIT'] - - -class Info(object): - """ Handle VTK version information """ - _vtk_version = None - - @staticmethod - def vtk_version(): - """ Get VTK version """ - if not Info.no_tvtk(): - return None - - if Info._vtk_version is None: - try: - from tvtk.tvtk_classes.vtk_version import vtk_build_version - vsplits = vtk_build_version.split('.') - Info._vtk_version = tuple([int(vsplits[0]), int(vsplits[1])] + vsplits[2:]) - except ImportError: - iflogger.warning( - 'VTK version-major inspection using tvtk failed, assuming VTK == 4.0.') - Info._vtk_version = (4, 0) - - return Info._vtk_version - - @staticmethod - def no_tvtk(): - global have_tvtk - return not have_tvtk - class TVTKBaseInterface(BaseInterface): @@ -79,13 +36,10 @@ class TVTKBaseInterface(BaseInterface): _redirect_x = True def __init__(self, **inputs): - if Info.no_tvtk(): + if VTKInfo.no_tvtk(): raise ImportError('This interface requires tvtk to run.') super(TVTKBaseInterface, self).__init__(**inputs) - def vtk_version(self): - return Info.vtk_version() - class WarpPointsInputSpec(BaseInterfaceInputSpec): points = File(exists=True, mandatory=True, @@ -144,17 +98,15 @@ def _run_interface(self, runtime): import nibabel as nb import numpy as np from scipy import ndimage - from tvtk.common import configure_input_data - from tvtk.common import is_old_pipeline as vtk_old r = tvtk.PolyDataReader(file_name=self.inputs.points) r.update() - mesh = r.output if vtk_old() else r.get_output() + mesh = VTKInfo.vtk_output(r) points = np.array(mesh.points) warp_dims = nb.funcs.four_to_three(nb.load(self.inputs.warp)) affine = warp_dims[0].affine - voxsize = warp_dims[0].header.get_zooms() + # voxsize = warp_dims[0].header.get_zooms() vox2ras = affine[0:3, 0:3] ras2vox = np.linalg.inv(vox2ras) origin = affine[0:3, 3] @@ -176,7 +128,7 @@ def _run_interface(self, runtime): newpoints = [p + d for p, d in zip(points, disps)] mesh.points = newpoints w = tvtk.PolyDataWriter() - configure_input_data(w, mesh) + VTKInfo.configure_input_data(w, mesh) w.file_name = self._gen_fname(self.inputs.points, suffix='warped', ext='.vtk') w.write() return runtime @@ -253,13 +205,10 @@ def _triangle_area(self, A, B, C): return area def _run_interface(self, runtime): - from tvtk.common import configure_input_data - from tvtk.common import is_old_pipeline as vtk_old - r1 = tvtk.PolyDataReader(file_name=self.inputs.surface1) r2 = tvtk.PolyDataReader(file_name=self.inputs.surface2) - vtk1 = r1.output if vtk_old() else r1.get_output() - vtk2 = r2.output if vtk_old() else r2.get_output() + vtk1 = VTKInfo.vtk_output(r1) + vtk2 = VTKInfo.vtk_output(r2) r1.update() r2.update() assert(len(vtk1.points) == len(vtk2.points)) @@ -303,7 +252,7 @@ def _run_interface(self, runtime): out_mesh.point_data.vectors.name = 'warpings' writer = tvtk.PolyDataWriter( file_name=op.abspath(self.inputs.out_warp)) - configure_input_data(writer, out_mesh) + VTKInfo.configure_input_data(writer, out_mesh) writer.write() self._distance = np.average(errvector, weights=weights) @@ -372,11 +321,8 @@ class MeshWarpMaths(TVTKBaseInterface): output_spec = MeshWarpMathsOutputSpec def _run_interface(self, runtime): - from tvtk.common import configure_input_data - from tvtk.common import is_old_pipeline as vtk_old - r1 = tvtk.PolyDataReader(file_name=self.inputs.in_surf) - vtk1 = r1.output if vtk_old() else r1.get_output() + vtk1 = VTKInfo.vtk_output(r1) r1.update() points1 = np.array(vtk1.points) @@ -388,7 +334,7 @@ def _run_interface(self, runtime): if isinstance(operator, string_types): r2 = tvtk.PolyDataReader(file_name=self.inputs.surface2) - vtk2 = r2.output if vtk_old() else r2.get_output() + vtk2 = VTKInfo.vtk_output(r2) r2.update() assert(len(points1) == len(vtk2.points)) @@ -399,7 +345,7 @@ def _run_interface(self, runtime): if opfield is None: raise RuntimeError( - ('No operator values found in operator file')) + 'No operator values found in operator file') opfield = np.array(opfield) @@ -422,13 +368,13 @@ def _run_interface(self, runtime): vtk1.point_data.vectors = warping writer = tvtk.PolyDataWriter(file_name=op.abspath(self.inputs.out_warp)) - configure_input_data(writer, vtk1) + VTKInfo.configure_input_data(writer, vtk1) writer.write() vtk1.point_data.vectors = None vtk1.points = points1 + warping writer = tvtk.PolyDataWriter(file_name=op.abspath(self.inputs.out_file)) - configure_input_data(writer, vtk1) + VTKInfo.configure_input_data(writer, vtk1) writer.write() return runtime diff --git a/nipype/algorithms/tests/test_mesh_ops.py b/nipype/algorithms/tests/test_mesh_ops.py index a754a97c1f..38edb8ecef 100644 --- a/nipype/algorithms/tests/test_mesh_ops.py +++ b/nipype/algorithms/tests/test_mesh_ops.py @@ -8,12 +8,9 @@ from nipype.testing import (assert_equal, assert_raises, skipif, assert_almost_equal, example_data) - import numpy as np - from nipype.algorithms import mesh as m - -import platform +from ...interfaces import vtkbase as VTKInfo def test_ident_distances(): @@ -21,7 +18,7 @@ def test_ident_distances(): curdir = os.getcwd() os.chdir(tempdir) - if m.Info.no_tvtk(): + if VTKInfo.no_tvtk(): yield assert_raises, ImportError, m.ComputeMeshWarp else: in_surf = example_data('surf01.vtk') @@ -45,24 +42,23 @@ def test_trans_distances(): curdir = os.getcwd() os.chdir(tempdir) - if m.Info.no_tvtk(): + if VTKInfo.no_tvtk(): yield assert_raises, ImportError, m.ComputeMeshWarp else: - from nipype.algorithms.mesh import tvtk - from tvtk.common import is_old_pipeline as vtk_old - from tvtk.common import configure_input_data + from ...interfaces.vtkbase import tvtk + in_surf = example_data('surf01.vtk') warped_surf = os.path.join(tempdir, 'warped.vtk') inc = np.array([0.7, 0.3, -0.2]) r1 = tvtk.PolyDataReader(file_name=in_surf) - vtk1 = r1.output if vtk_old() else r1.get_output() + vtk1 = VTKInfo.vtk_output(r1) r1.update() vtk1.points = np.array(vtk1.points) + inc writer = tvtk.PolyDataWriter(file_name=warped_surf) - configure_input_data(writer, vtk1) + VTKInfo.configure_input_data(writer, vtk1) writer.write() dist = m.ComputeMeshWarp() @@ -84,7 +80,7 @@ def test_warppoints(): curdir = os.getcwd() os.chdir(tempdir) - if m.Info.no_tvtk(): + if VTKInfo.no_tvtk(): yield assert_raises, ImportError, m.WarpPoints # TODO: include regression tests for when tvtk is installed @@ -98,7 +94,7 @@ def test_meshwarpmaths(): curdir = os.getcwd() os.chdir(tempdir) - if m.Info.no_tvtk(): + if VTKInfo.no_tvtk(): yield assert_raises, ImportError, m.MeshWarpMaths # TODO: include regression tests for when tvtk is installed diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py index 55467b613c..ce86446cdc 100644 --- a/nipype/interfaces/fsl/utils.py +++ b/nipype/interfaces/fsl/utils.py @@ -34,8 +34,6 @@ from ...utils.filemanip import (load_json, save_json, split_filename, fname_presuffix, copyfile) -from ...algorithms.mesh import Info as VTKInfo - warn = warnings.warn @@ -1895,17 +1893,15 @@ def _parse_inputs(self, skip=None): return first_args + [second_args] def _vtk_to_coords(self, in_file, out_file=None): - # Ensure that tvtk is loaded with the appropriate ETS_TOOLKIT env var + from ..vtkbase import tvtk + from ...interfaces import vtkbase as VTKInfo + if VTKInfo.no_tvtk(): raise ImportError('TVTK is required and tvtk package was not found') - from ...algorithms.mesh import tvtk - from tvtk.common import is_old_pipeline as vtk_old - reader = tvtk.PolyDataReader(file_name=in_file + '.vtk') reader.update() - - mesh = reader.output if vtk_old() else reader.get_output() + mesh = VTKInfo.vtk_output(reader) points = mesh.points if out_file is None: @@ -1915,23 +1911,20 @@ def _vtk_to_coords(self, in_file, out_file=None): return out_file def _coords_to_vtk(self, points, out_file): - import os - # Ensure that tvtk is loaded with the appropriate ETS_TOOLKIT env var + from ..vtkbase import tvtk + from ...interfaces import vtkbase as VTKInfo + if VTKInfo.no_tvtk(): raise ImportError('TVTK is required and tvtk package was not found') - from ...algorithms.mesh import tvtk - from tvtk.common import is_old_pipeline as vtk_old - from tvtk.common import configure_input_data - reader = tvtk.PolyDataReader(file_name=self.inputs.in_file) reader.update() - mesh = reader.output if vtk_old() else reader.get_output() + mesh = VTKInfo.vtk_output(reader) mesh.points = points writer = tvtk.PolyDataWriter(file_name=out_file) - configure_input_data(writer, mesh) + VTKInfo.configure_input_data(writer, mesh) writer.write() def _trk_to_coords(self, in_file, out_file=None): diff --git a/nipype/interfaces/vtkbase.py b/nipype/interfaces/vtkbase.py new file mode 100644 index 0000000000..452203367c --- /dev/null +++ b/nipype/interfaces/vtkbase.py @@ -0,0 +1,81 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +""" +vtkbase provides some helpers to use VTK through the tvtk package (mayavi) + +Code using tvtk should import it through this module +""" + +import os +from .. import logging + +iflogger = logging.getLogger('interface') + +# Check that VTK can be imported and get version +_vtk_version = None +try: + import vtk + _vtk_version = (vtk.vtkVersion.GetVTKMajorVersion(), + vtk.vtkVersion.GetVTKMinorVersion()) +except ImportError: + iflogger.warning('VTK was not found') + +# Ensure that tvtk is loaded with the appropriate ETS_TOOLKIT env var +old_ets = os.getenv('ETS_TOOLKIT') +os.environ['ETS_TOOLKIT'] = 'null' +_have_tvtk = False +try: + from tvtk.api import tvtk + _have_tvtk = True +except ImportError: + iflogger.warning('tvtk wasn\'t found') + tvtk = None +finally: + if old_ets is not None: + os.environ['ETS_TOOLKIT'] = old_ets + else: + del os.environ['ETS_TOOLKIT'] + + +def vtk_version(): + """ Get VTK version """ + global _vtk_version + return _vtk_version + + +def no_vtk(): + """ Checks if VTK is installed and the python wrapper is functional """ + global _vtk_version + return _vtk_version is None + + +def no_tvtk(): + """ Checks if tvtk was found """ + global _have_tvtk + return not _have_tvtk + + +def vtk_old(): + """ Checks if VTK uses the old-style pipeline (VTK<6.0) """ + global _vtk_version + if _vtk_version is None: + raise RuntimeException('VTK is not correctly installed.') + return _vtk_version[0] < 6 + + +def configure_input_data(obj, data): + """ + Configure the input data for vtk pipeline object obj. + Copied from latest version of mayavi + """ + if vtk_old(): + obj.input = data + else: + obj.set_input_data(data) + + +def vtk_output(obj): + """ Configure the input data for vtk pipeline object obj.""" + if vtk_old(): + return obj.output + return obj.get_output() From 90ad893343851194649f3783bba3aa79b2a626a4 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Sun, 31 Jan 2016 23:29:35 -0800 Subject: [PATCH 36/56] fixed outdated code based on xvfbwrapper --- .travis.yml | 1 + circle.yml | 2 +- nipype/interfaces/base.py | 23 ++++------------------- 3 files changed, 6 insertions(+), 20 deletions(-) diff --git a/.travis.yml b/.travis.yml index e6ade72ead..0c0b3886a8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -18,6 +18,7 @@ before_install: - if $INSTALL_DEB_DEPENDECIES; then sudo ln -s /run/shm /dev/shm; fi - bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) - sudo apt-get update +- sudo apt-get install xvfb - if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq --no-install-recommends fsl afni elastix; fi - if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq fsl-atlases; diff --git a/circle.yml b/circle.yml index ffa1c640aa..972aae6206 100644 --- a/circle.yml +++ b/circle.yml @@ -14,7 +14,7 @@ dependencies: - sudo apt-get update override: # Install apt packages - - sudo apt-get install -y fsl-core fsl-atlases fsl-mni152-templates fsl-feeds afni swig python-vtk + - sudo apt-get install -y fsl-core fsl-atlases fsl-mni152-templates fsl-feeds afni swig python-vtk xvfb - echo 'source /etc/fsl/fsl.sh' >> $HOME/.profile - echo 'source /etc/afni/afni.sh' >> $HOME/.profile - mkdir -p ~/examples/ && ln -sf /usr/share/fsl-feeds/ ~/examples/feeds diff --git a/nipype/interfaces/base.py b/nipype/interfaces/base.py index 098e6223bd..d2a0882c7a 100644 --- a/nipype/interfaces/base.py +++ b/nipype/interfaces/base.py @@ -64,17 +64,6 @@ def __init__(self, value): def __str__(self): return repr(self.value) - -def _unlock_display(ndisplay): - lockf = os.path.join('/tmp', '.X%d-lock' % ndisplay) - try: - os.remove(lockf) - except: - return False - - return True - - def _exists_in_path(cmd, environ): ''' Based on a code snippet from @@ -987,7 +976,10 @@ def _run_wrapper(self, runtime): vdisp = Xvfb(nolisten='tcp') vdisp.start() - vdisp_num = vdisp.vdisplay_num + try: + vdisp_num = vdisp.new_display + except AttributeError: # outdated version of xvfbwrapper + vdisp_num = vdisp.vdisplay_num iflogger.info('Redirecting X to :%d' % vdisp_num) runtime.environ['DISPLAY'] = ':%d' % vdisp_num @@ -995,14 +987,7 @@ def _run_wrapper(self, runtime): runtime = self._run_interface(runtime) if self._redirect_x: - if sysdisplay is None: - os.unsetenv('DISPLAY') - else: - os.environ['DISPLAY'] = sysdisplay - - iflogger.info('Freeing X :%d' % vdisp_num) vdisp.stop() - _unlock_display(vdisp_num) return runtime From 1ce68502e78035e903cce70d4c57faf0d6f36c6d Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 2 Feb 2016 08:25:48 -0800 Subject: [PATCH 37/56] fixing lint problems --- nipype/interfaces/base.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/nipype/interfaces/base.py b/nipype/interfaces/base.py index d2a0882c7a..8ce06d2271 100644 --- a/nipype/interfaces/base.py +++ b/nipype/interfaces/base.py @@ -858,7 +858,7 @@ def _outputs_help(cls): """ Prints description for output parameters """ helpstr = ['Outputs::', ''] - if cls.output_spec: + if cls.output_spec is not None: outputs = cls.output_spec() for name, spec in sorted(outputs.traits(transient=None).items()): helpstr += cls._get_trait_desc(outputs, name, spec) @@ -870,7 +870,7 @@ def _outputs(self): """ Returns a bunch containing output fields for the class """ outputs = None - if self.output_spec: + if self.output_spec is not None: outputs = self.output_spec() return outputs @@ -1091,7 +1091,7 @@ def run(self, **inputs): def _list_outputs(self): """ List the expected outputs """ - if self.output_spec: + if self.output_spec is not None: raise NotImplementedError else: return None @@ -1450,8 +1450,8 @@ def _get_environ(self): def version_from_command(self, flag='-v'): cmdname = self.cmd.split()[0] - if _exists_in_path(cmdname): - env = dict(os.environ) + env = dict(os.environ) + if _exists_in_path(cmdname, env): out_environ = self._get_environ() env.update(out_environ) proc = subprocess.Popen(' '.join((cmdname, flag)), @@ -1612,7 +1612,7 @@ def _overload_extension(self, value, name=None): def _list_outputs(self): metadata = dict(name_source=lambda t: t is not None) traits = self.inputs.traits(**metadata) - if traits: + if traits and self.output_spec is not None: outputs = self.output_spec().get() for name, trait_spec in traits.items(): out_name = name From 01bd281f3bdfa28b65edc597b99aca7a7724d940 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 3 Feb 2016 10:18:55 -0800 Subject: [PATCH 38/56] fixing output_spec is not callable --- nipype/interfaces/base.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nipype/interfaces/base.py b/nipype/interfaces/base.py index 8ce06d2271..13f9a00b66 100644 --- a/nipype/interfaces/base.py +++ b/nipype/interfaces/base.py @@ -858,7 +858,7 @@ def _outputs_help(cls): """ Prints description for output parameters """ helpstr = ['Outputs::', ''] - if cls.output_spec is not None: + if getattr(cls, 'output_spec', None) is not None: outputs = cls.output_spec() for name, spec in sorted(outputs.traits(transient=None).items()): helpstr += cls._get_trait_desc(outputs, name, spec) @@ -870,7 +870,7 @@ def _outputs(self): """ Returns a bunch containing output fields for the class """ outputs = None - if self.output_spec is not None: + if getattr(self, 'output_spec', None) is not None: outputs = self.output_spec() return outputs @@ -1091,7 +1091,7 @@ def run(self, **inputs): def _list_outputs(self): """ List the expected outputs """ - if self.output_spec is not None: + if getattr(self, 'output_spec', None) is not None: raise NotImplementedError else: return None @@ -1612,7 +1612,7 @@ def _overload_extension(self, value, name=None): def _list_outputs(self): metadata = dict(name_source=lambda t: t is not None) traits = self.inputs.traits(**metadata) - if traits and self.output_spec is not None: + if traits and getattr(self, 'output_spec', None) is not None: outputs = self.output_spec().get() for name, trait_spec in traits.items(): out_name = name From edf906e9b473cfa0a36f0ffeb3a3016663ebf6e5 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 4 Feb 2016 10:44:29 -0800 Subject: [PATCH 39/56] revert codacy fixes --- nipype/interfaces/base.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nipype/interfaces/base.py b/nipype/interfaces/base.py index 13f9a00b66..6dfe257654 100644 --- a/nipype/interfaces/base.py +++ b/nipype/interfaces/base.py @@ -858,7 +858,7 @@ def _outputs_help(cls): """ Prints description for output parameters """ helpstr = ['Outputs::', ''] - if getattr(cls, 'output_spec', None) is not None: + if cls.output_spec: outputs = cls.output_spec() for name, spec in sorted(outputs.traits(transient=None).items()): helpstr += cls._get_trait_desc(outputs, name, spec) @@ -870,7 +870,7 @@ def _outputs(self): """ Returns a bunch containing output fields for the class """ outputs = None - if getattr(self, 'output_spec', None) is not None: + if self.output_spec: outputs = self.output_spec() return outputs @@ -1091,7 +1091,7 @@ def run(self, **inputs): def _list_outputs(self): """ List the expected outputs """ - if getattr(self, 'output_spec', None) is not None: + if self.output_spec: raise NotImplementedError else: return None @@ -1612,7 +1612,7 @@ def _overload_extension(self, value, name=None): def _list_outputs(self): metadata = dict(name_source=lambda t: t is not None) traits = self.inputs.traits(**metadata) - if traits and getattr(self, 'output_spec', None) is not None: + if traits: outputs = self.output_spec().get() for name, trait_spec in traits.items(): out_name = name From 94eeea20501e51076d77a624e001052579f1a951 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 10 Feb 2016 11:15:01 -0800 Subject: [PATCH 40/56] update travis --- .travis.yml | 30 ++++++++++++++---------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/.travis.yml b/.travis.yml index 7feeaf7e9d..94be534a9e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -18,7 +18,7 @@ before_install: - if $INSTALL_DEB_DEPENDECIES; then sudo ln -s /run/shm /dev/shm; fi - bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) - sudo apt-get update -- sudo apt-get install xvfb +- sudo apt-get install -qq xvfb libvtk6-dev python-vtk libx11-dev swig - if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq --no-install-recommends fsl afni elastix; fi - if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq fsl-atlases; @@ -26,33 +26,31 @@ before_install: - if $INSTALL_DEB_DEPENDECIES; then source /etc/fsl/fsl.sh; fi - if $INSTALL_DEB_DEPENDECIES; then source /etc/afni/afni.sh; fi - export FSLOUTPUTTYPE=NIFTI_GZ -# Install vtk and fix numpy installation problem # Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 -- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then travis_retry sudo apt-get install -qq libx11-dev swig; +- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then echo '[x11]' >> $HOME/.numpy-site.cfg; echo 'library_dirs = /usr/lib64:/usr/lib:/usr/lib/x86_64-linux-gnu' >> $HOME/.numpy-site.cfg; echo 'include_dirs = /usr/include:/usr/include/X11' >> $HOME/.numpy-site.cfg; fi install: - conda update --yes conda -- conda create -n testenv --yes pip python=$TRAVIS_PYTHON_VERSION +- conda create -n testenv --yes pip numpy scipy nose networkx dateutil python=$TRAVIS_PYTHON_VERSION - source activate testenv -- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then pip install ordereddict; fi -- conda install --yes numpy scipy nose networkx dateutil -- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then conda install --yes traits; else pip install traits; fi -- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then conda install --yes vtk; fi +- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then + pip install ordereddict; fi + conda install --yes traits; else + pip install traits; + fi - pip install python-coveralls - pip install nose-cov # Add tvtk (PIL is required by blockcanvas) # Install mayavi (see https://github.com/enthought/mayavi/issues/271) -- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then - pip install http://effbot.org/downloads/Imaging-1.1.7.tar.gz; - pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools; - pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas; - pip install -e git+https://github.com/enthought/etsproxy.git#egg=etsproxy; - pip install https://github.com/dmsurti/mayavi/archive/4d4aaf315a29d6a86707dd95149e27d9ed2225bf.zip; - pip install -e git+https://github.com/enthought/ets.git#egg=ets; - fi +- pip install http://effbot.org/downloads/Imaging-1.1.7.tar.gz +- pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools +- pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas +- pip install -e git+https://github.com/enthought/etsproxy.git#egg=etsproxy +- pip install https://github.com/enthought/mayavi/archive/a811639986fc1babecea68656f301c9a68d1ec07.zip +- pip install -e git+https://github.com/enthought/ets.git#egg=ets - pip install -r requirements.txt # finish remaining requirements - python setup.py install script: From a6ec41ea7318385ab4ecd215196004a20ba76a83 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 10 Feb 2016 11:20:42 -0800 Subject: [PATCH 41/56] remove empty line before docstring --- nipype/algorithms/mesh.py | 20 +++++--------------- nipype/interfaces/fsl/utils.py | 26 -------------------------- 2 files changed, 5 insertions(+), 41 deletions(-) diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index 55f4f95cb4..9e18a96b90 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -12,25 +12,23 @@ """ from __future__ import division -from builtins import zip import os.path as op -from warnings import warn - import numpy as np from numpy import linalg as nla +from builtins import zip + from .. import logging from ..external.six import string_types from ..interfaces.base import (BaseInterface, traits, TraitedSpec, File, BaseInterfaceInputSpec) from ..interfaces.vtkbase import tvtk from ..interfaces import vtkbase as VTKInfo -iflogger = logging.getLogger('interface') +IFLOGGER = logging.getLogger('interface') class TVTKBaseInterface(BaseInterface): - """ A base class for interfaces using VTK """ _redirect_x = True @@ -58,7 +56,6 @@ class WarpPointsOutputSpec(TraitedSpec): class WarpPoints(TVTKBaseInterface): - """ Applies a displacement field to a point set given in vtk format. Any discrete deformation field, given in physical coordinates and @@ -79,8 +76,6 @@ class WarpPoints(TVTKBaseInterface): output_spec = WarpPointsOutputSpec def _gen_fname(self, in_file, suffix='generated', ext=None): - import os.path as op - fname, fext = op.splitext(op.basename(in_file)) if fext == '.gz': @@ -96,7 +91,6 @@ def _gen_fname(self, in_file, suffix='generated', ext=None): def _run_interface(self, runtime): import nibabel as nb - import numpy as np from scipy import ndimage r = tvtk.PolyDataReader(file_name=self.inputs.points) @@ -170,7 +164,6 @@ class ComputeMeshWarpOutputSpec(TraitedSpec): class ComputeMeshWarp(TVTKBaseInterface): - """ Calculates a the vertex-wise warping to get surface2 from surface1. It also reports the average distance of vertices, using the norm specified @@ -295,7 +288,6 @@ class MeshWarpMathsOutputSpec(TraitedSpec): class MeshWarpMaths(TVTKBaseInterface): - """ Performs the most basic mathematical operations on the warping field defined at each vertex of the input surface. A surface with scalar @@ -386,7 +378,6 @@ def _list_outputs(self): class P2PDistance(ComputeMeshWarp): - """ Calculates a point-to-point (p2p) distance between two corresponding VTK-readable meshes or contours. @@ -399,6 +390,5 @@ class P2PDistance(ComputeMeshWarp): def __init__(self, **inputs): super(P2PDistance, self).__init__(**inputs) - warn(('This interface has been deprecated since 1.0, please use ' - 'ComputeMeshWarp'), - DeprecationWarning) + IFLOGGER.warn('This interface has been deprecated since 1.0, please use ' + 'ComputeMeshWarp') diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py index 9fecad18c2..8e58b0d8cf 100644 --- a/nipype/interfaces/fsl/utils.py +++ b/nipype/interfaces/fsl/utils.py @@ -52,7 +52,6 @@ class CopyGeomOutputSpec(TraitedSpec): class CopyGeom(FSLCommand): - """Use fslcpgeom to copy the header geometry information to another image. Copy certain parts of the header information (image dimensions, voxel dimensions, voxel dimensions units string, image orientation/origin or qform/sform info) @@ -114,7 +113,6 @@ class ImageMeantsOutputSpec(TraitedSpec): class ImageMeants(FSLCommand): - """ Use fslmeants for printing the average timeseries (intensities) to the screen (or saves to a file). The average is taken over all voxels in the mask (or all voxels in the image if no mask is specified) @@ -158,7 +156,6 @@ class SmoothOutputSpec(TraitedSpec): class Smooth(FSLCommand): - """ Use fslmaths to smooth the image @@ -223,7 +220,6 @@ class MergeOutputSpec(TraitedSpec): class Merge(FSLCommand): - """Use fslmerge to concatenate images Images can be concatenated across time, x, y, or z dimensions. Across the @@ -291,7 +287,6 @@ class ExtractROIOutputSpec(TraitedSpec): class ExtractROI(FSLCommand): - """Uses FSL Fslroi command to extract region of interest (ROI) from an image. @@ -372,7 +367,6 @@ class SplitOutputSpec(TraitedSpec): class Split(FSLCommand): - """Uses FSL Fslsplit command to separate a volume into images in time, x, y or z dimension. """ @@ -424,7 +418,6 @@ class ImageMathsOutputSpec(TraitedSpec): class ImageMaths(FSLCommand): - """Use FSL fslmaths command to allow mathematical manipulation of images `FSL info `_ @@ -499,7 +492,6 @@ class FilterRegressorOutputSpec(TraitedSpec): class FilterRegressor(FSLCommand): - """Data de-noising by regressing out part of a design matrix Uses simple OLS regression on 4D images @@ -555,7 +547,6 @@ class ImageStatsOutputSpec(TraitedSpec): class ImageStats(FSLCommand): - """Use FSL fslstats command to calculate stats from images `FSL info `_ @@ -633,7 +624,6 @@ class AvScaleOutputSpec(TraitedSpec): class AvScale(FSLCommand): - """Use FSL avscale command to extract info from mat file output of FLIRT Examples @@ -728,7 +718,6 @@ class OverlayOutputSpec(TraitedSpec): class Overlay(FSLCommand): - """ Use FSL's overlay command to combine background and statistical images into one volume @@ -845,7 +834,6 @@ class SlicerOutputSpec(TraitedSpec): class Slicer(FSLCommand): - """Use FSL's slicer command to output a png image from a volume. @@ -937,7 +925,6 @@ class PlotTimeSeriesOutputSpec(TraitedSpec): class PlotTimeSeries(FSLCommand): - """Use fsl_tsplot to create images of time course plots. Examples @@ -1021,7 +1008,6 @@ class PlotMotionParamsOutputSpec(TraitedSpec): class PlotMotionParams(FSLCommand): - """Use fsl_tsplot to plot the estimated motion parameters from a realignment program. @@ -1135,7 +1121,6 @@ class ConvertXFMOutputSpec(TraitedSpec): class ConvertXFM(FSLCommand): - """Use the FSL utility convert_xfm to modify FLIRT transformation matrices. Examples @@ -1207,7 +1192,6 @@ class SwapDimensionsOutputSpec(TraitedSpec): class SwapDimensions(FSLCommand): - """Use fslswapdim to alter the orientation of an image. This interface accepts a three-tuple corresponding to the new @@ -1250,7 +1234,6 @@ class PowerSpectrumOutputSpec(TraitedSpec): class PowerSpectrum(FSLCommand): - """Use FSL PowerSpectrum command for power spectrum estimation. Examples @@ -1311,7 +1294,6 @@ class SigLossOuputSpec(TraitedSpec): class SigLoss(FSLCommand): - """Estimates signal loss from a field map (in rad/s) Examples @@ -1353,7 +1335,6 @@ class Reorient2StdOutputSpec(TraitedSpec): class Reorient2Std(FSLCommand): - """fslreorient2std is a tool for reorienting the image to match the approximate orientation of the standard template images (MNI152). @@ -1442,7 +1423,6 @@ class InvWarpOutputSpec(TraitedSpec): class InvWarp(FSLCommand): - """ Use FSL Invwarp to invert a FNIRT warp @@ -1529,7 +1509,6 @@ class ComplexOuputSpec(TraitedSpec): class Complex(FSLCommand): - """fslcomplex is a tool for converting complex data Examples @@ -1665,7 +1644,6 @@ class WarpUtilsOutputSpec(TraitedSpec): class WarpUtils(FSLCommand): - """Use FSL `fnirtfileutils `_ to convert field->coefficients, coefficients->field, coefficients->other_coefficients etc @@ -1798,7 +1776,6 @@ class ConvertWarpOutputSpec(TraitedSpec): class ConvertWarp(FSLCommand): - """Use FSL `convertwarp `_ for combining multiple transforms into one. @@ -1854,7 +1831,6 @@ class WarpPointsOutputSpec(TraitedSpec): class WarpPoints(CommandLine): - """Use FSL `img2imgcoord `_ to transform point sets. Accepts plain text files and vtk files. @@ -2011,7 +1987,6 @@ class WarpPointsToStdInputSpec(WarpPointsBaseInputSpec): class WarpPointsToStd(WarpPoints): - """ Use FSL `img2stdcoord `_ to transform point sets to standard space coordinates. Accepts plain text files and @@ -2073,7 +2048,6 @@ class MotionOutliersOutputSpec(TraitedSpec): class MotionOutliers(FSLCommand): - """ Use FSL fsl_motion_outliers`http://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FSLMotionOutliers`_ to find outliers in timeseries (4d) data. Examples From a17422e417a0d9be05ced27f34c58cb5133df27d Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 10 Feb 2016 11:28:41 -0800 Subject: [PATCH 42/56] add multiverse for ubuntu --- .travis.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.travis.yml b/.travis.yml index 94be534a9e..cae76dcd56 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,6 +16,9 @@ before_install: - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then export PATH=/home/travis/miniconda2/bin:$PATH; else export PATH=/home/travis/miniconda3/bin:$PATH; fi - if $INSTALL_DEB_DEPENDECIES; then sudo rm -rf /dev/shm; fi - if $INSTALL_DEB_DEPENDECIES; then sudo ln -s /run/shm /dev/shm; fi +# Enable universe and multiverse +- sudo add-apt-repository "deb http://us.archive.ubuntu.com/ubuntu/ trusty universe multiverse" +- sudo add-apt-repository "deb http://us.archive.ubuntu.com/ubuntu/ trusty-updates universe multiverse" - bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) - sudo apt-get update - sudo apt-get install -qq xvfb libvtk6-dev python-vtk libx11-dev swig From c9968ae0bcc4550b0b63edf3fc550874be3ed3c9 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 10 Feb 2016 11:51:51 -0800 Subject: [PATCH 43/56] travis set to pin libvtk6-dev from trusty --- .travis.yml | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/.travis.yml b/.travis.yml index cae76dcd56..282275a638 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,6 +8,7 @@ env: - INSTALL_DEB_DEPENDECIES=true - INSTALL_DEB_DEPENDECIES=false before_install: +- echo 'APT::Default-Release "precise";' | sudo tee /etc/apt/apt.conf.d/01ubuntu - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then wget http://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh; else wget http://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; fi @@ -17,18 +18,10 @@ before_install: - if $INSTALL_DEB_DEPENDECIES; then sudo rm -rf /dev/shm; fi - if $INSTALL_DEB_DEPENDECIES; then sudo ln -s /run/shm /dev/shm; fi # Enable universe and multiverse -- sudo add-apt-repository "deb http://us.archive.ubuntu.com/ubuntu/ trusty universe multiverse" -- sudo add-apt-repository "deb http://us.archive.ubuntu.com/ubuntu/ trusty-updates universe multiverse" +- sudo add-apt-repository "deb http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" +- sudo add-apt-repository "deb-src http://archive.ubuntu.com/ubuntu trusty main universe multiverse" - bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) - sudo apt-get update -- sudo apt-get install -qq xvfb libvtk6-dev python-vtk libx11-dev swig -- if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq --no-install-recommends - fsl afni elastix; fi -- if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq fsl-atlases; - fi -- if $INSTALL_DEB_DEPENDECIES; then source /etc/fsl/fsl.sh; fi -- if $INSTALL_DEB_DEPENDECIES; then source /etc/afni/afni.sh; fi -- export FSLOUTPUTTYPE=NIFTI_GZ # Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then echo '[x11]' >> $HOME/.numpy-site.cfg; @@ -36,6 +29,16 @@ before_install: echo 'include_dirs = /usr/include:/usr/include/X11' >> $HOME/.numpy-site.cfg; fi install: +- sudo apt-get build-dep -y -qq libvtk6-dev +- sudo apt-get -b source -t trusty -y -qq libvtk6-dev +- sudo apt-get install -y -qq xvfb libx11-dev swig +- if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq --no-install-recommends + fsl afni elastix; fi +- if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq fsl-atlases; + fi +- if $INSTALL_DEB_DEPENDECIES; then source /etc/fsl/fsl.sh; fi +- if $INSTALL_DEB_DEPENDECIES; then source /etc/afni/afni.sh; fi +- export FSLOUTPUTTYPE=NIFTI_GZ - conda update --yes conda - conda create -n testenv --yes pip numpy scipy nose networkx dateutil python=$TRAVIS_PYTHON_VERSION - source activate testenv From 807708b5554708cc6db6d379fb2a9b75c939007c Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 10 Feb 2016 11:59:51 -0800 Subject: [PATCH 44/56] fix error in travis --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 282275a638..4f0f380a16 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,7 +19,7 @@ before_install: - if $INSTALL_DEB_DEPENDECIES; then sudo ln -s /run/shm /dev/shm; fi # Enable universe and multiverse - sudo add-apt-repository "deb http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" -- sudo add-apt-repository "deb-src http://archive.ubuntu.com/ubuntu trusty main universe multiverse" +- sudo add-apt-repository "deb-src http://archive.ubuntu.com/ubuntu trusty universe multiverse" - bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) - sudo apt-get update # Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 From 39b84f7dab9086a208b3756c544b8587db7fa78b Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 10 Feb 2016 12:14:18 -0800 Subject: [PATCH 45/56] fix error in travis --- .travis.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 4f0f380a16..7b09a12568 100644 --- a/.travis.yml +++ b/.travis.yml @@ -18,8 +18,7 @@ before_install: - if $INSTALL_DEB_DEPENDECIES; then sudo rm -rf /dev/shm; fi - if $INSTALL_DEB_DEPENDECIES; then sudo ln -s /run/shm /dev/shm; fi # Enable universe and multiverse -- sudo add-apt-repository "deb http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" -- sudo add-apt-repository "deb-src http://archive.ubuntu.com/ubuntu trusty universe multiverse" +- sudo add-apt-repository -s "deb http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" - bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) - sudo apt-get update # Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 From f809b412556c48884ac000ef5fa26dc539832d4b Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 10 Feb 2016 13:19:46 -0800 Subject: [PATCH 46/56] trying to fix travis --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 7b09a12568..db42945798 100644 --- a/.travis.yml +++ b/.travis.yml @@ -18,7 +18,8 @@ before_install: - if $INSTALL_DEB_DEPENDECIES; then sudo rm -rf /dev/shm; fi - if $INSTALL_DEB_DEPENDECIES; then sudo ln -s /run/shm /dev/shm; fi # Enable universe and multiverse -- sudo add-apt-repository -s "deb http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" +- echo "deb http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" | sudo tee /etc/apt/sources.list.d/trusty.list +- echo "deb-src http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" | sudo tee /etc/apt/sources.list.d/trusty.list - bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) - sudo apt-get update # Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 From 76e64cc41fb2fbbabb7768e54a45389e239026f4 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 10 Feb 2016 13:53:32 -0800 Subject: [PATCH 47/56] add trusty-updates- --- .travis.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.travis.yml b/.travis.yml index db42945798..bea478132d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -20,6 +20,8 @@ before_install: # Enable universe and multiverse - echo "deb http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" | sudo tee /etc/apt/sources.list.d/trusty.list - echo "deb-src http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" | sudo tee /etc/apt/sources.list.d/trusty.list +- echo "deb http://us.archive.ubuntu.com/ubuntu/ trusty-updates universe multiverse" | sudo tee /etc/apt/sources.list.d/trusty.list +- echo "deb-src deb-src http://us.archive.ubuntu.com/ubuntu/ trusty-updates universe multiverse" | sudo tee /etc/apt/sources.list.d/trusty.list - bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) - sudo apt-get update # Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 @@ -29,6 +31,8 @@ before_install: echo 'include_dirs = /usr/include:/usr/include/X11' >> $HOME/.numpy-site.cfg; fi install: +- sudo apt-get build-dep -y -qq dh-python +- sudo apt-get -b source -t trusty -y -qq python - sudo apt-get build-dep -y -qq libvtk6-dev - sudo apt-get -b source -t trusty -y -qq libvtk6-dev - sudo apt-get install -y -qq xvfb libx11-dev swig From bf8192da8ee24de234a3cd6af1276c4cde0869e6 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 10 Feb 2016 14:17:26 -0800 Subject: [PATCH 48/56] still fixing VTK6 installation in travis --- .travis.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index bea478132d..02bfe48093 100644 --- a/.travis.yml +++ b/.travis.yml @@ -18,10 +18,8 @@ before_install: - if $INSTALL_DEB_DEPENDECIES; then sudo rm -rf /dev/shm; fi - if $INSTALL_DEB_DEPENDECIES; then sudo ln -s /run/shm /dev/shm; fi # Enable universe and multiverse -- echo "deb http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" | sudo tee /etc/apt/sources.list.d/trusty.list -- echo "deb-src http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" | sudo tee /etc/apt/sources.list.d/trusty.list -- echo "deb http://us.archive.ubuntu.com/ubuntu/ trusty-updates universe multiverse" | sudo tee /etc/apt/sources.list.d/trusty.list -- echo "deb-src deb-src http://us.archive.ubuntu.com/ubuntu/ trusty-updates universe multiverse" | sudo tee /etc/apt/sources.list.d/trusty.list +- sudo -c sh 'echo "deb http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" >> /etc/apt/sources.list.d/trusty.list' +- sudo -c sh 'echo "deb-src http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" >> /etc/apt/sources.list.d/trusty.list' - bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) - sudo apt-get update # Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 From 584359117d436311439bcbeae6ac70534fdeac10 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 10 Feb 2016 14:35:52 -0800 Subject: [PATCH 49/56] fixed command as sudo --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 02bfe48093..76d56ec067 100644 --- a/.travis.yml +++ b/.travis.yml @@ -18,8 +18,8 @@ before_install: - if $INSTALL_DEB_DEPENDECIES; then sudo rm -rf /dev/shm; fi - if $INSTALL_DEB_DEPENDECIES; then sudo ln -s /run/shm /dev/shm; fi # Enable universe and multiverse -- sudo -c sh 'echo "deb http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" >> /etc/apt/sources.list.d/trusty.list' -- sudo -c sh 'echo "deb-src http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" >> /etc/apt/sources.list.d/trusty.list' +- sudo bash -c 'echo "deb http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" >> /etc/apt/sources.list.d/trusty.list' +- sudo bash -c 'echo "deb-src http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" >> /etc/apt/sources.list.d/trusty.list' - bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) - sudo apt-get update # Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 From b9e051daa17ccca9e955088ca59cf1a38fe28db2 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 10 Feb 2016 14:55:23 -0800 Subject: [PATCH 50/56] use ubuntu trusty --- .travis.yml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 76d56ec067..9680ce65a3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,8 @@ cache: - apt language: python +sudo: required +dist: trusty python: - 2.7 - 3.4 @@ -8,7 +10,7 @@ env: - INSTALL_DEB_DEPENDECIES=true - INSTALL_DEB_DEPENDECIES=false before_install: -- echo 'APT::Default-Release "precise";' | sudo tee /etc/apt/apt.conf.d/01ubuntu +# - echo 'APT::Default-Release "precise";' | sudo tee /etc/apt/apt.conf.d/01ubuntu - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then wget http://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh; else wget http://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; fi @@ -18,8 +20,8 @@ before_install: - if $INSTALL_DEB_DEPENDECIES; then sudo rm -rf /dev/shm; fi - if $INSTALL_DEB_DEPENDECIES; then sudo ln -s /run/shm /dev/shm; fi # Enable universe and multiverse -- sudo bash -c 'echo "deb http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" >> /etc/apt/sources.list.d/trusty.list' -- sudo bash -c 'echo "deb-src http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" >> /etc/apt/sources.list.d/trusty.list' +# - sudo bash -c 'echo "deb http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" >> /etc/apt/sources.list.d/trusty.list' +# - sudo bash -c 'echo "deb-src http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" >> /etc/apt/sources.list.d/trusty.list' - bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) - sudo apt-get update # Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 From 62a23e27ab476c50aa5626e221e58a963b8ab8cb Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 10 Feb 2016 15:14:11 -0800 Subject: [PATCH 51/56] modify the neurodebian script for travis --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 9680ce65a3..16e8a038cd 100644 --- a/.travis.yml +++ b/.travis.yml @@ -22,7 +22,7 @@ before_install: # Enable universe and multiverse # - sudo bash -c 'echo "deb http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" >> /etc/apt/sources.list.d/trusty.list' # - sudo bash -c 'echo "deb-src http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" >> /etc/apt/sources.list.d/trusty.list' -- bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) +- bash <(wget -q -O- https://gist.github.com/oesteban/a0d8972f978339910bf4/raw) - sudo apt-get update # Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then From 664a839cc10a7d9f3c79eca049c104b86c2ffbc9 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 10 Feb 2016 15:26:52 -0800 Subject: [PATCH 52/56] remove ubuntu pinning --- .travis.yml | 19 ++++++------------- 1 file changed, 6 insertions(+), 13 deletions(-) diff --git a/.travis.yml b/.travis.yml index 16e8a038cd..95dd0a4e3b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,23 +19,16 @@ before_install: - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then export PATH=/home/travis/miniconda2/bin:$PATH; else export PATH=/home/travis/miniconda3/bin:$PATH; fi - if $INSTALL_DEB_DEPENDECIES; then sudo rm -rf /dev/shm; fi - if $INSTALL_DEB_DEPENDECIES; then sudo ln -s /run/shm /dev/shm; fi -# Enable universe and multiverse -# - sudo bash -c 'echo "deb http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" >> /etc/apt/sources.list.d/trusty.list' -# - sudo bash -c 'echo "deb-src http://archive.ubuntu.com/ubuntu/ trusty main universe multiverse" >> /etc/apt/sources.list.d/trusty.list' - bash <(wget -q -O- https://gist.github.com/oesteban/a0d8972f978339910bf4/raw) - sudo apt-get update # Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 -- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then - echo '[x11]' >> $HOME/.numpy-site.cfg; - echo 'library_dirs = /usr/lib64:/usr/lib:/usr/lib/x86_64-linux-gnu' >> $HOME/.numpy-site.cfg; - echo 'include_dirs = /usr/include:/usr/include/X11' >> $HOME/.numpy-site.cfg; - fi +#- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then +# echo '[x11]' >> $HOME/.numpy-site.cfg; +# echo 'library_dirs = /usr/lib64:/usr/lib:/usr/lib/x86_64-linux-gnu' >> $HOME/.numpy-site.cfg; +# echo 'include_dirs = /usr/include:/usr/include/X11' >> $HOME/.numpy-site.cfg; +# fi install: -- sudo apt-get build-dep -y -qq dh-python -- sudo apt-get -b source -t trusty -y -qq python -- sudo apt-get build-dep -y -qq libvtk6-dev -- sudo apt-get -b source -t trusty -y -qq libvtk6-dev -- sudo apt-get install -y -qq xvfb libx11-dev swig +- travis_retry sudo apt-get install -y -qq xvfb libx11-dev swig libvtk6-dev python-vtk - if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq --no-install-recommends fsl afni elastix; fi - if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq fsl-atlases; From 4a778c8cdbabd69a8bb7e1bae6e414dd1b64f9a0 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 11 Feb 2016 08:26:37 -0800 Subject: [PATCH 53/56] remove python-vtk from travis --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 95dd0a4e3b..f55988458b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -28,7 +28,7 @@ before_install: # echo 'include_dirs = /usr/include:/usr/include/X11' >> $HOME/.numpy-site.cfg; # fi install: -- travis_retry sudo apt-get install -y -qq xvfb libx11-dev swig libvtk6-dev python-vtk +- travis_retry sudo apt-get install -y -qq xvfb libx11-dev swig libvtk6-dev - if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq --no-install-recommends fsl afni elastix; fi - if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq fsl-atlases; From 0087bd99f2ac4be6fde01abe645cd3f0a0d037b0 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Thu, 11 Feb 2016 14:11:10 -0800 Subject: [PATCH 54/56] added disable flag in codacy errors --- nipype/interfaces/base.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/nipype/interfaces/base.py b/nipype/interfaces/base.py index 6dfe257654..906768f99f 100644 --- a/nipype/interfaces/base.py +++ b/nipype/interfaces/base.py @@ -859,7 +859,7 @@ def _outputs_help(cls): """ helpstr = ['Outputs::', ''] if cls.output_spec: - outputs = cls.output_spec() + outputs = cls.output_spec() #pylint: disable=E1102 for name, spec in sorted(outputs.traits(transient=None).items()): helpstr += cls._get_trait_desc(outputs, name, spec) if len(helpstr) == 2: @@ -871,7 +871,8 @@ def _outputs(self): """ outputs = None if self.output_spec: - outputs = self.output_spec() + outputs = self.output_spec() #pylint: disable=E1102 + return outputs @classmethod @@ -1613,7 +1614,7 @@ def _list_outputs(self): metadata = dict(name_source=lambda t: t is not None) traits = self.inputs.traits(**metadata) if traits: - outputs = self.output_spec().get() + outputs = self.output_spec().get() #pylint: disable=E1102 for name, trait_spec in traits.items(): out_name = name if trait_spec.output_name is not None: From a2e52ee99daad6a2ea29b4b2bcda93889d4ac7e0 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 12 Feb 2016 09:33:10 -0800 Subject: [PATCH 55/56] revert back travis --- .travis.yml | 47 +++++++++++++++++++++++------------------------ 1 file changed, 23 insertions(+), 24 deletions(-) diff --git a/.travis.yml b/.travis.yml index f55988458b..7feeaf7e9d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,8 +1,6 @@ cache: - apt language: python -sudo: required -dist: trusty python: - 2.7 - 3.4 @@ -10,7 +8,6 @@ env: - INSTALL_DEB_DEPENDECIES=true - INSTALL_DEB_DEPENDECIES=false before_install: -# - echo 'APT::Default-Release "precise";' | sudo tee /etc/apt/apt.conf.d/01ubuntu - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then wget http://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh; else wget http://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; fi @@ -19,16 +16,9 @@ before_install: - if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then export PATH=/home/travis/miniconda2/bin:$PATH; else export PATH=/home/travis/miniconda3/bin:$PATH; fi - if $INSTALL_DEB_DEPENDECIES; then sudo rm -rf /dev/shm; fi - if $INSTALL_DEB_DEPENDECIES; then sudo ln -s /run/shm /dev/shm; fi -- bash <(wget -q -O- https://gist.github.com/oesteban/a0d8972f978339910bf4/raw) +- bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh) - sudo apt-get update -# Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 -#- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then -# echo '[x11]' >> $HOME/.numpy-site.cfg; -# echo 'library_dirs = /usr/lib64:/usr/lib:/usr/lib/x86_64-linux-gnu' >> $HOME/.numpy-site.cfg; -# echo 'include_dirs = /usr/include:/usr/include/X11' >> $HOME/.numpy-site.cfg; -# fi -install: -- travis_retry sudo apt-get install -y -qq xvfb libx11-dev swig libvtk6-dev +- sudo apt-get install xvfb - if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq --no-install-recommends fsl afni elastix; fi - if $INSTALL_DEB_DEPENDECIES; then travis_retry sudo apt-get install -qq fsl-atlases; @@ -36,24 +26,33 @@ install: - if $INSTALL_DEB_DEPENDECIES; then source /etc/fsl/fsl.sh; fi - if $INSTALL_DEB_DEPENDECIES; then source /etc/afni/afni.sh; fi - export FSLOUTPUTTYPE=NIFTI_GZ +# Install vtk and fix numpy installation problem +# Fix numpy problem: https://github.com/enthought/enable/issues/34#issuecomment-2029381 +- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then travis_retry sudo apt-get install -qq libx11-dev swig; + echo '[x11]' >> $HOME/.numpy-site.cfg; + echo 'library_dirs = /usr/lib64:/usr/lib:/usr/lib/x86_64-linux-gnu' >> $HOME/.numpy-site.cfg; + echo 'include_dirs = /usr/include:/usr/include/X11' >> $HOME/.numpy-site.cfg; + fi +install: - conda update --yes conda -- conda create -n testenv --yes pip numpy scipy nose networkx dateutil python=$TRAVIS_PYTHON_VERSION +- conda create -n testenv --yes pip python=$TRAVIS_PYTHON_VERSION - source activate testenv -- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then - pip install ordereddict; fi - conda install --yes traits; else - pip install traits; - fi +- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then pip install ordereddict; fi +- conda install --yes numpy scipy nose networkx dateutil +- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then conda install --yes traits; else pip install traits; fi +- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then conda install --yes vtk; fi - pip install python-coveralls - pip install nose-cov # Add tvtk (PIL is required by blockcanvas) # Install mayavi (see https://github.com/enthought/mayavi/issues/271) -- pip install http://effbot.org/downloads/Imaging-1.1.7.tar.gz -- pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools -- pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas -- pip install -e git+https://github.com/enthought/etsproxy.git#egg=etsproxy -- pip install https://github.com/enthought/mayavi/archive/a811639986fc1babecea68656f301c9a68d1ec07.zip -- pip install -e git+https://github.com/enthought/ets.git#egg=ets +- if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then + pip install http://effbot.org/downloads/Imaging-1.1.7.tar.gz; + pip install -e git+https://github.com/enthought/etsdevtools.git#egg=etsdevtools; + pip install -e git+https://github.com/enthought/blockcanvas.git#egg=blockcanvas; + pip install -e git+https://github.com/enthought/etsproxy.git#egg=etsproxy; + pip install https://github.com/dmsurti/mayavi/archive/4d4aaf315a29d6a86707dd95149e27d9ed2225bf.zip; + pip install -e git+https://github.com/enthought/ets.git#egg=ets; + fi - pip install -r requirements.txt # finish remaining requirements - python setup.py install script: From 24488216c256dd06e730d528b568c1515bd6242d Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Fri, 12 Feb 2016 13:47:16 -0800 Subject: [PATCH 56/56] add disable flag for codacy --- nipype/interfaces/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/interfaces/base.py b/nipype/interfaces/base.py index 906768f99f..eb2d406532 100644 --- a/nipype/interfaces/base.py +++ b/nipype/interfaces/base.py @@ -1732,7 +1732,7 @@ class SEMLikeCommandLine(CommandLine): """ def _list_outputs(self): - outputs = self.output_spec().get() + outputs = self.output_spec().get() #pylint: disable=E1102 return self._outputs_from_inputs(outputs) def _outputs_from_inputs(self, outputs):