diff --git a/build_docs.py b/build_docs.py index f50281fce0..77b55eba53 100644 --- a/build_docs.py +++ b/build_docs.py @@ -70,7 +70,7 @@ class APIDocs(TempInstall): user_options = [ ('None', None, 'this command has no options'), - ] + ] def run(self): # First build the project and install it to a temporary location. @@ -193,5 +193,3 @@ def run(self): 'api_docs': APIDocs, 'clean': Clean, } - - diff --git a/doc/conf.py b/doc/conf.py index 5bfb1f402f..6066689c44 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -12,7 +12,8 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys, os +import sys +import os nipypepath = os.path.abspath('..') sys.path.insert(1, nipypepath) @@ -211,8 +212,8 @@ # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('documentation', 'nipype.tex', u'nipype Documentation', - u'Neuroimaging in Python team', 'manual'), + ('documentation', 'nipype.tex', u'nipype Documentation', + u'Neuroimaging in Python team', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of diff --git a/doc/sphinxext/autosummary_generate.py b/doc/sphinxext/autosummary_generate.py index 0b562f9975..aefd8552d8 100755 --- a/doc/sphinxext/autosummary_generate.py +++ b/doc/sphinxext/autosummary_generate.py @@ -18,7 +18,12 @@ """ from __future__ import print_function -import glob, re, inspect, os, optparse, pydoc +import glob +import re +import inspect +import os +import optparse +import pydoc from autosummary import import_by_name try: @@ -76,7 +81,7 @@ def main(): f = open(fn, 'w') try: - f.write('%s\n%s\n\n' % (name, '='*len(name))) + f.write('%s\n%s\n\n' % (name, '=' * len(name))) if inspect.isclass(obj): if issubclass(obj, Exception): @@ -205,7 +210,7 @@ def get_documented_in_lines(lines, module=None, filename=None): current_module = name documented.update(get_documented_in_docstring( name, filename=filename)) - elif current_module and not name.startswith(current_module+'.'): + elif current_module and not name.startswith(current_module + '.'): name = "%s.%s" % (current_module, name) documented.setdefault(name, []).append( (filename, current_title, "auto" + m.group(1), None)) diff --git a/doc/sphinxext/numpy_ext/docscrape.py b/doc/sphinxext/numpy_ext/docscrape.py index 71beb6bbc9..affb1ba6b5 100644 --- a/doc/sphinxext/numpy_ext/docscrape.py +++ b/doc/sphinxext/numpy_ext/docscrape.py @@ -67,7 +67,7 @@ def read_to_condition(self, condition_func): return self[start:self._l] self._l += 1 if self.eof(): - return self[start:self._l+1] + return self[start:self._l + 1] return [] def read_to_next_empty_line(self): @@ -114,7 +114,7 @@ def __init__(self, docstring, config={}): 'References': '', 'Examples': '', 'index': {} - } + } self._parse() @@ -139,18 +139,20 @@ def _is_at_section(self): return True l2 = self._doc.peek(1).strip() # ---------- or ========== - return l2.startswith('-'*len(l1)) or l2.startswith('='*len(l1)) + return l2.startswith('-' * len(l1)) or l2.startswith('=' * len(l1)) def _strip(self, doc): i = 0 j = 0 for i, line in enumerate(doc): - if line.strip(): break + if line.strip(): + break for j, line in enumerate(doc[::-1]): - if line.strip(): break + if line.strip(): + break - return doc[i:len(doc)-j] + return doc[i:len(doc) - j] def _read_to_next_section(self): section = self._doc.read_to_next_empty_line() @@ -227,7 +229,8 @@ def push_item(name, rest): rest = [] for line in content: - if not line.strip(): continue + if not line.strip(): + continue m = self._name_rgx.match(line) if m and line[m.end():].strip().startswith(':'): @@ -306,12 +309,12 @@ def _parse(self): # string conversion routines def _str_header(self, name, symbol='-'): - return [name, len(name)*symbol] + return [name, len(name) * symbol] def _str_indent(self, doc, indent=4): out = [] for line in doc: - out += [' '*indent + line] + out += [' ' * indent + line] return out def _str_signature(self): @@ -351,7 +354,8 @@ def _str_section(self, name): return out def _str_see_also(self, func_role): - if not self['See Also']: return [] + if not self['See Also']: + return [] out = [] out += self._str_header("See Also") last_had_desc = True @@ -404,7 +408,7 @@ def __str__(self, func_role=''): def indent(str, indent=4): - indent_str = ' '*indent + indent_str = ' ' * indent if str is None: return indent_str lines = str.split('\n') @@ -417,7 +421,7 @@ def dedent_lines(lines): def header(text, style='-'): - return text + '\n' + style*len(text) + '\n' + return text + '\n' + style * len(text) + '\n' class FunctionDoc(NumpyDocString): @@ -504,9 +508,9 @@ def methods(self): if self._cls is None: return [] return [name for name, func in inspect.getmembers(self._cls) - if ((not name.startswith('_') - or name in self.extra_public_methods) - and callable(func))] + if ((not name.startswith('_') or + name in self.extra_public_methods) and + callable(func))] @property def properties(self): diff --git a/doc/sphinxext/numpy_ext/docscrape_sphinx.py b/doc/sphinxext/numpy_ext/docscrape_sphinx.py index 70f7ecd695..783f3be85a 100644 --- a/doc/sphinxext/numpy_ext/docscrape_sphinx.py +++ b/doc/sphinxext/numpy_ext/docscrape_sphinx.py @@ -1,5 +1,8 @@ from __future__ import absolute_import -import re, inspect, textwrap, pydoc +import re +import inspect +import textwrap +import pydoc import sphinx from .docscrape import NumpyDocString, FunctionDoc, ClassDoc from nipype.external.six import string_types @@ -20,7 +23,7 @@ def _str_field_list(self, name): def _str_indent(self, doc, indent=4): out = [] for line in doc: - out += [' '*indent + line] + out += [' ' * indent + line] return out def _str_signature(self): @@ -87,7 +90,7 @@ def _str_member_list(self, name): if others: maxlen_0 = max([len(x[0]) for x in others]) maxlen_1 = max([len(x[1]) for x in others]) - hdr = "="*maxlen_0 + " " + "="*maxlen_1 + " " + "="*10 + hdr = "=" * maxlen_0 + " " + "=" * maxlen_1 + " " + "=" * 10 fmt = '%%%ds %%%ds ' % (maxlen_0, maxlen_1) n_indent = maxlen_0 + maxlen_1 + 4 out += [hdr] @@ -164,8 +167,8 @@ def _str_references(self): def _str_examples(self): examples_str = "\n".join(self['Examples']) - if (self.use_plots and 'import matplotlib' in examples_str - and 'plot::' not in examples_str): + if (self.use_plots and 'import matplotlib' in examples_str and + 'plot::' not in examples_str): out = [] out += self._str_header('Examples') out += ['.. plot::', ''] diff --git a/doc/sphinxext/numpy_ext/numpydoc.py b/doc/sphinxext/numpy_ext/numpydoc.py index 981c1d0270..ab375c112f 100644 --- a/doc/sphinxext/numpy_ext/numpydoc.py +++ b/doc/sphinxext/numpy_ext/numpydoc.py @@ -24,7 +24,9 @@ if sphinx.__version__ < '1.0.1': raise RuntimeError("Sphinx 1.0.1 or newer is required") -import os, re, pydoc +import os +import re +import pydoc from .docscrape_sphinx import get_doc_object, SphinxDocString from sphinx.util.compat import Directive import inspect @@ -39,14 +41,14 @@ def mangle_docstrings(app, what, name, obj, options, lines, if what == 'module': # Strip top title title_re = re.compile(ur'^\s*[#*=]{4,}\n[a-z0-9 -]+\n[#*=]{4,}\s*', - re.I |re.S) + re.I | re.S) lines[:] = title_re.sub(u'', u"\n".join(lines)).split(u"\n") else: doc = get_doc_object(obj, what, u"\n".join(lines), config=cfg) lines[:] = str(doc).split(u"\n") if app.config.numpydoc_edit_link and hasattr(obj, '__name__') and \ - obj.__name__: + obj.__name__: if hasattr(obj, '__module__'): v = dict(full_name=u"%s.%s" % (obj.__module__, obj.__name__)) else: @@ -87,8 +89,10 @@ def mangle_signature(app, what, name, obj, options, sig, retann): 'initializes x; see ' in pydoc.getdoc(obj.__init__))): return '', '' - if not (callable(obj) or hasattr(obj, '__argspec_is_invalid_')): return - if not hasattr(obj, '__doc__'): return + if not (callable(obj) or hasattr(obj, '__argspec_is_invalid_')): + return + if not hasattr(obj, '__doc__'): + return doc = SphinxDocString(pydoc.getdoc(obj)) if doc['Signature']: @@ -176,4 +180,3 @@ def run(self): return base_directive.run(self) return directive - diff --git a/examples/dmri_camino_dti.py b/examples/dmri_camino_dti.py index 4c81eebf09..bf94ebdae9 100755 --- a/examples/dmri_camino_dti.py +++ b/examples/dmri_camino_dti.py @@ -268,7 +268,7 @@ def get_affine(volume): tractography.connect([(dtifit, fa, [("tensor_fitted", "in_file")])]) tractography.connect([(fa, analyzeheader_fa, [("fa", "in_file")])]) tractography.connect([(inputnode, analyzeheader_fa, [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) + (('dwi', get_data_dims), 'data_dims')])]) tractography.connect([(fa, fa2nii, [('fa', 'data_file')])]) tractography.connect([(inputnode, fa2nii, [(('dwi', get_affine), 'affine')])]) tractography.connect([(analyzeheader_fa, fa2nii, [('header', 'header_file')])]) @@ -277,7 +277,7 @@ def get_affine(volume): tractography.connect([(dtifit, trace, [("tensor_fitted", "in_file")])]) tractography.connect([(trace, analyzeheader_trace, [("trace", "in_file")])]) tractography.connect([(inputnode, analyzeheader_trace, [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) + (('dwi', get_data_dims), 'data_dims')])]) tractography.connect([(trace, trace2nii, [('trace', 'data_file')])]) tractography.connect([(inputnode, trace2nii, [(('dwi', get_affine), 'affine')])]) tractography.connect([(analyzeheader_trace, trace2nii, [('header', 'header_file')])]) @@ -287,10 +287,10 @@ def get_affine(volume): tractography.connect([(trackpico, cam2trk_pico, [('tracked', 'in_file')])]) tractography.connect([(trackdt, cam2trk_dt, [('tracked', 'in_file')])]) tractography.connect([(inputnode, cam2trk_pico, [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) + (('dwi', get_data_dims), 'data_dims')])]) tractography.connect([(inputnode, cam2trk_dt, [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) + (('dwi', get_data_dims), 'data_dims')])]) """ @@ -303,9 +303,9 @@ def get_affine(volume): workflow.base_dir = os.path.abspath('camino_dti_tutorial') workflow.connect([(infosource, datasource, [('subject_id', 'subject_id')]), (datasource, tractography, [('dwi', 'inputnode.dwi'), - ('bvals', 'inputnode.bvals'), - ('bvecs', 'inputnode.bvecs') - ]) + ('bvals', 'inputnode.bvals'), + ('bvecs', 'inputnode.bvecs') + ]) ]) """ The following functions run the whole workflow and produce a .dot and .png graph of the processing pipeline. diff --git a/examples/dmri_connectivity.py b/examples/dmri_connectivity.py index 1206e1821e..35c7ee3c39 100755 --- a/examples/dmri_connectivity.py +++ b/examples/dmri_connectivity.py @@ -464,7 +464,7 @@ def select_aparc_annot(list_of_files): mapping.connect([(dtifit, fa, [("tensor_fitted", "in_file")])]) mapping.connect([(fa, analyzeheader_fa, [("fa", "in_file")])]) mapping.connect([(inputnode, analyzeheader_fa, [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) + (('dwi', get_data_dims), 'data_dims')])]) mapping.connect([(fa, fa2nii, [('fa', 'data_file')])]) mapping.connect([(inputnode, fa2nii, [(('dwi', get_affine), 'affine')])]) mapping.connect([(analyzeheader_fa, fa2nii, [('header', 'header_file')])]) @@ -473,7 +473,7 @@ def select_aparc_annot(list_of_files): mapping.connect([(dtifit, trace, [("tensor_fitted", "in_file")])]) mapping.connect([(trace, analyzeheader_trace, [("trace", "in_file")])]) mapping.connect([(inputnode, analyzeheader_trace, [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) + (('dwi', get_data_dims), 'data_dims')])]) mapping.connect([(trace, trace2nii, [('trace', 'data_file')])]) mapping.connect([(inputnode, trace2nii, [(('dwi', get_affine), 'affine')])]) mapping.connect([(analyzeheader_trace, trace2nii, [('header', 'header_file')])]) @@ -490,7 +490,7 @@ def select_aparc_annot(list_of_files): (camino2trackvis, trk2camino, [['trackvis', 'in_file']]) ]) mapping.connect([(inputnode, camino2trackvis, [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) + (('dwi', get_data_dims), 'data_dims')])]) """ Here the CMTK connectivity mapping nodes are connected. @@ -571,11 +571,11 @@ def select_aparc_annot(list_of_files): connectivity.connect([ (infosource, datasource, [('subject_id', 'subject_id')]), (datasource, mapping, [('dwi', 'inputnode.dwi'), - ('bvals', 'inputnode.bvals'), - ('bvecs', 'inputnode.bvecs') - ]), - (infosource, mapping, [('subject_id', 'inputnode.subject_id')]) - ]) + ('bvals', 'inputnode.bvals'), + ('bvecs', 'inputnode.bvecs') + ]), + (infosource, mapping, [('subject_id', 'inputnode.subject_id')]) +]) """ The following functions run the whole workflow and produce graphs describing the processing pipeline. diff --git a/examples/dmri_connectivity_advanced.py b/examples/dmri_connectivity_advanced.py index bbf648aeb7..b1d3ae692f 100755 --- a/examples/dmri_connectivity_advanced.py +++ b/examples/dmri_connectivity_advanced.py @@ -56,7 +56,8 @@ import nipype.interfaces.cmtk as cmtk import nipype.interfaces.dipy as dipy import inspect -import os, os.path as op # system functions +import os +import os.path as op # system functions from nipype.workflows.dmri.fsl.dti import create_eddy_correct_pipeline from nipype.workflows.dmri.camino.connectivity_mapping import select_aparc_annot from nipype.utils.misc import package_check @@ -580,11 +581,11 @@ connectivity.connect([ (infosource, datasource, [('subject_id', 'subject_id')]), (datasource, mapping, [('dwi', 'inputnode.dwi'), - ('bvals', 'inputnode.bvals'), - ('bvecs', 'inputnode.bvecs') - ]), - (infosource, mapping, [('subject_id', 'inputnode.subject_id')]) - ]) + ('bvals', 'inputnode.bvals'), + ('bvecs', 'inputnode.bvecs') + ]), + (infosource, mapping, [('subject_id', 'inputnode.subject_id')]) +]) """ The following functions run the whole workflow and produce a .dot and .png graph of the processing pipeline. diff --git a/examples/dmri_dtk_dti.py b/examples/dmri_dtk_dti.py index f3c5b3e0dc..e71d519912 100755 --- a/examples/dmri_dtk_dti.py +++ b/examples/dmri_dtk_dti.py @@ -154,9 +154,9 @@ """ computeTensor.connect([ - (fslroi, bet, [('roi_file', 'in_file')]), - (eddycorrect, dtifit, [('outputnode.eddy_corrected', 'DWI')]) - ]) + (fslroi, bet, [('roi_file', 'in_file')]), + (eddycorrect, dtifit, [('outputnode.eddy_corrected', 'DWI')]) +]) """ @@ -177,8 +177,8 @@ """ tractography.connect([ - (dtk_tracker, smooth_trk, [('track_file', 'track_file')]) - ]) + (dtk_tracker, smooth_trk, [('track_file', 'track_file')]) +]) """ @@ -201,18 +201,16 @@ def getstripdir(subject_id): dwiproc = pe.Workflow(name="dwiproc") dwiproc.base_dir = os.path.abspath('dtk_dti_tutorial') dwiproc.connect([ - (infosource, datasource, [('subject_id', 'subject_id')]), - (datasource, computeTensor, [('dwi', 'fslroi.in_file'), - ('bvals', 'dtifit.bvals'), - ('bvecs', 'dtifit.bvecs'), - ('dwi', 'eddycorrect.inputnode.in_file')]), - (computeTensor, tractography, [('bet.mask_file', 'dtk_tracker.mask1_file'), - ('dtifit.tensor', 'dtk_tracker.tensor_file') - ]) - ]) + (infosource, datasource, [('subject_id', 'subject_id')]), + (datasource, computeTensor, [('dwi', 'fslroi.in_file'), + ('bvals', 'dtifit.bvals'), + ('bvecs', 'dtifit.bvecs'), + ('dwi', 'eddycorrect.inputnode.in_file')]), + (computeTensor, tractography, [('bet.mask_file', 'dtk_tracker.mask1_file'), + ('dtifit.tensor', 'dtk_tracker.tensor_file') + ]) +]) if __name__ == '__main__': dwiproc.run() dwiproc.write_graph() - - diff --git a/examples/dmri_dtk_odf.py b/examples/dmri_dtk_odf.py index 2cc4e6ea1d..ff295b1d9f 100755 --- a/examples/dmri_dtk_odf.py +++ b/examples/dmri_dtk_odf.py @@ -153,11 +153,11 @@ """ compute_ODF.connect([ - (fslroi, bet, [('roi_file', 'in_file')]), - (eddycorrect, odf_recon, [('outputnode.eddy_corrected', 'DWI')]), - (eddycorrect, hardi_mat, [('outputnode.eddy_corrected', 'reference_file')]), - (hardi_mat, odf_recon, [('out_file', 'matrix')]) - ]) + (fslroi, bet, [('roi_file', 'in_file')]), + (eddycorrect, odf_recon, [('outputnode.eddy_corrected', 'DWI')]), + (eddycorrect, hardi_mat, [('outputnode.eddy_corrected', 'reference_file')]), + (hardi_mat, odf_recon, [('out_file', 'matrix')]) +]) """ @@ -177,8 +177,8 @@ """ tractography.connect([ - (odf_tracker, smooth_trk, [('track_file', 'track_file')]) - ]) + (odf_tracker, smooth_trk, [('track_file', 'track_file')]) +]) """ @@ -189,16 +189,16 @@ dwiproc = pe.Workflow(name="dwiproc") dwiproc.base_dir = os.path.abspath('dtk_odf_tutorial') dwiproc.connect([ - (infosource, datasource, [('subject_id', 'subject_id')]), - (datasource, compute_ODF, [('dwi', 'fslroi.in_file'), - ('bvals', 'hardi_mat.bvals'), - ('bvecs', 'hardi_mat.bvecs'), - ('dwi', 'eddycorrect.inputnode.in_file')]), - (compute_ODF, tractography, [('bet.mask_file', 'odf_tracker.mask1_file'), - ('odf_recon.ODF', 'odf_tracker.ODF'), - ('odf_recon.max', 'odf_tracker.max') - ]) - ]) + (infosource, datasource, [('subject_id', 'subject_id')]), + (datasource, compute_ODF, [('dwi', 'fslroi.in_file'), + ('bvals', 'hardi_mat.bvals'), + ('bvecs', 'hardi_mat.bvecs'), + ('dwi', 'eddycorrect.inputnode.in_file')]), + (compute_ODF, tractography, [('bet.mask_file', 'odf_tracker.mask1_file'), + ('odf_recon.ODF', 'odf_tracker.ODF'), + ('odf_recon.max', 'odf_tracker.max') + ]) +]) dwiproc.inputs.compute_ODF.hardi_mat.oblique_correction = True dwiproc.inputs.compute_ODF.odf_recon.n_directions = 31 @@ -208,5 +208,3 @@ if __name__ == '__main__': dwiproc.run() dwiproc.write_graph() - - diff --git a/examples/dmri_fsl_dti.py b/examples/dmri_fsl_dti.py index 879c5b8ec5..1eb3c99bdd 100755 --- a/examples/dmri_fsl_dti.py +++ b/examples/dmri_fsl_dti.py @@ -76,12 +76,12 @@ bvals=[['subject_id', 'bvals']], seed_file=[['subject_id', 'MASK_average_thal_right']], target_masks=[['subject_id', ['MASK_average_M1_right', - 'MASK_average_S1_right', - 'MASK_average_occipital_right', - 'MASK_average_pfc_right', - 'MASK_average_pmc_right', - 'MASK_average_ppc_right', - 'MASK_average_temporal_right']]]) + 'MASK_average_S1_right', + 'MASK_average_occipital_right', + 'MASK_average_pfc_right', + 'MASK_average_pmc_right', + 'MASK_average_ppc_right', + 'MASK_average_temporal_right']]]) infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), name="infosource") @@ -165,11 +165,11 @@ """ computeTensor.connect([ - (fslroi, bet, [('roi_file', 'in_file')]), - (eddycorrect, dtifit, [('outputnode.eddy_corrected', 'dwi')]), - (infosource, dtifit, [['subject_id', 'base_name']]), - (bet, dtifit, [('mask_file', 'mask')]) - ]) + (fslroi, bet, [('roi_file', 'in_file')]), + (eddycorrect, dtifit, [('outputnode.eddy_corrected', 'dwi')]), + (infosource, dtifit, [['subject_id', 'base_name']]), + (bet, dtifit, [('mask_file', 'mask')]) +]) """ @@ -223,9 +223,9 @@ tractography.add_nodes([bedpostx, flirt]) tractography.connect([(bedpostx, probtrackx, [('outputnode.thsamples', 'thsamples'), - ('outputnode.phsamples', 'phsamples'), - ('outputnode.fsamples', 'fsamples') - ]), + ('outputnode.phsamples', 'phsamples'), + ('outputnode.fsamples', 'fsamples') + ]), (probtrackx, findthebiggest, [('targets', 'in_files')]), (flirt, probtrackx, [('out_matrix_file', 'xfm')]) ]) @@ -252,27 +252,25 @@ def getstripdir(subject_id): dwiproc = pe.Workflow(name="dwiproc") dwiproc.base_dir = os.path.abspath('fsl_dti_tutorial') dwiproc.connect([ - (infosource, datasource, [('subject_id', 'subject_id')]), - (datasource, computeTensor, [('dwi', 'fslroi.in_file'), - ('bvals', 'dtifit.bvals'), - ('bvecs', 'dtifit.bvecs'), - ('dwi', 'eddycorrect.inputnode.in_file')]), - (datasource, tractography, [('bvals', 'bedpostx.inputnode.bvals'), - ('bvecs', 'bedpostx.inputnode.bvecs'), - ('seed_file', 'probtrackx.seed'), - ('target_masks', 'probtrackx.target_masks') - ]), - (computeTensor, tractography, [('eddycorrect.outputnode.eddy_corrected', 'bedpostx.inputnode.dwi'), - ('bet.mask_file', 'bedpostx.inputnode.mask'), - ('bet.mask_file', 'probtrackx.mask'), - ('fslroi.roi_file', 'flirt.reference')]), - (infosource, datasink, [('subject_id', 'container'), - (('subject_id', getstripdir), 'strip_dir')]), - (tractography, datasink, [('findthebiggest.out_file', 'fbiggest.@biggestsegmentation')]) - ]) + (infosource, datasource, [('subject_id', 'subject_id')]), + (datasource, computeTensor, [('dwi', 'fslroi.in_file'), + ('bvals', 'dtifit.bvals'), + ('bvecs', 'dtifit.bvecs'), + ('dwi', 'eddycorrect.inputnode.in_file')]), + (datasource, tractography, [('bvals', 'bedpostx.inputnode.bvals'), + ('bvecs', 'bedpostx.inputnode.bvecs'), + ('seed_file', 'probtrackx.seed'), + ('target_masks', 'probtrackx.target_masks') + ]), + (computeTensor, tractography, [('eddycorrect.outputnode.eddy_corrected', 'bedpostx.inputnode.dwi'), + ('bet.mask_file', 'bedpostx.inputnode.mask'), + ('bet.mask_file', 'probtrackx.mask'), + ('fslroi.roi_file', 'flirt.reference')]), + (infosource, datasink, [('subject_id', 'container'), + (('subject_id', getstripdir), 'strip_dir')]), + (tractography, datasink, [('findthebiggest.out_file', 'fbiggest.@biggestsegmentation')]) +]) if __name__ == '__main__': dwiproc.run() dwiproc.write_graph() - - diff --git a/examples/dmri_mrtrix_dti.py b/examples/dmri_mrtrix_dti.py index d010518874..36b8f5508e 100755 --- a/examples/dmri_mrtrix_dti.py +++ b/examples/dmri_mrtrix_dti.py @@ -25,7 +25,8 @@ import nipype.interfaces.mrtrix as mrtrix # <---- The important new part! import nipype.interfaces.fsl as fsl import nipype.algorithms.misc as misc -import os, os.path as op # system functions +import os +import os.path as op # system functions fsl.FSLCommand.set_default_output_type('NIFTI') @@ -249,12 +250,12 @@ dwiproc = pe.Workflow(name="dwiproc") dwiproc.base_dir = os.path.abspath('dmri_mrtrix_dti') dwiproc.connect([ - (infosource, datasource, [('subject_id', 'subject_id')]), - (datasource, tractography, [('dwi', 'inputnode.dwi'), - ('bvals', 'inputnode.bvals'), - ('bvecs', 'inputnode.bvecs') - ]) - ]) + (infosource, datasource, [('subject_id', 'subject_id')]), + (datasource, tractography, [('dwi', 'inputnode.dwi'), + ('bvals', 'inputnode.bvals'), + ('bvecs', 'inputnode.bvecs') + ]) +]) if __name__ == '__main__': dwiproc.run() diff --git a/examples/dmri_preprocessing.py b/examples/dmri_preprocessing.py index 22676d6093..d814e4facc 100644 --- a/examples/dmri_preprocessing.py +++ b/examples/dmri_preprocessing.py @@ -81,7 +81,7 @@ """ datasource = pe.Node(nio.DataGrabber(infields=['subject_id'], - outfields=list(info.keys())), name='datasource') + outfields=list(info.keys())), name='datasource') datasource.inputs.template = "%s/%s" @@ -100,7 +100,7 @@ """ inputnode = pe.Node(niu.IdentityInterface(fields=["dwi", "bvecs", "bvals", - "dwi_rev"]), name="inputnode") + "dwi_rev"]), name="inputnode") """ @@ -155,14 +155,14 @@ wf = pe.Workflow(name="dMRI_Preprocessing") wf.base_dir = os.path.abspath('preprocessing_dmri_tutorial') wf.connect([ - (infosource, datasource, [('subject_id', 'subject_id')]), - (datasource, prep, [('dwi', 'inputnode.in_file'), - ('dwi_rev', 'inputnode.alt_file'), - ('bvals', 'inputnode.in_bval'), - ('bvecs', 'inputnode.in_bvec')]), - (prep, bias, [('outputnode.out_file', 'inputnode.in_file'), - ('outputnode.out_mask', 'inputnode.in_mask')]), - (datasource, bias, [('bvals', 'inputnode.in_bval')]) + (infosource, datasource, [('subject_id', 'subject_id')]), + (datasource, prep, [('dwi', 'inputnode.in_file'), + ('dwi_rev', 'inputnode.alt_file'), + ('bvals', 'inputnode.in_bval'), + ('bvecs', 'inputnode.in_bvec')]), + (prep, bias, [('outputnode.out_file', 'inputnode.in_file'), + ('outputnode.out_mask', 'inputnode.in_mask')]), + (datasource, bias, [('bvals', 'inputnode.in_bval')]) ]) diff --git a/examples/fmri_ants_openfmri.py b/examples/fmri_ants_openfmri.py index 7a42ee8432..8c9cbdcdd0 100755 --- a/examples/fmri_ants_openfmri.py +++ b/examples/fmri_ants_openfmri.py @@ -217,7 +217,7 @@ def create_reg_workflow(name='registration'): reg.inputs.convergence_window_size = [20] * 2 + [5] reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[1, 0.5, 0]] reg.inputs.sigma_units = ['vox'] * 3 - reg.inputs.shrink_factors = [[3, 2, 1]]*2 + [[4, 2, 1]] + reg.inputs.shrink_factors = [[3, 2, 1]] * 2 + [[4, 2, 1]] reg.inputs.use_estimate_learning_rate_once = [True] * 3 reg.inputs.use_histogram_matching = [False] * 2 + [True] reg.inputs.winsorize_lower_quantile = 0.005 @@ -428,7 +428,7 @@ def create_fs_reg_workflow(name='registration'): reg.inputs.convergence_window_size = [20] * 2 + [5] reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[1, 0.5, 0]] reg.inputs.sigma_units = ['vox'] * 3 - reg.inputs.shrink_factors = [[3, 2, 1]]*2 + [[4, 2, 1]] + reg.inputs.shrink_factors = [[3, 2, 1]] * 2 + [[4, 2, 1]] reg.inputs.use_estimate_learning_rate_once = [True] * 3 reg.inputs.use_histogram_matching = [False] * 2 + [True] reg.inputs.winsorize_lower_quantile = 0.005 @@ -903,7 +903,7 @@ def merge_files(copes, varcopes, zstats): [('copes', 'copes'), ('varcopes', 'varcopes'), ('zstats', 'zstats'), - ])]) + ])]) wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files') def split_files(in_files, splits): @@ -1007,7 +1007,7 @@ def get_subs(subject_id, conds, run_id, model_id, task_id): [('design_cov', 'qa.model'), ('design_image', 'qa.model.@matrix_image'), ('design_file', 'qa.model.@matrix'), - ])]) + ])]) wf.connect([(preproc, datasink, [('outputspec.motion_parameters', 'qa.motion'), ('outputspec.motion_plots', diff --git a/examples/fmri_freesurfer_smooth.py b/examples/fmri_freesurfer_smooth.py index 315030d3e6..06c9eef847 100755 --- a/examples/fmri_freesurfer_smooth.py +++ b/examples/fmri_freesurfer_smooth.py @@ -177,7 +177,7 @@ (realign, ApplyVolTransform, [('mean_image', 'source_file')]), (ApplyVolTransform, Threshold, [('transformed_file', 'in_file')]), (realign, art, [('realignment_parameters', 'realignment_parameters'), - ('realigned_files', 'realigned_files')]), + ('realigned_files', 'realigned_files')]), (Threshold, art, [('binary_file', 'mask_file')]), (realign, volsmooth, [('realigned_files', 'in_files')]), (realign, surfsmooth, [('realigned_files', 'in_file')]), @@ -227,9 +227,9 @@ volanalysis.connect([(modelspec, level1design, [('session_info', 'session_info')]), (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]), (level1estimate, contrastestimate, [('spm_mat_file', 'spm_mat_file'), - ('beta_images', 'beta_images'), - ('residual_image', 'residual_image')]), - ]) + ('beta_images', 'beta_images'), + ('residual_image', 'residual_image')]), + ]) """ Set up surface analysis workflow @@ -327,13 +327,13 @@ l1pipeline = pe.Workflow(name='firstlevel') l1pipeline.connect([(inputnode, preproc, [('func', 'realign.in_files'), - ('subject_id', 'surfregister.subject_id'), - ('subject_id', 'fssource.subject_id'), - ]), + ('subject_id', 'surfregister.subject_id'), + ('subject_id', 'fssource.subject_id'), + ]), (inputnode, volanalysis, [('session_info', 'modelspec.subject_info'), - ('contrasts', 'contrastestimate.contrasts')]), - (inputnode, surfanalysis, [('session_info', 'modelspec.subject_info'), ('contrasts', 'contrastestimate.contrasts')]), + (inputnode, surfanalysis, [('session_info', 'modelspec.subject_info'), + ('contrasts', 'contrastestimate.contrasts')]), ]) # attach volume and surface model specification and estimation components @@ -450,7 +450,7 @@ def subjectinfo(subject_id): from nipype.interfaces.base import Bunch from copy import deepcopy - print("Subject ID: %s\n" %str(subject_id)) + print("Subject ID: %s\n" % str(subject_id)) output = [] names = ['Task-Odd', 'Task-Even'] for r in range(4): @@ -521,8 +521,8 @@ def subjectinfo(subject_id): level1.connect([(infosource, datasource, [('subject_id', 'subject_id')]), (datasource, l1pipeline, [('func', 'inputnode.func')]), (infosource, l1pipeline, [('subject_id', 'inputnode.subject_id'), - (('subject_id', subjectinfo), - 'inputnode.session_info')]), + (('subject_id', subjectinfo), + 'inputnode.session_info')]), ]) @@ -539,16 +539,16 @@ def subjectinfo(subject_id): def getsubs(subject_id): - subs = [('_subject_id_%s/' %subject_id, '')] + subs = [('_subject_id_%s/' % subject_id, '')] return subs # store relevant outputs from various stages of the 1st level analysis level1.connect([(infosource, datasink, [('subject_id', 'container'), - (('subject_id', getsubs), 'substitutions') - ]), + (('subject_id', getsubs), 'substitutions') + ]), (l1pipeline, datasink, [('surfanalysis.contrastestimate.con_images', 'contrasts'), - ('preproc.surfregister.out_reg_file', 'registrations'), - ]) + ('preproc.surfregister.out_reg_file', 'registrations'), + ]) ]) @@ -579,7 +579,7 @@ def getsubs(subject_id): l2inputnode = pe.Node(interface=util.IdentityInterface(fields=['contrasts', 'hemi']), name='inputnode') -l2inputnode.iterables = [('contrasts', list(range(1, len(contrasts)+1))), +l2inputnode.iterables = [('contrasts', list(range(1, len(contrasts) + 1))), ('hemi', ['lh', 'rh'])] """ @@ -610,7 +610,7 @@ def ordersubjects(files, subj_list): outlist = [] for s in subj_list: for f in files: - if '/%s/' %s in f: + if '/%s/' % s in f: outlist.append(f) continue print(outlist) @@ -648,4 +648,3 @@ def list2tuple(listoflist): if __name__ == '__main__': l2flow.run() l2flow.write_graph(graph2use='flat') - diff --git a/examples/fmri_fsl.py b/examples/fmri_fsl.py index 285fc81d78..d33deaab24 100755 --- a/examples/fmri_fsl.py +++ b/examples/fmri_fsl.py @@ -191,7 +191,7 @@ def getmiddlevolume(func): def getthreshop(thresh): - return '-thr %.10f -Tmin -bin' %(0.1*thresh[0][1]) + return '-thr %.10f -Tmin -bin' % (0.1 * thresh[0][1]) preproc.connect(getthresh, ('out_stat', getthreshop), threshold, 'op_string') """ @@ -259,11 +259,11 @@ def getthreshop(thresh): def getbtthresh(medianvals): - return [0.75*val for val in medianvals] + return [0.75 * val for val in medianvals] def getusans(x): - return [[tuple([val[0], 0.75*val[1]])] for val in x] + return [[tuple([val[0], 0.75 * val[1]])] for val in x] preproc.connect(maskfunc2, 'out_file', smooth, 'in_file') preproc.connect(medianval, ('out_stat', getbtthresh), smooth, 'brightness_threshold') @@ -367,7 +367,7 @@ def getinormscale(medianvals): Use :class:`nipype.algorithms.modelgen.SpecifyModel` to generate design information. """ -modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec") +modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec") """ Use :class:`nipype.interfaces.fsl.Level1Design` to generate a run specific fsf @@ -402,20 +402,20 @@ def getinormscale(medianvals): conestimate = pe.MapNode(interface=fsl.ContrastMgr(), name='conestimate', iterfield=['tcon_file', 'param_estimates', - 'sigmasquareds', 'corrections', - 'dof_file']) + 'sigmasquareds', 'corrections', + 'dof_file']) modelfit.connect([ - (modelspec, level1design, [('session_info', 'session_info')]), - (level1design, modelgen, [('fsf_files', 'fsf_file'), - ('ev_files', 'ev_files')]), - (modelgen, modelestimate, [('design_file', 'design_file')]), - (modelgen, conestimate, [('con_file', 'tcon_file')]), - (modelestimate, conestimate, [('param_estimates', 'param_estimates'), - ('sigmasquareds', 'sigmasquareds'), - ('corrections', 'corrections'), - ('dof_file', 'dof_file')]), - ]) + (modelspec, level1design, [('session_info', 'session_info')]), + (level1design, modelgen, [('fsf_files', 'fsf_file'), + ('ev_files', 'ev_files')]), + (modelgen, modelestimate, [('design_file', 'design_file')]), + (modelgen, conestimate, [('con_file', 'tcon_file')]), + (modelestimate, conestimate, [('param_estimates', 'param_estimates'), + ('sigmasquareds', 'sigmasquareds'), + ('corrections', 'corrections'), + ('dof_file', 'dof_file')]), +]) """ Set up fixed-effects workflow @@ -431,8 +431,8 @@ def getinormscale(medianvals): """ copemerge = pe.MapNode(interface=fsl.Merge(dimension='t'), - iterfield=['in_files'], - name="copemerge") + iterfield=['in_files'], + name="copemerge") varcopemerge = pe.MapNode(interface=fsl.Merge(dimension='t'), iterfield=['in_files'], @@ -456,8 +456,8 @@ def getinormscale(medianvals): fixed_fx.connect([(copemerge, flameo, [('merged_file', 'cope_file')]), (varcopemerge, flameo, [('merged_file', 'var_cope_file')]), (level2model, flameo, [('design_mat', 'design_file'), - ('design_con', 't_con_file'), - ('design_grp', 'cov_split_file')]), + ('design_con', 't_con_file'), + ('design_grp', 'cov_split_file')]), ]) @@ -487,9 +487,9 @@ def num_copes(files): ('highpass.out_file', 'modelestimate.in_file')]), (preproc, fixed_fx, [('coregister.out_file', 'flameo.mask_file')]), (modelfit, fixed_fx, [(('conestimate.copes', sort_copes), 'copemerge.in_files'), - (('conestimate.varcopes', sort_copes), 'varcopemerge.in_files'), - (('conestimate.copes', num_copes), 'l2model.num_copes'), - ]) + (('conestimate.varcopes', sort_copes), 'varcopemerge.in_files'), + (('conestimate.copes', num_copes), 'l2model.num_copes'), + ]) ]) @@ -648,5 +648,3 @@ def subjectinfo(subject_id): l1pipeline.write_graph() outgraph = l1pipeline.run() # l1pipeline.run(plugin='MultiProc', plugin_args={'n_procs':2}) - - diff --git a/examples/fmri_fsl_feeds.py b/examples/fmri_fsl_feeds.py index f564074d20..88fb6a312c 100755 --- a/examples/fmri_fsl_feeds.py +++ b/examples/fmri_fsl_feeds.py @@ -79,7 +79,7 @@ modelspec.inputs.time_repetition = TR modelspec.inputs.high_pass_filter_cutoff = 100 modelspec.inputs.subject_info = [Bunch(conditions=['Visual', 'Auditory'], - onsets=[list(range(0, int(180*TR), 60)), list(range(0, int(180*TR), 90))], + onsets=[list(range(0, int(180 * TR), 60)), list(range(0, int(180 * TR), 90))], durations=[[30], [45]], amplitudes=None, tmod=None, diff --git a/examples/fmri_fsl_reuse.py b/examples/fmri_fsl_reuse.py index 8dd05b19ab..272068d229 100755 --- a/examples/fmri_fsl_reuse.py +++ b/examples/fmri_fsl_reuse.py @@ -65,7 +65,7 @@ iterfield=['realigned_files', 'realignment_parameters', 'mask_file'], name="art") -modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec") +modelspec = pe.Node(interface=model.SpecifyModel(), name="modelspec") level1_workflow.connect([(preproc, art, [('outputspec.motion_parameters', 'realignment_parameters'), @@ -202,7 +202,7 @@ def num_copes(files): def subjectinfo(subject_id): from nipype.interfaces.base import Bunch from copy import deepcopy - print("Subject ID: %s\n" %str(subject_id)) + print("Subject ID: %s\n" % str(subject_id)) output = [] names = ['Task-Odd', 'Task-Even'] for r in range(4): @@ -259,5 +259,3 @@ def subjectinfo(subject_id): # level1_workflow.write_graph() level1_workflow.run() # level1_workflow.run(plugin='MultiProc', plugin_args={'n_procs':2}) - - diff --git a/examples/fmri_nipy_glm.py b/examples/fmri_nipy_glm.py index f97d7e7189..ca149a248e 100755 --- a/examples/fmri_nipy_glm.py +++ b/examples/fmri_nipy_glm.py @@ -159,7 +159,7 @@ def subjectinfo(subject_id): from nipype.interfaces.base import Bunch from copy import deepcopy - print("Subject ID: %s\n" %str(subject_id)) + print("Subject ID: %s\n" % str(subject_id)) output = [] names = ['Task-Odd', 'Task-Even'] for r in range(4): @@ -245,7 +245,7 @@ def subjectinfo(subject_id): (datasource, realign, [('func', 'in_files')]), (realign, compute_mask, [('mean_image', 'mean_volume')]), (realign, coregister, [('mean_image', 'source'), - ('realigned_files', 'apply_to_files')]), + ('realigned_files', 'apply_to_files')]), (datasource, coregister, [('struct', 'target')]), (coregister, smooth, [('coregistered_files', 'in_files')]), (realign, modelspec, [('realignment_parameters', 'realignment_parameters')]), @@ -264,8 +264,7 @@ def subjectinfo(subject_id): ("axis", "axis"), ("constants", "constants"), ("reg_names", "reg_names")]) - ]) + ]) if __name__ == '__main__': l1pipeline.run() - diff --git a/examples/fmri_openfmri.py b/examples/fmri_openfmri.py index dddfcda605..af41f11833 100755 --- a/examples/fmri_openfmri.py +++ b/examples/fmri_openfmri.py @@ -332,7 +332,7 @@ def merge_files(copes, varcopes, zstats): [('copes', 'copes'), ('varcopes', 'varcopes'), ('zstats', 'zstats'), - ])]) + ])]) wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files') def split_files(in_files, splits): diff --git a/examples/fmri_spm.py b/examples/fmri_spm.py index b3fe92beb3..7d3f735a0c 100755 --- a/examples/fmri_spm.py +++ b/examples/fmri_spm.py @@ -183,7 +183,7 @@ def subjectinfo(subject_id): from nipype.interfaces.base import Bunch from copy import deepcopy - print("Subject ID: %s\n" %str(subject_id)) + print("Subject ID: %s\n" % str(subject_id)) output = [] names = ['Task-Odd', 'Task-Even'] for r in range(4): @@ -271,13 +271,13 @@ def subjectinfo(subject_id): l1pipeline.connect([(infosource, datasource, [('subject_id', 'subject_id')]), (datasource, realign, [('func', 'in_files')]), (realign, coregister, [('mean_image', 'source'), - ('realigned_files', 'apply_to_files')]), + ('realigned_files', 'apply_to_files')]), (datasource, coregister, [('struct', 'target')]), (datasource, normalize, [('struct', 'source')]), (coregister, normalize, [('coregistered_files', 'apply_to_files')]), (normalize, smooth, [('normalized_files', 'in_files')]), (infosource, modelspec, [(('subject_id', subjectinfo), - 'subject_info')]), + 'subject_info')]), (realign, modelspec, [('realignment_parameters', 'realignment_parameters')]), (smooth, modelspec, [('smoothed_files', 'functional_runs')]), (normalize, skullstrip, [('normalized_source', 'in_file')]), @@ -289,9 +289,9 @@ def subjectinfo(subject_id): (skullstrip, level1design, [('mask_file', 'mask_image')]), (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]), (level1estimate, contrastestimate, [('spm_mat_file', 'spm_mat_file'), - ('beta_images', 'beta_images'), - ('residual_image', 'residual_image')]), - ]) + ('beta_images', 'beta_images'), + ('residual_image', 'residual_image')]), + ]) """ @@ -324,19 +324,19 @@ def getstripdir(subject_id): # store relevant outputs from various stages of the 1st level analysis l1pipeline.connect([(infosource, datasink, [('subject_id', 'container'), - (('subject_id', getstripdir), 'strip_dir')]), + (('subject_id', getstripdir), 'strip_dir')]), (realign, datasink, [('mean_image', 'realign.@mean'), - ('realignment_parameters', 'realign.@param')]), + ('realignment_parameters', 'realign.@param')]), (art, datasink, [('outlier_files', 'art.@outliers'), - ('statistic_files', 'art.@stats')]), + ('statistic_files', 'art.@stats')]), (level1design, datasink, [('spm_mat_file', 'model.pre-estimate')]), (level1estimate, datasink, [('spm_mat_file', 'model.@spm'), - ('beta_images', 'model.@beta'), - ('mask_image', 'model.@mask'), - ('residual_image', 'model.@res'), - ('RPVimage', 'model.@rpv')]), + ('beta_images', 'model.@beta'), + ('mask_image', 'model.@mask'), + ('residual_image', 'model.@res'), + ('RPVimage', 'model.@rpv')]), (contrastestimate, datasink, [('con_images', 'contrasts.@con'), - ('spmT_images', 'contrasts.@T')]), + ('spmT_images', 'contrasts.@T')]), ]) @@ -351,7 +351,7 @@ def getstripdir(subject_id): """ # collect all the con images for each contrast. -contrast_ids = list(range(1, len(contrasts)+1)) +contrast_ids = list(range(1, len(contrasts) + 1)) l2source = pe.Node(nio.DataGrabber(infields=['fwhm', 'con']), name="l2source") # we use .*i* to capture both .img (SPM8) and .nii (SPM12) l2source.inputs.template = os.path.abspath('spm_tutorial/l1output/*/con*/*/_fwhm_%d/con_%04d.*i*') @@ -385,8 +385,8 @@ def getstripdir(subject_id): l2pipeline.connect([(l2source, onesamplettestdes, [('outfiles', 'in_files')]), (onesamplettestdes, l2estimate, [('spm_mat_file', 'spm_mat_file')]), (l2estimate, l2conestimate, [('spm_mat_file', 'spm_mat_file'), - ('beta_images', 'beta_images'), - ('residual_image', 'residual_image')]), + ('beta_images', 'beta_images'), + ('residual_image', 'residual_image')]), ]) """ diff --git a/examples/fmri_spm_auditory.py b/examples/fmri_spm_auditory.py index 1b34cb6b49..9933277f0f 100755 --- a/examples/fmri_spm_auditory.py +++ b/examples/fmri_spm_auditory.py @@ -187,11 +187,11 @@ def get_vox_dims(volume): l1analysis.connect([(modelspec, level1design, [('session_info', 'session_info')]), (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]), (level1estimate, contrastestimate, [('spm_mat_file', 'spm_mat_file'), - ('beta_images', 'beta_images'), - ('residual_image', 'residual_image')]), + ('beta_images', 'beta_images'), + ('residual_image', 'residual_image')]), (contrastestimate, threshold, [('spm_mat_file', 'spm_mat_file'), - ('spmT_images', 'stat_image')]), - ]) + ('spmT_images', 'stat_image')]), + ]) """ Preproc + Analysis pipeline @@ -376,9 +376,9 @@ def getstripdir(subject_id): # store relevant outputs from various stages of the 1st level analysis level1.connect([(infosource, datasink, [('subject_id', 'container'), - (('subject_id', getstripdir), 'strip_dir')]), + (('subject_id', getstripdir), 'strip_dir')]), (l1pipeline, datasink, [('analysis.contrastestimate.con_images', 'contrasts.@con'), - ('analysis.contrastestimate.spmT_images', 'contrasts.@T')]), + ('analysis.contrastestimate.spmT_images', 'contrasts.@T')]), ]) @@ -396,4 +396,3 @@ def getstripdir(subject_id): if __name__ == '__main__': level1.run() level1.write_graph() - diff --git a/examples/fmri_spm_dartel.py b/examples/fmri_spm_dartel.py index 0c150999ca..a2e7d7fccc 100755 --- a/examples/fmri_spm_dartel.py +++ b/examples/fmri_spm_dartel.py @@ -113,7 +113,7 @@ normalize_struct.inputs.fwhm = 2 preproc.connect([(realign, coregister, [('mean_image', 'source'), - ('realigned_files', 'apply_to_files')]), + ('realigned_files', 'apply_to_files')]), (coregister, normalize_and_smooth_func, [('coregistered_files', 'apply_to_files')]), (normalize_struct, skullstrip, [('normalized_files', 'in_file')]), (realign, art, [('realignment_parameters', 'realignment_parameters')]), @@ -183,12 +183,12 @@ l1analysis.connect([(modelspec, level1design, [('session_info', 'session_info')]), (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]), (level1estimate, contrastestimate, [('spm_mat_file', 'spm_mat_file'), - ('beta_images', 'beta_images'), - ('residual_image', 'residual_image')]), + ('beta_images', 'beta_images'), + ('residual_image', 'residual_image')]), (contrastestimate, selectcontrast, [('spmT_images', 'inlist')]), (selectcontrast, overlaystats, [('out', 'stat_image')]), (overlaystats, slicestats, [('out_file', 'in_file')]) - ]) + ]) """ Preproc + Analysis pipeline @@ -302,7 +302,7 @@ def pickFieldFlow(dartel_flow_fields, subject_id): from nipype.utils.filemanip import split_filename for f in dartel_flow_fields: _, name, _ = split_filename(f) - if name.find("subject_id_%s" %subject_id): + if name.find("subject_id_%s" % subject_id): return f raise Exception @@ -328,7 +328,7 @@ def pickFieldFlow(dartel_flow_fields, subject_id): def subjectinfo(subject_id): from nipype.interfaces.base import Bunch from copy import deepcopy - print("Subject ID: %s\n" %str(subject_id)) + print("Subject ID: %s\n" % str(subject_id)) output = [] names = ['Task-Odd', 'Task-Even'] for r in range(4): @@ -404,8 +404,8 @@ def subjectinfo(subject_id): (infosource, datasource, [('subject_id', 'subject_id')]), (datasource, l1pipeline, [('func', 'preproc.realign.in_files'), - ('struct', 'preproc.coregister.target'), - ('struct', 'preproc.normalize_struct.apply_to_files')]), + ('struct', 'preproc.coregister.target'), + ('struct', 'preproc.normalize_struct.apply_to_files')]), (dartel_workflow, l1pipeline, [('outputspec.template_file', 'preproc.normalize_struct.template_file'), ('outputspec.template_file', 'preproc.normalize_and_smooth_func.template_file')]), (infosource, pick_flow, [('subject_id', 'subject_id')]), @@ -413,7 +413,7 @@ def subjectinfo(subject_id): (pick_flow, l1pipeline, [('dartel_flow_field', 'preproc.normalize_struct.flowfield_files'), ('dartel_flow_field', 'preproc.normalize_and_smooth_func.flowfield_files')]), (infosource, l1pipeline, [(('subject_id', subjectinfo), - 'analysis.modelspec.subject_info')]), + 'analysis.modelspec.subject_info')]), ]) @@ -450,11 +450,11 @@ def getstripdir(subject_id): # store relevant outputs from various stages of the 1st level analysis level1.connect([(infosource, datasink, [('subject_id', 'container'), - (('subject_id', getstripdir), 'strip_dir')]), + (('subject_id', getstripdir), 'strip_dir')]), (l1pipeline, datasink, [('analysis.contrastestimate.con_images', 'contrasts.@con'), - ('analysis.contrastestimate.spmT_images', 'contrasts.@T')]), + ('analysis.contrastestimate.spmT_images', 'contrasts.@T')]), (infosource, report, [('subject_id', 'container'), - (('subject_id', getstripdir), 'strip_dir')]), + (('subject_id', getstripdir), 'strip_dir')]), (l1pipeline, report, [('analysis.slicestats.out_file', '@report')]), ]) @@ -485,7 +485,7 @@ def getstripdir(subject_id): """ # collect all the con images for each contrast. -contrast_ids = list(range(1, len(contrasts)+1)) +contrast_ids = list(range(1, len(contrasts) + 1)) l2source = pe.Node(nio.DataGrabber(infields=['fwhm', 'con']), name="l2source") # we use .*i* to capture both .img (SPM8) and .nii (SPM12) l2source.inputs.template = os.path.abspath('spm_dartel_tutorial/l1output/*/con*/*/_fwhm_%d/con_%04d.*i*') @@ -519,8 +519,8 @@ def getstripdir(subject_id): l2pipeline.connect([(l2source, onesamplettestdes, [('outfiles', 'in_files')]), (onesamplettestdes, l2estimate, [('spm_mat_file', 'spm_mat_file')]), (l2estimate, l2conestimate, [('spm_mat_file', 'spm_mat_file'), - ('beta_images', 'beta_images'), - ('residual_image', 'residual_image')]), + ('beta_images', 'beta_images'), + ('residual_image', 'residual_image')]), ]) """ @@ -531,5 +531,3 @@ def getstripdir(subject_id): if __name__ == '__main__': l2pipeline.run() - - diff --git a/examples/fmri_spm_face.py b/examples/fmri_spm_face.py index 948475e8f5..7f918aafa7 100755 --- a/examples/fmri_spm_face.py +++ b/examples/fmri_spm_face.py @@ -182,11 +182,11 @@ def pickfirst(l): l1analysis.connect([(modelspec, level1design, [('session_info', 'session_info')]), (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]), (level1estimate, contrastestimate, [('spm_mat_file', 'spm_mat_file'), - ('beta_images', 'beta_images'), - ('residual_image', 'residual_image')]), + ('beta_images', 'beta_images'), + ('residual_image', 'residual_image')]), (contrastestimate, threshold, [('spm_mat_file', 'spm_mat_file'), - (('spmT_images', pickfirst), 'stat_image')]), - ]) + (('spmT_images', pickfirst), 'stat_image')]), + ]) """ Preproc + Analysis pipeline @@ -228,7 +228,7 @@ def makelist(item): # Specify the subject directories subject_list = ['M03953'] # Map field names to individual subject runs. -info = dict(func=[['RawEPI', 'subject_id', 5, ["_%04d" %i for i in range(6, 357)]]], +info = dict(func=[['RawEPI', 'subject_id', 5, ["_%04d" % i for i in range(6, 357)]]], struct=[['Structural', 'subject_id', 7, '']]) infosource = pe.Node(interface=util.IdentityInterface(fields=['subject_id']), @@ -402,7 +402,7 @@ def makelist(item): l1pipeline.connect([(preproc, paramanalysis, [('realign.realignment_parameters', 'modelspec.realignment_parameters'), (('smooth.smoothed_files', makelist), - 'modelspec.functional_runs')])]) + 'modelspec.functional_runs')])]) """ Setup the pipeline @@ -432,7 +432,7 @@ def makelist(item): level1.connect([(infosource, datasource, [('subject_id', 'subject_id')]), (datasource, l1pipeline, [('struct', 'preproc.coregister.source'), - ('func', 'preproc.realign.in_files')]) + ('func', 'preproc.realign.in_files')]) ]) @@ -466,11 +466,11 @@ def getstripdir(subject_id): # store relevant outputs from various stages of the 1st level analysis level1.connect([(infosource, datasink, [('subject_id', 'container'), - (('subject_id', getstripdir), 'strip_dir')]), + (('subject_id', getstripdir), 'strip_dir')]), (l1pipeline, datasink, [('analysis.contrastestimate.con_images', 'contrasts.@con'), - ('analysis.contrastestimate.spmT_images', 'contrasts.@T'), - ('paramanalysis.contrastestimate.con_images', 'paramcontrasts.@con'), - ('paramanalysis.contrastestimate.spmT_images', 'paramcontrasts.@T')]), + ('analysis.contrastestimate.spmT_images', 'contrasts.@T'), + ('paramanalysis.contrastestimate.con_images', 'paramcontrasts.@con'), + ('paramanalysis.contrastestimate.spmT_images', 'paramcontrasts.@T')]), ]) @@ -488,4 +488,3 @@ def getstripdir(subject_id): if __name__ == '__main__': level1.run() level1.write_graph() - diff --git a/examples/fmri_spm_nested.py b/examples/fmri_spm_nested.py index 28d3d0e755..b73b6b8c2d 100755 --- a/examples/fmri_spm_nested.py +++ b/examples/fmri_spm_nested.py @@ -118,7 +118,7 @@ smooth.iterables = ('fwhm', fwhmlist) preproc.connect([(realign, coregister, [('mean_image', 'source'), - ('realigned_files', 'apply_to_files')]), + ('realigned_files', 'apply_to_files')]), (coregister, normalize, [('coregistered_files', 'apply_to_files')]), (normalize, smooth, [('normalized_files', 'in_files')]), (normalize, skullstrip, [('normalized_source', 'in_file')]), @@ -189,12 +189,12 @@ l1analysis.connect([(modelspec, level1design, [('session_info', 'session_info')]), (level1design, level1estimate, [('spm_mat_file', 'spm_mat_file')]), (level1estimate, contrastestimate, [('spm_mat_file', 'spm_mat_file'), - ('beta_images', 'beta_images'), - ('residual_image', 'residual_image')]), + ('beta_images', 'beta_images'), + ('residual_image', 'residual_image')]), (contrastestimate, selectcontrast, [('spmT_images', 'inlist')]), (selectcontrast, overlaystats, [('out', 'stat_image')]), (overlaystats, slicestats, [('out_file', 'in_file')]) - ]) + ]) """ Preproc + Analysis pipeline @@ -290,7 +290,7 @@ def subjectinfo(subject_id): from nipype.interfaces.base import Bunch from copy import deepcopy - print("Subject ID: %s\n" %str(subject_id)) + print("Subject ID: %s\n" % str(subject_id)) output = [] names = ['Task-Odd', 'Task-Even'] for r in range(4): @@ -363,10 +363,10 @@ def subjectinfo(subject_id): level1.connect([(infosource, datasource, [('subject_id', 'subject_id')]), (datasource, l1pipeline, [('func', 'preproc.realign.in_files'), - ('struct', 'preproc.coregister.target'), - ('struct', 'preproc.normalize.source')]), + ('struct', 'preproc.coregister.target'), + ('struct', 'preproc.normalize.source')]), (infosource, l1pipeline, [(('subject_id', subjectinfo), - 'analysis.modelspec.subject_info')]), + 'analysis.modelspec.subject_info')]), ]) @@ -403,11 +403,11 @@ def getstripdir(subject_id): # store relevant outputs from various stages of the 1st level analysis level1.connect([(infosource, datasink, [('subject_id', 'container'), - (('subject_id', getstripdir), 'strip_dir')]), + (('subject_id', getstripdir), 'strip_dir')]), (l1pipeline, datasink, [('analysis.contrastestimate.con_images', 'contrasts.@con'), - ('analysis.contrastestimate.spmT_images', 'contrasts.@T')]), + ('analysis.contrastestimate.spmT_images', 'contrasts.@T')]), (infosource, report, [('subject_id', 'container'), - (('subject_id', getstripdir), 'strip_dir')]), + (('subject_id', getstripdir), 'strip_dir')]), (l1pipeline, report, [('analysis.slicestats.out_file', '@report')]), ]) @@ -438,7 +438,7 @@ def getstripdir(subject_id): """ # collect all the con images for each contrast. -contrast_ids = list(range(1, len(contrasts)+1)) +contrast_ids = list(range(1, len(contrasts) + 1)) l2source = pe.Node(nio.DataGrabber(infields=['fwhm', 'con']), name="l2source") # we use .*i* to capture both .img (SPM8) and .nii (SPM12) l2source.inputs.template = os.path.abspath('spm_tutorial2/l1output/*/con*/*/_fwhm_%d/con_%04d.*i*') @@ -472,8 +472,8 @@ def getstripdir(subject_id): l2pipeline.connect([(l2source, onesamplettestdes, [('outfiles', 'in_files')]), (onesamplettestdes, l2estimate, [('spm_mat_file', 'spm_mat_file')]), (l2estimate, l2conestimate, [('spm_mat_file', 'spm_mat_file'), - ('beta_images', 'beta_images'), - ('residual_image', 'residual_image')]), + ('beta_images', 'beta_images'), + ('residual_image', 'residual_image')]), ]) """ @@ -484,4 +484,3 @@ def getstripdir(subject_id): if __name__ == '__main__': l2pipeline.run('MultiProc') - diff --git a/examples/frontiers_paper/smoothing_comparison.py b/examples/frontiers_paper/smoothing_comparison.py index 79bf3e2aeb..b16b229f6d 100644 --- a/examples/frontiers_paper/smoothing_comparison.py +++ b/examples/frontiers_paper/smoothing_comparison.py @@ -28,8 +28,8 @@ iter_smoothing_method = pe.Node(interface=util.IdentityInterface(fields=["smoothing_method"]), name="iter_smoothing_method") iter_smoothing_method.iterables = [('smoothing_method', ['isotropic_voxel', - 'anisotropic_voxel', - 'isotropic_surface'])] + 'anisotropic_voxel', + 'isotropic_surface'])] realign = pe.Node(interface=spm.Realign(), name="realign") realign.inputs.register_to_mean = True @@ -111,7 +111,7 @@ def chooseindex(roi): specify_model.inputs.subject_info = [Bunch(conditions=['Task-Odd', 'Task-Even'], onsets=[list(range(15, 240, 60)), list(range(45, 240, 60))], - durations=[[15], [15]])]*4 + durations=[[15], [15]])] * 4 level1design = pe.Node(interface=spm.Level1Design(), name="level1design") level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} diff --git a/examples/frontiers_paper/workflow_from_scratch.py b/examples/frontiers_paper/workflow_from_scratch.py index 533da455fc..fa9bbd4cca 100644 --- a/examples/frontiers_paper/workflow_from_scratch.py +++ b/examples/frontiers_paper/workflow_from_scratch.py @@ -69,7 +69,7 @@ specify_model.inputs.subject_info = [Bunch(conditions=['Task-Odd', 'Task-Even'], onsets=[list(range(15, 240, 60)), list(range(45, 240, 60))], - durations=[[15], [15]])]*4 + durations=[[15], [15]])] * 4 level1design = pe.Node(interface=spm.Level1Design(), name="level1design") level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} diff --git a/examples/howto_caching_example.py b/examples/howto_caching_example.py index 41f9bb5ccd..b5c548fc76 100644 --- a/examples/howto_caching_example.py +++ b/examples/howto_caching_example.py @@ -46,6 +46,3 @@ # or what wasn't used since the start of 2011 # mem.clear_runs_since(year=2011) - - - diff --git a/examples/rsfmri_vol_surface_preprocessing.py b/examples/rsfmri_vol_surface_preprocessing.py index 35de677abe..953d07595f 100644 --- a/examples/rsfmri_vol_surface_preprocessing.py +++ b/examples/rsfmri_vol_surface_preprocessing.py @@ -514,7 +514,7 @@ def create_reg_workflow(name='registration'): reg.inputs.convergence_window_size = [20] * 2 + [5] reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[1, 0.5, 0]] reg.inputs.sigma_units = ['vox'] * 3 - reg.inputs.shrink_factors = [[3, 2, 1]]*2 + [[4, 2, 1]] + reg.inputs.shrink_factors = [[3, 2, 1]] * 2 + [[4, 2, 1]] reg.inputs.use_estimate_learning_rate_once = [True] * 3 reg.inputs.use_histogram_matching = [False] * 2 + [True] reg.inputs.winsorize_lower_quantile = 0.005 diff --git a/examples/rsfmri_vol_surface_preprocessing_nipy.py b/examples/rsfmri_vol_surface_preprocessing_nipy.py index 12abda6987..d3ace02535 100644 --- a/examples/rsfmri_vol_surface_preprocessing_nipy.py +++ b/examples/rsfmri_vol_surface_preprocessing_nipy.py @@ -496,7 +496,7 @@ def create_reg_workflow(name='registration'): reg.inputs.convergence_window_size = [20] * 2 + [5] reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 2 + [[1, 0.5, 0]] reg.inputs.sigma_units = ['vox'] * 3 - reg.inputs.shrink_factors = [[3, 2, 1]]*2 + [[4, 2, 1]] + reg.inputs.shrink_factors = [[3, 2, 1]] * 2 + [[4, 2, 1]] reg.inputs.use_estimate_learning_rate_once = [True] * 3 reg.inputs.use_histogram_matching = [False] * 2 + [True] reg.inputs.winsorize_lower_quantile = 0.005 diff --git a/examples/smri_ants_build_template.py b/examples/smri_ants_build_template.py index 49100be967..21603c956c 100644 --- a/examples/smri_ants_build_template.py +++ b/examples/smri_ants_build_template.py @@ -28,7 +28,9 @@ 2. Download T1 volumes into home directory """ -import urllib.request, urllib.error, urllib.parse +import urllib.request +import urllib.error +import urllib.parse homeDir = os.getenv("HOME") requestedPath = os.path.join(homeDir, 'nipypeTestPath') mydatadir = os.path.realpath(requestedPath) @@ -37,13 +39,13 @@ print(mydatadir) MyFileURLs = [ - ('http://slicer.kitware.com/midas3/download?bitstream=13121', '01_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13122', '02_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13124', '03_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13128', '01_T1_inv_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13123', '02_T1_inv_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13125', '03_T1_inv_half.nii.gz'), - ] + ('http://slicer.kitware.com/midas3/download?bitstream=13121', '01_T1_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13122', '02_T1_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13124', '03_T1_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13128', '01_T1_inv_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13123', '02_T1_inv_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13125', '03_T1_inv_half.nii.gz'), +] for tt in MyFileURLs: myURL = tt[0] localFilename = os.path.join(mydatadir, tt[1]) diff --git a/examples/smri_ants_registration.py b/examples/smri_ants_registration.py index d77fd1b250..f2558d560d 100644 --- a/examples/smri_ants_registration.py +++ b/examples/smri_ants_registration.py @@ -17,7 +17,9 @@ standard_library.install_aliases() import os -import urllib.request, urllib.error, urllib.parse +import urllib.request +import urllib.error +import urllib.parse from nipype.interfaces.ants import Registration """ @@ -32,9 +34,9 @@ print(mydatadir) MyFileURLs = [ - ('http://slicer.kitware.com/midas3/download?bitstream=13121', '01_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13122', '02_T1_half.nii.gz'), - ] + ('http://slicer.kitware.com/midas3/download?bitstream=13121', '01_T1_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13122', '02_T1_half.nii.gz'), +] for tt in MyFileURLs: myURL = tt[0] localFilename = os.path.join(mydatadir, tt[1]) @@ -66,7 +68,7 @@ reg.inputs.output_transform_prefix = "output_" reg.inputs.transforms = ['Translation', 'Rigid', 'Affine', 'SyN'] reg.inputs.transform_parameters = [(0.1,), (0.1,), (0.1,), (0.2, 3.0, 0.0)] -reg.inputs.number_of_iterations = ([[10000, 111110, 11110]]*3 + +reg.inputs.number_of_iterations = ([[10000, 111110, 11110]] * 3 + [[100, 50, 30]]) reg.inputs.dimension = 3 reg.inputs.write_composite_transform = True @@ -80,7 +82,7 @@ reg.inputs.convergence_window_size = [20] * 3 + [5] reg.inputs.smoothing_sigmas = [[4, 2, 1]] * 3 + [[1, 0.5, 0]] reg.inputs.sigma_units = ['vox'] * 4 -reg.inputs.shrink_factors = [[6, 4, 2]] + [[3, 2, 1]]*2 + [[4, 2, 1]] +reg.inputs.shrink_factors = [[6, 4, 2]] + [[3, 2, 1]] * 2 + [[4, 2, 1]] reg.inputs.use_estimate_learning_rate_once = [True] * 4 reg.inputs.use_histogram_matching = [False] * 3 + [True] reg.inputs.initial_moving_transform_com = True diff --git a/examples/smri_antsregistration_build_template.py b/examples/smri_antsregistration_build_template.py index 1e49a2c0bb..ad4e0470ef 100644 --- a/examples/smri_antsregistration_build_template.py +++ b/examples/smri_antsregistration_build_template.py @@ -28,7 +28,9 @@ 2. Download T1 volumes into home directory """ -import urllib.request, urllib.error, urllib.parse +import urllib.request +import urllib.error +import urllib.parse homeDir = os.getenv("HOME") requestedPath = os.path.join(homeDir, 'nipypeTestPath') mydatadir = os.path.realpath(requestedPath) @@ -37,13 +39,13 @@ print(mydatadir) MyFileURLs = [ - ('http://slicer.kitware.com/midas3/download?bitstream=13121', '01_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13122', '02_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13124', '03_T1_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13128', '01_T1_inv_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13123', '02_T1_inv_half.nii.gz'), - ('http://slicer.kitware.com/midas3/download?bitstream=13125', '03_T1_inv_half.nii.gz'), - ] + ('http://slicer.kitware.com/midas3/download?bitstream=13121', '01_T1_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13122', '02_T1_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13124', '03_T1_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13128', '01_T1_inv_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13123', '02_T1_inv_half.nii.gz'), + ('http://slicer.kitware.com/midas3/download?bitstream=13125', '03_T1_inv_half.nii.gz'), +] for tt in MyFileURLs: myURL = tt[0] localFilename = os.path.join(mydatadir, tt[1]) @@ -106,7 +108,7 @@ InitialTemplateInputs = [mdict['T1'] for mdict in ListOfImagesDictionaries] datasource = pe.Node(interface=util.IdentityInterface(fields=['InitialTemplateInputs', 'ListOfImagesDictionaries', - 'registrationImageTypes', 'interpolationMapping']), + 'registrationImageTypes', 'interpolationMapping']), run_without_submitting=True, name='InputImages') datasource.inputs.InitialTemplateInputs = InitialTemplateInputs diff --git a/examples/smri_cbs_skullstripping.py b/examples/smri_cbs_skullstripping.py index 0fd80926bb..16fe3de000 100644 --- a/examples/smri_cbs_skullstripping.py +++ b/examples/smri_cbs_skullstripping.py @@ -29,4 +29,4 @@ skullstrip.inputs.xDefaultMem = 6000 wf.connect(mask, 'outMasked', skullstrip, 'inInput') -wf.run() \ No newline at end of file +wf.run() diff --git a/examples/tessellation_tutorial.py b/examples/tessellation_tutorial.py index 6ca738e0d3..dcd0328120 100644 --- a/examples/tessellation_tutorial.py +++ b/examples/tessellation_tutorial.py @@ -37,7 +37,8 @@ import nipype.pipeline.engine as pe # pypeline engine import nipype.interfaces.cmtk as cmtk import nipype.interfaces.io as nio # Data i/o -import os, os.path as op +import os +import os.path as op from nipype.workflows.smri.freesurfer import create_tessellation_flow """ diff --git a/examples/test_spm.py b/examples/test_spm.py index da35185329..9a5e1f83aa 100644 --- a/examples/test_spm.py +++ b/examples/test_spm.py @@ -37,7 +37,7 @@ workflow3d.connect([(split, stc, [("out_files", "in_files")]), (stc, realign_estimate, [('timecorrected_files', 'in_files')]), (realign_estimate, realign_write, [('modified_in_files', 'in_files')]), - (stc, realign_estwrite, [('timecorrected_files', 'in_files')]), + (stc, realign_estwrite, [('timecorrected_files', 'in_files')]), (realign_write, smooth, [('realigned_files', 'in_files')])]) workflow3d.run() @@ -71,7 +71,7 @@ workflow4d.connect([(gunzip, stc, [("out_file", "in_files")]), (stc, realign_estimate, [('timecorrected_files', 'in_files')]), (realign_estimate, realign_write, [('modified_in_files', 'in_files')]), - (stc, realign_estwrite, [('timecorrected_files', 'in_files')]), + (stc, realign_estwrite, [('timecorrected_files', 'in_files')]), (realign_write, smooth, [('realigned_files', 'in_files')])]) workflow4d.run() diff --git a/examples/workshop_dartmouth_2010.py b/examples/workshop_dartmouth_2010.py index 779fec9045..1c9c587ae9 100644 --- a/examples/workshop_dartmouth_2010.py +++ b/examples/workshop_dartmouth_2010.py @@ -133,8 +133,8 @@ motion_correct_and_smooth = pe.Workflow(name="motion_correct_and_smooth") motion_correct_and_smooth.base_dir = os.path.abspath('.') # define where will be the root folder for the workflow motion_correct_and_smooth.connect([ - (motion_correct, smooth, [('out_file', 'in_file')]) - ]) + (motion_correct, smooth, [('out_file', 'in_file')]) +]) # we are connecting 'out_file' output of motion_correct to 'in_file' input of smooth motion_correct_and_smooth.run() @@ -154,8 +154,8 @@ demean = pe.Workflow(name="demean") demean.base_dir = os.path.abspath('.') demean.connect([ - (calc_mean, subtract, [('out_file', 'in_file2')]) - ]) + (calc_mean, subtract, [('out_file', 'in_file2')]) +]) demean.inputs.calc_mean.in_file = os.path.abspath('data/s1/f3.nii') demean.inputs.subtract.in_file = os.path.abspath('data/s1/f3.nii') @@ -216,9 +216,9 @@ preprocess = pe.Workflow(name='preprocout') preprocess.base_dir = os.path.abspath('.') preprocess.connect([ - (preproc, datasink, [('meanfunc2.out_file', 'meanfunc'), - ('maskfunc3.out_file', 'funcruns')]) - ]) + (preproc, datasink, [('meanfunc2.out_file', 'meanfunc'), + ('maskfunc3.out_file', 'funcruns')]) +]) preprocess.run() """ diff --git a/nipype/__init__.py b/nipype/__init__.py index bc86315987..86a75a37ca 100644 --- a/nipype/__init__.py +++ b/nipype/__init__.py @@ -55,7 +55,7 @@ def _test_local_install(): imported locally is a bad idea. """ if os.getcwd() == os.sep.join( - os.path.abspath(__file__).split(os.sep)[:-2]): + os.path.abspath(__file__).split(os.sep)[:-2]): import warnings warnings.warn('Running the tests from the install directory may ' 'trigger some failures') diff --git a/nipype/algorithms/__init__.py b/nipype/algorithms/__init__.py index c8f2549532..0aa096f1f9 100644 --- a/nipype/algorithms/__init__.py +++ b/nipype/algorithms/__init__.py @@ -7,5 +7,3 @@ """ __docformat__ = 'restructuredtext' - - diff --git a/nipype/algorithms/icc.py b/nipype/algorithms/icc.py index f30697d584..98758dfcbf 100644 --- a/nipype/algorithms/icc.py +++ b/nipype/algorithms/icc.py @@ -46,7 +46,7 @@ def _run_interface(self, runtime): for x in range(icc.shape[0]): Y = all_data[x, :, :] - icc[x], subject_var[x], session_var[x], session_F[x], _, _ = ICC_rep_anova(Y) + icc[x], subject_var[x], session_var[x], session_F[x], _, _ = ICC_rep_anova(Y) nim = nb.load(self.inputs.subjects_sessions[0][0]) new_data = np.zeros(nim.get_shape()) diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index e8bd62585d..ed51d7bf13 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -71,7 +71,7 @@ def _gen_fname(self, in_file, suffix='generated', ext=None): if fext == '.gz': fname, fext2 = op.splitext(fname) - fext = fext2+fext + fext = fext2 + fext if ext is None: ext = fext @@ -117,7 +117,7 @@ def _run_interface(self, runtime): ras2vox = np.linalg.inv(vox2ras) origin = affine[0:3, 3] voxpoints = np.array([np.dot(ras2vox, - (p-origin)) for p in points]) + (p - origin)) for p in points]) warps = [] for axis in warp_dims: @@ -132,7 +132,7 @@ def _run_interface(self, runtime): warps.append(warp) disps = np.squeeze(np.dstack(warps)) - newpoints = [p+d for p, d in zip(points, disps)] + newpoints = [p + d for p, d in zip(points, disps)] mesh.points = newpoints w = tvtk.PolyDataWriter() if vtk_major <= 5: diff --git a/nipype/algorithms/metrics.py b/nipype/algorithms/metrics.py index 5284b1f201..746919e06e 100644 --- a/nipype/algorithms/metrics.py +++ b/nipype/algorithms/metrics.py @@ -284,7 +284,7 @@ def _run_interface(self, runtime): if self.inputs.vol_units == 'mm': voxvol = nii1.get_header().get_zooms() - for i in range(nii1.get_data().ndim-1): + for i in range(nii1.get_data().ndim - 1): scale = scale * voxvol[i] data1 = nii1.get_data() @@ -318,7 +318,7 @@ def _run_interface(self, runtime): results = dict(jaccard=[], dice=[]) results['jaccard'] = np.array(res) - results['dice'] = 2.0*results['jaccard'] / (results['jaccard'] + 1.0) + results['dice'] = 2.0 * results['jaccard'] / (results['jaccard'] + 1.0) weights = np.ones((len(volumes1),), dtype=np.float32) if self.inputs.weighting != 'none': @@ -335,12 +335,12 @@ def _run_interface(self, runtime): self._labels = labels self._ove_rois = results - self._vol_rois = (np.array(volumes1) - - np.array(volumes2)) / np.array(volumes1) + self._vol_rois = (np.array(volumes1) - + np.array(volumes2)) / np.array(volumes1) - self._dice = round(np.sum(weights*results['dice']), 5) - self._jaccard = round(np.sum(weights*results['jaccard']), 5) - self._volume = np.sum(weights*self._vol_rois) + self._dice = round(np.sum(weights * results['dice']), 5) + self._jaccard = round(np.sum(weights * results['jaccard']), 5) + self._volume = np.sum(weights * self._vol_rois) return runtime @@ -360,9 +360,9 @@ def _list_outputs(self): class FuzzyOverlapInputSpec(BaseInterfaceInputSpec): in_ref = InputMultiPath(File(exists=True), mandatory=True, - desc='Reference image. Requires the same dimensions as in_tst.') + desc='Reference image. Requires the same dimensions as in_tst.') in_tst = InputMultiPath(File(exists=True), mandatory=True, - desc='Test image. Requires the same dimensions as in_ref.') + desc='Test image. Requires the same dimensions as in_ref.') weighting = traits.Enum('none', 'volume', 'squared_vol', usedefault=True, desc=('\'none\': no class-overlap weighting is ' 'performed. \'volume\': computed class-' @@ -443,7 +443,7 @@ def _run_interface(self, runtime): weights = weights / np.sum(weights) - setattr(self, '_jaccard', np.sum(weights * self._jaccards)) + setattr(self, '_jaccard', np.sum(weights * self._jaccards)) setattr(self, '_dice', np.sum(weights * self._dices)) diff = np.zeros(diff_im[0].shape) @@ -463,8 +463,8 @@ def _list_outputs(self): outputs[method] = getattr(self, '_' + method) # outputs['volume_difference'] = self._volume outputs['diff_file'] = os.path.abspath(self.inputs.out_file) - outputs['class_fji'] = np.array(self._jaccards).astype(float).tolist(); - outputs['class_fdi'] = self._dices.astype(float).tolist(); + outputs['class_fji'] = np.array(self._jaccards).astype(float).tolist() + outputs['class_fdi'] = self._dices.astype(float).tolist() return outputs @@ -529,7 +529,7 @@ def _run_interface(self, runtime): msk_idxs = np.where(mskvector == 1) refvector = ref_data.reshape(-1, comps)[msk_idxs].astype(np.float32) tstvector = tst_data.reshape(-1, comps)[msk_idxs].astype(np.float32) - diffvector = (refvector-tstvector) + diffvector = (refvector - tstvector) # Scale the difference if self.inputs.metric == 'sqeuclidean': diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index 6d5aaf1dc0..e4e41326ce 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -391,7 +391,7 @@ def matlab2csv(in_array, name, reshape): if reshape: if len(np.shape(output_array)) > 1: output_array = np.reshape(output_array, ( - np.shape(output_array)[0]*np.shape(output_array)[1], 1)) + np.shape(output_array)[0] * np.shape(output_array)[1], 1)) iflogger.info(np.shape(output_array)) output_name = op.abspath(name + '.csv') np.savetxt(output_name, output_array, delimiter=',') @@ -512,7 +512,7 @@ def merge_csvs(in_list): ) except ValueError as ex: in_array = np.loadtxt( - in_file, delimiter=',', skiprows=1, usecols=list(range(1, n_cols-1))) + in_file, delimiter=',', skiprows=1, usecols=list(range(1, n_cols - 1))) if idx == 0: out_array = in_array else: @@ -530,7 +530,7 @@ def remove_identical_paths(in_files): out_names = list() commonprefix = op.commonprefix(in_files) lastslash = commonprefix.rfind('/') - commonpath = commonprefix[0:(lastslash+1)] + commonpath = commonprefix[0:(lastslash + 1)] for fileidx, in_file in enumerate(in_files): path, name, ext = split_filename(in_file) in_file = op.join(path, name) @@ -548,10 +548,10 @@ def maketypelist(rowheadings, shape, extraheadingBool, extraheading): if rowheadings: typelist.append(('heading', 'a40')) if len(shape) > 1: - for idx in range(1, (min(shape)+1)): + for idx in range(1, (min(shape) + 1)): typelist.append((str(idx), float)) else: - for idx in range(1, (shape[0]+1)): + for idx in range(1, (shape[0] + 1)): typelist.append((str(idx), float)) if extraheadingBool: typelist.append((extraheading, 'a40')) @@ -566,13 +566,13 @@ def makefmtlist(output_array, typelist, rowheadingsBool, fmtlist.append('%s') if len(shape) > 1: output = np.zeros(max(shape), typelist) - for idx in range(1, min(shape)+1): - output[str(idx)] = output_array[:, idx-1] + for idx in range(1, min(shape) + 1): + output[str(idx)] = output_array[:, idx - 1] fmtlist.append('%f') else: output = np.zeros(1, typelist) - for idx in range(1, len(output_array)+1): - output[str(idx)] = output_array[idx-1] + for idx in range(1, len(output_array) + 1): + output[str(idx)] = output_array[idx - 1] fmtlist.append('%f') if extraheadingBool: fmtlist.append('%s') @@ -1056,7 +1056,7 @@ def gen_noise(self, image, mask=None, snr_db=10.0, dist='normal', bg_dist='norma mask[mask < 1] = 0 if mask.ndim < image.ndim: - mask = np.rollaxis(np.array([mask]*image.shape[3]), 0, 4) + mask = np.rollaxis(np.array([mask] * image.shape[3]), 0, 4) signal = image[mask > 0].reshape(-1) @@ -1271,7 +1271,7 @@ def normalize_tpms(in_files, in_mask=None, out_files=[]): msk = np.ones_like(imgs[0].get_data()) msk[weights <= 0] = 0 - if not in_mask is None: + if in_mask is not None: msk = nib.load(in_mask).get_data() msk[msk <= 0] = 0 msk[msk > 0] = 1 @@ -1334,7 +1334,7 @@ def split_rois(in_file, mask=None, roishape=None): for i in range(nrois): first = i * roisize - last = (i+1) * roisize + last = (i + 1) * roisize fill = 0 if last > els: @@ -1343,7 +1343,7 @@ def split_rois(in_file, mask=None, roishape=None): droi = data[first:last, ...] iname = op.abspath('roi%010d_idx' % i) - out_idxs.append(iname+'.npz') + out_idxs.append(iname + '.npz') np.savez(iname, (nzels[0][first:last],)) if fill > 0: diff --git a/nipype/algorithms/modelgen.py b/nipype/algorithms/modelgen.py index cb7e2ae4e8..26439e5951 100644 --- a/nipype/algorithms/modelgen.py +++ b/nipype/algorithms/modelgen.py @@ -50,7 +50,8 @@ def gcd(a, b): 11 """ - while b > 0: a, b = b, a % b + while b > 0: + a, b = b, a % b return a @@ -155,7 +156,7 @@ def gen_info(run_event_files): for event_file in event_files: _, name = os.path.split(event_file) if '.run' in name: - name, _ = name.split('.run%03d' % (i+1)) + name, _ = name.split('.run%03d' % (i + 1)) elif '.txt' in name: name, _ = name.split('.txt') runinfo.conditions.append(name) @@ -315,10 +316,10 @@ def _generate_standard_design(self, infolist, sessinfo[i]['cond'][cid]['amplitudes'] = \ info.amplitudes[cid] if hasattr(info, 'tmod') and info.tmod and \ - len(info.tmod) > cid: + len(info.tmod) > cid: sessinfo[i]['cond'][cid]['tmod'] = info.tmod[cid] if hasattr(info, 'pmod') and info.pmod and \ - len(info.pmod) > cid: + len(info.pmod) > cid: if info.pmod[cid]: sessinfo[i]['cond'][cid]['pmod'] = [] for j, name in enumerate(info.pmod[cid].name): @@ -334,11 +335,11 @@ def _generate_standard_design(self, infolist, for j, r in enumerate(info.regressors): sessinfo[i]['regress'].insert(j, dict(name='', val=[])) if hasattr(info, 'regressor_names') and \ - info.regressor_names is not None: + info.regressor_names is not None: sessinfo[i]['regress'][j]['name'] = \ info.regressor_names[j] else: - sessinfo[i]['regress'][j]['name'] = 'UR%d' % (j+1) + sessinfo[i]['regress'][j]['name'] = 'UR%d' % (j + 1) sessinfo[i]['regress'][j]['val'] = info.regressors[j] sessinfo[i]['scans'] = functional_runs[i] if realignment_parameters is not None: @@ -364,7 +365,7 @@ def _generate_standard_design(self, infolist, for j, scanno in enumerate(out): colidx = len(sessinfo[i]['regress']) sessinfo[i]['regress'].insert(colidx, dict(name='', val=[])) - sessinfo[i]['regress'][colidx]['name'] = 'Outlier%d' %(j+1) + sessinfo[i]['regress'][colidx]['name'] = 'Outlier%d' % (j + 1) sessinfo[i]['regress'][colidx]['val'] = \ np.zeros((1, numscans))[0].tolist() sessinfo[i]['regress'][colidx]['val'][int(scanno)] = 1 @@ -475,12 +476,12 @@ def _concatenate_info(self, infolist): for j, val in enumerate(info.onsets): if self.inputs.input_units == 'secs': onsets = np.array(info.onsets[j]) +\ - self.inputs.time_repetition * \ - sum(nscans[0:(i + 1)]) + self.inputs.time_repetition * \ + sum(nscans[0:(i + 1)]) infoout.onsets[j].extend(onsets.tolist()) else: onsets = np.array(info.onsets[j]) + \ - sum(nscans[0:(i + 1)]) + sum(nscans[0:(i + 1)]) infoout.onsets[j].extend(onsets.tolist()) for j, val in enumerate(info.durations): if len(info.onsets[j]) > 1 and len(val) == 1: @@ -491,7 +492,7 @@ def _concatenate_info(self, infolist): else: raise ValueError('Mismatch in number of onsets and \ durations for run {0}, condition \ - {1}'.format(i+2, j+1)) + {1}'.format(i + 2, j + 1)) if hasattr(info, 'amplitudes') and info.amplitudes: for j, val in enumerate(info.amplitudes): infoout.amplitudes[j].extend(info.amplitudes[j]) @@ -560,7 +561,7 @@ class SpecifySparseModelInputSpec(SpecifyModelInputSpec): time_acquisition = traits.Float(0, mandatory=True, desc="Time in seconds to acquire a single image volume") volumes_in_cluster = traits.Range(1, usedefault=True, - desc="Number of scan volumes in a cluster") + desc="Number of scan volumes in a cluster") model_hrf = traits.Bool(desc="model sparse events with hrf") stimuli_as_impulses = traits.Bool(True, desc="Treat each stimulus to be impulse like.", @@ -633,7 +634,7 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): dt = TA / 10.0 durations = np.round(np.array(i_durations) * 1000) if len(durations) == 1: - durations = durations*np.ones((len(i_onsets))) + durations = durations * np.ones((len(i_onsets))) onsets = np.round(np.array(i_onsets) * 1000) dttemp = gcd(TA, gcd(SILENCE, TR)) if dt < dttemp: @@ -702,7 +703,7 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): regderiv = [] for i, trial in enumerate(np.arange(nscans) / nvol): scanstart = int((SCANONSET + trial * TR + (i % nvol) * TA) / dt) - scanidx = scanstart+np.arange(int(TA / dt)) + scanidx = scanstart + np.arange(int(TA / dt)) timeline2[scanidx] = np.max(timeline) reg.insert(i, np.mean(timeline[scanidx]) * reg_scale) if isdefined(self.inputs.use_temporal_deriv) and \ @@ -758,7 +759,7 @@ def _cond_to_regress(self, info, nscans): # for sparse-clustered acquisitions enter T1-effect regressors nvol = self.inputs.volumes_in_cluster if nvol > 1: - for i in range(nvol-1): + for i in range(nvol - 1): treg = np.zeros((nscans / nvol, nvol)) treg[:, i] = 1 reg.insert(len(reg), treg.ravel().tolist()) @@ -782,7 +783,7 @@ def _generate_clustered_design(self, infolist): if hasattr(infoout[i], 'regressors') and infoout[i].regressors: if not infoout[i].regressor_names: infoout[i].regressor_names = \ - ['R%d' %j for j in range(len(infoout[i].regressors))] + ['R%d' % j for j in range(len(infoout[i].regressors))] else: infoout[i].regressors = [] infoout[i].regressor_names = [] diff --git a/nipype/algorithms/rapidart.py b/nipype/algorithms/rapidart.py index 6f90c70e75..435b3c99e5 100644 --- a/nipype/algorithms/rapidart.py +++ b/nipype/algorithms/rapidart.py @@ -119,11 +119,11 @@ def _calc_norm(mc, use_differences, source, brain_pts=None): all_pts)[0:3, :].ravel() if brain_pts is not None: displacement[i, :] = \ - np.sqrt(np.sum(np.power(np.reshape(newpos[i, :], - (3, all_pts.shape[1])) - - all_pts[0:3, :], - 2), - axis=0)) + np.sqrt(np.sum(np.power(np.reshape(newpos[i, :], + (3, all_pts.shape[1])) - + all_pts[0:3, :], + 2), + axis=0)) # np.savez('displacement.npz', newpos=newpos, pts=all_pts) normdata = np.zeros(mc.shape[0]) if use_differences: @@ -131,8 +131,8 @@ def _calc_norm(mc, use_differences, source, brain_pts=None): np.diff(newpos, n=1, axis=0)), axis=0) for i in range(newpos.shape[0]): normdata[i] = \ - np.max(np.sqrt(np.sum(np.reshape(np.power(np.abs(newpos[i, :]), 2), - (3, all_pts.shape[1])), axis=0))) + np.max(np.sqrt(np.sum(np.reshape(np.power(np.abs(newpos[i, :]), 2), + (3, all_pts.shape[1])), axis=0))) else: newpos = np.abs(signal.detrend(newpos, axis=0, type='constant')) normdata = np.sqrt(np.mean(np.power(newpos, 2), axis=1)) @@ -319,7 +319,7 @@ def _list_outputs(self): for i, f in enumerate(filename_to_list(self.inputs.realigned_files)): (outlierfile, intensityfile, statsfile, normfile, plotfile, displacementfile, maskfile) = \ - self._get_output_filenames(f, os.getcwd()) + self._get_output_filenames(f, os.getcwd()) outputs['outlier_files'].insert(i, outlierfile) outputs['intensity_files'].insert(i, intensityfile) outputs['statistic_files'].insert(i, statsfile) @@ -461,8 +461,8 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): traval = mc[:, 0:3] # translation parameters (mm) rotval = mc[:, 3:6] # rotation parameters (rad) tidx = find_indices(np.sum(abs(traval) > - self.inputs.translation_threshold, 1) - > 0) + self.inputs.translation_threshold, 1) > + 0) ridx = find_indices(np.sum(abs(rotval) > self.inputs.rotation_threshold, 1) > 0) @@ -514,13 +514,13 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): 'min': np.min(mc_in, axis=0).tolist(), 'max': np.max(mc_in, axis=0).tolist(), 'std': np.std(mc_in, axis=0).tolist()}, - ]}, + ]}, {'intensity': [{'using differences': self.inputs.use_differences[1]}, {'mean': np.mean(gz, axis=0).tolist(), 'min': np.min(gz, axis=0).tolist(), 'max': np.max(gz, axis=0).tolist(), 'std': np.std(gz, axis=0).tolist()}, - ]}, + ]}, ] if self.inputs.use_norm: stats.insert(3, {'motion_norm': @@ -528,7 +528,7 @@ def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None): 'min': np.min(normval, axis=0).tolist(), 'max': np.max(normval, axis=0).tolist(), 'std': np.std(normval, axis=0).tolist(), - }}) + }}) save_json(statsfile, stats) def _run_interface(self, runtime): diff --git a/nipype/algorithms/tests/test_auto_ErrorMap.py b/nipype/algorithms/tests/test_auto_ErrorMap.py new file mode 100644 index 0000000000..69484529dd --- /dev/null +++ b/nipype/algorithms/tests/test_auto_ErrorMap.py @@ -0,0 +1,35 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ...testing import assert_equal +from ..metrics import ErrorMap + + +def test_ErrorMap_inputs(): + input_map = dict(ignore_exception=dict(nohash=True, + usedefault=True, + ), + in_ref=dict(mandatory=True, + ), + in_tst=dict(mandatory=True, + ), + mask=dict(), + metric=dict(mandatory=True, + usedefault=True, + ), + out_map=dict(), + ) + inputs = ErrorMap.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + yield assert_equal, getattr(inputs.traits()[key], metakey), value + + +def test_ErrorMap_outputs(): + output_map = dict(distance=dict(), + out_map=dict(), + ) + outputs = ErrorMap.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + yield assert_equal, getattr(outputs.traits()[key], metakey), value diff --git a/nipype/algorithms/tests/test_auto_Overlap.py b/nipype/algorithms/tests/test_auto_Overlap.py new file mode 100644 index 0000000000..a5a3874bd1 --- /dev/null +++ b/nipype/algorithms/tests/test_auto_Overlap.py @@ -0,0 +1,47 @@ +# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT +from ...testing import assert_equal +from ..misc import Overlap + + +def test_Overlap_inputs(): + input_map = dict(bg_overlap=dict(mandatory=True, + usedefault=True, + ), + ignore_exception=dict(nohash=True, + usedefault=True, + ), + mask_volume=dict(), + out_file=dict(usedefault=True, + ), + vol_units=dict(mandatory=True, + usedefault=True, + ), + volume1=dict(mandatory=True, + ), + volume2=dict(mandatory=True, + ), + weighting=dict(usedefault=True, + ), + ) + inputs = Overlap.input_spec() + + for key, metadata in list(input_map.items()): + for metakey, value in list(metadata.items()): + yield assert_equal, getattr(inputs.traits()[key], metakey), value + + +def test_Overlap_outputs(): + output_map = dict(dice=dict(), + diff_file=dict(), + jaccard=dict(), + labels=dict(), + roi_di=dict(), + roi_ji=dict(), + roi_voldiff=dict(), + volume_difference=dict(), + ) + outputs = Overlap.output_spec() + + for key, metadata in list(output_map.items()): + for metakey, value in list(metadata.items()): + yield assert_equal, getattr(outputs.traits()[key], metakey), value diff --git a/nipype/algorithms/tests/test_errormap.py b/nipype/algorithms/tests/test_errormap.py index 3dea795ee0..361646add0 100644 --- a/nipype/algorithms/tests/test_errormap.py +++ b/nipype/algorithms/tests/test_errormap.py @@ -11,66 +11,66 @@ def test_errormap(): - tempdir = mkdtemp() - # Single-Spectual - # Make two fake 2*2*2 voxel volumes - volume1 = np.array([[[2.0, 8.0], [1.0, 2.0]], [[1.0, 9.0], [0.0, 3.0]]]) # John von Neumann's birthday - volume2 = np.array([[[0.0, 7.0], [2.0, 3.0]], [[1.0, 9.0], [1.0, 2.0]]]) # Alan Turing's birthday - mask = np.array([[[1, 0], [0, 1]], [[1, 0], [0, 1]]]) - - img1 = nib.Nifti1Image(volume1, np.eye(4)) - img2 = nib.Nifti1Image(volume2, np.eye(4)) - maskimg = nib.Nifti1Image(mask, np.eye(4)) - - nib.save(img1, os.path.join(tempdir, 'von.nii.gz')) - nib.save(img2, os.path.join(tempdir, 'alan.nii.gz')) - nib.save(maskimg, os.path.join(tempdir, 'mask.nii.gz')) - - # Default metric - errmap = ErrorMap() - errmap.inputs.in_tst = os.path.join(tempdir, 'von.nii.gz') - errmap.inputs.in_ref = os.path.join(tempdir, 'alan.nii.gz') - errmap.out_map = os.path.join(tempdir, 'out_map.nii.gz') - result = errmap.run() - yield assert_equal, result.outputs.distance, 1.125 - - # Square metric - errmap.inputs.metric = 'sqeuclidean' - result = errmap.run() - yield assert_equal, result.outputs.distance, 1.125 - - # Linear metric - errmap.inputs.metric = 'euclidean' - result = errmap.run() - yield assert_equal, result.outputs.distance, 0.875 - - # Masked - errmap.inputs.mask = os.path.join(tempdir, 'mask.nii.gz') - result = errmap.run() - yield assert_equal, result.outputs.distance, 1.0 - - # Multi-Spectual - volume3 = np.array([[[1.0, 6.0], [0.0, 3.0]], [[1.0, 9.0], [3.0, 6.0]]]) # Raymond Vahan Damadian's birthday - - msvolume1 = np.zeros(shape=(2, 2, 2, 2)) - msvolume1[:, :, :, 0] = volume1 - msvolume1[:, :, :, 1] = volume3 - msimg1 = nib.Nifti1Image(msvolume1, np.eye(4)) - - msvolume2 = np.zeros(shape=(2, 2, 2, 2)) - msvolume2[:, :, :, 0] = volume3 - msvolume2[:, :, :, 1] = volume1 - msimg2 = nib.Nifti1Image(msvolume2, np.eye(4)) - - nib.save(msimg1, os.path.join(tempdir, 'von-ray.nii.gz')) - nib.save(msimg2, os.path.join(tempdir, 'alan-ray.nii.gz')) - - errmap.inputs.in_tst = os.path.join(tempdir, 'von-ray.nii.gz') - errmap.inputs.in_ref = os.path.join(tempdir, 'alan-ray.nii.gz') - errmap.inputs.metric = 'sqeuclidean' - result = errmap.run() - yield assert_equal, result.outputs.distance, 5.5 - - errmap.inputs.metric = 'euclidean' - result = errmap.run() - yield assert_equal, result.outputs.distance, np.float32(1.25 * (2**0.5)) + tempdir = mkdtemp() + # Single-Spectual + # Make two fake 2*2*2 voxel volumes + volume1 = np.array([[[2.0, 8.0], [1.0, 2.0]], [[1.0, 9.0], [0.0, 3.0]]]) # John von Neumann's birthday + volume2 = np.array([[[0.0, 7.0], [2.0, 3.0]], [[1.0, 9.0], [1.0, 2.0]]]) # Alan Turing's birthday + mask = np.array([[[1, 0], [0, 1]], [[1, 0], [0, 1]]]) + + img1 = nib.Nifti1Image(volume1, np.eye(4)) + img2 = nib.Nifti1Image(volume2, np.eye(4)) + maskimg = nib.Nifti1Image(mask, np.eye(4)) + + nib.save(img1, os.path.join(tempdir, 'von.nii.gz')) + nib.save(img2, os.path.join(tempdir, 'alan.nii.gz')) + nib.save(maskimg, os.path.join(tempdir, 'mask.nii.gz')) + + # Default metric + errmap = ErrorMap() + errmap.inputs.in_tst = os.path.join(tempdir, 'von.nii.gz') + errmap.inputs.in_ref = os.path.join(tempdir, 'alan.nii.gz') + errmap.out_map = os.path.join(tempdir, 'out_map.nii.gz') + result = errmap.run() + yield assert_equal, result.outputs.distance, 1.125 + + # Square metric + errmap.inputs.metric = 'sqeuclidean' + result = errmap.run() + yield assert_equal, result.outputs.distance, 1.125 + + # Linear metric + errmap.inputs.metric = 'euclidean' + result = errmap.run() + yield assert_equal, result.outputs.distance, 0.875 + + # Masked + errmap.inputs.mask = os.path.join(tempdir, 'mask.nii.gz') + result = errmap.run() + yield assert_equal, result.outputs.distance, 1.0 + + # Multi-Spectual + volume3 = np.array([[[1.0, 6.0], [0.0, 3.0]], [[1.0, 9.0], [3.0, 6.0]]]) # Raymond Vahan Damadian's birthday + + msvolume1 = np.zeros(shape=(2, 2, 2, 2)) + msvolume1[:, :, :, 0] = volume1 + msvolume1[:, :, :, 1] = volume3 + msimg1 = nib.Nifti1Image(msvolume1, np.eye(4)) + + msvolume2 = np.zeros(shape=(2, 2, 2, 2)) + msvolume2[:, :, :, 0] = volume3 + msvolume2[:, :, :, 1] = volume1 + msimg2 = nib.Nifti1Image(msvolume2, np.eye(4)) + + nib.save(msimg1, os.path.join(tempdir, 'von-ray.nii.gz')) + nib.save(msimg2, os.path.join(tempdir, 'alan-ray.nii.gz')) + + errmap.inputs.in_tst = os.path.join(tempdir, 'von-ray.nii.gz') + errmap.inputs.in_ref = os.path.join(tempdir, 'alan-ray.nii.gz') + errmap.inputs.metric = 'sqeuclidean' + result = errmap.run() + yield assert_equal, result.outputs.distance, 5.5 + + errmap.inputs.metric = 'euclidean' + result = errmap.run() + yield assert_equal, result.outputs.distance, np.float32(1.25 * (2**0.5)) diff --git a/nipype/caching/__init__.py b/nipype/caching/__init__.py index b6815e9724..db0261ebea 100644 --- a/nipype/caching/__init__.py +++ b/nipype/caching/__init__.py @@ -1,2 +1 @@ from .memory import Memory - diff --git a/nipype/caching/memory.py b/nipype/caching/memory.py index d3b842459d..67ef605e32 100644 --- a/nipype/caching/memory.py +++ b/nipype/caching/memory.py @@ -51,8 +51,8 @@ def __init__(self, interface, base_dir, callback=None): An optional callable called each time after the function is called. """ - if not (isinstance(interface, type) - and issubclass(interface, BaseInterface)): + if not (isinstance(interface, type) and + issubclass(interface, BaseInterface)): raise ValueError('the interface argument should be a nipype ' 'interface class, but %s (type %s) was passed.' % (interface, type(interface))) @@ -300,4 +300,3 @@ def _clear_all_but(self, runs, warn=True): def __repr__(self): return '%s(base_dir=%s)' % (self.__class__.__name__, self.base_dir) - diff --git a/nipype/caching/tests/test_memory.py b/nipype/caching/tests/test_memory.py index c48d23fec9..784eca1b93 100644 --- a/nipype/caching/tests/test_memory.py +++ b/nipype/caching/tests/test_memory.py @@ -50,4 +50,3 @@ def test_caching(): if __name__ == '__main__': test_caching() - diff --git a/nipype/external/provcopy.py b/nipype/external/provcopy.py index 2a55f3b4ce..42e250f967 100644 --- a/nipype/external/provcopy.py +++ b/nipype/external/provcopy.py @@ -90,25 +90,25 @@ ) PROV_N_MAP = { - PROV_REC_ENTITY: u'entity', - PROV_REC_ACTIVITY: u'activity', - PROV_REC_GENERATION: u'wasGeneratedBy', - PROV_REC_USAGE: u'used', - PROV_REC_COMMUNICATION: u'wasInformedBy', - PROV_REC_START: u'wasStartedBy', - PROV_REC_END: u'wasEndedBy', - PROV_REC_INVALIDATION: u'wasInvalidatedBy', - PROV_REC_DERIVATION: u'wasDerivedFrom', - PROV_REC_AGENT: u'agent', - PROV_REC_ATTRIBUTION: u'wasAttributedTo', - PROV_REC_ASSOCIATION: u'wasAssociatedWith', - PROV_REC_DELEGATION: u'actedOnBehalfOf', - PROV_REC_INFLUENCE: u'wasInfluencedBy', - PROV_REC_ALTERNATE: u'alternateOf', - PROV_REC_SPECIALIZATION: u'specializationOf', - PROV_REC_MENTION: u'mentionOf', - PROV_REC_MEMBERSHIP: u'hadMember', - PROV_REC_BUNDLE: u'bundle', + PROV_REC_ENTITY: u'entity', + PROV_REC_ACTIVITY: u'activity', + PROV_REC_GENERATION: u'wasGeneratedBy', + PROV_REC_USAGE: u'used', + PROV_REC_COMMUNICATION: u'wasInformedBy', + PROV_REC_START: u'wasStartedBy', + PROV_REC_END: u'wasEndedBy', + PROV_REC_INVALIDATION: u'wasInvalidatedBy', + PROV_REC_DERIVATION: u'wasDerivedFrom', + PROV_REC_AGENT: u'agent', + PROV_REC_ATTRIBUTION: u'wasAttributedTo', + PROV_REC_ASSOCIATION: u'wasAssociatedWith', + PROV_REC_DELEGATION: u'actedOnBehalfOf', + PROV_REC_INFLUENCE: u'wasInfluencedBy', + PROV_REC_ALTERNATE: u'alternateOf', + PROV_REC_SPECIALIZATION: u'specializationOf', + PROV_REC_MENTION: u'mentionOf', + PROV_REC_MEMBERSHIP: u'hadMember', + PROV_REC_BUNDLE: u'bundle', } # # Identifiers for PROV's attributes @@ -1820,7 +1820,7 @@ def add_bundle(self, bundle, identifier=None): '''Add a bundle to the current document ''' - if identifier == None: + if identifier is None: identifier = bundle.get_identifier() if not identifier: diff --git a/nipype/fixes/numpy/testing/noseclasses.py b/nipype/fixes/numpy/testing/noseclasses.py index 2a1c6900c9..9f69dc33db 100644 --- a/nipype/fixes/numpy/testing/noseclasses.py +++ b/nipype/fixes/numpy/testing/noseclasses.py @@ -87,8 +87,8 @@ def _find(self, tests, obj, name, module, source_lines, globs, seen): if ismodule(obj) and self._recurse: for valname, val in list(obj.__dict__.items()): valname1 = '%s.%s' % (name, valname) - if ((isroutine(val) or isclass(val)) - and self._from_module(module, val)): + if ((isroutine(val) or isclass(val)) and + self._from_module(module, val)): self._find(tests, val, valname1, module, source_lines, globs, seen) @@ -134,8 +134,8 @@ def check_output(self, want, got, optionflags): # try to normalize out 32 and 64 bit default int sizes for sz in [4, 8]: - got = got.replace("' 0: if len(parameters) > 1: @@ -654,7 +654,7 @@ def _formatRegistration(self): if isdefined(self.inputs.sigma_units): retval.append('--smoothing-sigmas %s%s' % (self._antsJoinList(self.inputs.smoothing_sigmas[ - ii]), + ii]), self.inputs.sigma_units[ii])) else: retval.append('--smoothing-sigmas %s' % diff --git a/nipype/interfaces/ants/resampling.py b/nipype/interfaces/ants/resampling.py index 9473740a30..f1bcd4cda1 100644 --- a/nipype/interfaces/ants/resampling.py +++ b/nipype/interfaces/ants/resampling.py @@ -401,4 +401,3 @@ def _format_arg(self, opt, spec, val): if opt == "transforms": return self._getTransformFileNames() return super(ApplyTransformsToPoints, self)._format_arg(opt, spec, val) - diff --git a/nipype/interfaces/ants/visualization.py b/nipype/interfaces/ants/visualization.py index 635c4be7f5..6780b79d8b 100644 --- a/nipype/interfaces/ants/visualization.py +++ b/nipype/interfaces/ants/visualization.py @@ -16,22 +16,22 @@ class ConvertScalarImageToRGBInputSpec(ANTSCommandInputSpec): dimension = traits.Enum(3, 2, argstr='%d', usedefault=True, - desc='image dimension (2 or 3)', mandatory=True, - position=0) + desc='image dimension (2 or 3)', mandatory=True, + position=0) input_image = File(argstr='%s', exists=True, - desc='Main input is a 3-D grayscale image.', mandatory=True, - position=1) + desc='Main input is a 3-D grayscale image.', mandatory=True, + position=1) output_image = traits.Str('rgb.nii.gz', argstr='%s', usedefault=True, - desc=('rgb output image'), position=2) + desc=('rgb output image'), position=2) mask_image = File('none', argstr='%s', exists=True, - desc='mask image', position=3, usedefault=True) + desc='mask image', position=3, usedefault=True) colormap = traits.Str(argstr='%s', usedefault=True, - desc=('Possible colormaps: grey, red, green, ' - 'blue, copper, jet, hsv, spring, summer, ' - 'autumn, winter, hot, cool, overunder, custom ' - ), mandatory=True, position=4) + desc=('Possible colormaps: grey, red, green, ' + 'blue, copper, jet, hsv, spring, summer, ' + 'autumn, winter, hot, cool, overunder, custom ' + ), mandatory=True, position=4) custom_color_map_file = traits.Str('none', argstr='%s', usedefault=True, - desc='custom color map file', position=5) + desc='custom color map file', position=5) minimum_input = traits.Int(argstr='%d', desc='minimum input', mandatory=True, position=6) maximum_input = traits.Int(argstr='%d', desc='maximum input', @@ -79,27 +79,27 @@ class CreateTiledMosaicInputSpec(ANTSCommandInputSpec): desc='Main input is a 3-D grayscale image.', mandatory=True) rgb_image = File(argstr='-r %s', exists=True, - desc=('An optional Rgb image can be added as an overlay.' + desc=('An optional Rgb image can be added as an overlay.' 'It must have the same image' 'geometry as the input grayscale image.'), - mandatory=True) + mandatory=True) mask_image = File(argstr='-x %s', exists=True, desc='Specifies the ROI of the RGB voxels used.') alpha_value = traits.Float(argstr='-a %.2f', desc=('If an Rgb image is provided, render the overlay ' - 'using the specified alpha parameter.')) + 'using the specified alpha parameter.')) output_image = traits.Str('output.png', argstr='-o %s', desc='The output consists of the tiled mosaic image.', usedefault=True) tile_geometry = traits.Str(argstr='-t %s', desc=( - 'The tile geometry specifies the number of rows and columns' - 'in the output image. For example, if the user specifies "5x10", ' - 'then 5 rows by 10 columns of slices are rendered. If R < 0 and C > ' - '0 (or vice versa), the negative value is selected' - 'based on direction.')) + 'The tile geometry specifies the number of rows and columns' + 'in the output image. For example, if the user specifies "5x10", ' + 'then 5 rows by 10 columns of slices are rendered. If R < 0 and C > ' + '0 (or vice versa), the negative value is selected' + 'based on direction.')) direction = traits.Int(argstr='-d %d', desc=('Specifies the direction of ' - 'the slices. If no direction is specified, the ' - 'direction with the coarsest spacing is chosen.')) + 'the slices. If no direction is specified, the ' + 'direction with the coarsest spacing is chosen.')) pad_or_crop = traits.Str(argstr='-p %s', desc='argument passed to -p flag:' '[padVoxelWidth,]' @@ -115,7 +115,7 @@ class CreateTiledMosaicInputSpec(ANTSCommandInputSpec): ) slices = traits.Str(argstr='-s %s', desc=('Number of slices to increment Slice1xSlice2xSlice3' - '[numberOfSlicesToIncrement,,]')) + '[numberOfSlicesToIncrement,,]')) flip_slice = traits.Str(argstr='-f %s', desc=('flipXxflipY')) permute_axes = traits.Bool(argstr='-g', desc='doPermute' diff --git a/nipype/interfaces/base.py b/nipype/interfaces/base.py index fd1f3e1dd5..f37f65fda7 100644 --- a/nipype/interfaces/base.py +++ b/nipype/interfaces/base.py @@ -27,7 +27,9 @@ import select import subprocess import sys -import random, time, fnmatch +import random +import time +import fnmatch from textwrap import wrap from datetime import datetime as dt from dateutil.parser import parse as parseutc @@ -514,8 +516,8 @@ def _clean_container(self, object, undefinedval=None, skipundefined=False): else: if not skipundefined: out[key] = undefinedval - elif (isinstance(object, TraitListObject) or isinstance(object, list) - or isinstance(object, tuple)): + elif (isinstance(object, TraitListObject) or + isinstance(object, list) or isinstance(object, tuple)): out = [] for val in object: if isdefined(val): @@ -564,9 +566,8 @@ def get_hashval(self, hash_method=None): if has_metadata(trait.trait_type, "nohash", True): continue hash_files = (not has_metadata(trait.trait_type, "hash_files", - False) - and not has_metadata(trait.trait_type, - "name_source")) + False) and not + has_metadata(trait.trait_type, "name_source")) dict_nofilename.append((name, self._get_sorteddict(val, hash_method=hash_method, hash_files=hash_files))) @@ -800,7 +801,7 @@ def _get_trait_desc(self, inputs, name, spec): line = "(%s%s)" % (type_info, default) manhelpstr = wrap(line, 70, - initial_indent=manhelpstr[0]+': ', + initial_indent=manhelpstr[0] + ': ', subsequent_indent='\t\t ') if desc: @@ -1500,7 +1501,7 @@ def _run_interface(self, runtime, correct_return_codes=[0]): runtime = run_command(runtime, output=self.inputs.terminal_output, redirect_x=self._redirect_x) if runtime.returncode is None or \ - runtime.returncode not in correct_return_codes: + runtime.returncode not in correct_return_codes: self.raise_exception(runtime) return runtime @@ -1522,8 +1523,8 @@ def _format_arg(self, name, trait_spec, value): # traits.Either turns into traits.TraitCompound and does not have any # inner_traits elif trait_spec.is_trait_type(traits.List) \ - or (trait_spec.is_trait_type(traits.TraitCompound) - and isinstance(value, list)): + or (trait_spec.is_trait_type(traits.TraitCompound) and + isinstance(value, list)): # This is a bit simple-minded at present, and should be # construed as the default. If more sophisticated behavior # is needed, it can be accomplished with metadata (e.g. @@ -1771,13 +1772,14 @@ def validate(self, object, name, value): newvalue = value if not isinstance(value, list) \ - or (self.inner_traits() - and isinstance(self.inner_traits()[0].trait_type, traits.List) - and not isinstance(self.inner_traits()[0].trait_type, - InputMultiPath) - and isinstance(value, list) - and value - and not isinstance(value[0], list)): + or (self.inner_traits() and + isinstance(self.inner_traits()[0].trait_type, + traits.List) and not + isinstance(self.inner_traits()[0].trait_type, + InputMultiPath) and + isinstance(value, list) and + value and not + isinstance(value[0], list)): newvalue = [value] value = super(MultiPath, self).validate(object, name, newvalue) diff --git a/nipype/interfaces/camino/__init__.py b/nipype/interfaces/camino/__init__.py index ec8a9f4c8e..e3f7271626 100644 --- a/nipype/interfaces/camino/__init__.py +++ b/nipype/interfaces/camino/__init__.py @@ -11,7 +11,7 @@ TrackBayesDirac, TrackDT, TrackBallStick, TrackBootstrap, TrackBedpostxDeter, TrackBedpostxProba, ComputeFractionalAnisotropy, ComputeMeanDiffusivity, - ComputeTensorTrace, ComputeEigensystem, DTMetric) + ComputeTensorTrace, ComputeEigensystem, DTMetric) from .calib import (SFPICOCalibData, SFLUTGen) from .odf import (QBallMX, LinRecon, SFPeaks, MESD) from .utils import ImageStats diff --git a/nipype/interfaces/camino/connectivity.py b/nipype/interfaces/camino/connectivity.py index fde488d13f..3a41c801e2 100644 --- a/nipype/interfaces/camino/connectivity.py +++ b/nipype/interfaces/camino/connectivity.py @@ -153,4 +153,3 @@ def _gen_filename(self, name): _, filename, _ = split_filename(self.inputs.in_file) filename = filename + "_" return filename - diff --git a/nipype/interfaces/camino/convert.py b/nipype/interfaces/camino/convert.py index 956ef9b474..cdde8a2b88 100644 --- a/nipype/interfaces/camino/convert.py +++ b/nipype/interfaces/camino/convert.py @@ -188,13 +188,13 @@ class ProcStreamlinesInputSpec(StdOutCommandLineInputSpec): desc='data file') maxtractpoints = traits.Int(argstr='-maxtractpoints %d', units='NA', - desc="maximum number of tract points") + desc="maximum number of tract points") mintractpoints = traits.Int(argstr='-mintractpoints %d', units='NA', - desc="minimum number of tract points") + desc="minimum number of tract points") maxtractlength = traits.Int(argstr='-maxtractlength %d', units='mm', - desc="maximum length of tracts") + desc="maximum length of tracts") mintractlength = traits.Int(argstr='-mintractlength %d', units='mm', - desc="minimum length of tracts") + desc="minimum length of tracts") datadims = traits.List(traits.Int, desc='data dimensions in voxels', argstr='-datadims %s', minlen=3, maxlen=3, units='voxels') @@ -288,7 +288,7 @@ def _run_interface(self, runtime): if not os.path.exists(base): os.makedirs(base) new_runtime = super(ProcStreamlines, self)._run_interface(runtime) - self.outputroot_files = glob.glob(os.path.join(os.getcwd(), actual_outputroot+'*')) + self.outputroot_files = glob.glob(os.path.join(os.getcwd(), actual_outputroot + '*')) return new_runtime else: new_runtime = super(ProcStreamlines, self)._run_interface(runtime) diff --git a/nipype/interfaces/camino/dti.py b/nipype/interfaces/camino/dti.py index 5043b83ff8..8402fcf45f 100644 --- a/nipype/interfaces/camino/dti.py +++ b/nipype/interfaces/camino/dti.py @@ -262,29 +262,29 @@ def _gen_outfilename(self): class DTLUTGenInputSpec(StdOutCommandLineInputSpec): - lrange = traits.List(traits.Float, desc='Index to one-tensor LUTs. This is the ratio L1/L3 and L2 / L3.' \ - 'The LUT is square, with half the values calculated (because L2 / L3 cannot be less than L1 / L3 by definition).' \ - 'The minimum must be >= 1. For comparison, a ratio L1 / L3 = 10 with L2 / L3 = 1 corresponds to an FA of 0.891, '\ - 'and L1 / L3 = 15 with L2 / L3 = 1 corresponds to an FA of 0.929. The default range is 1 to 10.', \ + lrange = traits.List(traits.Float, desc='Index to one-tensor LUTs. This is the ratio L1/L3 and L2 / L3.' + 'The LUT is square, with half the values calculated (because L2 / L3 cannot be less than L1 / L3 by definition).' + 'The minimum must be >= 1. For comparison, a ratio L1 / L3 = 10 with L2 / L3 = 1 corresponds to an FA of 0.891, ' + 'and L1 / L3 = 15 with L2 / L3 = 1 corresponds to an FA of 0.929. The default range is 1 to 10.', argstr='-lrange %s', minlen=2, maxlen=2, position=1, units='NA') - frange = traits.List(traits.Float, desc='Index to two-tensor LUTs. This is the fractional anisotropy \ - of the two tensors. The default is 0.3 to 0.94', \ + frange = traits.List(traits.Float, desc='Index to two-tensor LUTs. This is the fractional anisotropy' + ' of the two tensors. The default is 0.3 to 0.94', argstr='-frange %s', minlen=2, maxlen=2, position=1, units='NA') step = traits.Float(argstr='-step %f', units='NA', - desc='Distance between points in the LUT.' \ - 'For example, if lrange is 1 to 10 and the step is 0.1, LUT entries will be computed ' \ - 'at L1 / L3 = 1, 1.1, 1.2 ... 10.0 and at L2 / L3 = 1.0, 1.1 ... L1 / L3.' \ + desc='Distance between points in the LUT.' + 'For example, if lrange is 1 to 10 and the step is 0.1, LUT entries will be computed ' + 'at L1 / L3 = 1, 1.1, 1.2 ... 10.0 and at L2 / L3 = 1.0, 1.1 ... L1 / L3.' 'For single tensor LUTs, the default step is 0.2, for two-tensor LUTs it is 0.02.') samples = traits.Int(argstr='-samples %d', units='NA', desc='The number of synthetic measurements to generate at each point in the LUT. The default is 2000.') snr = traits.Float(argstr='-snr %f', units='NA', - desc='The signal to noise ratio of the unweighted (q = 0) measurements.'\ + desc='The signal to noise ratio of the unweighted (q = 0) measurements.' 'This should match the SNR (in white matter) of the images that the LUTs are used with.') bingham = traits.Bool(argstr='-bingham', desc="Compute a LUT for the Bingham PDF. This is the default.") @@ -351,27 +351,27 @@ class PicoPDFsInputSpec(StdOutCommandLineInputSpec): argstr='-inputmodel %s', position=2, desc='input model type', usedefault=True) luts = InputMultiPath(File(exists=True), argstr='-luts %s', mandatory=True, - desc='Files containing the lookup tables.'\ - 'For tensor data, one lut must be specified for each type of inversion used in the image (one-tensor, two-tensor, three-tensor).'\ - 'For pds, the number of LUTs must match -numpds (it is acceptable to use the same LUT several times - see example, above).'\ + desc='Files containing the lookup tables.' + 'For tensor data, one lut must be specified for each type of inversion used in the image (one-tensor, two-tensor, three-tensor).' + 'For pds, the number of LUTs must match -numpds (it is acceptable to use the same LUT several times - see example, above).' 'These LUTs may be generated with dtlutgen.') pdf = traits.Enum('bingham', 'watson', 'acg', - argstr='-pdf %s', position=4, desc=' Specifies the PDF to use. There are three choices:'\ - 'watson - The Watson distribution. This distribution is rotationally symmetric.'\ - 'bingham - The Bingham distributionn, which allows elliptical probability density contours.'\ + argstr='-pdf %s', position=4, desc=' Specifies the PDF to use. There are three choices:' + 'watson - The Watson distribution. This distribution is rotationally symmetric.' + 'bingham - The Bingham distributionn, which allows elliptical probability density contours.' 'acg - The Angular Central Gaussian distribution, which also allows elliptical probability density contours', usedefault=True) directmap = traits.Bool(argstr='-directmap', desc="Only applicable when using pds as the inputmodel. Use direct mapping between the eigenvalues and the distribution parameters instead of the log of the eigenvalues.") maxcomponents = traits.Int(argstr='-maxcomponents %d', units='NA', - desc='The maximum number of tensor components in a voxel (default 2) for multitensor data.'\ + desc='The maximum number of tensor components in a voxel (default 2) for multitensor data.' 'Currently, only the default is supported, but future releases may allow the input of three-tensor data using this option.') numpds = traits.Int(argstr='-numpds %d', units='NA', - desc='The maximum number of PDs in a voxel (default 3) for PD data.' \ - 'This option determines the size of the input and output voxels.' \ - 'This means that the data file may be large enough to accomodate three or more PDs,'\ + desc='The maximum number of PDs in a voxel (default 3) for PD data.' + 'This option determines the size of the input and output voxels.' + 'This means that the data file may be large enough to accomodate three or more PDs,' 'but does not mean that any of the voxels are classified as containing three or more PDs.') @@ -835,19 +835,19 @@ class ComputeMeanDiffusivityInputSpec(CommandLineInputSpec): inputmodel = traits.Enum('dt', 'twotensor', 'threetensor', argstr='-inputmodel %s', - desc='Specifies the model that the input tensor data contains parameters for.' \ - 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), '\ - '"threetensor" (three-tensor data). By default, the program assumes that the input data '\ + desc='Specifies the model that the input tensor data contains parameters for.' + 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), ' + '"threetensor" (three-tensor data). By default, the program assumes that the input data ' 'contains a single diffusion tensor in each voxel.') inputdatatype = traits.Enum('char', 'short', 'int', 'long', 'float', 'double', argstr='-inputdatatype %s', - desc='Specifies the data type of the input file. The data type can be any of the' \ + desc='Specifies the data type of the input file. The data type can be any of the' 'following strings: "char", "short", "int", "long", "float" or "double".') outputdatatype = traits.Enum('char', 'short', 'int', 'long', 'float', 'double', argstr='-outputdatatype %s', - desc='Specifies the data type of the output data. The data type can be any of the' \ + desc='Specifies the data type of the output data. The data type can be any of the' 'following strings: "char", "short", "int", "long", "float" or "double".') @@ -891,19 +891,19 @@ class ComputeFractionalAnisotropyInputSpec(StdOutCommandLineInputSpec): inputmodel = traits.Enum('dt', 'twotensor', 'threetensor', 'multitensor', argstr='-inputmodel %s', - desc='Specifies the model that the input tensor data contains parameters for.' \ - 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), '\ - '"threetensor" (three-tensor data). By default, the program assumes that the input data '\ + desc='Specifies the model that the input tensor data contains parameters for.' + 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), ' + '"threetensor" (three-tensor data). By default, the program assumes that the input data ' 'contains a single diffusion tensor in each voxel.') inputdatatype = traits.Enum('char', 'short', 'int', 'long', 'float', 'double', argstr='-inputdatatype %s', - desc='Specifies the data type of the input file. The data type can be any of the' \ + desc='Specifies the data type of the input file. The data type can be any of the' 'following strings: "char", "short", "int", "long", "float" or "double".') outputdatatype = traits.Enum('char', 'short', 'int', 'long', 'float', 'double', argstr='-outputdatatype %s', - desc='Specifies the data type of the output data. The data type can be any of the' \ + desc='Specifies the data type of the output data. The data type can be any of the' 'following strings: "char", "short", "int", "long", "float" or "double".') @@ -953,19 +953,19 @@ class ComputeTensorTraceInputSpec(StdOutCommandLineInputSpec): inputmodel = traits.Enum('dt', 'twotensor', 'threetensor', 'multitensor', argstr='-inputmodel %s', - desc='Specifies the model that the input tensor data contains parameters for.' \ - 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), '\ - '"threetensor" (three-tensor data). By default, the program assumes that the input data '\ + desc='Specifies the model that the input tensor data contains parameters for.' + 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), ' + '"threetensor" (three-tensor data). By default, the program assumes that the input data ' 'contains a single diffusion tensor in each voxel.') inputdatatype = traits.Enum('char', 'short', 'int', 'long', 'float', 'double', argstr='-inputdatatype %s', - desc='Specifies the data type of the input file. The data type can be any of the' \ + desc='Specifies the data type of the input file. The data type can be any of the' 'following strings: "char", "short", "int", "long", "float" or "double".') outputdatatype = traits.Enum('char', 'short', 'int', 'long', 'float', 'double', argstr='-outputdatatype %s', - desc='Specifies the data type of the output data. The data type can be any of the' \ + desc='Specifies the data type of the output data. The data type can be any of the' 'following strings: "char", "short", "int", "long", "float" or "double".') diff --git a/nipype/interfaces/camino/odf.py b/nipype/interfaces/camino/odf.py index bd39f06760..e39bc81117 100644 --- a/nipype/interfaces/camino/odf.py +++ b/nipype/interfaces/camino/odf.py @@ -438,5 +438,3 @@ def _list_outputs(self): def _gen_outfilename(self): _, name, _ = split_filename(self.inputs.in_file) return name + '_peaks.Bdouble' - - diff --git a/nipype/interfaces/cmtk/cmtk.py b/nipype/interfaces/cmtk/cmtk.py index 0b539837eb..3da9c3bf2f 100644 --- a/nipype/interfaces/cmtk/cmtk.py +++ b/nipype/interfaces/cmtk/cmtk.py @@ -385,8 +385,8 @@ def cmat(track_file, roi_file, resolution_network_file, matrix_name, matrix_mat_ iflogger.info("Filtering tractography - keeping only no orphan fibers") finalfibers_fname = op.abspath(endpoint_name + '_streamline_final.trk') stats['endpoint_n_fib'] = save_fibers(hdr, fib, finalfibers_fname, final_fibers_idx) - stats['endpoints_percent'] = float(stats['endpoint_n_fib'])/float(stats['orig_n_fib'])*100 - stats['intersections_percent'] = float(stats['intersections_n_fib'])/float(stats['orig_n_fib'])*100 + stats['endpoints_percent'] = float(stats['endpoint_n_fib']) / float(stats['orig_n_fib']) * 100 + stats['intersections_percent'] = float(stats['intersections_n_fib']) / float(stats['orig_n_fib']) * 100 out_stats_file = op.abspath(endpoint_name + '_statistics.mat') iflogger.info("Saving matrix creation statistics as %s" % out_stats_file) diff --git a/nipype/interfaces/cmtk/convert.py b/nipype/interfaces/cmtk/convert.py index f874aa186f..33ee7616b9 100644 --- a/nipype/interfaces/cmtk/convert.py +++ b/nipype/interfaces/cmtk/convert.py @@ -7,7 +7,8 @@ """ -import os, os.path as op +import os +import os.path as op import datetime import string import warnings diff --git a/nipype/interfaces/cmtk/nbs.py b/nipype/interfaces/cmtk/nbs.py index 8a64ad95f6..8fd539691f 100644 --- a/nipype/interfaces/cmtk/nbs.py +++ b/nipype/interfaces/cmtk/nbs.py @@ -43,7 +43,7 @@ class NetworkBasedStatisticInputSpec(BaseInterfaceInputSpec): number_of_permutations = traits.Int(1000, usedefault=True, desc='Number of permutations to perform') threshold = traits.Float(3, usedefault=True, desc='T-statistic threshold') t_tail = traits.Enum('left', 'right', 'both', usedefault=True, desc='Can be one of "left", "right", or "both"') - edge_key = traits.Str('number_of_fibers', usedefault=True, desc='Usually "number_of_fibers, "fiber_length_mean", "fiber_length_std" for matrices made with CMTK' \ + edge_key = traits.Str('number_of_fibers', usedefault=True, desc='Usually "number_of_fibers, "fiber_length_mean", "fiber_length_std" for matrices made with CMTK' 'Sometimes "weight" or "value" for functional networks.') out_nbs_network = File(desc='Output network with edges identified by the NBS') out_nbs_pval_network = File(desc='Output network with p-values to weight the edges identified by the NBS') diff --git a/nipype/interfaces/cmtk/nx.py b/nipype/interfaces/cmtk/nx.py index 1716ad0c8d..64b817a746 100644 --- a/nipype/interfaces/cmtk/nx.py +++ b/nipype/interfaces/cmtk/nx.py @@ -169,7 +169,7 @@ def average_networks(in_files, ntwk_res_file, group_id): data[key] = data[key] / len(in_files) ntwk.edge[edge[0]][edge[1]] = data avg_ntwk.add_edge(edge[0], edge[1], data) - edge_dict['count'][edge[0]-1][edge[1]-1] = ntwk.edge[edge[0]][edge[1]]['count'] + edge_dict['count'][edge[0] - 1][edge[1] - 1] = ntwk.edge[edge[0]][edge[1]]['count'] iflogger.info('After thresholding, the average network has has {n} edges'.format(n=avg_ntwk.number_of_edges())) @@ -179,7 +179,7 @@ def average_networks(in_files, ntwk_res_file, group_id): for key in list(data.keys()): if not key == 'count': edge_dict[key] = np.zeros((avg_ntwk.number_of_nodes(), avg_ntwk.number_of_nodes())) - edge_dict[key][edge[0]-1][edge[1]-1] = data[key] + edge_dict[key][edge[0] - 1][edge[1] - 1] = data[key] for key in list(edge_dict.keys()): tmp = {} @@ -505,7 +505,7 @@ def _gen_outfilename(self, name, ext): class AverageNetworksInputSpec(BaseInterfaceInputSpec): in_files = InputMultiPath(File(exists=True), mandatory=True, desc='Networks for a group of subjects') - resolution_network_file = File(exists=True, desc='Parcellation files from Connectome Mapping Toolkit. This is not necessary' \ + resolution_network_file = File(exists=True, desc='Parcellation files from Connectome Mapping Toolkit. This is not necessary' ', but if included, the interface will output the statistical maps as networkx graphs.') group_id = traits.Str('group1', usedefault=True, desc='ID for group') out_gpickled_groupavg = File(desc='Average network saved as a NetworkX .pck') diff --git a/nipype/interfaces/cmtk/parcellation.py b/nipype/interfaces/cmtk/parcellation.py index 140b7273ca..2297177e01 100644 --- a/nipype/interfaces/cmtk/parcellation.py +++ b/nipype/interfaces/cmtk/parcellation.py @@ -273,7 +273,7 @@ def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation): iflogger.info("[ DONE ]") # dilate cortical regions - if (dilation == True): + if dilation is True: iflogger.info("Dilating cortical regions...") # loop throughout all the voxels belonging to the aseg GM volume for j in range(xx.size): @@ -463,7 +463,7 @@ def crop_and_move_datasets(subject_id, subjects_dir, fs_dir, parcellation_name, ds.append((op.abspath('ROI_%s.nii.gz' % parcellation_name), op.abspath('ROI_HR_th.nii.gz'))) - if(dilation == True): + if dilation is True: ds.append((op.abspath('ROIv_%s.nii.gz' % parcellation_name), op.abspath('ROIv_HR_th.nii.gz'))) orig = op.join(fs_dir, 'mri', 'orig', '001.mgz') @@ -588,7 +588,7 @@ def _list_outputs(self): else: outputs['roi_file'] = op.abspath( self._gen_outfilename('nii.gz', 'ROI')) - if(self.inputs.dilation == True): + if self.inputs.dilation is True: outputs['roiv_file'] = op.abspath(self._gen_outfilename( 'nii.gz', 'ROIv')) outputs['white_matter_mask_file'] = op.abspath('fsmask_1mm.nii.gz') @@ -597,7 +597,7 @@ def _list_outputs(self): outputs['aseg_file'] = op.abspath('aseg.nii.gz') outputs['roi_file_in_structural_space'] = op.abspath( 'ROI_HR_th.nii.gz') - if(self.inputs.dilation == True): + if self.inputs.dilation is True: outputs['dilated_roi_file_in_structural_space'] = op.abspath( 'ROIv_HR_th.nii.gz') return outputs diff --git a/nipype/interfaces/dcmstack.py b/nipype/interfaces/dcmstack.py index b3f7dc0c53..a06065fa41 100644 --- a/nipype/interfaces/dcmstack.py +++ b/nipype/interfaces/dcmstack.py @@ -37,7 +37,7 @@ def sanitize_path_comp(path_comp): result = [] for char in path_comp: - if not char in string.letters + string.digits + '-_.': + if char not in string.letters + string.digits + '-_.': result.append('_') else: result.append(char) @@ -63,7 +63,7 @@ def _get_out_path(self, meta, idx=None): # If no out_format is specified, use a sane default that will work # with the provided meta data. out_fmt = [] - if not idx is None: + if idx is not None: out_fmt.append('%03d' % idx) if 'SeriesNumber' in meta: out_fmt.append('%(SeriesNumber)03d') @@ -303,7 +303,7 @@ def _run_interface(self, runtime): if self.inputs.exclude_classes: classes = [cls for cls in classes - if not cls in self.inputs.exclude_classes + if cls not in self.inputs.exclude_classes ] for cls in classes: @@ -424,4 +424,3 @@ def _list_outputs(self): outputs = self._outputs().get() outputs['out_list'] = self.out_list return outputs - diff --git a/nipype/interfaces/diffusion_toolkit/dti.py b/nipype/interfaces/diffusion_toolkit/dti.py index 76e87c2c64..554f2bf38a 100644 --- a/nipype/interfaces/diffusion_toolkit/dti.py +++ b/nipype/interfaces/diffusion_toolkit/dti.py @@ -77,7 +77,7 @@ def _create_gradient_matrix(self, bvecs_file, bvals_file): bvecs_f.close() gradient_matrix_f = open(_gradient_matrix_file, 'w') for i in range(len(bvals)): - gradient_matrix_f.write("%s, %s, %s, %s\n" %(bvecs_x[i], bvecs_y[i], bvecs_z[i], bvals[i])) + gradient_matrix_f.write("%s, %s, %s, %s\n" % (bvecs_x[i], bvecs_y[i], bvecs_z[i], bvals[i])) gradient_matrix_f.close() return _gradient_matrix_file @@ -92,18 +92,18 @@ def _list_outputs(self): output_type = self.inputs.output_type outputs = self.output_spec().get() - outputs['ADC'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_adc.' + output_type)) - outputs['B0'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_b0.' + output_type)) - outputs['L1'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_e1.' + output_type)) - outputs['L2'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_e2.' + output_type)) - outputs['L3'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_e3.' + output_type)) - outputs['exp'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_exp.' + output_type)) - outputs['FA'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_fa.' + output_type)) - outputs['FA_color'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_fa_color.' + output_type)) - outputs['tensor'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_tensor.' + output_type)) - outputs['V1'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_v1.' + output_type)) - outputs['V2'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_v2.' + output_type)) - outputs['V3'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_v3.' + output_type)) + outputs['ADC'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_adc.' + output_type)) + outputs['B0'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_b0.' + output_type)) + outputs['L1'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_e1.' + output_type)) + outputs['L2'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_e2.' + output_type)) + outputs['L3'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_e3.' + output_type)) + outputs['exp'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_exp.' + output_type)) + outputs['FA'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_fa.' + output_type)) + outputs['FA_color'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_fa_color.' + output_type)) + outputs['tensor'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_tensor.' + output_type)) + outputs['V1'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_v1.' + output_type)) + outputs['V2'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_v2.' + output_type)) + outputs['V3'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_v3.' + output_type)) return outputs diff --git a/nipype/interfaces/diffusion_toolkit/odf.py b/nipype/interfaces/diffusion_toolkit/odf.py index 87495410f5..b2f0b2c6a7 100644 --- a/nipype/interfaces/diffusion_toolkit/odf.py +++ b/nipype/interfaces/diffusion_toolkit/odf.py @@ -75,7 +75,7 @@ def _create_gradient_matrix(self, bvecs_file, bvals_file): for i in range(len(bvals)): if int(bvals[i]) == 0: continue - gradient_matrix_f.write("%s %s %s\n" %(bvecs_x[i], bvecs_y[i], bvecs_z[i])) + gradient_matrix_f.write("%s %s %s\n" % (bvecs_x[i], bvecs_y[i], bvecs_z[i])) gradient_matrix_f.close() return _gradient_matrix_file @@ -143,12 +143,12 @@ def _list_outputs(self): output_type = self.inputs.output_type outputs = self.output_spec().get() - outputs['B0'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_b0.' + output_type)) - outputs['DWI'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_dwi.' + output_type)) - outputs['max'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_max.' + output_type)) - outputs['ODF'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_odf.' + output_type)) + outputs['B0'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_b0.' + output_type)) + outputs['DWI'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_dwi.' + output_type)) + outputs['max'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_max.' + output_type)) + outputs['ODF'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_odf.' + output_type)) if isdefined(self.inputs.output_entropy): - outputs['entropy'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_entropy.' + output_type)) + outputs['entropy'] = os.path.abspath(fname_presuffix("", prefix=out_prefix, suffix='_entropy.' + output_type)) return outputs @@ -233,4 +233,3 @@ def _list_outputs(self): outputs = self.output_spec().get() outputs['track_file'] = os.path.abspath(self.inputs.out_file) return outputs - diff --git a/nipype/interfaces/dynamic_slicer.py b/nipype/interfaces/dynamic_slicer.py index 4c6a7e99c7..1c26ef4acf 100644 --- a/nipype/interfaces/dynamic_slicer.py +++ b/nipype/interfaces/dynamic_slicer.py @@ -22,12 +22,12 @@ class SlicerCommandLine(CommandLine): output_spec = DynamicTraitedSpec def _grab_xml(self, module): - cmd = CommandLine(command="Slicer3", args="--launch %s --xml" %module) + cmd = CommandLine(command="Slicer3", args="--launch %s --xml" % module) ret = cmd.run() if ret.runtime.returncode == 0: return xml.dom.minidom.parseString(ret.runtime.stdout) else: - raise Exception(cmd.cmdline + " failed:\n%s" %ret.runtime.stderr) + raise Exception(cmd.cmdline + " failed:\n%s" % ret.runtime.stderr) def _outputs(self): base = super(SlicerCommandLine, self)._outputs() @@ -42,7 +42,7 @@ def _outputs(self): def __init__(self, module, **inputs): warnings.warn('slicer is Not fully implemented', RuntimeWarning) - super(SlicerCommandLine, self).__init__(command="Slicer3 --launch %s " %module, name=module, **inputs) + super(SlicerCommandLine, self).__init__(command="Slicer3 --launch %s " % module, name=module, **inputs) dom = self._grab_xml(module) self._outputs_filenames = {} diff --git a/nipype/interfaces/elastix/utils.py b/nipype/interfaces/elastix/utils.py index 2c857136e5..541cfa1ec4 100644 --- a/nipype/interfaces/elastix/utils.py +++ b/nipype/interfaces/elastix/utils.py @@ -137,7 +137,7 @@ def _list_outputs(self): def _get_outfile(self): val = getattr(self, '_out_file') - if not val is None and not val == '': + if val is not None and val != '': return val if isdefined(self.inputs.output_file): diff --git a/nipype/interfaces/freesurfer/preprocess.py b/nipype/interfaces/freesurfer/preprocess.py index c2d634291b..619edde594 100644 --- a/nipype/interfaces/freesurfer/preprocess.py +++ b/nipype/interfaces/freesurfer/preprocess.py @@ -789,13 +789,13 @@ def cmdline(self): if all([os.path.exists(os.path.join(subjects_dir, self.inputs.subject_id, f)) for f in outfiles]): - flags.append('-no%s' %step) + flags.append('-no%s' % step) if idx > 4: directive = 'autorecon2' elif idx > 23: directive = 'autorecon3' else: - flags.append('-%s' %step) + flags.append('-%s' % step) cmd = cmd.replace(' -%s ' % self.inputs.directive, ' -%s ' % directive) cmd += ' ' + ' '.join(flags) iflogger.info('resume recon-all : %s' % cmd) @@ -1003,8 +1003,8 @@ class ApplyVolTransform(FSCommand): def _get_outfile(self): outfile = self.inputs.transformed_file if not isdefined(outfile): - if self.inputs.inverse == True: - if self.inputs.fs_target == True: + if self.inputs.inverse is True: + if self.inputs.fs_target is True: src = 'orig.mgz' else: src = self.inputs.target_file diff --git a/nipype/interfaces/freesurfer/tests/test_preprocess.py b/nipype/interfaces/freesurfer/tests/test_preprocess.py index 509964ddf5..3086c85bf7 100644 --- a/nipype/interfaces/freesurfer/tests/test_preprocess.py +++ b/nipype/interfaces/freesurfer/tests/test_preprocess.py @@ -55,7 +55,7 @@ def test_robustregister(): reg.inputs.target_file = filelist[1] reg.inputs.auto_sens = True yield assert_equal, reg.cmdline, ('mri_robust_register ' - '--satit --lta %s_robustreg.lta --mov %s --dst %s' %(filelist[0][:-4], filelist[0], filelist[1])) + '--satit --lta %s_robustreg.lta --mov %s --dst %s' % (filelist[0][:-4], filelist[0], filelist[1])) # constructor based parameter setting reg2 = freesurfer.RobustRegister(source_file=filelist[0], target_file=filelist[1], outlier_sens=3.0, @@ -81,7 +81,7 @@ def test_fitmsparams(): # .inputs based parameters setting fit.inputs.in_files = filelist fit.inputs.out_dir = outdir - yield assert_equal, fit.cmdline, 'mri_ms_fitparms %s %s %s' %(filelist[0], filelist[1], outdir) + yield assert_equal, fit.cmdline, 'mri_ms_fitparms %s %s %s' % (filelist[0], filelist[1], outdir) # constructor based parameter setting fit2 = freesurfer.FitMSParams(in_files=filelist, te_list=[1.5, 3.5], flip_list=[20, 30], out_dir=outdir) @@ -117,4 +117,3 @@ def test_synthesizeflash(): syn2 = freesurfer.SynthesizeFLASH(t1_image=filelist[0], pd_image=filelist[1], flip_angle=20, te=5, tr=25) yield assert_equal, syn2.cmdline, ('mri_synthesize 25.00 20.00 5.000 %s %s %s' % (filelist[0], filelist[1], os.path.join(outdir, 'synth-flash_20.mgz'))) - diff --git a/nipype/interfaces/freesurfer/tests/test_utils.py b/nipype/interfaces/freesurfer/tests/test_utils.py index a1ce930dc2..876d7c8c0a 100644 --- a/nipype/interfaces/freesurfer/tests/test_utils.py +++ b/nipype/interfaces/freesurfer/tests/test_utils.py @@ -193,12 +193,12 @@ def test_applymask(): masker.inputs.in_file = filelist[0] masker.inputs.mask_file = filelist[1] outfile = os.path.join(testdir, "a_masked.nii") - yield assert_equal, masker.cmdline, "mri_mask a.nii b.nii %s" %outfile + yield assert_equal, masker.cmdline, "mri_mask a.nii b.nii %s" % outfile # Now test that optional inputs get formatted properly masker.inputs.mask_thresh = 2 - yield assert_equal, masker.cmdline, "mri_mask -T 2.0000 a.nii b.nii %s" %outfile + yield assert_equal, masker.cmdline, "mri_mask -T 2.0000 a.nii b.nii %s" % outfile masker.inputs.use_abs = True - yield assert_equal, masker.cmdline, "mri_mask -T 2.0000 -abs a.nii b.nii %s" %outfile + yield assert_equal, masker.cmdline, "mri_mask -T 2.0000 -abs a.nii b.nii %s" % outfile # Now clean up clean_directory(testdir, origdir) diff --git a/nipype/interfaces/freesurfer/utils.py b/nipype/interfaces/freesurfer/utils.py index 7a79386c7e..5bc417d3ba 100644 --- a/nipype/interfaces/freesurfer/utils.py +++ b/nipype/interfaces/freesurfer/utils.py @@ -633,7 +633,7 @@ def _format_arg(self, name, spec, value): def _run_interface(self, runtime): if not isdefined(self.inputs.screenshot_stem): stem = "%s_%s_%s" % ( - self.inputs.subject_id, self.inputs.hemi, self.inputs.surface) + self.inputs.subject_id, self.inputs.hemi, self.inputs.surface) else: stem = self.inputs.screenshot_stem stem_args = self.inputs.stem_template_args @@ -641,7 +641,7 @@ def _run_interface(self, runtime): args = tuple([getattr(self.inputs, arg) for arg in stem_args]) stem = stem % args # Check if the DISPLAY variable is set -- should avoid crashes (might not?) - if not "DISPLAY" in os.environ: + if "DISPLAY" not in os.environ: raise RuntimeError("Graphics are not enabled -- cannot run tksurfer") runtime.environ["_SNAPSHOT_STEM"] = stem self._write_tcl_script() @@ -814,7 +814,7 @@ class MRIsConvertInputSpec(FSTraitedSpec): # Not really sure why the ./ is necessary but the module fails without it out_datatype = traits.Enum("ico", "tri", "stl", "vtk", "gii", "mgh", "mgz", mandatory=True, - desc="These file formats are supported: ASCII: .asc" \ + desc="These file formats are supported: ASCII: .asc" "ICO: .ico, .tri GEO: .geo STL: .stl VTK: .vtk GIFTI: .gii MGH surface-encoded 'volume': .mgh, .mgz") diff --git a/nipype/interfaces/fsl/dti.py b/nipype/interfaces/fsl/dti.py index 3e13ddccbe..84e44e4f7a 100644 --- a/nipype/interfaces/fsl/dti.py +++ b/nipype/interfaces/fsl/dti.py @@ -96,8 +96,8 @@ def _list_outputs(self): outputs = self.output_spec().get() for k in list(outputs.keys()): if k not in ('outputtype', 'environ', 'args'): - if k != 'tensor' or (isdefined(self.inputs.save_tensor) - and self.inputs.save_tensor): + if k != 'tensor' or (isdefined(self.inputs.save_tensor) and + self.inputs.save_tensor): outputs[k] = self._gen_fname(self.inputs.base_name, suffix='_' + k) return outputs @@ -177,9 +177,9 @@ class FSLXCommandOutputSpec(TraitedSpec): mean_fsamples = OutputMultiPath(File(exists=True), desc=('Mean of ' 'distribution on f anisotropy')) mean_S0samples = File(exists=True, desc=('Mean of distribution on T2w' - 'baseline signal intensity S0')) + 'baseline signal intensity S0')) mean_tausamples = File(exists=True, desc=('Mean of distribution on ' - 'tau samples (only with rician noise)')) + 'tau samples (only with rician noise)')) phsamples = OutputMultiPath(File(exists=True), desc=('phi samples, per fiber')) thsamples = OutputMultiPath(File(exists=True), desc=('theta samples, per fiber')) @@ -273,7 +273,7 @@ class BEDPOSTX5OutputSpec(TraitedSpec): mean_fsamples = OutputMultiPath(File(exists=True), desc=('Mean of ' 'distribution on f anisotropy')) mean_S0samples = File(exists=True, desc=('Mean of distribution on T2w' - 'baseline signal intensity S0')) + 'baseline signal intensity S0')) mean_phsamples = OutputMultiPath(File(exists=True), desc=('Mean of ' 'distribution on phi')) mean_thsamples = OutputMultiPath(File(exists=True), desc=('Mean of ' @@ -534,7 +534,7 @@ class BEDPOSTX4OutputSpec(TraitedSpec): 'bedpostx output files for this subject') xfms_directory = Directory(exists=True, desc='path/name of directory with the ' + - 'tranformation matrices') + 'tranformation matrices') merged_thsamples = traits.List(File(exists=True), desc='a list of path/name of 4D volume ' + 'with samples from the distribution ' + @@ -552,7 +552,7 @@ class BEDPOSTX4OutputSpec(TraitedSpec): desc='a list of path/name of 3D volume with mean of distribution on phi') mean_fsamples = traits.List(File(exists=True), desc='a list of path/name of 3D volume with mean of distribution on f anisotropy') - dyads = traits.List(File(exists=True), desc='a list of path/name of mean of PDD distribution in vector form') + dyads = traits.List(File(exists=True), desc='a list of path/name of mean of PDD distribution in vector form') class BEDPOSTX4(FSLCommand): @@ -623,27 +623,27 @@ def _list_outputs(self): outputs[k] = [] for n in range(self.inputs.fibres): - outputs['merged_thsamples'].append(self._gen_fname('merged_th' + repr(n + 1) - + 'samples', suffix='', - cwd=outputs['bpx_out_directory'])) - outputs['merged_phsamples'].append(self._gen_fname('merged_ph' + repr(n + 1) - + 'samples', suffix='', - cwd=outputs['bpx_out_directory'])) - outputs['merged_fsamples'].append(self._gen_fname('merged_f' + repr(n + 1) - + 'samples', suffix='', - cwd=outputs['bpx_out_directory'])) - outputs['mean_thsamples'].append(self._gen_fname('mean_th' + repr(n + 1) - + 'samples', suffix='', - cwd=outputs['bpx_out_directory'])) - outputs['mean_phsamples'].append(self._gen_fname('mean_ph' + repr(n + 1) - + 'samples', suffix='', - cwd=outputs['bpx_out_directory'])) - outputs['mean_fsamples'].append(self._gen_fname('mean_f' + repr(n + 1) - + 'samples', suffix='', - cwd=outputs['bpx_out_directory'])) - outputs['dyads'].append(self._gen_fname('dyads' + repr(n + 1), - suffix='', - cwd=outputs['bpx_out_directory'])) + outputs['merged_thsamples'].append(self._gen_fname( + 'merged_th' + repr(n + 1) + 'samples', + suffix='', cwd=outputs['bpx_out_directory'])) + outputs['merged_phsamples'].append(self._gen_fname( + 'merged_ph' + repr(n + 1) + 'samples', + suffix='', cwd=outputs['bpx_out_directory'])) + outputs['merged_fsamples'].append(self._gen_fname( + 'merged_f' + repr(n + 1) + 'samples', + suffix='', cwd=outputs['bpx_out_directory'])) + outputs['mean_thsamples'].append(self._gen_fname( + 'mean_th' + repr(n + 1) + 'samples', + suffix='', cwd=outputs['bpx_out_directory'])) + outputs['mean_phsamples'].append(self._gen_fname( + 'mean_ph' + repr(n + 1) + 'samples', + suffix='', cwd=outputs['bpx_out_directory'])) + outputs['mean_fsamples'].append(self._gen_fname( + 'mean_f' + repr(n + 1) + 'samples', + suffix='', cwd=outputs['bpx_out_directory'])) + outputs['dyads'].append(self._gen_fname( + 'dyads' + repr(n + 1), + suffix='', cwd=outputs['bpx_out_directory'])) return outputs @@ -735,8 +735,8 @@ class ProbTrackXBaseInputSpec(FSLCommandInputSpec): class ProbTrackXInputSpec(ProbTrackXBaseInputSpec): mode = traits.Enum("simple", "two_mask_symm", "seedmask", - desc='options: simple (single seed voxel), seedmask (mask of seed voxels), ' - + 'twomask_symm (two bet binary masks) ', + desc='options: simple (single seed voxel), seedmask (mask of seed voxels), ' + + 'twomask_symm (two bet binary masks) ', argstr='--mode=%s', genfile=True) mask2 = File(exists=True, desc='second bet binary mask (in diffusion space) in twomask_symm mode', argstr='--mask2=%s') @@ -836,14 +836,14 @@ def _list_outputs(self): outputs['log'] = os.path.abspath(os.path.join(out_dir, 'probtrackx.log')) # utputs['way_total'] = os.path.abspath(os.path.join(out_dir, 'waytotal')) - if isdefined(self.inputs.opd == True): + if isdefined(self.inputs.opd is True): if isinstance(self.inputs.seed, list) and isinstance(self.inputs.seed[0], list): outputs['fdt_paths'] = [] for seed in self.inputs.seed: outputs['fdt_paths'].append( - os.path.abspath( - self._gen_fname("fdt_paths_%s" % ("_".join([str(s) for s in seed])), - cwd=out_dir, suffix=''))) + os.path.abspath( + self._gen_fname("fdt_paths_%s" % ("_".join([str(s) for s in seed])), + cwd=out_dir, suffix=''))) else: outputs['fdt_paths'] = os.path.abspath(self._gen_fname("fdt_paths", cwd=out_dir, suffix='')) @@ -853,13 +853,13 @@ def _list_outputs(self): outputs['targets'] = [] for target in self.inputs.target_masks: outputs['targets'].append(os.path.abspath( - self._gen_fname('seeds_to_' + os.path.split(target)[1], - cwd=out_dir, - suffix=''))) + self._gen_fname('seeds_to_' + os.path.split(target)[1], + cwd=out_dir, + suffix=''))) if isdefined(self.inputs.verbose) and self.inputs.verbose == 2: outputs['particle_files'] = [os.path.abspath( - os.path.join(out_dir, 'particle%d' % i)) - for i in range(self.inputs.n_samples)] + os.path.join(out_dir, 'particle%d' % i)) + for i in range(self.inputs.n_samples)] return outputs def _gen_filename(self, name): @@ -955,7 +955,7 @@ def _list_outputs(self): if isdefined(self.inputs.omatrix2): outputs['lookup_tractspace'] = \ - os.path.abspath(os.path.join(out_dir, 'lookup_tractspace_fdt_matrix2.nii.gz')) + os.path.abspath(os.path.join(out_dir, 'lookup_tractspace_fdt_matrix2.nii.gz')) outputs['matrix2_dot'] = os.path.abspath(os.path.join(out_dir, 'fdt_matrix2.dot')) if isdefined(self.inputs.omatrix3): diff --git a/nipype/interfaces/fsl/model.py b/nipype/interfaces/fsl/model.py index ecbe766986..62e16e9319 100644 --- a/nipype/interfaces/fsl/model.py +++ b/nipype/interfaces/fsl/model.py @@ -1818,31 +1818,30 @@ class GLMInputSpec(FSLCommandInputSpec): in_file = File(exists=True, argstr='-i %s', mandatory=True, position=1, desc='input file name (text matrix or 3D/4D image file)') out_file = File(name_template="%s_glm", argstr='-o %s', position=3, - desc=('filename for GLM parameter estimates' - + ' (GLM betas)'), + desc=('filename for GLM parameter estimates' + + ' (GLM betas)'), name_source="in_file", keep_extension=True) design = File(exists=True, argstr='-d %s', mandatory=True, position=2, - desc=('file name of the GLM design matrix (text time' - + ' courses for temporal regression or an image' - + ' file for spatial regression)')) - contrasts = File(exists=True, argstr='-c %s', desc=('matrix of t-statics' - + ' contrasts')) - mask = File(exists=True, argstr='-m %s', desc=('mask image file name if' - + ' input is image')) - dof = traits.Int(argstr='--dof=%d', desc=('set degrees of freedom' - + ' explicitly')) - des_norm = traits.Bool(argstr='--des_norm', desc=('switch on normalization' - + ' of the design matrix' - + ' columns to unit std' - + ' deviation')) + desc=('file name of the GLM design matrix (text time' + + ' courses for temporal regression or an image' + + ' file for spatial regression)')) + contrasts = File(exists=True, argstr='-c %s', desc=('matrix of t-statics' + + ' contrasts')) + mask = File(exists=True, argstr='-m %s', desc=('mask image file name if' + + ' input is image')) + dof = traits.Int(argstr='--dof=%d', desc=('set degrees of freedom' + + ' explicitly')) + des_norm = traits.Bool(argstr='--des_norm', + desc=('switch on normalization of the design' + + ' matrix columns to unit std deviation')) dat_norm = traits.Bool(argstr='--dat_norm', desc=('switch on normalization' - + ' of the data time' - + ' series to unit std' - + ' deviation')) - var_norm = traits.Bool(argstr='--vn', desc=('perform MELODIC variance-' - + 'normalisation on data')) - demean = traits.Bool(argstr='--demean', desc=('switch on demeaining of ' - + ' design and data')) + ' of the data time' + + ' series to unit std' + + ' deviation')) + var_norm = traits.Bool(argstr='--vn', desc=('perform MELODIC variance-' + + 'normalisation on data')) + demean = traits.Bool(argstr='--demean', desc=('switch on demeaining of ' + + ' design and data')) out_cope = File(argstr='--out_cope=%s', desc='output file name for COPE (either as txt or image') out_z_name = File(argstr='--out_z=%s', @@ -1851,8 +1850,8 @@ class GLMInputSpec(FSLCommandInputSpec): desc='output file name for t-stats (either as txt or image') out_p_name = File(argstr='--out_p=%s', - desc=('output file name for p-values of Z-stats (either as' - + ' text file or image)')) + desc=('output file name for p-values of Z-stats (either as' + + ' text file or image)')) out_f_name = File(argstr='--out_f=%s', desc='output file name for F-value of full model fit') out_pf_name = File(argstr='--out_pf=%s', @@ -1863,13 +1862,13 @@ class GLMInputSpec(FSLCommandInputSpec): desc='output file name for variance of COPEs') out_sigsq_name = File(argstr='--out_sigsq=%s', - desc=('output file name for residual noise variance' - + ' sigma-square')) + desc=('output file name for residual noise variance' + + ' sigma-square')) out_data_name = File(argstr='--out_data=%s', desc='output file name for pre-processed data') out_vnscales_name = File(argstr='--out_vnscales=%s', - desc=('output file name for scaling factors for variance' - + ' normalisation')) + desc=('output file name for scaling factors for variance' + + ' normalisation')) class GLMOutputSpec(TraitedSpec): @@ -1961,4 +1960,3 @@ def _list_outputs(self): self.inputs.out_vnscales_name) return outputs - diff --git a/nipype/interfaces/fsl/tests/test_FILMGLS.py b/nipype/interfaces/fsl/tests/test_FILMGLS.py index 8e55381660..8fe231cf79 100644 --- a/nipype/interfaces/fsl/tests/test_FILMGLS.py +++ b/nipype/interfaces/fsl/tests/test_FILMGLS.py @@ -43,7 +43,7 @@ def test_filmgls(): threshold=dict(usedefault=True, argstr='--thr=%f',), tukey_window=dict(xor=['autocorr_estimate_only', 'fit_armodel', 'tukey_window', 'multitaper_product', 'use_pava', 'autocorr_noestimate'], argstr='--tukey=%d',), use_pava=dict(argstr='--pava',), - ) + ) instance = FILMGLS() if isinstance(instance.inputs, FILMGLSInputSpec): for key, metadata in list(input_map.items()): diff --git a/nipype/interfaces/fsl/tests/test_XFibres.py b/nipype/interfaces/fsl/tests/test_XFibres.py index ba315c0354..c38f642f86 100644 --- a/nipype/interfaces/fsl/tests/test_XFibres.py +++ b/nipype/interfaces/fsl/tests/test_XFibres.py @@ -1,3 +1,2 @@ from nipype.testing import assert_equal from nipype.interfaces.fsl.dti import XFibres - diff --git a/nipype/interfaces/fsl/tests/test_dti.py b/nipype/interfaces/fsl/tests/test_dti.py index 0b7f27c165..43a3e4becf 100644 --- a/nipype/interfaces/fsl/tests/test_dti.py +++ b/nipype/interfaces/fsl/tests/test_dti.py @@ -98,10 +98,10 @@ def test_dtifit2(): dti.inputs.max_z = 50 yield assert_equal, dti.cmdline, \ - 'dtifit -k %s -o foo.dti.nii -m %s -r %s -b %s -Z 50 -z 10' %(filelist[0], - filelist[1], - filelist[0], - filelist[1]) + 'dtifit -k %s -o foo.dti.nii -m %s -r %s -b %s -Z 50 -z 10' % (filelist[0], + filelist[1], + filelist[0], + filelist[1]) clean_directory(outdir, cwd) @@ -172,40 +172,34 @@ def test_randomise2(): 'randomise -i infile3 -o outfile3' # test arguments for opt_map - opt_map = {'demean_data': ('-D', True), - 'one_sample_gmean': ('-1', True), - 'mask_image': ('-m inp_mask', 'inp_mask'), - 'design_matrix': ('-d design.mat', - 'design.mat'), - 't_contrast': ('-t input.con', - 'input.con'), - 'f_contrast': ('-f input.fts', - 'input.fts'), - 'xchange_block_labels': ('-e design.grp', - 'design.grp'), - 'print_unique_perm': ('-q', True), - 'print_info_parallelMode': ('-Q', True), - 'num_permutations': ('-n 10', 10), - 'vox_pvalus': ('-x', True), - 'fstats_only': ('--fonly', True), - 'thresh_free_cluster': ('-T', True), - 'thresh_free_cluster_2Dopt': ('--T2', True), - 'cluster_thresholding': ('-c 0.20', 0.20), - 'cluster_mass_thresholding': ('-C 0.40', 0.40), - 'fcluster_thresholding': ('-F 0.10', 0.10), - 'fcluster_mass_thresholding': ('-S 0.30', 0.30), - 'variance_smoothing': ('-v 0.20', 0.20), - 'diagnostics_off': ('--quiet', True), - 'output_raw': ('-R', True), - 'output_perm_vect': ('-P', True), - 'int_seed': ('--seed=20', 20), - 'TFCE_height_param': ('--tfce_H=0.11', 0.11), - 'TFCE_extent_param': ('--tfce_E=0.50', 0.50), - 'TFCE_connectivity': ('--tfce_C=0.30', 0.30), - 'list_num_voxel_EVs_pos': ('--vxl=1,2,3,4', - '1,2,3,4'), - 'list_img_voxel_EVs': ('--vxf=6,7,8,9,3', - '6,7,8,9,3')} + opt_map = {'demean_data': ('-D', True), + 'one_sample_gmean': ('-1', True), + 'mask_image': ('-m inp_mask', 'inp_mask'), + 'design_matrix': ('-d design.mat', 'design.mat'), + 't_contrast': ('-t input.con', 'input.con'), + 'f_contrast': ('-f input.fts', 'input.fts'), + 'xchange_block_labels': ('-e design.grp', 'design.grp'), + 'print_unique_perm': ('-q', True), + 'print_info_parallelMode': ('-Q', True), + 'num_permutations': ('-n 10', 10), + 'vox_pvalus': ('-x', True), + 'fstats_only': ('--fonly', True), + 'thresh_free_cluster': ('-T', True), + 'thresh_free_cluster_2Dopt': ('--T2', True), + 'cluster_thresholding': ('-c 0.20', 0.20), + 'cluster_mass_thresholding': ('-C 0.40', 0.40), + 'fcluster_thresholding': ('-F 0.10', 0.10), + 'fcluster_mass_thresholding': ('-S 0.30', 0.30), + 'variance_smoothing': ('-v 0.20', 0.20), + 'diagnostics_off': ('--quiet', True), + 'output_raw': ('-R', True), + 'output_perm_vect': ('-P', True), + 'int_seed': ('--seed=20', 20), + 'TFCE_height_param': ('--tfce_H=0.11', 0.11), + 'TFCE_extent_param': ('--tfce_E=0.50', 0.50), + 'TFCE_connectivity': ('--tfce_C=0.30', 0.30), + 'list_num_voxel_EVs_pos': ('--vxl=1,2,3,4', '1,2,3,4'), + 'list_img_voxel_EVs': ('--vxf=6,7,8,9,3', '6,7,8,9,3')} for name, settings in list(opt_map.items()): rand4 = fsl.Randomise(input_4D='infile', output_rootname='root', @@ -255,42 +249,36 @@ def test_Randomise_parallel(): 'randomise_parallel -i infile3 -o outfile3' # test arguments for opt_map - opt_map = {'demean_data': ('-D', True), - 'one_sample_gmean': ('-1', True), - 'mask_image': ('-m inp_mask', 'inp_mask'), - 'design_matrix': ('-d design.mat', - 'design.mat'), - 't_contrast': ('-t input.con', - 'input.con'), - 'f_contrast': ('-f input.fts', - 'input.fts'), - 'xchange_block_labels': ('-e design.grp', - 'design.grp'), - 'print_unique_perm': ('-q', True), - 'print_info_parallelMode': ('-Q', True), - 'num_permutations': ('-n 10', 10), - 'vox_pvalus': ('-x', True), - 'fstats_only': ('--fonly', True), - 'thresh_free_cluster': ('-T', True), - 'thresh_free_cluster_2Dopt': ('--T2', True), - 'cluster_thresholding': ('-c 0.20', 0.20), - 'cluster_mass_thresholding': ('-C 0.40', 0.40), - 'fcluster_thresholding': ('-F 0.10', 0.10), - 'fcluster_mass_thresholding': ('-S 0.30', 0.30), - 'variance_smoothing': ('-v 0.20', 0.20), - 'diagnostics_off': ('--quiet', True), - 'output_raw': ('-R', True), - 'output_perm_vect': ('-P', True), - 'int_seed': ('--seed=20', 20), - 'TFCE_height_param': ('--tfce_H=0.11', 0.11), - 'TFCE_extent_param': ('--tfce_E=0.50', 0.50), - 'TFCE_connectivity': ('--tfce_C=0.30', 0.30), - 'list_num_voxel_EVs_pos': ('--vxl=' \ - + repr([1, 2, 3, 4]), - repr([1, 2, 3, 4])), - 'list_img_voxel_EVs': ('--vxf=' \ - + repr([6, 7, 8, 9, 3]), - repr([6, 7, 8, 9, 3]))} + opt_map = {'demean_data': ('-D', True), + 'one_sample_gmean': ('-1', True), + 'mask_image': ('-m inp_mask', 'inp_mask'), + 'design_matrix': ('-d design.mat', 'design.mat'), + 't_contrast': ('-t input.con', 'input.con'), + 'f_contrast': ('-f input.fts', 'input.fts'), + 'xchange_block_labels': ('-e design.grp', 'design.grp'), + 'print_unique_perm': ('-q', True), + 'print_info_parallelMode': ('-Q', True), + 'num_permutations': ('-n 10', 10), + 'vox_pvalus': ('-x', True), + 'fstats_only': ('--fonly', True), + 'thresh_free_cluster': ('-T', True), + 'thresh_free_cluster_2Dopt': ('--T2', True), + 'cluster_thresholding': ('-c 0.20', 0.20), + 'cluster_mass_thresholding': ('-C 0.40', 0.40), + 'fcluster_thresholding': ('-F 0.10', 0.10), + 'fcluster_mass_thresholding': ('-S 0.30', 0.30), + 'variance_smoothing': ('-v 0.20', 0.20), + 'diagnostics_off': ('--quiet', True), + 'output_raw': ('-R', True), + 'output_perm_vect': ('-P', True), + 'int_seed': ('--seed=20', 20), + 'TFCE_height_param': ('--tfce_H=0.11', 0.11), + 'TFCE_extent_param': ('--tfce_E=0.50', 0.50), + 'TFCE_connectivity': ('--tfce_C=0.30', 0.30), + 'list_num_voxel_EVs_pos': ('--vxl=' + repr([1, 2, 3, 4]), + repr([1, 2, 3, 4])), + 'list_img_voxel_EVs': ('--vxf=' + repr([6, 7, 8, 9, 3]), + repr([6, 7, 8, 9, 3]))} for name, settings in list(opt_map.items()): rand4 = fsl.Randomise_parallel(input_4D='infile', @@ -379,13 +367,13 @@ def test_Vec_reg(): yield assert_equal, results.interface.inputs.affineTmat, 'tmat3.mat' # test arguments for opt_map - opt_map = {'verbose': ('-v', True), - 'helpDoc': ('-h', True), - 'tensor': ('--tensor', True), - 'affineTmat': ('-t Tmat', 'Tmat'), - 'warpFile': ('-w wrpFile', 'wrpFile'), - 'interpolation': ('--interp=sinc', 'sinc'), - 'brainMask': ('-m mask', 'mask')} + opt_map = {'verbose': ('-v', True), + 'helpDoc': ('-h', True), + 'tensor': ('--tensor', True), + 'affineTmat': ('-t Tmat', 'Tmat'), + 'warpFile': ('-w wrpFile', 'wrpFile'), + 'interpolation': ('--interp=sinc', 'sinc'), + 'brainMask': ('-m mask', 'mask')} for name, settings in list(opt_map.items()): vrg4 = fsl.VecReg(infile='infile', outfile='outfile', @@ -440,7 +428,7 @@ def test_tbss_skeleton(): # First by implicit argument skeletor.inputs.skeleton_file = True yield assert_equal, skeletor.cmdline, \ - "tbss_skeleton -i a.nii -o %s" %os.path.join(newdir, "a_skeleton.nii") + "tbss_skeleton -i a.nii -o %s" % os.path.join(newdir, "a_skeleton.nii") # Now with a specific name skeletor.inputs.skeleton_file = "old_boney.nii" @@ -459,14 +447,14 @@ def test_tbss_skeleton(): # Now we get a command line yield assert_equal, bones.cmdline, \ - "tbss_skeleton -i a.nii -p 0.200 b.nii %s b.nii %s" %(Info.standard_image("LowerCingulum_1mm.nii.gz"), - os.path.join(newdir, "b_skeletonised.nii")) + "tbss_skeleton -i a.nii -p 0.200 b.nii %s b.nii %s" % (Info.standard_image("LowerCingulum_1mm.nii.gz"), + os.path.join(newdir, "b_skeletonised.nii")) # Can we specify a mask? bones.inputs.use_cingulum_mask = Undefined bones.inputs.search_mask_file = "a.nii" yield assert_equal, bones.cmdline, \ - "tbss_skeleton -i a.nii -p 0.200 b.nii a.nii b.nii %s" %os.path.join(newdir, "b_skeletonised.nii") + "tbss_skeleton -i a.nii -p 0.200 b.nii a.nii b.nii %s" % os.path.join(newdir, "b_skeletonised.nii") # Looks good; clean up clean_directory(newdir, olddir) @@ -488,19 +476,18 @@ def test_distancemap(): mapper.inputs.in_file = "a.nii" # It should - yield assert_equal, mapper.cmdline, "distancemap --out=%s --in=a.nii" %os.path.join(newdir, "a_dstmap.nii") + yield assert_equal, mapper.cmdline, "distancemap --out=%s --in=a.nii" % os.path.join(newdir, "a_dstmap.nii") # And we should be able to write out a maxima map mapper.inputs.local_max_file = True yield assert_equal, mapper.cmdline, \ - "distancemap --out=%s --in=a.nii --localmax=%s" %(os.path.join(newdir, "a_dstmap.nii"), - os.path.join(newdir, "a_lclmax.nii")) + "distancemap --out=%s --in=a.nii --localmax=%s" % (os.path.join(newdir, "a_dstmap.nii"), + os.path.join(newdir, "a_lclmax.nii")) # And call it whatever we want mapper.inputs.local_max_file = "max.nii" yield assert_equal, mapper.cmdline, \ - "distancemap --out=%s --in=a.nii --localmax=max.nii" %os.path.join(newdir, "a_dstmap.nii") + "distancemap --out=%s --in=a.nii --localmax=max.nii" % os.path.join(newdir, "a_dstmap.nii") # Not much else to do here clean_directory(newdir, olddir) - diff --git a/nipype/interfaces/fsl/tests/test_epi.py b/nipype/interfaces/fsl/tests/test_epi.py index ec209808c2..4f2b0ed2c3 100644 --- a/nipype/interfaces/fsl/tests/test_epi.py +++ b/nipype/interfaces/fsl/tests/test_epi.py @@ -10,7 +10,7 @@ import nibabel as nb from nipype.testing import (assert_equal, assert_not_equal, - assert_raises, skipif) + assert_raises, skipif) import nipype.interfaces.fsl.epi as fsl from nipype.interfaces.fsl import no_fsl @@ -52,15 +52,12 @@ def test_eddy_correct2(): eddy.inputs.in_file = filelist[0] eddy.inputs.out_file = 'foo_eddc.nii' eddy.inputs.ref_num = 100 - yield assert_equal, eddy.cmdline, 'eddy_correct %s foo_eddc.nii 100' %filelist[0] + yield assert_equal, eddy.cmdline, 'eddy_correct %s foo_eddc.nii 100' % filelist[0] # .run based parameter setting eddy2 = fsl.EddyCorrect(in_file=filelist[0], out_file='foo_ec.nii', ref_num=20) - yield assert_equal, eddy2.cmdline, 'eddy_correct %s foo_ec.nii 20' %filelist[0] + yield assert_equal, eddy2.cmdline, 'eddy_correct %s foo_ec.nii 20' % filelist[0] # test arguments for opt_map # eddy_correct class doesn't have opt_map{} clean_directory(outdir, cwd) - - - diff --git a/nipype/interfaces/fsl/tests/test_maths.py b/nipype/interfaces/fsl/tests/test_maths.py index 592b1f90f4..d4003e8d74 100644 --- a/nipype/interfaces/fsl/tests/test_maths.py +++ b/nipype/interfaces/fsl/tests/test_maths.py @@ -72,7 +72,7 @@ def test_maths_base(fsl_output_type=None): out_file = "a_maths%s" % out_ext # Now test the most basic command line - yield assert_equal, maths.cmdline, "fslmaths a.nii %s" %os.path.join(testdir, out_file) + yield assert_equal, maths.cmdline, "fslmaths a.nii %s" % os.path.join(testdir, out_file) # Now test that we can set the various data types dtypes = ["float", "char", "int", "short", "double", "input"] @@ -81,11 +81,11 @@ def test_maths_base(fsl_output_type=None): duo_cmdline = "fslmaths -dt %s a.nii " + os.path.join(testdir, out_file) + " -odt %s" for dtype in dtypes: foo = fsl.MathsCommand(in_file="a.nii", internal_datatype=dtype) - yield assert_equal, foo.cmdline, int_cmdline %dtype + yield assert_equal, foo.cmdline, int_cmdline % dtype bar = fsl.MathsCommand(in_file="a.nii", output_datatype=dtype) yield assert_equal, bar.cmdline, out_cmdline % dtype foobar = fsl.MathsCommand(in_file="a.nii", internal_datatype=dtype, output_datatype=dtype) - yield assert_equal, foobar.cmdline, duo_cmdline %(dtype, dtype) + yield assert_equal, foobar.cmdline, duo_cmdline % (dtype, dtype) # Test that we can ask for an outfile name maths.inputs.out_file = "b.nii" @@ -122,7 +122,7 @@ def test_changedt(fsl_output_type=None): cmdline = "fslmaths a.nii b.nii -odt %s" for dtype in dtypes: foo = fsl.MathsCommand(in_file="a.nii", out_file="b.nii", output_datatype=dtype) - yield assert_equal, foo.cmdline, cmdline %dtype + yield assert_equal, foo.cmdline, cmdline % dtype # Clean up our mess clean_directory(testdir, origdir) @@ -147,19 +147,19 @@ def test_threshold(fsl_output_type=None): cmdline = "fslmaths a.nii %s b.nii" for val in [0, 0., -1, -1.5, -0.5, 0.5, 3, 400, 400.5]: thresh.inputs.thresh = val - yield assert_equal, thresh.cmdline, cmdline %"-thr %.10f" %val + yield assert_equal, thresh.cmdline, cmdline % "-thr %.10f" % val - val = "%.10f" %42 + val = "%.10f" % 42 thresh = fsl.Threshold(in_file="a.nii", out_file="b.nii", thresh=42, use_robust_range=True) - yield assert_equal, thresh.cmdline, cmdline %("-thrp "+val) + yield assert_equal, thresh.cmdline, cmdline % ("-thrp " + val) thresh.inputs.use_nonzero_voxels = True - yield assert_equal, thresh.cmdline, cmdline %("-thrP "+val) + yield assert_equal, thresh.cmdline, cmdline % ("-thrP " + val) thresh = fsl.Threshold(in_file="a.nii", out_file="b.nii", thresh=42, direction="above") - yield assert_equal, thresh.cmdline, cmdline %("-uthr "+val) + yield assert_equal, thresh.cmdline, cmdline % ("-uthr " + val) thresh.inputs.use_robust_range = True - yield assert_equal, thresh.cmdline, cmdline %("-uthrp "+val) + yield assert_equal, thresh.cmdline, cmdline % ("-uthrp " + val) thresh.inputs.use_nonzero_voxels = True - yield assert_equal, thresh.cmdline, cmdline %("-uthrP "+val) + yield assert_equal, thresh.cmdline, cmdline % ("-uthrP " + val) # Clean up our mess clean_directory(testdir, origdir) @@ -184,11 +184,11 @@ def test_meanimage(fsl_output_type=None): cmdline = "fslmaths a.nii -%smean b.nii" for dim in ["X", "Y", "Z", "T"]: meaner.inputs.dimension = dim - yield assert_equal, meaner.cmdline, cmdline %dim + yield assert_equal, meaner.cmdline, cmdline % dim # Test the auto naming meaner = fsl.MeanImage(in_file="a.nii") - yield assert_equal, meaner.cmdline, "fslmaths a.nii -Tmean %s" %os.path.join(testdir, "a_mean%s" % out_ext) + yield assert_equal, meaner.cmdline, "fslmaths a.nii -Tmean %s" % os.path.join(testdir, "a_mean%s" % out_ext) # Clean up our mess clean_directory(testdir, origdir) @@ -213,11 +213,11 @@ def test_maximage(fsl_output_type=None): cmdline = "fslmaths a.nii -%smax b.nii" for dim in ["X", "Y", "Z", "T"]: maxer.inputs.dimension = dim - yield assert_equal, maxer.cmdline, cmdline %dim + yield assert_equal, maxer.cmdline, cmdline % dim # Test the auto naming maxer = fsl.MaxImage(in_file="a.nii") - yield assert_equal, maxer.cmdline, "fslmaths a.nii -Tmax %s" %os.path.join(testdir, "a_max%s" % out_ext) + yield assert_equal, maxer.cmdline, "fslmaths a.nii -Tmax %s" % os.path.join(testdir, "a_max%s" % out_ext) # Clean up our mess clean_directory(testdir, origdir) @@ -242,14 +242,14 @@ def test_smooth(fsl_output_type=None): cmdline = "fslmaths a.nii -s %.5f b.nii" for val in [0, 1., 1, 25, 0.5, 8 / 3.]: smoother = fsl.IsotropicSmooth(in_file="a.nii", out_file="b.nii", sigma=val) - yield assert_equal, smoother.cmdline, cmdline %val + yield assert_equal, smoother.cmdline, cmdline % val smoother = fsl.IsotropicSmooth(in_file="a.nii", out_file="b.nii", fwhm=val) val = float(val) / np.sqrt(8 * np.log(2)) - yield assert_equal, smoother.cmdline, cmdline %val + yield assert_equal, smoother.cmdline, cmdline % val # Test automatic naming smoother = fsl.IsotropicSmooth(in_file="a.nii", sigma=5) - yield assert_equal, smoother.cmdline, "fslmaths a.nii -s %.5f %s" %(5, os.path.join(testdir, "a_smooth%s" % out_ext)) + yield assert_equal, smoother.cmdline, "fslmaths a.nii -s %.5f %s" % (5, os.path.join(testdir, "a_smooth%s" % out_ext)) # Clean up our mess clean_directory(testdir, origdir) @@ -276,7 +276,7 @@ def test_mask(fsl_output_type=None): # Test auto name generation masker = fsl.ApplyMask(in_file="a.nii", mask_file="b.nii") - yield assert_equal, masker.cmdline, "fslmaths a.nii -mas b.nii "+os.path.join(testdir, "a_masked%s" % out_ext) + yield assert_equal, masker.cmdline, "fslmaths a.nii -mas b.nii " + os.path.join(testdir, "a_masked%s" % out_ext) # Clean up our mess clean_directory(testdir, origdir) @@ -301,14 +301,14 @@ def test_dilation(fsl_output_type=None): for op in ["mean", "modal", "max"]: cv = dict(mean="M", modal="D", max="F") diller.inputs.operation = op - yield assert_equal, diller.cmdline, "fslmaths a.nii -dil%s b.nii" %cv[op] + yield assert_equal, diller.cmdline, "fslmaths a.nii -dil%s b.nii" % cv[op] # Now test the different kernel options for k in ["3D", "2D", "box", "boxv", "gauss", "sphere"]: for size in [1, 1.5, 5]: diller.inputs.kernel_shape = k diller.inputs.kernel_size = size - yield assert_equal, diller.cmdline, "fslmaths a.nii -kernel %s %.4f -dilF b.nii" %(k, size) + yield assert_equal, diller.cmdline, "fslmaths a.nii -kernel %s %.4f -dilF b.nii" % (k, size) # Test that we can use a file kernel f = open("kernel.txt", "w").close() @@ -320,7 +320,7 @@ def test_dilation(fsl_output_type=None): # Test that we don't need to request an out name dil = fsl.DilateImage(in_file="a.nii", operation="max") - yield assert_equal, dil.cmdline, "fslmaths a.nii -dilF %s" %os.path.join(testdir, "a_dil%s" % out_ext) + yield assert_equal, dil.cmdline, "fslmaths a.nii -dilF %s" % os.path.join(testdir, "a_dil%s" % out_ext) # Clean up our mess clean_directory(testdir, origdir) @@ -347,7 +347,7 @@ def test_erosion(fsl_output_type=None): # Test that we don't need to request an out name erode = fsl.ErodeImage(in_file="a.nii") - yield assert_equal, erode.cmdline, "fslmaths a.nii -ero %s" %os.path.join(testdir, "a_ero%s" % out_ext) + yield assert_equal, erode.cmdline, "fslmaths a.nii -ero %s" % os.path.join(testdir, "a_ero%s" % out_ext) # Clean up our mess clean_directory(testdir, origdir) @@ -371,11 +371,11 @@ def test_spatial_filter(fsl_output_type=None): # Test the different operations for op in ["mean", "meanu", "median"]: filter.inputs.operation = op - yield assert_equal, filter.cmdline, "fslmaths a.nii -f%s b.nii" %op + yield assert_equal, filter.cmdline, "fslmaths a.nii -f%s b.nii" % op # Test that we don't need to ask for an out name filter = fsl.SpatialFilter(in_file="a.nii", operation="mean") - yield assert_equal, filter.cmdline, "fslmaths a.nii -fmean %s" %os.path.join(testdir, "a_filt%s" % out_ext) + yield assert_equal, filter.cmdline, "fslmaths a.nii -fmean %s" % os.path.join(testdir, "a_filt%s" % out_ext) # Clean up our mess clean_directory(testdir, origdir) @@ -400,12 +400,12 @@ def test_unarymaths(fsl_output_type=None): ops = ["exp", "log", "sin", "cos", "sqr", "sqrt", "recip", "abs", "bin", "index"] for op in ops: maths.inputs.operation = op - yield assert_equal, maths.cmdline, "fslmaths a.nii -%s b.nii" %op + yield assert_equal, maths.cmdline, "fslmaths a.nii -%s b.nii" % op # Test that we don't need to ask for an out file for op in ops: maths = fsl.UnaryMaths(in_file="a.nii", operation=op) - yield assert_equal, maths.cmdline, "fslmaths a.nii -%s %s" %(op, os.path.join(testdir, "a_%s%s" %(op, out_ext))) + yield assert_equal, maths.cmdline, "fslmaths a.nii -%s %s" % (op, os.path.join(testdir, "a_%s%s" % (op, out_ext))) # Clean up our mess clean_directory(testdir, origdir) @@ -434,15 +434,15 @@ def test_binarymaths(fsl_output_type=None): maths = fsl.BinaryMaths(in_file="a.nii", out_file="c.nii", operation=op) if ent == "b.nii": maths.inputs.operand_file = ent - yield assert_equal, maths.cmdline, "fslmaths a.nii -%s b.nii c.nii" %op + yield assert_equal, maths.cmdline, "fslmaths a.nii -%s b.nii c.nii" % op else: maths.inputs.operand_value = ent - yield assert_equal, maths.cmdline, "fslmaths a.nii -%s %.8f c.nii" %(op, ent) + yield assert_equal, maths.cmdline, "fslmaths a.nii -%s %.8f c.nii" % (op, ent) # Test that we don't need to ask for an out file for op in ops: maths = fsl.BinaryMaths(in_file="a.nii", operation=op, operand_file="b.nii") - yield assert_equal, maths.cmdline, "fslmaths a.nii -%s b.nii %s" %(op, os.path.join(testdir, "a_maths%s" % out_ext)) + yield assert_equal, maths.cmdline, "fslmaths a.nii -%s b.nii %s" % (op, os.path.join(testdir, "a_maths%s" % out_ext)) # Clean up our mess clean_directory(testdir, origdir) @@ -470,12 +470,12 @@ def test_multimaths(fsl_output_type=None): "-mas %s -add %s"] for ostr in opstrings: maths.inputs.op_string = ostr - yield assert_equal, maths.cmdline, "fslmaths a.nii %s c.nii" %ostr %("a.nii", "b.nii") + yield assert_equal, maths.cmdline, "fslmaths a.nii %s c.nii" % ostr % ("a.nii", "b.nii") # Test that we don't need to ask for an out file maths = fsl.MultiImageMaths(in_file="a.nii", op_string="-add %s -mul 5", operand_files=["b.nii"]) yield assert_equal, maths.cmdline, \ - "fslmaths a.nii -add b.nii -mul 5 %s" %os.path.join(testdir, "a_maths%s" % out_ext) + "fslmaths a.nii -add b.nii -mul 5 %s" % os.path.join(testdir, "a_maths%s" % out_ext) # Clean up our mess clean_directory(testdir, origdir) @@ -501,12 +501,12 @@ def test_tempfilt(fsl_output_type=None): for win in windows: filt.inputs.highpass_sigma = win[0] filt.inputs.lowpass_sigma = win[1] - yield assert_equal, filt.cmdline, "fslmaths a.nii -bptf %.6f %.6f b.nii" %win + yield assert_equal, filt.cmdline, "fslmaths a.nii -bptf %.6f %.6f b.nii" % win # Test that we don't need to ask for an out file filt = fsl.TemporalFilter(in_file="a.nii", highpass_sigma=64) yield assert_equal, filt.cmdline, \ - "fslmaths a.nii -bptf 64.000000 -1.000000 %s" %os.path.join(testdir, "a_filt%s" % out_ext) + "fslmaths a.nii -bptf 64.000000 -1.000000 %s" % os.path.join(testdir, "a_filt%s" % out_ext) # Clean up our mess clean_directory(testdir, origdir) diff --git a/nipype/interfaces/fsl/tests/test_preprocess.py b/nipype/interfaces/fsl/tests/test_preprocess.py index 8ae16e6c28..1065fad6a5 100644 --- a/nipype/interfaces/fsl/tests/test_preprocess.py +++ b/nipype/interfaces/fsl/tests/test_preprocess.py @@ -74,19 +74,19 @@ def func(): # Our options and some test values for them # Should parallel the opt_map structure in the class for clarity opt_map = { - 'outline': ('-o', True), - 'mask': ('-m', True), - 'skull': ('-s', True), - 'no_output': ('-n', True), - 'frac': ('-f 0.40', 0.4), - 'vertical_gradient': ('-g 0.75', 0.75), - 'radius': ('-r 20', 20), - 'center': ('-c 54 75 80', [54, 75, 80]), - 'threshold': ('-t', True), - 'mesh': ('-e', True), - 'surfaces': ('-A', True) - # 'verbose': ('-v', True), - # 'flags': ('--i-made-this-up', '--i-made-this-up'), + 'outline': ('-o', True), + 'mask': ('-m', True), + 'skull': ('-s', True), + 'no_output': ('-n', True), + 'frac': ('-f 0.40', 0.4), + 'vertical_gradient': ('-g 0.75', 0.75), + 'radius': ('-r 20', 20), + 'center': ('-c 54 75 80', [54, 75, 80]), + 'threshold': ('-t', True), + 'mesh': ('-e', True), + 'surfaces': ('-A', True) + # 'verbose': ('-v', True), + # 'flags': ('--i-made-this-up', '--i-made-this-up'), } # Currently we don't test -R, -S, -B, -Z, -F, -A or -A2 @@ -129,32 +129,31 @@ def test_fast(): # Our options and some test values for them # Should parallel the opt_map structure in the class for clarity - opt_map = {'number_classes': ('-n 4', 4), - 'bias_iters': ('-I 5', 5), - 'bias_lowpass': ('-l 15', 15), - 'img_type': ('-t 2', 2), - 'init_seg_smooth': ('-f 0.035', 0.035), - 'segments': ('-g', True), - 'init_transform': ('-a %s' % (tmp_infile), '%s' % (tmp_infile)), - 'other_priors': ('-A %s %s %s' % (tmp_infile, tmp_infile, - tmp_infile), - (['%s' % (tmp_infile), - '%s' % (tmp_infile), - '%s' % (tmp_infile)])), - 'no_pve': ('--nopve', True), - 'output_biasfield': ('-b', True), + opt_map = {'number_classes': ('-n 4', 4), + 'bias_iters': ('-I 5', 5), + 'bias_lowpass': ('-l 15', 15), + 'img_type': ('-t 2', 2), + 'init_seg_smooth': ('-f 0.035', 0.035), + 'segments': ('-g', True), + 'init_transform': ('-a %s' % (tmp_infile), '%s' % (tmp_infile)), + 'other_priors': ('-A %s %s %s' % (tmp_infile, tmp_infile, + tmp_infile), + (['%s' % (tmp_infile), + '%s' % (tmp_infile), + '%s' % (tmp_infile)])), + 'no_pve': ('--nopve', True), + 'output_biasfield': ('-b', True), 'output_biascorrected': ('-B', True), - 'no_bias': ('-N', True), - 'out_basename': ('-o fasted', 'fasted'), - 'use_priors': ('-P', True), - 'segment_iters': ('-W 14', 14), - 'mixel_smooth': ('-R 0.25', 0.25), - 'iters_afterbias': ('-O 3', 3), - 'hyper': ('-H 0.15', 0.15), - 'verbose': ('-v', True), - 'manual_seg': ('-s %s' % (tmp_infile), - '%s' % (tmp_infile)), - 'probability_maps': ('-p', True), + 'no_bias': ('-N', True), + 'out_basename': ('-o fasted', 'fasted'), + 'use_priors': ('-P', True), + 'segment_iters': ('-W 14', 14), + 'mixel_smooth': ('-R 0.25', 0.25), + 'iters_afterbias': ('-O 3', 3), + 'hyper': ('-H 0.15', 0.15), + 'verbose': ('-v', True), + 'manual_seg': ('-s %s' % (tmp_infile), '%s' % (tmp_infile)), + 'probability_maps': ('-p', True), } # test each of our arguments @@ -299,21 +298,21 @@ def test_mcflirt(): yield assert_equal, frt.cmdline, realcmd opt_map = { - 'cost': ('-cost mutualinfo', 'mutualinfo'), - 'bins': ('-bins 256', 256), - 'dof': ('-dof 6', 6), - 'ref_vol': ('-refvol 2', 2), - 'scaling': ('-scaling 6.00', 6.00), - 'smooth': ('-smooth 1.00', 1.00), - 'rotation': ('-rotation 2', 2), - 'stages': ('-stages 3', 3), - 'init': ('-init %s' % (infile), infile), + 'cost': ('-cost mutualinfo', 'mutualinfo'), + 'bins': ('-bins 256', 256), + 'dof': ('-dof 6', 6), + 'ref_vol': ('-refvol 2', 2), + 'scaling': ('-scaling 6.00', 6.00), + 'smooth': ('-smooth 1.00', 1.00), + 'rotation': ('-rotation 2', 2), + 'stages': ('-stages 3', 3), + 'init': ('-init %s' % (infile), infile), 'use_gradient': ('-gdt', True), - 'use_contour': ('-edge', True), - 'mean_vol': ('-meanvol', True), - 'stats_imgs': ('-stats', True), - 'save_mats': ('-mats', True), - 'save_plots': ('-plots', True), + 'use_contour': ('-edge', True), + 'mean_vol': ('-meanvol', True), + 'stats_imgs': ('-stats', True), + 'save_mats': ('-mats', True), + 'save_plots': ('-plots', True), } for name, settings in list(opt_map.items()): @@ -369,7 +368,7 @@ def test_fnirt(): elif item in ('in_fwhm'): cmd = 'fnirt --in=%s %s=%s --logout=%s '\ '--ref=%s --iout=%s' % (infile, flag, - strval, log, reffile, iout) + strval, log, reffile, iout) elif item.startswith('apply'): cmd = 'fnirt %s=%s '\ '--in=%s '\ @@ -396,17 +395,17 @@ def test_fnirt(): # test files opt_map = { - 'affine_file': ('--aff='), - 'inwarp_file': ('--inwarp='), + 'affine_file': ('--aff='), + 'inwarp_file': ('--inwarp='), 'in_intensitymap_file': ('--intin='), - 'config_file': ('--config='), - 'refmask_file': ('--refmask='), - 'inmask_file': ('--inmask='), - 'field_file': ('--fout='), - 'jacobian_file': ('--jout='), - 'modulatedref_file': ('--refout='), + 'config_file': ('--config='), + 'refmask_file': ('--refmask='), + 'inmask_file': ('--inmask='), + 'field_file': ('--fout='), + 'jacobian_file': ('--jout='), + 'modulatedref_file': ('--refout='), 'out_intensitymap_file': ('--intout='), - 'log_file': ('--logout=')} + 'log_file': ('--logout=')} for name, settings in list(opt_map.items()): fnirt = fsl.FNIRT(in_file=infile, @@ -457,9 +456,9 @@ def test_fnirt(): def test_applywarp(): tmpdir, infile, reffile = setup_flirt() opt_map = { - 'out_file': ('--out=bar.nii', 'bar.nii'), - 'premat': ('--premat=%s' % (reffile), reffile), - 'postmat': ('--postmat=%s' % (reffile), reffile), + 'out_file': ('--out=bar.nii', 'bar.nii'), + 'premat': ('--premat=%s' % (reffile), reffile), + 'postmat': ('--postmat=%s' % (reffile), reffile), } # in_file, ref_file, field_file mandatory diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py index 214f308e03..c98141dfd1 100644 --- a/nipype/interfaces/fsl/utils.py +++ b/nipype/interfaces/fsl/utils.py @@ -739,8 +739,8 @@ def _list_outputs(self): out_file = self.inputs.out_file if not isdefined(out_file): if isdefined(self.inputs.stat_image2) and ( - not isdefined(self.inputs.show_negative_stats) - or not self.inputs.show_negative_stats): + not isdefined(self.inputs.show_negative_stats) or not + self.inputs.show_negative_stats): stem = "%s_and_%s" % (split_filename(self.inputs.stat_image)[1], split_filename(self.inputs.stat_image2)[1]) else: @@ -1292,7 +1292,7 @@ def _list_outputs(self): if not isdefined(outputs['out_file']) and \ isdefined(self.inputs.in_file): outputs['out_file'] = self._gen_fname(self.inputs.in_file, - suffix='_sigloss') + suffix='_sigloss') return outputs def _gen_filename(self, name): @@ -1441,15 +1441,15 @@ class ComplexInputSpec(FSLCommandInputSpec): 'complex_split', 'complex_merge', ] complex_out_file = File(genfile=True, argstr="%s", position=-3, - xor=_ofs+_conversion[:2]) + xor=_ofs + _conversion[:2]) magnitude_out_file = File(genfile=True, argstr="%s", position=-4, - xor=_ofs[:1]+_ofs[3:]+_conversion[1:]) + xor=_ofs[:1] + _ofs[3:] + _conversion[1:]) phase_out_file = File(genfile=True, argstr="%s", position=-3, - xor=_ofs[:1]+_ofs[3:]+_conversion[1:]) + xor=_ofs[:1] + _ofs[3:] + _conversion[1:]) real_out_file = File(genfile=True, argstr="%s", position=-4, - xor=_ofs[:3]+_conversion[:1]+_conversion[2:]) + xor=_ofs[:3] + _conversion[:1] + _conversion[2:]) imaginary_out_file = File(genfile=True, argstr="%s", position=-3, - xor=_ofs[:3]+_conversion[:1]+_conversion[2:]) + xor=_ofs[:3] + _conversion[:1] + _conversion[2:]) start_vol = traits.Int(position=-2, argstr='%d') end_vol = traits.Int(position=-1, argstr='%d') @@ -1502,12 +1502,12 @@ class Complex(FSLCommand): output_spec = ComplexOuputSpec def _parse_inputs(self, skip=None): - if skip == None: + if skip is None: skip = [] if self.inputs.real_cartesian: skip += self.inputs._ofs[:3] elif self.inputs.real_polar: - skip += self.inputs._ofs[:1]+self.inputs._ofs[3:] + skip += self.inputs._ofs[:1] + self.inputs._ofs[3:] else: skip += self.inputs._ofs[1:] return super(Complex, self)._parse_inputs(skip) @@ -1686,7 +1686,7 @@ class ConvertWarpInputSpec(FSLCommandInputSpec): 'of subjects.')) midmat = File(exists=True, argstr="--midmat=%s", - desc="Name of file containing mid-warp-affine transform") + desc="Name of file containing mid-warp-affine transform") warp2 = File(exists=True, argstr='--warp2=%s', desc=('Name of file containing secondary warp-fields/coefficients (after warp1/midmat but before postmat). This could e.g. be a ' @@ -1870,7 +1870,7 @@ def _vtk_to_coords(self, in_file, out_file=None): except ImportError: raise ImportError('This interface requires tvtk to run.') - reader = tvtk.PolyDataReader(file_name=in_file+'.vtk') + reader = tvtk.PolyDataReader(file_name=in_file + '.vtk') reader.update() points = reader.output.points @@ -1933,7 +1933,7 @@ def _run_interface(self, runtime): runtime = super(WarpPoints, self)._run_interface(runtime) newpoints = np.fromstring('\n'.join(runtime.stdout.split('\n')[1:]), sep=' ') - if not tmpfile is None: + if tmpfile is not None: try: os.remove(tmpfile.name) except: diff --git a/nipype/interfaces/io.py b/nipype/interfaces/io.py index 03123c39ae..ed5c0b5f9f 100644 --- a/nipype/interfaces/io.py +++ b/nipype/interfaces/io.py @@ -456,7 +456,7 @@ def localtos3(self, paths): # convert local path to s3 path bd_index = path.find(self.inputs.base_directory) if bd_index != -1: # base_directory is in path, maintain directory structure - s3path = path[bd_index+len(self.inputs.base_directory):] # cut out base directory + s3path = path[bd_index + len(self.inputs.base_directory):] # cut out base directory if s3path[0] == os.path.sep: s3path = s3path[1:] else: # base_directory isn't in path, simply place all files in bucket_path folder @@ -546,7 +546,7 @@ def __init__(self, infields=None, outfields=None, **kwargs): if not isdefined(self.inputs.template_args): self.inputs.template_args = {} for key in outfields: - if not key in self.inputs.template_args: + if key not in self.inputs.template_args: if infields: self.inputs.template_args[key] = [infields] else: @@ -789,7 +789,7 @@ def __init__(self, infields=None, outfields=None, **kwargs): if not isdefined(self.inputs.template_args): self.inputs.template_args = {} for key in outfields: - if not key in self.inputs.template_args: + if key not in self.inputs.template_args: if infields: self.inputs.template_args[key] = [infields] else: @@ -1100,7 +1100,7 @@ def _match_path(self, target_path): return # Check if we can match the path match = self.match_regex.search(target_path) - if not match is None: + if match is not None: match_dict = match.groupdict() if self.result is None: self.result = {'out_paths': []} @@ -1156,8 +1156,7 @@ def _run_interface(self, runtime): full_path = os.path.join(curr_dir, infile) self._match_path(full_path) if (self.inputs.unpack_single and - len(self.result['out_paths']) == 1 - ): + len(self.result['out_paths']) == 1): for key, vals in self.result.items(): self.result[key] = vals[0] else: @@ -1578,7 +1577,7 @@ class XNATSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): desc=('Option to share the subjects from the original project' 'instead of creating new ones when possible - the created ' 'experiments are then shared back to the original project' - ), + ), usedefault=True) def __setattr__(self, key, value): diff --git a/nipype/interfaces/matlab.py b/nipype/interfaces/matlab.py index e3e9081433..fe9d4457a6 100644 --- a/nipype/interfaces/matlab.py +++ b/nipype/interfaces/matlab.py @@ -32,7 +32,7 @@ class MatlabInputSpec(CommandLineInputSpec): """ Basic expected inputs to Matlab interface """ script = traits.Str(argstr='-r \"%s;exit\"', desc='m-code to run', - mandatory=True, position=-1) + mandatory=True, position=-1) uses_mcr = traits.Bool(desc='use MCR interface', xor=['nodesktop', 'nosplash', 'single_comp_thread'], @@ -51,7 +51,7 @@ class MatlabInputSpec(CommandLineInputSpec): nohash=True) # non-commandline options mfile = traits.Bool(True, desc='Run m-code using m-file', - usedefault=True) + usedefault=True) script_file = File('pyscript.m', usedefault=True, desc='Name of file to write m-code to') paths = InputMultiPath(Directory(), desc='Paths to add to matlabpath') @@ -180,7 +180,7 @@ def _gen_matlab_command(self, argstr, script_lines): # clean up the code of comments and replace newlines with commas script_lines = ','.join([line for line in script_lines.split("\n") if not line.strip().startswith("%")]) - script_lines = '\n'.join(prescript)+script_lines+'\n'.join(postscript) + script_lines = '\n'.join(prescript) + script_lines + '\n'.join(postscript) if mfile: with open(os.path.join(cwd, self.inputs.script_file), 'wt') as mfile: mfile.write(script_lines) diff --git a/nipype/interfaces/meshfix.py b/nipype/interfaces/meshfix.py index 36a25a2606..f5a891465e 100644 --- a/nipype/interfaces/meshfix.py +++ b/nipype/interfaces/meshfix.py @@ -38,7 +38,7 @@ class MeshFixInputSpec(CommandLineInputSpec): uniform_remeshing_steps = traits.Int(argstr='-u %d', requires=['uniform_remeshing_vertices'], desc="Number of steps for uniform remeshing of the whole mesh") - uniform_remeshing_vertices = traits.Int(argstr='--vertices %d', requires=['uniform_remeshing_steps'], desc="Constrains the number of vertices." \ + uniform_remeshing_vertices = traits.Int(argstr='--vertices %d', requires=['uniform_remeshing_steps'], desc="Constrains the number of vertices." "Must be used with uniform_remeshing_steps") laplacian_smoothing_steps = traits.Int(argstr='--smooth %d', desc="The number of laplacian smoothing steps to apply") @@ -48,23 +48,23 @@ class MeshFixInputSpec(CommandLineInputSpec): # Cutting, decoupling, dilation cut_outer = traits.Int(argstr='--cut-outer %d', desc="Remove triangles of 1st that are outside of the 2nd shell.") cut_inner = traits.Int(argstr='--cut-inner %d', desc="Remove triangles of 1st that are inside of the 2nd shell. Dilate 2nd by N; Fill holes and keep only 1st afterwards.") - decouple_inin = traits.Int(argstr='--decouple-inin %d', desc="Treat 1st file as inner, 2nd file as outer component." \ + decouple_inin = traits.Int(argstr='--decouple-inin %d', desc="Treat 1st file as inner, 2nd file as outer component." "Resolve overlaps by moving inners triangles inwards. Constrain the min distance between the components > d.") - decouple_outin = traits.Int(argstr='--decouple-outin %d', desc="Treat 1st file as outer, 2nd file as inner component." \ + decouple_outin = traits.Int(argstr='--decouple-outin %d', desc="Treat 1st file as outer, 2nd file as inner component." "Resolve overlaps by moving outers triangles inwards. Constrain the min distance between the components > d.") - decouple_outout = traits.Int(argstr='--decouple-outout %d', desc="Treat 1st file as outer, 2nd file as inner component." \ + decouple_outout = traits.Int(argstr='--decouple-outout %d', desc="Treat 1st file as outer, 2nd file as inner component." "Resolve overlaps by moving outers triangles outwards. Constrain the min distance between the components > d.") finetuning_inwards = traits.Bool(argstr='--fineTuneIn ', requires=['finetuning_distance', 'finetuning_substeps']) finetuning_outwards = traits.Bool(argstr='--fineTuneIn ', requires=['finetuning_distance', 'finetuning_substeps'], xor=['finetuning_inwards'], desc='Similar to finetuning_inwards, but ensures minimal distance in the other direction') - finetuning_distance = traits.Float(argstr='%f', requires=['finetuning_substeps'], desc="Used to fine-tune the minimal distance between surfaces." \ + finetuning_distance = traits.Float(argstr='%f', requires=['finetuning_substeps'], desc="Used to fine-tune the minimal distance between surfaces." "A minimal distance d is ensured, and reached in n substeps. When using the surfaces for subsequent volume meshing by gmsh, this step prevent too flat tetrahedra2)") - finetuning_substeps = traits.Int(argstr='%d', requires=['finetuning_distance'], desc="Used to fine-tune the minimal distance between surfaces." \ + finetuning_substeps = traits.Int(argstr='%d', requires=['finetuning_distance'], desc="Used to fine-tune the minimal distance between surfaces." "A minimal distance d is ensured, and reached in n substeps. When using the surfaces for subsequent volume meshing by gmsh, this step prevent too flat tetrahedra2)") dilation = traits.Int(argstr='--dilate %d', desc="Dilate the surface by d. d < 0 means shrinking.") - set_intersections_to_one = traits.Bool(argstr='--intersect', desc="If the mesh contains intersections, return value = 1." \ + set_intersections_to_one = traits.Bool(argstr='--intersect', desc="If the mesh contains intersections, return value = 1." "If saved in gmsh format, intersections will be highlighted.") in_file1 = File(exists=True, argstr="%s", position=1, mandatory=True) diff --git a/nipype/interfaces/mrtrix/__init__.py b/nipype/interfaces/mrtrix/__init__.py index a71b85c6a1..4b5a4270d5 100644 --- a/nipype/interfaces/mrtrix/__init__.py +++ b/nipype/interfaces/mrtrix/__init__.py @@ -11,4 +11,4 @@ GenerateWhiteMatterMask, DWI2Tensor, Tensor2ApparentDiffusion, Tensor2FractionalAnisotropy, Tensor2Vector, MedianFilter3D, Erode, Threshold) -from .convert import MRTrix2TrackVis \ No newline at end of file +from .convert import MRTrix2TrackVis diff --git a/nipype/interfaces/mrtrix/convert.py b/nipype/interfaces/mrtrix/convert.py index b7369524ae..711ea2107b 100644 --- a/nipype/interfaces/mrtrix/convert.py +++ b/nipype/interfaces/mrtrix/convert.py @@ -13,7 +13,8 @@ # -*- coding: utf-8 -*- import os.path as op -import nibabel as nb, nibabel.trackvis as trk +import nibabel as nb +import nibabel.trackvis as trk import numpy as np from nibabel.trackvis import HeaderError from nibabel.volumeutils import native_code @@ -83,7 +84,7 @@ def read_mrtrix_streamlines(in_file, header, as_generator=True): endianness = native_code f4dt = np.dtype(endianness + 'f4') pt_cols = 3 - bytesize = pt_cols*4 + bytesize = pt_cols * 4 def points_per_track(offset): n_streams = 0 @@ -95,12 +96,12 @@ def points_per_track(offset): pts = np.ndarray(shape=(num_triplets, pt_cols), dtype='f4', buffer=all_str) nonfinite_list = np.where(np.isfinite(pts[:, 2]) == False) nonfinite_list = list(nonfinite_list[0])[0:-1] # Converts numpy array to list, removes the last value - nonfinite_list_bytes = [offset+x*bytesize for x in nonfinite_list] + nonfinite_list_bytes = [offset + x * bytesize for x in nonfinite_list] for idx, value in enumerate(nonfinite_list): if idx == 0: track_points.append(nonfinite_list[idx]) else: - track_points.append(nonfinite_list[idx]-nonfinite_list[idx-1]-1) + track_points.append(nonfinite_list[idx] - nonfinite_list[idx - 1] - 1) return track_points, nonfinite_list def track_gen(track_points): @@ -117,9 +118,9 @@ def track_gen(track_points): if not n_streams == stream_count: raise HeaderError( 'Expecting %s points, found only %s' % ( - stream_count, n_streams)) + stream_count, n_streams)) iflogger.error('Expecting %s points, found only %s' % ( - stream_count, n_streams)) + stream_count, n_streams)) break pts = np.ndarray( shape=(n_pts, pt_cols), @@ -139,7 +140,7 @@ def track_gen(track_points): iflogger.info('100% : {n} tracks read'.format(n=n_streams)) raise StopIteration if n_streams % int(stream_count / 100) == 0: - percent = int(float(n_streams)/float(stream_count)*100) + percent = int(float(n_streams) / float(stream_count) * 100) iflogger.info('{p}% : {n} tracks read'.format(p=percent, n=n_streams)) track_points, nonfinite_list = points_per_track(offset) fileobj.seek(offset) @@ -217,7 +218,7 @@ def _run_interface(self, runtime): iflogger.info(aff) axcode = aff2axcodes(reg_affine) - trk_header['voxel_order'] = axcode[0]+axcode[1]+axcode[2] + trk_header['voxel_order'] = axcode[0] + axcode[1] + axcode[2] final_streamlines = move_streamlines(transformed_streamlines, aff) trk_tracks = ((ii, None, None) for ii in final_streamlines) @@ -228,7 +229,7 @@ def _run_interface(self, runtime): else: iflogger.info('Applying transformation from scanner coordinates to {img}'.format(img=self.inputs.image_file)) axcode = aff2axcodes(affine) - trk_header['voxel_order'] = axcode[0]+axcode[1]+axcode[2] + trk_header['voxel_order'] = axcode[0] + axcode[1] + axcode[2] trk_header['vox_to_ras'] = affine transformed_streamlines = transform_to_affine(streamlines, trk_header, affine) trk_tracks = ((ii, None, None) for ii in transformed_streamlines) diff --git a/nipype/interfaces/mrtrix/tensors.py b/nipype/interfaces/mrtrix/tensors.py index 5577f0d564..3ef2ecc901 100644 --- a/nipype/interfaces/mrtrix/tensors.py +++ b/nipype/interfaces/mrtrix/tensors.py @@ -104,7 +104,7 @@ class ConstrainedSphericalDeconvolutionInputSpec(CommandLineInputSpec): encoding_file = File(exists=True, argstr='-grad %s', position=1, desc='Gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient, and b gives the b-value in units (1000 s/mm^2). See FSL2MRTrix') filter_file = File(exists=True, argstr='-filter %s', position=-2, - desc='a text file containing the filtering coefficients for each even harmonic order.' \ + desc='a text file containing the filtering coefficients for each even harmonic order.' 'the linear frequency filtering parameters used for the initial linear spherical deconvolution step (default = [ 1 1 1 0 0 ]).') lambda_value = traits.Float(argstr='-lambda %s', desc='the regularisation parameter lambda that controls the strength of the constraint (default = 1.0).') @@ -268,7 +268,7 @@ class FSL2MRTrixInputSpec(TraitedSpec): class FSL2MRTrixOutputSpec(TraitedSpec): - encoding_file = File(desc='The gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient' \ + encoding_file = File(desc='The gradient encoding, supplied as a 4xN text file with each line is in the format [ X Y Z b ], where [ X Y Z ] describe the direction of the applied gradient' 'and b gives the b-value in units (1000 s/mm^2).') @@ -353,7 +353,7 @@ class FindShPeaksInputSpec(CommandLineInputSpec): peaks_image = File(exists=True, argstr='-peaks %s', desc='the program will try to find the peaks that most closely match those in the image provided') num_peaks = traits.Int(argstr='-num %s', desc='the number of peaks to extract (default is 3)') peak_directions = traits.List(traits.Float, argstr='-direction %s', sep=' ', minlen=2, maxlen=2, - desc='phi theta. the direction of a peak to estimate. The algorithm will attempt to find the same number of peaks as have been specified using this option ' \ + desc='phi theta. the direction of a peak to estimate. The algorithm will attempt to find the same number of peaks as have been specified using this option ' ' phi: the azimuthal angle of the direction (in degrees). theta: the elevation angle of the direction (in degrees, from the vertical z-axis)') peak_threshold = traits.Float(argstr='-threshold %s', desc='only peak amplitudes greater than the threshold will be considered') display_info = traits.Bool(argstr='-info', desc='Display information messages.') @@ -392,7 +392,7 @@ class Directions2AmplitudeInputSpec(CommandLineInputSpec): peaks_image = File(exists=True, argstr='-peaks %s', desc='the program will try to find the peaks that most closely match those in the image provided') num_peaks = traits.Int(argstr='-num %s', desc='the number of peaks to extract (default is 3)') peak_directions = traits.List(traits.Float, argstr='-direction %s', sep=' ', minlen=2, maxlen=2, - desc='phi theta. the direction of a peak to estimate. The algorithm will attempt to find the same number of peaks as have been specified using this option ' \ + desc='phi theta. the direction of a peak to estimate. The algorithm will attempt to find the same number of peaks as have been specified using this option ' ' phi: the azimuthal angle of the direction (in degrees). theta: the elevation angle of the direction (in degrees, from the vertical z-axis)') display_info = traits.Bool(argstr='-info', desc='Display information messages.') quiet_display = traits.Bool(argstr='-quiet', desc='do not display information messages or progress status.') @@ -421,4 +421,3 @@ class Directions2Amplitude(CommandLine): _cmd = 'dir2amp' input_spec = Directions2AmplitudeInputSpec output_spec = Directions2AmplitudeOutputSpec - diff --git a/nipype/interfaces/mrtrix/tracking.py b/nipype/interfaces/mrtrix/tracking.py index cce2b38c6e..c4d49118a8 100644 --- a/nipype/interfaces/mrtrix/tracking.py +++ b/nipype/interfaces/mrtrix/tracking.py @@ -8,7 +8,8 @@ >>> os.chdir(datadir) """ -import os, os.path as op +import os +import os.path as op from ..base import CommandLineInputSpec, CommandLine, traits, TraitedSpec, File from ..traits_extension import isdefined @@ -35,7 +36,7 @@ class FilterTracksInputSpec(CommandLineInputSpec): name_source=['in_file'], hash_files=False, name_template='%s_filt') no_mask_interpolation = traits.Bool(argstr='-nomaskinterp', desc="Turns off trilinear interpolation of mask images.") - invert = traits.Bool(argstr='-invert', desc="invert the matching process, so that tracks that would" \ + invert = traits.Bool(argstr='-invert', desc="invert the matching process, so that tracks that would" "otherwise have been included are now excluded and vice-versa.") quiet = traits.Bool(argstr='-quiet', position=1, desc="Do not display information messages or progress status.") @@ -127,8 +128,8 @@ def _gen_outfilename(self): class StreamlineTrackInputSpec(CommandLineInputSpec): - in_file = File(exists=True, argstr='%s', mandatory=True, position=-2, desc='the image containing the source data.' \ - 'The type of data required depends on the type of tracking as set in the preceeding argument. For DT methods, ' \ + in_file = File(exists=True, argstr='%s', mandatory=True, position=-2, desc='the image containing the source data.' + 'The type of data required depends on the type of tracking as set in the preceeding argument. For DT methods, ' 'the base DWI are needed. For SD methods, the SH harmonic coefficients of the FOD are needed.') seed_xor = ['seed_file', 'seed_spec'] @@ -163,11 +164,11 @@ class StreamlineTrackInputSpec(CommandLineInputSpec): desc="Set the step size of the algorithm in mm (default is 0.2).") minimum_radius_of_curvature = traits.Float(argstr='-curvature %s', units='mm', desc="Set the minimum radius of curvature (default is 2 mm for DT_STREAM, 0 for SD_STREAM, 1 mm for SD_PROB and DT_PROB)") - desired_number_of_tracks = traits.Int(argstr='-number %d', desc='Sets the desired number of tracks.' \ - 'The program will continue to generate tracks until this number of tracks have been selected and written to the output file' \ + desired_number_of_tracks = traits.Int(argstr='-number %d', desc='Sets the desired number of tracks.' + 'The program will continue to generate tracks until this number of tracks have been selected and written to the output file' '(default is 100 for *_STREAM methods, 1000 for *_PROB methods).') - maximum_number_of_tracks = traits.Int(argstr='-maxnum %d', desc='Sets the maximum number of tracks to generate.' \ - "The program will not generate more tracks than this number, even if the desired number of tracks hasn't yet been reached" \ + maximum_number_of_tracks = traits.Int(argstr='-maxnum %d', desc='Sets the maximum number of tracks to generate.' + "The program will not generate more tracks than this number, even if the desired number of tracks hasn't yet been reached" '(default is 100 x number).') minimum_tract_length = traits.Float(argstr='-minlength %s', units='mm', diff --git a/nipype/interfaces/nipy/model.py b/nipype/interfaces/nipy/model.py index 54fc6c26ca..7fba0b26c7 100644 --- a/nipype/interfaces/nipy/model.py +++ b/nipype/interfaces/nipy/model.py @@ -39,8 +39,8 @@ class FitGLMInputSpec(BaseInterfaceInputSpec): "With Derivative' or 'FIR'"), usedefault=True) drift_model = traits.Enum("Cosine", "Polynomial", "Blank", desc=("string that specifies the desired drift " - "model, to be chosen among 'Polynomial', " - "'Cosine', 'Blank'"), usedefault=True) + "model, to be chosen among 'Polynomial', " + "'Cosine', 'Blank'"), usedefault=True) TR = traits.Float(mandatory=True) model = traits.Enum("ar1", "spherical", desc=("autoregressive mode is available only for the " @@ -122,7 +122,7 @@ def _run_interface(self, runtime): for i in range(len(reg_names)): reg_vals[:, i] = np.array(session_info[0]['regress'][i]['val']).reshape(1, -1) - frametimes = np.linspace(0, (nscans-1)*self.inputs.TR, nscans) + frametimes = np.linspace(0, (nscans - 1) * self.inputs.TR, nscans) conditions = [] onsets = [] @@ -130,9 +130,9 @@ def _run_interface(self, runtime): for i, cond in enumerate(session_info[0]['cond']): onsets += cond['onset'] - conditions += [cond['name']]*len(cond['onset']) + conditions += [cond['name']] * len(cond['onset']) if len(cond['duration']) == 1: - duration += cond['duration']*len(cond['onset']) + duration += cond['duration'] * len(cond['onset']) else: duration += cond['duration'] @@ -146,10 +146,10 @@ def _run_interface(self, runtime): add_reg_names=reg_names ) if self.inputs.normalize_design_matrix: - for i in range(len(self._reg_names)-1): + for i in range(len(self._reg_names) - 1): design_matrix[:, i] = ((design_matrix[:, i] - - design_matrix[:, i].mean()) / - design_matrix[:, i].std()) + design_matrix[:, i].mean()) / + design_matrix[:, i].std()) if self.inputs.plot_design_matrix: import pylab @@ -318,4 +318,3 @@ def _list_outputs(self): outputs["p_maps"] = self._p_maps outputs["z_maps"] = self._z_maps return outputs - diff --git a/nipype/interfaces/nitime/tests/test_nitime.py b/nipype/interfaces/nitime/tests/test_nitime.py index a270a5c0ab..a5bfe177bf 100644 --- a/nipype/interfaces/nitime/tests/test_nitime.py +++ b/nipype/interfaces/nitime/tests/test_nitime.py @@ -63,8 +63,8 @@ def test_coherence_analysis(): T.metadata['roi'] = roi_names C = nta.CoherenceAnalyzer(T, method=dict(this_method='welch', - NFFT=CA.inputs.NFFT, - n_overlap=CA.inputs.n_overlap)) + NFFT=CA.inputs.NFFT, + n_overlap=CA.inputs.n_overlap)) freq_idx = np.where((C.frequencies > CA.inputs.frequency_range[0]) * (C.frequencies < CA.inputs.frequency_range[1]))[0] @@ -73,5 +73,3 @@ def test_coherence_analysis(): coh = np.mean(C.coherence[:, :, freq_idx], -1) # Averaging on the last dimension yield assert_equal, o.outputs.coherence_array, coh - - diff --git a/nipype/interfaces/slicer/generate_classes.py b/nipype/interfaces/slicer/generate_classes.py index 73ce553ce8..46b85af86a 100644 --- a/nipype/interfaces/slicer/generate_classes.py +++ b/nipype/interfaces/slicer/generate_classes.py @@ -227,7 +227,7 @@ def generate_class(module, launcher, strip_module_name_prefix=True, redirect_x=F param.nodeName.replace('-vector', '')]] else: values = [typesDict[param.nodeName.replace('-vector', '')]] - if mipav_hacks == True: + if mipav_hacks is True: traitsParams["sep"] = ";" else: traitsParams["sep"] = ',' diff --git a/nipype/interfaces/spm/model.py b/nipype/interfaces/spm/model.py index c942161c8e..2a3472a648 100644 --- a/nipype/interfaces/spm/model.py +++ b/nipype/interfaces/spm/model.py @@ -375,8 +375,8 @@ def _make_matlab_command(self, _): try: tidx = cname.index(fcont[0]) except: - Exception("Contrast Estimate: could not get index of" \ - " T contrast. probably not defined prior " \ + Exception("Contrast Estimate: could not get index of" + " T contrast. probably not defined prior " "to the F contrasts") script += "consess{%d}.fcon.convec{%d} = consess{%d}.tcon.convec;\n" % (i + 1, cl0 + 1, tidx + 1) script += "jobs{1}.stats{1}.con.consess = consess;\n" diff --git a/nipype/interfaces/spm/preprocess.py b/nipype/interfaces/spm/preprocess.py index caec4af720..10810fdc91 100644 --- a/nipype/interfaces/spm/preprocess.py +++ b/nipype/interfaces/spm/preprocess.py @@ -329,7 +329,7 @@ def _format_arg(self, opt, spec, val): return np.array(filename_to_list(val), dtype=object) if opt == 'source' and self.inputs.jobtype == "write": if isdefined(self.inputs.apply_to_files): - return scans_for_fnames(val+self.inputs.apply_to_files) + return scans_for_fnames(val + self.inputs.apply_to_files) else: return scans_for_fnames(val) return super(Coregister, self)._format_arg(opt, spec, val) @@ -811,7 +811,7 @@ def _list_outputs(self): outfield = '%s_%s_image' % (image, tissue) outputs[outfield] = fname_presuffix(f, prefix='%sc%d' % (prefix, - tidx+1)) + tidx + 1)) if isdefined(self.inputs.save_bias_corrected) and \ self.inputs.save_bias_corrected: outputs['bias_corrected_image'] = fname_presuffix(f, prefix='m') @@ -963,16 +963,16 @@ def _list_outputs(self): if isdefined(self.inputs.tissues): for i, tissue in enumerate(self.inputs.tissues): if tissue[2][0]: - outputs['native_class_images'][i].append(os.path.join(pth, "c%d%s.nii" % (i+1, base))) + outputs['native_class_images'][i].append(os.path.join(pth, "c%d%s.nii" % (i + 1, base))) if tissue[2][1]: - outputs['dartel_input_images'][i].append(os.path.join(pth, "rc%d%s.nii" % (i+1, base))) + outputs['dartel_input_images'][i].append(os.path.join(pth, "rc%d%s.nii" % (i + 1, base))) if tissue[3][0]: - outputs['normalized_class_images'][i].append(os.path.join(pth, "wc%d%s.nii" % (i+1, base))) + outputs['normalized_class_images'][i].append(os.path.join(pth, "wc%d%s.nii" % (i + 1, base))) if tissue[3][1]: - outputs['modulated_class_images'][i].append(os.path.join(pth, "mwc%d%s.nii" % (i+1, base))) + outputs['modulated_class_images'][i].append(os.path.join(pth, "mwc%d%s.nii" % (i + 1, base))) else: for i in range(n_classes): - outputs['native_class_images'][i].append(os.path.join(pth, "c%d%s.nii" % (i+1, base))) + outputs['native_class_images'][i].append(os.path.join(pth, "c%d%s.nii" % (i + 1, base))) outputs['transformation_mat'].append(os.path.join(pth, "%s_seg8.mat" % base)) if isdefined(self.inputs.write_deformation_fields): @@ -1146,7 +1146,7 @@ def _list_outputs(self): outputs = self._outputs().get() outputs['template_files'] = [] for i in range(6): - outputs['template_files'].append(os.path.realpath('%s_%d.nii' % (self.inputs.template_prefix, i+1))) + outputs['template_files'].append(os.path.realpath('%s_%d.nii' % (self.inputs.template_prefix, i + 1))) outputs['final_template_file'] = os.path.realpath('%s_6.nii' % self.inputs.template_prefix) outputs['dartel_flow_fields'] = [] for filename in self.inputs.image_files[0]: @@ -1234,7 +1234,7 @@ def _format_arg(self, opt, spec, val): def _list_outputs(self): outputs = self._outputs().get() pth, base, ext = split_filename(self.inputs.template_file) - outputs['normalization_parameter_file'] = os.path.realpath(base+'_2mni.mat') + outputs['normalization_parameter_file'] = os.path.realpath(base + '_2mni.mat') outputs['normalized_files'] = [] prefix = "w" if isdefined(self.inputs.modulate) and self.inputs.modulate: @@ -1374,7 +1374,7 @@ class VBMSegmentInputSpec(SPMCommandInputSpec): exists=True, field='estwrite.tpm', desc='tissue probability map') gaussians_per_class = traits.Tuple( - (2, 2, 2, 3, 4, 2), *([traits.Int()]*6), + (2, 2, 2, 3, 4, 2), *([traits.Int()] * 6), usedefault=True, desc='number of gaussians for each tissue class') bias_regularization = traits.Enum( @@ -1562,25 +1562,25 @@ def _list_outputs(self): if getattr(self.inputs, '%s_native' % tis): outputs['native_class_images'][i].append( - os.path.join(pth, "p%d%s.nii" % (i+1, base))) + os.path.join(pth, "p%d%s.nii" % (i + 1, base))) if getattr(self.inputs, '%s_dartel' % tis) == 1: outputs['dartel_input_images'][i].append( - os.path.join(pth, "rp%d%s.nii" % (i+1, base))) + os.path.join(pth, "rp%d%s.nii" % (i + 1, base))) elif getattr(self.inputs, '%s_dartel' % tis) == 2: outputs['dartel_input_images'][i].append( - os.path.join(pth, "rp%d%s_affine.nii" % (i+1, base))) + os.path.join(pth, "rp%d%s_affine.nii" % (i + 1, base))) # normalized space if getattr(self.inputs, '%s_normalized' % tis): outputs['normalized_class_images'][i].append( - os.path.join(pth, "w%sp%d%s.nii" % (dartel_px, i+1, base))) + os.path.join(pth, "w%sp%d%s.nii" % (dartel_px, i + 1, base))) if getattr(self.inputs, '%s_modulated_normalized' % tis) == 1: outputs['modulated_class_images'][i].append(os.path.join( - pth, "mw%sp%d%s.nii" % (dartel_px, i+1, base))) + pth, "mw%sp%d%s.nii" % (dartel_px, i + 1, base))) elif getattr(self.inputs, '%s_modulated_normalized' % tis) == 2: outputs['normalized_class_images'][i].append(os.path.join( - pth, "m0w%sp%d%s.nii" % (dartel_px, i+1, base))) + pth, "m0w%sp%d%s.nii" % (dartel_px, i + 1, base))) if self.inputs.pve_label_native: outputs['pve_label_native_images'].append( diff --git a/nipype/interfaces/spm/tests/test_base.py b/nipype/interfaces/spm/tests/test_base.py index 4a7ce2c852..73e8ad9c12 100644 --- a/nipype/interfaces/spm/tests/test_base.py +++ b/nipype/interfaces/spm/tests/test_base.py @@ -93,7 +93,7 @@ class TestClass(spm.SPMCommand): yield assert_equal, dc._use_mcr, True yield assert_equal, dc._matlab_cmd, 'spmcmd' # restore environment - os.environ.clear(); + os.environ.clear() os.environ.update(saved_env) diff --git a/nipype/interfaces/spm/tests/test_utils.py b/nipype/interfaces/spm/tests/test_utils.py index cd800949fb..38581b927f 100644 --- a/nipype/interfaces/spm/tests/test_utils.py +++ b/nipype/interfaces/spm/tests/test_utils.py @@ -21,7 +21,7 @@ def test_coreg(): assert_equal(isdefined(coreg.inputs.mat), False) pth, mov, _ = split_filename(moving) _, tgt, _ = split_filename(target) - mat = os.path.join(pth, '%s_to_%s.mat' %(mov, tgt)) + mat = os.path.join(pth, '%s_to_%s.mat' % (mov, tgt)) invmat = fname_presuffix(mat, prefix='inverse_') scrpt = coreg._make_matlab_command(None) assert_equal(coreg.inputs.mat, mat) diff --git a/nipype/interfaces/spm/utils.py b/nipype/interfaces/spm/utils.py index 9b7aa84dd3..29682171cc 100644 --- a/nipype/interfaces/spm/utils.py +++ b/nipype/interfaces/spm/utils.py @@ -25,11 +25,11 @@ class Analyze2nii(SPMCommand): output_spec = Analyze2niiOutputSpec def _make_matlab_command(self, _): - script = "V = spm_vol('%s');\n" %self.inputs.analyze_file + script = "V = spm_vol('%s');\n" % self.inputs.analyze_file _, name, _ = split_filename(self.inputs.analyze_file) self.output_name = os.path.join(os.getcwd(), name + ".nii") script += "[Y, XYZ] = spm_read_vols(V);\n" - script += "V.fname = '%s';\n" %self.output_name + script += "V.fname = '%s';\n" % self.output_name script += "spm_write_vol(V, Y);\n" return script @@ -42,9 +42,9 @@ def _list_outputs(self): class CalcCoregAffineInputSpec(SPMCommandInputSpec): target = File(exists=True, mandatory=True, - desc='target for generating affine transform') + desc='target for generating affine transform') moving = File(exists=True, mandatory=True, copyfile=False, - desc='volume transform can be applied to register with target') + desc='volume transform can be applied to register with target') mat = File(desc='Filename used to store affine matrix') invmat = File(desc='Filename used to store inverse affine matrix') @@ -89,7 +89,7 @@ def _make_mat_file(self): """ makes name for matfile if doesn exist""" pth, mv, _ = split_filename(self.inputs.moving) _, tgt, _ = split_filename(self.inputs.target) - mat = os.path.join(pth, '%s_to_%s.mat' %(mv, tgt)) + mat = os.path.join(pth, '%s_to_%s.mat' % (mv, tgt)) return mat def _make_matlab_command(self, _): @@ -108,10 +108,10 @@ def _make_matlab_command(self, _): save('%s' , 'M' ); M = inv(M); save('%s','M') - """ %(self.inputs.target, - self.inputs.moving, - self.inputs.mat, - self.inputs.invmat) + """ % (self.inputs.target, + self.inputs.moving, + self.inputs.mat, + self.inputs.invmat) return script def _list_outputs(self): @@ -123,9 +123,9 @@ def _list_outputs(self): class ApplyTransformInputSpec(SPMCommandInputSpec): in_file = File(exists=True, mandatory=True, copyfile=True, - desc='file to apply transform to, (only updates header)') + desc='file to apply transform to, (only updates header)') mat = File(exists=True, mandatory=True, - desc='file holding transform to apply') + desc='file holding transform to apply') out_file = File(desc="output file name for transformed data", genfile=True) @@ -166,9 +166,9 @@ def _make_matlab_command(self, _): V.fname = fullfile(outfile); spm_write_vol(V,X); - """ %(self.inputs.in_file, - self.inputs.out_file, - self.inputs.mat) + """ % (self.inputs.in_file, + self.inputs.out_file, + self.inputs.mat) # img_space = spm_get_space(infile); # spm_get_space(infile, transform.M * img_space); return script @@ -188,12 +188,12 @@ def _gen_outfilename(self): class ResliceInputSpec(SPMCommandInputSpec): in_file = File(exists=True, mandatory=True, - desc='file to apply transform to, (only updates header)') + desc='file to apply transform to, (only updates header)') space_defining = File(exists=True, mandatory=True, - desc='Volume defining space to slice in_file into') + desc='Volume defining space to slice in_file into') interp = traits.Range(low=0, high=7, usedefault=True, - desc='degree of b-spline used for interpolation'\ + desc='degree of b-spline used for interpolation' '0 is nearest neighbor (default)') out_file = File(desc='Optional file to save resliced volume') @@ -222,9 +222,9 @@ def _make_matlab_command(self, _): infiles = strvcat(\'%s\', \'%s\'); invols = spm_vol(infiles); spm_reslice(invols, flags); - """ %(self.inputs.interp, - self.inputs.space_defining, - self.inputs.in_file) + """ % (self.inputs.interp, + self.inputs.space_defining, + self.inputs.in_file) return script def _list_outputs(self): @@ -465,11 +465,11 @@ def _list_outputs(self): ext = self.inputs.format if self.inputs.output_dir_struct == "flat": - outputs['out_files'] = glob(os.path.join(od, '*.%s' %ext)) + outputs['out_files'] = glob(os.path.join(od, '*.%s' % ext)) elif self.inputs.output_dir_struct == 'series': - outputs['out_files'] = glob(os.path.join(od, os.path.join('*', '*.%s' %ext))) + outputs['out_files'] = glob(os.path.join(od, os.path.join('*', '*.%s' % ext))) elif self.inputs.output_dir_struct in ['patid', 'date_time', 'patname']: - outputs['out_files'] = glob(os.path.join(od, os.path.join('*', '*', '*.%s' %ext))) + outputs['out_files'] = glob(os.path.join(od, os.path.join('*', '*', '*.%s' % ext))) elif self.inputs.output_dir_struct == 'patid_date': - outputs['out_files'] = glob(os.path.join(od, os.path.join('*', '*', '*', '*.%s' %ext))) + outputs['out_files'] = glob(os.path.join(od, os.path.join('*', '*', '*', '*.%s' % ext))) return outputs diff --git a/nipype/interfaces/tests/test_base.py b/nipype/interfaces/tests/test_base.py index cbfd95f092..d186f64b6b 100644 --- a/nipype/interfaces/tests/test_base.py +++ b/nipype/interfaces/tests/test_base.py @@ -140,7 +140,7 @@ class MyInterface(nib.BaseInterface): myif = MyInterface() yield assert_raises, TypeError, setattr(myif.inputs, 'kung', 10.0) myif.inputs.foo = 1 - yield assert_equal, myif.inputs.foo, 1 + yield assert_equal, myif.inputs.foo, 1 set_bar = lambda: setattr(myif.inputs, 'bar', 1) yield assert_raises, IOError, set_bar yield assert_equal, myif.inputs.foo, 1 @@ -386,7 +386,7 @@ class spec4(nib.TraitedSpec): infields = spec4(moo=nme, doo=[tmp_infile]) hashval2 = infields.get_hashval(hash_method='content') - yield assert_not_equal, hashval1[1], hashval2[1] + yield assert_not_equal, hashval1[1], hashval2[1] os.chdir(pwd) teardown_file(tmpd) diff --git a/nipype/interfaces/tests/test_io.py b/nipype/interfaces/tests/test_io.py index a46b6c14b4..8b5df0a7ce 100644 --- a/nipype/interfaces/tests/test_io.py +++ b/nipype/interfaces/tests/test_io.py @@ -201,7 +201,7 @@ def test_datasink_substitutions(): ds.run() yield assert_equal, \ sorted([os.path.basename(x) for - x in glob.glob(os.path.join(outdir, '*'))]), \ + x in glob.glob(os.path.join(outdir, '*'))]), \ ['!-yz-b.n', 'ABABAB.n'] # so we got re used 2nd and both patterns shutil.rmtree(indir) shutil.rmtree(outdir) @@ -250,7 +250,7 @@ def test_s3datasink_substitutions(): ds.run() yield assert_equal, \ sorted([os.path.basename(x) for - x in glob.glob(os.path.join(outdir, '*'))]), \ + x in glob.glob(os.path.join(outdir, '*'))]), \ ['!-yz-b.n', 'ABABAB.n'] # so we got re used 2nd and both patterns bkt = conn.get_bucket(ds.inputs.bucket) @@ -420,4 +420,3 @@ def test_jsonsink(): os.chdir(curdir) shutil.rmtree(outdir) - diff --git a/nipype/interfaces/traits_extension.py b/nipype/interfaces/traits_extension.py index 084eb5ea7f..49af1db164 100644 --- a/nipype/interfaces/traits_extension.py +++ b/nipype/interfaces/traits_extension.py @@ -35,7 +35,7 @@ class BaseFile (traits.BaseStr): info_text = 'a file name' def __init__(self, value='', filter=None, auto_set=False, - entries=0, exists=False, **metadata): + entries=0, exists=False, **metadata): """ Creates a File trait. Parameters @@ -86,7 +86,7 @@ class File (BaseFile): """ def __init__(self, value='', filter=None, auto_set=False, - entries=0, exists=False, **metadata): + entries=0, exists=False, **metadata): """ Creates a File trait. Parameters @@ -112,7 +112,7 @@ def __init__(self, value='', filter=None, auto_set=False, fast_validate = (11, str) super(File, self).__init__(value, filter, auto_set, entries, exists, - **metadata) + **metadata) # ------------------------------------------------------------------------------- # 'BaseDirectory' and 'Directory' traits: @@ -127,7 +127,7 @@ class BaseDirectory (traits.BaseStr): info_text = 'a directory name' def __init__(self, value='', auto_set=False, entries=0, - exists=False, **metadata): + exists=False, **metadata): """ Creates a BaseDirectory trait. Parameters @@ -175,7 +175,7 @@ class Directory (BaseDirectory): """ def __init__(self, value='', auto_set=False, entries=0, - exists=False, **metadata): + exists=False, **metadata): """ Creates a Directory trait. Parameters @@ -199,7 +199,7 @@ def __init__(self, value='', auto_set=False, entries=0, self.fast_validate = (11, str) super(Directory, self).__init__(value, auto_set, entries, exists, - **metadata) + **metadata) """ @@ -239,17 +239,14 @@ def has_metadata(trait, metadata, value=None, recursive=True): Checks if a given trait has a metadata (and optionally if it is set to particular value) ''' count = 0 - if hasattr(trait, "_metadata") and metadata in list(trait._metadata.keys()) and (trait._metadata[metadata] == value or value == None): + if hasattr(trait, "_metadata") and metadata in list(trait._metadata.keys()) and (trait._metadata[metadata] == value or value is None): count += 1 if recursive: if hasattr(trait, 'inner_traits'): for inner_trait in trait.inner_traits(): count += has_metadata(inner_trait.trait_type, metadata, recursive) - if hasattr(trait, 'handlers') and trait.handlers != None: + if hasattr(trait, 'handlers') and trait.handlers is not None: for handler in trait.handlers: count += has_metadata(handler, metadata, recursive) return count > 0 - - - diff --git a/nipype/interfaces/utility.py b/nipype/interfaces/utility.py index 0cf6aa6e64..37883d4e5c 100644 --- a/nipype/interfaces/utility.py +++ b/nipype/interfaces/utility.py @@ -405,8 +405,8 @@ def __init__(self, input_names, output_names, function=None, imports=None, try: self.inputs.function_str = getsource(function) except IOError: - raise Exception('Interface Function does not accept ' \ - 'function objects defined interactively ' \ + raise Exception('Interface Function does not accept ' + 'function objects defined interactively ' 'in a python session') elif isinstance(function, string_types): self.inputs.function_str = function diff --git a/nipype/interfaces/vista/__init__.py b/nipype/interfaces/vista/__init__.py index 755a98ae4a..e1777c408d 100644 --- a/nipype/interfaces/vista/__init__.py +++ b/nipype/interfaces/vista/__init__.py @@ -1,3 +1,3 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -from .vista import (Vnifti2Image, VtoMat) \ No newline at end of file +from .vista import (Vnifti2Image, VtoMat) diff --git a/nipype/interfaces/vista/vista.py b/nipype/interfaces/vista/vista.py index 9f6286ade3..fdb054a5ad 100644 --- a/nipype/interfaces/vista/vista.py +++ b/nipype/interfaces/vista/vista.py @@ -11,7 +11,8 @@ from nipype.interfaces.base import CommandLineInputSpec, CommandLine, traits, TraitedSpec, File from nipype.utils.filemanip import split_filename -import os, os.path as op +import os +import os.path as op from nipype.interfaces.traits_extension import isdefined @@ -72,4 +73,3 @@ class VtoMat(CommandLine): _cmd = 'vtomat' input_spec = VtoMatInputSpec output_spec = VtoMatOutputSpec - diff --git a/nipype/pipeline/engine.py b/nipype/pipeline/engine.py index 151441c7b2..1c73918bf8 100644 --- a/nipype/pipeline/engine.py +++ b/nipype/pipeline/engine.py @@ -1057,15 +1057,16 @@ def _get_dot(self, prefix=None, hierarchy=None, colored=False, nodename = fullname.replace('.', '_') dotlist.append('subgraph cluster_%s {' % nodename) if colored: - dotlist.append(prefix + prefix + 'edge [color="%s"];' % (colorset[level+1])) + dotlist.append(prefix + prefix + 'edge [color="%s"];' % (colorset[level + 1])) dotlist.append(prefix + prefix + 'style=filled;') - dotlist.append(prefix + prefix + 'fillcolor="%s";' % (colorset[level+2])) + dotlist.append(prefix + prefix + 'fillcolor="%s";' % (colorset[level + 2])) dotlist.append(node._get_dot(prefix=prefix + prefix, hierarchy=hierarchy + [self.name], colored=colored, - simple_form=simple_form, level=level+3)) + simple_form=simple_form, level=level + 3)) dotlist.append('}') - if level == 6: level = 2 + if level == 6: + level = 2 else: for subnode in self._graph.successors_iter(node): if node._hierarchy != subnode._hierarchy: @@ -1337,19 +1338,19 @@ def run(self, updatehash=False): logger.debug(('updatehash, overwrite, always_run, hash_exists', updatehash, self.overwrite, self._interface.always_run, hash_exists)) - if (not updatehash and (((self.overwrite is None - and self._interface.always_run) - or self.overwrite) or - not hash_exists)): + if (not updatehash and (((self.overwrite is None and + self._interface.always_run) or + self.overwrite) or not + hash_exists)): logger.debug("Node hash: %s" % hashvalue) # by rerunning we mean only nodes that did finish to run previously json_pat = op.join(outdir, '_0x*.json') json_unfinished_pat = op.join(outdir, '_0x*_unfinished.json') - need_rerun = (op.exists(outdir) - and not isinstance(self, MapNode) - and len(glob(json_pat)) != 0 - and len(glob(json_unfinished_pat)) == 0) + need_rerun = (op.exists(outdir) and not + isinstance(self, MapNode) and + len(glob(json_pat)) != 0 and + len(glob(json_unfinished_pat)) == 0) if need_rerun: logger.debug("Rerunning node") logger.debug(("updatehash = %s, " @@ -1379,9 +1380,8 @@ def run(self, updatehash=False): logging.logdebug_dict_differences(prev_inputs, hashed_inputs) cannot_rerun = (str2bool( - self.config['execution']['stop_on_first_rerun']) - and not (self.overwrite is None - and self._interface.always_run)) + self.config['execution']['stop_on_first_rerun']) and not + (self.overwrite is None and self._interface.always_run)) if cannot_rerun: raise Exception(("Cannot rerun when 'stop_on_first_rerun' " "is set to True")) @@ -1390,10 +1390,10 @@ def run(self, updatehash=False): hashvalue) if op.exists(hashfile): os.remove(hashfile) - rm_outdir = (op.exists(outdir) - and not (op.exists(hashfile_unfinished) - and self._interface.can_resume) - and not isinstance(self, MapNode)) + rm_outdir = (op.exists(outdir) and not + (op.exists(hashfile_unfinished) and + self._interface.can_resume) and not + isinstance(self, MapNode)) if rm_outdir: logger.debug("Removing old %s and its contents" % outdir) try: @@ -1762,8 +1762,8 @@ def write_report(self, report_type=None, cwd=None): fp = open(report_file, 'at') fp.writelines(write_rst_header('Execution Inputs', level=1)) fp.writelines(write_rst_dict(self.inputs.get())) - exit_now = (not hasattr(self.result, 'outputs') - or self.result.outputs is None) + exit_now = (not hasattr(self.result, 'outputs') or + self.result.outputs is None) if exit_now: return fp.writelines(write_rst_header('Execution Outputs', level=1)) diff --git a/nipype/pipeline/plugins/base.py b/nipype/pipeline/plugins/base.py index e4aea237c8..7d9ae77c2b 100644 --- a/nipype/pipeline/plugins/base.py +++ b/nipype/pipeline/plugins/base.py @@ -233,7 +233,7 @@ def run(self, graph, config, updatehash=False): # setup polling - TODO: change to threaded model notrun = [] while np.any(self.proc_done == False) | \ - np.any(self.proc_pending == True): + np.any(self.proc_pending == True): toappend = [] # trigger callbacks for any pending results while self.pending_tasks: @@ -371,12 +371,9 @@ def _send_procs_to_workers(self, updatehash=False, graph=None): hash_exists, _, _, _ = self.procs[ jobid].hash_exists() logger.debug('Hash exists %s' % str(hash_exists)) - if (hash_exists and - (self.procs[jobid].overwrite == False or - (self.procs[jobid].overwrite == None and - not self.procs[jobid]._interface.always_run) - ) - ): + if (hash_exists and (self.procs[jobid].overwrite is False or + (self.procs[jobid].overwrite is None and not + self.procs[jobid]._interface.always_run))): continue_with_submission = False self._task_finished_cb(jobid) self._remove_node_dirs() @@ -656,4 +653,3 @@ def _get_result(self, taskid): result_out['result'] = result_data return result_out - diff --git a/nipype/pipeline/plugins/dagman.py b/nipype/pipeline/plugins/dagman.py index a41fff6988..4f89a13ce6 100644 --- a/nipype/pipeline/plugins/dagman.py +++ b/nipype/pipeline/plugins/dagman.py @@ -114,9 +114,9 @@ def _submit_graph(self, pyfiles, dependencies, nodes): "wrapper_args"]) # add required slots to the template template = '%s\n%s\n%s\nqueue\n' % ( - '%(initial_specs)s', - template, - '%(override_specs)s') + '%(initial_specs)s', + template, + '%(override_specs)s') batch_dir, name = os.path.split(pyscript) name = '.'.join(name.split('.')[:-1]) specs = dict( @@ -126,13 +126,13 @@ def _submit_graph(self, pyfiles, dependencies, nodes): nodescript=pyscript, basename=os.path.join(batch_dir, name), override_specs=override_specs - ) - if not wrapper_cmd is None: + ) + if wrapper_cmd is not None: specs['executable'] = wrapper_cmd specs['nodescript'] = \ '%s %s %s' % (wrapper_args % specs, # give access to variables - sys.executable, - pyscript) + sys.executable, + pyscript) submitspec = template % specs # write submit spec for this job submitfile = os.path.join(batch_dir, diff --git a/nipype/pipeline/plugins/debug.py b/nipype/pipeline/plugins/debug.py index 3afa3f80fa..9d0a52adaa 100644 --- a/nipype/pipeline/plugins/debug.py +++ b/nipype/pipeline/plugins/debug.py @@ -34,4 +34,3 @@ def run(self, graph, config, updatehash=False): logger.info("Executing debug plugin") for node in nx.topological_sort(graph): self._callable(node, graph) - diff --git a/nipype/pipeline/plugins/ipython.py b/nipype/pipeline/plugins/ipython.py index 2599f3f76c..96f47fd6ef 100644 --- a/nipype/pipeline/plugins/ipython.py +++ b/nipype/pipeline/plugins/ipython.py @@ -64,7 +64,7 @@ def run(self, graph, config, updatehash=False): __import__(name) self.iparallel = sys.modules[name] except ImportError: - raise ImportError("Ipython kernel not found. Parallel execution " \ + raise ImportError("Ipython kernel not found. Parallel execution " "will be unavailable") try: self.taskclient = self.iparallel.Client() @@ -112,6 +112,6 @@ def _report_crash(self, node, result=None): def _clear_task(self, taskid): if IPyversion >= '0.11': - logger.debug("Clearing id: %d" %taskid) + logger.debug("Clearing id: %d" % taskid) self.taskclient.purge_results(self.taskmap[taskid]) del self.taskmap[taskid] diff --git a/nipype/pipeline/plugins/ipythonx.py b/nipype/pipeline/plugins/ipythonx.py index 47887e3a93..7eca73e0ea 100644 --- a/nipype/pipeline/plugins/ipythonx.py +++ b/nipype/pipeline/plugins/ipythonx.py @@ -37,7 +37,7 @@ def run(self, graph, config, updatehash=False): __import__(name) self.ipyclient = sys.modules[name] except ImportError: - raise ImportError("Ipython kernel not found. Parallel execution " \ + raise ImportError("Ipython kernel not found. Parallel execution " "will be unavailable") try: self.taskclient = self.ipyclient.TaskClient() @@ -65,7 +65,7 @@ def _submit_job(self, node, updatehash=False): """ task = self.ipyclient.StringTask(cmdstr, push=dict(task=node, - updatehash=updatehash), + updatehash=updatehash), pull=['result', 'traceback']) return self.taskclient.run(task, block=False) @@ -80,5 +80,5 @@ def _report_crash(self, node, result=None): def _clear_task(self, taskid): if IPyversion >= '0.10.1': - logger.debug("Clearing id: %d" %taskid) + logger.debug("Clearing id: %d" % taskid) self.taskclient.clear(taskid) diff --git a/nipype/pipeline/plugins/linear.py b/nipype/pipeline/plugins/linear.py index fd6648c4ac..216d037757 100644 --- a/nipype/pipeline/plugins/linear.py +++ b/nipype/pipeline/plugins/linear.py @@ -55,4 +55,3 @@ def run(self, graph, config, updatehash=False): if self._status_callback: self._status_callback(node, 'exception') report_nodes_not_run(notrun) - diff --git a/nipype/pipeline/plugins/multiproc.py b/nipype/pipeline/plugins/multiproc.py index 062c8e5d04..861e2cc507 100644 --- a/nipype/pipeline/plugins/multiproc.py +++ b/nipype/pipeline/plugins/multiproc.py @@ -72,7 +72,7 @@ def __init__(self, plugin_args=None): def _get_result(self, taskid): if taskid not in self._taskresult: - raise RuntimeError('Multiproc task %d not found' %taskid) + raise RuntimeError('Multiproc task %d not found' % taskid) if not self._taskresult[taskid].ready(): return None return self._taskresult[taskid].get() diff --git a/nipype/pipeline/plugins/sgegraph.py b/nipype/pipeline/plugins/sgegraph.py index 60c8c4600b..efc77ce3e8 100644 --- a/nipype/pipeline/plugins/sgegraph.py +++ b/nipype/pipeline/plugins/sgegraph.py @@ -16,10 +16,10 @@ def node_completed_status(checknode): :return: boolean value True indicates that the node does not need to be run. """ """ TODO: place this in the base.py file and refactor """ - node_state_does_not_require_overwrite = (checknode.overwrite == False or - (checknode.overwrite == None and - not checknode._interface.always_run) - ) + node_state_does_not_require_overwrite = (checknode.overwrite is False or + (checknode.overwrite is None and not + checknode._interface.always_run) + ) hash_exists = False try: hash_exists, _, _, _ = checknode.hash_exists() diff --git a/nipype/pipeline/plugins/slurmgraph.py b/nipype/pipeline/plugins/slurmgraph.py index 08abe4b394..3c0bc62aaf 100644 --- a/nipype/pipeline/plugins/slurmgraph.py +++ b/nipype/pipeline/plugins/slurmgraph.py @@ -16,10 +16,10 @@ def node_completed_status(checknode): :return: boolean value True indicates that the node does not need to be run. """ """ TODO: place this in the base.py file and refactor """ - node_state_does_not_require_overwrite = (checknode.overwrite == False or - (checknode.overwrite == None and - not checknode._interface.always_run) - ) + node_state_does_not_require_overwrite = (checknode.overwrite is False or + (checknode.overwrite is None and not + checknode._interface.always_run) + ) hash_exists = False try: hash_exists, _, _, _ = checknode.hash_exists() diff --git a/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py b/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py index 5682965de1..89336c2026 100644 --- a/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py +++ b/nipype/pipeline/plugins/tests/test_multiproc_nondaemon.py @@ -144,4 +144,3 @@ def test_run_multiproc_nondaemon_true(): # with nondaemon_flag = True, the execution should succeed result = run_multiproc_nondaemon_with_flag(True) yield assert_equal, result, 180 # n_procs (2) * numberOfThreads (2) * 45 == 180 - diff --git a/nipype/pipeline/tests/test_engine.py b/nipype/pipeline/tests/test_engine.py index 08c25aced6..30b2981b4c 100644 --- a/nipype/pipeline/tests/test_engine.py +++ b/nipype/pipeline/tests/test_engine.py @@ -178,8 +178,8 @@ def test8(): pipe._execgraph = pe.generate_expanded_graph(deepcopy(pipe._flatgraph)) yield assert_equal, len(pipe._execgraph.nodes()), 8 yield assert_equal, len(pipe._execgraph.edges()), 8 - edgenum = sorted([(len(pipe._execgraph.in_edges(node)) + \ - len(pipe._execgraph.out_edges(node))) \ + edgenum = sorted([(len(pipe._execgraph.in_edges(node)) + + len(pipe._execgraph.out_edges(node))) for node in pipe._execgraph.nodes()]) yield assert_true, edgenum[0] > 0 @@ -219,7 +219,7 @@ def test_iterable_expansion(): wf1.connect(node1, 'output1', node2, 'input2') wf3 = pe.Workflow(name='group') for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d' %i)]) + wf3.add_nodes([wf1.clone(name='test%d' % i)]) wf3._flatgraph = wf3._create_flat_graph() yield assert_equal, len(pe.generate_expanded_graph(wf3._flatgraph).nodes()), 12 @@ -234,7 +234,7 @@ def test_synchronize_expansion(): wf1.connect(node1, 'output1', node2, 'input2') wf3 = pe.Workflow(name='group') for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d' %i)]) + wf3.add_nodes([wf1.clone(name='test%d' % i)]) wf3._flatgraph = wf3._create_flat_graph() # Each expanded graph clone has: # 3 node1 expansion nodes and @@ -258,7 +258,7 @@ def test_synchronize_tuples_expansion(): wf3 = pe.Workflow(name='group') for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d' %i)]) + wf3.add_nodes([wf1.clone(name='test%d' % i)]) wf3._flatgraph = wf3._create_flat_graph() # Identical to test_synchronize_expansion @@ -286,7 +286,7 @@ def test_itersource_expansion(): wf3 = pe.Workflow(name='group') for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d' %i)]) + wf3.add_nodes([wf1.clone(name='test%d' % i)]) wf3._flatgraph = wf3._create_flat_graph() @@ -318,7 +318,7 @@ def test_itersource_synchronize1_expansion(): wf1.connect(node3, 'output1', node4, 'input1') wf3 = pe.Workflow(name='group') for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d' %i)]) + wf3.add_nodes([wf1.clone(name='test%d' % i)]) wf3._flatgraph = wf3._create_flat_graph() # each expanded graph clone has: @@ -351,7 +351,7 @@ def test_itersource_synchronize2_expansion(): wf1.connect(node3, 'output1', node4, 'input1') wf3 = pe.Workflow(name='group') for i in [0, 1, 2]: - wf3.add_nodes([wf1.clone(name='test%d' %i)]) + wf3.add_nodes([wf1.clone(name='test%d' % i)]) wf3._flatgraph = wf3._create_flat_graph() # each expanded graph clone has: @@ -555,7 +555,7 @@ def func1(): return 1 def func2(a): - return a+1 + return a + 1 n1 = pe.Node(Function(input_names=[], output_names=['a'], function=func1), @@ -565,7 +565,7 @@ def func2(a): function=func2), name='n2') w1 = pe.Workflow(name='test') - modify = lambda x: x+1 + modify = lambda x: x + 1 n1.inputs.a = 1 w1.connect(n1, ('a', modify), n2, 'a') w1.base_dir = wd @@ -616,7 +616,7 @@ def func1(): return 1 def func2(a): - return a+1 + return a + 1 n1 = pe.Node(Function(input_names=[], output_names=['a'], function=func1), @@ -626,7 +626,7 @@ def func2(a): function=func2), name='n2') w1 = pe.Workflow(name='test') - modify = lambda x: x+1 + modify = lambda x: x + 1 n1.inputs.a = 1 w1.connect(n1, ('a', modify), n2, 'a') w1.base_dir = wd diff --git a/nipype/pipeline/utils.py b/nipype/pipeline/utils.py index 25931f1310..19569d3332 100644 --- a/nipype/pipeline/utils.py +++ b/nipype/pipeline/utils.py @@ -150,7 +150,7 @@ def get_print_name(node, simple_form=True): if simple_form: parts = name.split('.') if len(parts) > 2: - return ' ('.join(parts[1:])+')' + return ' ('.join(parts[1:]) + ')' elif len(parts) == 2: return parts[1] return name @@ -281,7 +281,7 @@ def count_iterables(iterables, synchronize=False): if synchronize: op = max else: - op = lambda x, y: x*y + op = lambda x, y: x * y return reduce(op, [len(func()) for _, func in iterables.items()]) @@ -620,9 +620,9 @@ def generate_expanded_graph(graph_in): # the join successor nodes of the current iterable node jnodes = [node for node in graph_in.nodes_iter() - if hasattr(node, 'joinsource') - and inode.name == node.joinsource - and nx.has_path(graph_in, inode, node)] + if hasattr(node, 'joinsource') and + inode.name == node.joinsource and + nx.has_path(graph_in, inode, node)] # excise the join in-edges. save the excised edges in a # {jnode: {source name: (destination name, edge data)}} @@ -649,8 +649,8 @@ def generate_expanded_graph(graph_in): # find the unique iterable source node in the graph try: iter_src = next((node for node in graph_in.nodes_iter() - if node.name == src_name - and nx.has_path(graph_in, node, inode))) + if node.name == src_name and + nx.has_path(graph_in, node, inode))) except StopIteration: raise ValueError("The node %s itersource %s was not found" " among the iterable predecessor nodes" @@ -894,11 +894,11 @@ def _transpose_iterables(fields, values): for key, tuples in list(values.items()): for kvals in tuples: for idx, val in enumerate(kvals): - if val != None: + if val is not None: transposed[fields[idx]][key].append(val) return list(transposed.items()) else: - return list(zip(fields, [[v for v in list(transpose) if v != None] + return list(zip(fields, [[v for v in list(transpose) if v is not None] for transpose in zip(*values)])) diff --git a/nipype/utils/__init__.py b/nipype/utils/__init__.py index 93b967edb8..926f228876 100644 --- a/nipype/utils/__init__.py +++ b/nipype/utils/__init__.py @@ -2,4 +2,3 @@ from .onetime import OneTimeProperty, setattr_on_read from .tmpdirs import TemporaryDirectory, InTemporaryDirectory - diff --git a/nipype/utils/config.py b/nipype/utils/config.py index 4cae18d2a7..bd7ab032ef 100644 --- a/nipype/utils/config.py +++ b/nipype/utils/config.py @@ -87,11 +87,11 @@ def __init__(self, *args, **kwargs): if os.path.exists(old_config_file): if os.path.exists(new_config_file): msg = ("Detected presence of both old (%s, used by versions " - "< 0.5.2) and new (%s) config files. This version will " - "proceed with the new one. We advise to merge settings " - "and remove old config file if you are not planning to " - "use previous releases of nipype.") % (old_config_file, - new_config_file) + "< 0.5.2) and new (%s) config files. This version will " + "proceed with the new one. We advise to merge settings " + "and remove old config file if you are not planning to " + "use previous releases of nipype.") % (old_config_file, + new_config_file) warn(msg) else: warn("Moving old config file from: %s to %s" % (old_config_file, @@ -175,4 +175,3 @@ def update_matplotlib(self): def enable_provenance(self): self._config.set('execution', 'write_provenance', 'true') self._config.set('execution', 'hash_method', 'content') - diff --git a/nipype/utils/docparse.py b/nipype/utils/docparse.py index 4875ee6b95..a445262a15 100644 --- a/nipype/utils/docparse.py +++ b/nipype/utils/docparse.py @@ -42,7 +42,7 @@ def grab_doc(cmd, trap_error=True): stdout, stderr = proc.communicate() if trap_error and proc.returncode: - msg = 'Attempting to run %s. Returned Error: %s' %(cmd, stderr) + msg = 'Attempting to run %s. Returned Error: %s' % (cmd, stderr) raise IOError(msg) if stderr: @@ -216,9 +216,8 @@ def build_doc(doc, opts): if attr is not None: # newline = line.replace(flag, attr) # Replace the flag with our attribute name - linelist[0] = '%s :' % str(attr) + linelist[0] = '%s :\n ' % str(attr) # Add some line formatting - linelist.insert(1, '\n ') newline = ' '.join(linelist) newdoc.append(newline) else: @@ -256,7 +255,7 @@ def get_doc(cmd, opt_map, help_flag=None, trap_error=True): terminal_output='allatonce').run() cmd_path = res.runtime.stdout.strip() if cmd_path == '': - raise Exception('Command %s not found' %cmd.split(' ')[0]) + raise Exception('Command %s not found' % cmd.split(' ')[0]) if help_flag: cmd = ' '.join((cmd, help_flag)) doc = grab_doc(cmd, trap_error) @@ -287,9 +286,9 @@ def _parse_doc(doc, style=['--']): style = [style] for line in doclist: linelist = line.split() - flag = [item for i, item in enumerate(linelist) if i < 2 and \ - any([item.startswith(s) for s in style]) and \ - len(item) > 1] + flag = [item for i, item in enumerate(linelist) if i < 2 and + any([item.startswith(s) for s in style]) and + len(item) > 1] if flag: if len(flag) == 1: style_idx = [flag[0].startswith(s) for s in style].index(True) @@ -303,7 +302,7 @@ def _parse_doc(doc, style=['--']): break flag = flag[style_idx.index(min(style_idx))] style_idx = min(style_idx) - optmap[flag.split(style[style_idx])[1]] = '%s %%s' %flag + optmap[flag.split(style[style_idx])[1]] = '%s %%s' % flag return optmap @@ -332,7 +331,7 @@ def get_params_from_doc(cmd, style='--', help_flag=None, trap_error=True): terminal_output='allatonce').run() cmd_path = res.runtime.stdout.strip() if cmd_path == '': - raise Exception('Command %s not found' %cmd.split(' ')[0]) + raise Exception('Command %s not found' % cmd.split(' ')[0]) if help_flag: cmd = ' '.join((cmd, help_flag)) doc = grab_doc(cmd, trap_error) diff --git a/nipype/utils/filemanip.py b/nipype/utils/filemanip.py index ba364b153b..de6f6760f5 100644 --- a/nipype/utils/filemanip.py +++ b/nipype/utils/filemanip.py @@ -226,7 +226,7 @@ def copyfile(originalfile, newfile, copy=False, create_new=False, s = re.search('_c[0-9]{4,4}$', fname) i = 0 if s: - i = int(s.group()[2:])+1 + i = int(s.group()[2:]) + 1 fname = fname[:-6] + "_c%04d" % i else: fname += "_c%04d" % i @@ -452,11 +452,11 @@ def write_rst_list(items, prefix=''): out = [] for item in items: out.append(prefix + ' ' + str(item)) - return '\n'.join(out)+'\n\n' + return '\n'.join(out) + '\n\n' def write_rst_dict(info, prefix=''): out = [] for key, value in sorted(info.items()): out.append(prefix + '* ' + key + ' : ' + str(value)) - return '\n'.join(out)+'\n\n' + return '\n'.join(out) + '\n\n' diff --git a/nipype/utils/misc.py b/nipype/utils/misc.py index de19f5843c..0c98900695 100644 --- a/nipype/utils/misc.py +++ b/nipype/utils/misc.py @@ -91,7 +91,7 @@ def create_function_from_source(function_source, imports=None): exec(function_source, ns) except Exception as msg: - msg = str(msg) + '\nError executing function:\n %s\n' %function_source + msg = str(msg) + '\nError executing function:\n %s\n' % function_source msg += '\n'.join(["Functions in connection strings have to be standalone.", "They cannot be declared either interactively or inside", "another function or inline in the connect string. Any", @@ -242,6 +242,3 @@ def unflatten(in_list, prev_structure): for item in prev_structure: out.append(unflatten(in_list, item)) return out - - - diff --git a/nipype/utils/nipype2boutiques.py b/nipype/utils/nipype2boutiques.py index bb7c515b34..49fc1d755d 100644 --- a/nipype/utils/nipype2boutiques.py +++ b/nipype/utils/nipype2boutiques.py @@ -72,8 +72,8 @@ def generate_boutiques_descriptor(module, interface_name, ignored_template_input # Tool description tool_desc = {} tool_desc['name'] = interface_name - tool_desc['command-line'] = "nipype_cmd "+str(module)+" "+interface_name+" " - tool_desc['description'] = interface_name+", as implemented in Nipype (module: "+str(module)+", interface: "+interface_name+")." + tool_desc['command-line'] = "nipype_cmd " + str(module) + " " + interface_name + " " + tool_desc['description'] = interface_name + ", as implemented in Nipype (module: " + str(module) + ", interface: " + interface_name + ")." tool_desc['inputs'] = [] tool_desc['outputs'] = [] tool_desc['tool-version'] = interface.version @@ -87,9 +87,9 @@ def generate_boutiques_descriptor(module, interface_name, ignored_template_input for name, spec in sorted(interface.inputs.traits(transient=None).items()): input = get_boutiques_input(inputs, interface, name, spec, ignored_template_inputs, verbose, ignore_template_numbers) tool_desc['inputs'].append(input) - tool_desc['command-line'] += input['command-line-key']+" " + tool_desc['command-line'] += input['command-line-key'] + " " if verbose: - print("-> Adding input "+input['name']) + print("-> Adding input " + input['name']) # Generates tool outputs for name, spec in sorted(outputs.traits(transient=None).items()): @@ -97,9 +97,9 @@ def generate_boutiques_descriptor(module, interface_name, ignored_template_input if output['path-template'] != "": tool_desc['outputs'].append(output) if verbose: - print("-> Adding output "+output['name']) + print("-> Adding output " + output['name']) elif verbose: - print("xx Skipping output "+output['name']+" with no path template.") + print("xx Skipping output " + output['name'] + " with no path template.") if tool_desc['outputs'] == []: raise Exception("Tool has no output.") @@ -135,10 +135,10 @@ def get_boutiques_input(inputs, interface, input_name, spec, ignored_template_in input['name'] = input_name.replace('_', ' ').capitalize() input['type'] = get_type_from_spec_info(spec_info) input['list'] = is_list(spec_info) - input['command-line-key'] = "["+input_name.upper()+"]" # assumes that input names are unique - input['command-line-flag'] = ("--%s" %input_name+" ").strip() + input['command-line-key'] = "[" + input_name.upper() + "]" # assumes that input names are unique + input['command-line-flag'] = ("--%s" % input_name + " ").strip() input['tempvalue'] = None - input['description'] = spec_info.capitalize()+". "+spec.desc.capitalize() + input['description'] = spec_info.capitalize() + ". " + spec.desc.capitalize() if not input['description'].endswith('.'): input['description'] += '.' if not (hasattr(spec, "mandatory") and spec.mandatory): @@ -155,7 +155,7 @@ def get_boutiques_input(inputs, interface, input_name, spec, ignored_template_in setattr(interface.inputs, input_name, tempvalue) input['tempvalue'] = tempvalue if verbose: - print("oo Path-template creation using "+input['id']+"="+str(tempvalue)) + print("oo Path-template creation using " + input['id'] + "=" + str(tempvalue)) # Now that temp values have been generated, set Boolean types to # Number (there is no Boolean type in Boutiques) @@ -236,10 +236,10 @@ def get_unique_value(type, id): assuming id is unique. ''' return { - "File": os.path.abspath(create_tempfile()), - "Boolean": True, - "Number": abs(hash(id)), # abs in case input param must be positive... - "String": id + "File": os.path.abspath(create_tempfile()), + "Boolean": True, + "Number": abs(hash(id)), # abs in case input param must be positive... + "String": id }[type] diff --git a/nipype/utils/nipype_cmd.py b/nipype/utils/nipype_cmd.py index 93ddcd16be..9bdfd7df91 100644 --- a/nipype/utils/nipype_cmd.py +++ b/nipype/utils/nipype_cmd.py @@ -14,7 +14,7 @@ def listClasses(module=None): print("Available Interfaces:") for k, v in sorted(list(pkg.__dict__.items())): if inspect.isclass(v) and issubclass(v, Interface): - print("\t%s" %k) + print("\t%s" % k) def add_options(parser=None, module=None, function=None): @@ -25,7 +25,7 @@ def add_options(parser=None, module=None, function=None): inputs = interface.input_spec() for name, spec in sorted(interface.inputs.traits(transient=None).items()): - desc = "\n".join(interface._get_trait_desc(inputs, name, spec))[len(name)+2:] + desc = "\n".join(interface._get_trait_desc(inputs, name, spec))[len(name) + 2:] args = {} if spec.is_trait_type(traits.Bool): @@ -38,7 +38,7 @@ def add_options(parser=None, module=None, function=None): else: if spec.is_trait_type(InputMultiPath): args["nargs"] = "*" - parser.add_argument("--%s" %name, dest=name, + parser.add_argument("--%s" % name, dest=name, help=desc, **args) return parser, interface @@ -65,7 +65,7 @@ def run_instance(interface, options): setattr(interface.inputs, input_name, value) except ValueError as e: - print("Error when setting the value of %s: '%s'" %(input_name, str(e))) + print("Error when setting the value of %s: '%s'" % (input_name, str(e))) print(interface.inputs) res = interface.run() @@ -84,7 +84,7 @@ def main(argv): parsed = parser.parse_args(args=argv[1:3]) _, prog = os.path.split(argv[0]) - interface_parser = argparse.ArgumentParser(description="Run %s" %parsed.interface, prog=" ".join([prog] + argv[1:3])) + interface_parser = argparse.ArgumentParser(description="Run %s" % parsed.interface, prog=" ".join([prog] + argv[1:3])) interface_parser, interface = add_options(interface_parser, parsed.module, parsed.interface) args = interface_parser.parse_args(args=argv[3:]) run_instance(interface, args) diff --git a/nipype/utils/tests/__init__.py b/nipype/utils/tests/__init__.py index d43673c6ac..00d7c65d5a 100644 --- a/nipype/utils/tests/__init__.py +++ b/nipype/utils/tests/__init__.py @@ -18,4 +18,3 @@ you contribute. Often, writing tests for your code before the code is written helps to frame your thoughts about what the code should look like. """ - diff --git a/nipype/utils/tests/test_cmd.py b/nipype/utils/tests/test_cmd.py index 91e565e6c3..a4bb0ae060 100644 --- a/nipype/utils/tests/test_cmd.py +++ b/nipype/utils/tests/test_cmd.py @@ -3,7 +3,8 @@ from future import standard_library standard_library.install_aliases() -import unittest, sys +import unittest +import sys from contextlib import contextmanager from nipype.external.six import PY2, PY3, StringIO diff --git a/nipype/utils/tests/test_docparse.py b/nipype/utils/tests/test_docparse.py index 9e3871d301..84f3d99097 100644 --- a/nipype/utils/tests/test_docparse.py +++ b/nipype/utils/tests/test_docparse.py @@ -23,9 +23,9 @@ class Foo(object): fmtd_doc = """Parameters ---------- -outline : +outline : something about an outline -fun : +fun : intensity of fun factor Others Parameters @@ -50,9 +50,9 @@ def test_build_doc(): The name of the input file outfile : str The name of the output file -outline : +outline : something about an outline -fun : +fun : intensity of fun factor Others Parameters diff --git a/nipype/utils/tmpdirs.py b/nipype/utils/tmpdirs.py index 274c08caed..aa7c5da67c 100644 --- a/nipype/utils/tmpdirs.py +++ b/nipype/utils/tmpdirs.py @@ -44,5 +44,3 @@ def __enter__(self): def __exit__(self, exc, value, tb): os.chdir(self._pwd) return super(InTemporaryDirectory, self).__exit__(exc, value, tb) - - diff --git a/nipype/workflows/dmri/camino/__init__.py b/nipype/workflows/dmri/camino/__init__.py index ef72229cde..fef8c6aae1 100644 --- a/nipype/workflows/dmri/camino/__init__.py +++ b/nipype/workflows/dmri/camino/__init__.py @@ -2,4 +2,3 @@ from .diffusion import create_camino_dti_pipeline from .connectivity_mapping import create_connectivity_pipeline from .group_connectivity import create_group_connectivity_pipeline - diff --git a/nipype/workflows/dmri/camino/connectivity_mapping.py b/nipype/workflows/dmri/camino/connectivity_mapping.py index 7cbb1ba996..a0838b4468 100644 --- a/nipype/workflows/dmri/camino/connectivity_mapping.py +++ b/nipype/workflows/dmri/camino/connectivity_mapping.py @@ -346,7 +346,7 @@ def create_connectivity_pipeline(name="connectivity"): mapping.connect([(dtifit, fa, [("tensor_fitted", "in_file")])]) mapping.connect([(fa, analyzeheader_fa, [("fa", "in_file")])]) mapping.connect([(inputnode_within, analyzeheader_fa, [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) + (('dwi', get_data_dims), 'data_dims')])]) mapping.connect([(fa, fa2nii, [('fa', 'data_file')])]) mapping.connect([(inputnode_within, fa2nii, [(('dwi', get_affine), 'affine')])]) mapping.connect([(analyzeheader_fa, fa2nii, [('header', 'header_file')])]) @@ -354,7 +354,7 @@ def create_connectivity_pipeline(name="connectivity"): mapping.connect([(dtifit, trace, [("tensor_fitted", "in_file")])]) mapping.connect([(trace, analyzeheader_trace, [("trace", "in_file")])]) mapping.connect([(inputnode_within, analyzeheader_trace, [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) + (('dwi', get_data_dims), 'data_dims')])]) mapping.connect([(trace, trace2nii, [('trace', 'data_file')])]) mapping.connect([(inputnode_within, trace2nii, [(('dwi', get_affine), 'affine')])]) mapping.connect([(analyzeheader_trace, trace2nii, [('header', 'header_file')])]) @@ -371,7 +371,7 @@ def create_connectivity_pipeline(name="connectivity"): (camino2trackvis, trk2camino, [['trackvis', 'in_file']]) ]) mapping.connect([(inputnode_within, camino2trackvis, [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) + (('dwi', get_data_dims), 'data_dims')])]) """ Here the CMTK connectivity mapping nodes are connected. @@ -451,16 +451,16 @@ def create_connectivity_pipeline(name="connectivity"): inputnode = pe.Node(interface=util.IdentityInterface(fields=["subject_id", "dwi", "bvecs", "bvals", "subjects_dir", "resolution_network_file"]), name="inputnode") outputnode = pe.Node(interface=util.IdentityInterface(fields=["fa", - "struct", - "trace", - "tracts", - "connectome", - "cmatrix", - "networks", - "rois", - "mean_fiber_length", - "fiber_length_std", - "tensors"]), + "struct", + "trace", + "tracts", + "connectome", + "cmatrix", + "networks", + "rois", + "mean_fiber_length", + "fiber_length_std", + "tensors"]), name="outputnode") connectivity = pe.Workflow(name="connectivity") @@ -485,6 +485,6 @@ def create_connectivity_pipeline(name="connectivity"): ("mri_convert_Brain.out_file", "struct"), ("trace2nii.nifti_file", "trace"), ("dtifit.tensor_fitted", "tensors")]) - ]) + ]) return connectivity diff --git a/nipype/workflows/dmri/camino/diffusion.py b/nipype/workflows/dmri/camino/diffusion.py index 3e176491f0..1c5c174e2d 100644 --- a/nipype/workflows/dmri/camino/diffusion.py +++ b/nipype/workflows/dmri/camino/diffusion.py @@ -153,7 +153,7 @@ def create_camino_dti_pipeline(name="dtiproc"): tractography.connect([(inputnode1, image2voxel, [("dwi", "in_file")]), (inputnode1, fsl2scheme, [("bvecs", "bvec_file"), ("bvals", "bval_file")]) - ]) + ]) """ Tensor fitting @@ -161,7 +161,7 @@ def create_camino_dti_pipeline(name="dtiproc"): tractography.connect([(image2voxel, dtifit, [['voxel_order', 'in_file']]), (fsl2scheme, dtifit, [['scheme', 'scheme_file']]) - ]) + ]) """ Workflow for applying DT streamline tractogpahy @@ -198,7 +198,7 @@ def create_camino_dti_pipeline(name="dtiproc"): tractography.connect([(dtifit, fa, [("tensor_fitted", "in_file")])]) tractography.connect([(fa, analyzeheader_fa, [("fa", "in_file")])]) tractography.connect([(inputnode1, analyzeheader_fa, [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) + (('dwi', get_data_dims), 'data_dims')])]) tractography.connect([(fa, fa2nii, [('fa', 'data_file')])]) tractography.connect([(inputnode1, fa2nii, [(('dwi', get_affine), 'affine')])]) tractography.connect([(analyzeheader_fa, fa2nii, [('header', 'header_file')])]) @@ -206,7 +206,7 @@ def create_camino_dti_pipeline(name="dtiproc"): tractography.connect([(dtifit, trace, [("tensor_fitted", "in_file")])]) tractography.connect([(trace, analyzeheader_trace, [("trace", "in_file")])]) tractography.connect([(inputnode1, analyzeheader_trace, [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) + (('dwi', get_data_dims), 'data_dims')])]) tractography.connect([(trace, trace2nii, [('trace', 'data_file')])]) tractography.connect([(inputnode1, trace2nii, [(('dwi', get_affine), 'affine')])]) tractography.connect([(analyzeheader_trace, trace2nii, [('header', 'header_file')])]) @@ -216,18 +216,18 @@ def create_camino_dti_pipeline(name="dtiproc"): tractography.connect([(trackpico, cam2trk_pico, [('tracked', 'in_file')])]) tractography.connect([(trackdt, cam2trk_dt, [('tracked', 'in_file')])]) tractography.connect([(inputnode1, cam2trk_pico, [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) + (('dwi', get_data_dims), 'data_dims')])]) tractography.connect([(inputnode1, cam2trk_dt, [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) + (('dwi', get_data_dims), 'data_dims')])]) inputnode = pe.Node(interface=util.IdentityInterface(fields=["dwi", "bvecs", "bvals"]), name="inputnode") outputnode = pe.Node(interface=util.IdentityInterface(fields=["fa", - "trace", - "tracts_pico", - "tracts_dt", - "tensors"]), + "trace", + "tracts_pico", + "tracts_dt", + "tensors"]), name="outputnode") workflow = pe.Workflow(name=name) @@ -242,6 +242,6 @@ def create_camino_dti_pipeline(name="dtiproc"): ("fa2nii.nifti_file", "fa"), ("trace2nii.nifti_file", "trace"), ("dtifit.tensor_fitted", "tensors")]) - ]) + ]) return workflow diff --git a/nipype/workflows/dmri/camino/group_connectivity.py b/nipype/workflows/dmri/camino/group_connectivity.py index f2a03c1140..1f323bda3d 100644 --- a/nipype/workflows/dmri/camino/group_connectivity.py +++ b/nipype/workflows/dmri/camino/group_connectivity.py @@ -75,7 +75,7 @@ def create_group_connectivity_pipeline(group_list, group_id, data_dir, subjects_ datasink.inputs.base_directory = output_dir datasink.inputs.container = group_id - l1pipeline = pe.Workflow(name="l1pipeline_"+group_id) + l1pipeline = pe.Workflow(name="l1pipeline_" + group_id) l1pipeline.base_dir = output_dir l1pipeline.base_output_dir = group_id l1pipeline.connect([(subj_infosource, datasource, [('subject_id', 'subject_id')])]) @@ -83,7 +83,7 @@ def create_group_connectivity_pipeline(group_list, group_id, data_dir, subjects_ l1pipeline.connect([(datasource, conmapper, [("dwi", "inputnode.dwi"), ("bvals", "inputnode.bvals"), ("bvecs", "inputnode.bvecs"), - ])]) + ])]) l1pipeline.connect([(conmapper, datasink, [("outputnode.connectome", "@l1output.cff"), ("outputnode.fa", "@l1output.fa"), ("outputnode.tracts", "@l1output.tracts"), diff --git a/nipype/workflows/dmri/connectivity/group_connectivity.py b/nipype/workflows/dmri/connectivity/group_connectivity.py index 2f8438bfe8..e4651febd9 100644 --- a/nipype/workflows/dmri/connectivity/group_connectivity.py +++ b/nipype/workflows/dmri/connectivity/group_connectivity.py @@ -129,9 +129,9 @@ def create_merge_networks_by_group_workflow(group_list, group_id, data_dir, subj [(group_infosource, l2infosource, [('group_id', 'group_id')])]) l2pipeline.connect([ - (l2infosource, l2source, [('group_id', 'group_id')]), - (l2source, l2inputnode, [('CFFfiles', 'CFFfiles')]), - ]) + (l2infosource, l2source, [('group_id', 'group_id')]), + (l2source, l2inputnode, [('CFFfiles', 'CFFfiles')]), + ]) l2pipeline.connect( [(l2inputnode, MergeCNetworks, [('CFFfiles', 'in_files')])]) @@ -210,14 +210,14 @@ def create_merge_network_results_by_group_workflow(group_list, group_id, data_di [(group_infosource, l2infosource, [('group_id', 'group_id')])]) l2pipeline.connect([ - (l2infosource, l2source, [('group_id', 'group_id')]), - (l2source, l2inputnode, [('CFFfiles', 'CFFfiles')]), - (l2source, l2inputnode, [( - 'CSVmatrices', 'CSVmatrices')]), - (l2source, l2inputnode, [('CSVnodal', 'CSVnodal')]), - (l2source, l2inputnode, [('CSVglobal', 'CSVglobal')]), - (l2source, l2inputnode, [('CSVfibers', 'CSVfibers')]), - ]) + (l2infosource, l2source, [('group_id', 'group_id')]), + (l2source, l2inputnode, [('CFFfiles', 'CFFfiles')]), + (l2source, l2inputnode, [( + 'CSVmatrices', 'CSVmatrices')]), + (l2source, l2inputnode, [('CSVnodal', 'CSVnodal')]), + (l2source, l2inputnode, [('CSVglobal', 'CSVglobal')]), + (l2source, l2inputnode, [('CSVfibers', 'CSVfibers')]), + ]) l2pipeline.connect( [(l2inputnode, MergeCNetworks, [('CFFfiles', 'in_files')])]) @@ -337,9 +337,9 @@ def create_merge_group_networks_workflow(group_list, data_dir, subjects_dir, out l3pipeline = pe.Workflow(name="l3output") l3pipeline.base_dir = output_dir l3pipeline.connect([ - (l3infosource, l3source, [('group_id', 'group_id')]), - (l3source, l3inputnode, [('CFFfiles', 'Group_CFFs')]), - ]) + (l3infosource, l3source, [('group_id', 'group_id')]), + (l3source, l3inputnode, [('CFFfiles', 'Group_CFFs')]), + ]) l3pipeline.connect( [(l3inputnode, MergeCNetworks_grp, [('Group_CFFs', 'in_files')])]) @@ -400,12 +400,12 @@ def create_merge_group_network_results_workflow(group_list, data_dir, subjects_d l3pipeline = pe.Workflow(name="l3output") l3pipeline.base_dir = output_dir l3pipeline.connect([ - (l3infosource, l3source, [('group_id', 'group_id')]), - (l3source, l3inputnode, [('CFFfiles', 'Group_CFFs')]), - (l3source, l3inputnode, [('CSVnodemetrics', 'Group_CSVnodemetrics')]), - (l3source, l3inputnode, [('CSVglobalmetrics', 'Group_CSVglobalmetrics')]), - (l3source, l3inputnode, [('CSVmatrices', 'Group_CSVmatrices')]), - ]) + (l3infosource, l3source, [('group_id', 'group_id')]), + (l3source, l3inputnode, [('CFFfiles', 'Group_CFFs')]), + (l3source, l3inputnode, [('CSVnodemetrics', 'Group_CSVnodemetrics')]), + (l3source, l3inputnode, [('CSVglobalmetrics', 'Group_CSVglobalmetrics')]), + (l3source, l3inputnode, [('CSVmatrices', 'Group_CSVmatrices')]), + ]) l3pipeline.connect([(l3inputnode, MergeCNetworks_grp, [('Group_CFFs', 'in_files')])]) l3pipeline.connect([(MergeCNetworks_grp, l3datasink, [('connectome_file', '@l3output')])]) @@ -494,17 +494,17 @@ def create_average_networks_by_group_workflow(group_list, data_dir, subjects_dir l4pipeline = pe.Workflow(name="l4output") l4pipeline.base_dir = output_dir l4pipeline.connect([ - (l4infosource, l4source_grp1, [('group_id1', 'group_id')]), - (l4infosource, l4source_grp2, [('group_id2', 'group_id')]), - (l4source_grp1, l4inputnode, [('CMatrices', 'CMatrices_grp1')]), - (l4source_grp2, l4inputnode, [('CMatrices', 'CMatrices_grp2')]), - (l4source_grp1, l4inputnode, [('networks', 'networks_grp1')]), - (l4source_grp2, l4inputnode, [('networks', 'networks_grp2')]), - (l4source_grp1, l4inputnode, [('fibmean', 'fibmean_grp1')]), - (l4source_grp2, l4inputnode, [('fibmean', 'fibmean_grp2')]), - (l4source_grp1, l4inputnode, [('fibdev', 'fibdev_grp1')]), - (l4source_grp2, l4inputnode, [('fibdev', 'fibdev_grp2')]), - ]) + (l4infosource, l4source_grp1, [('group_id1', 'group_id')]), + (l4infosource, l4source_grp2, [('group_id2', 'group_id')]), + (l4source_grp1, l4inputnode, [('CMatrices', 'CMatrices_grp1')]), + (l4source_grp2, l4inputnode, [('CMatrices', 'CMatrices_grp2')]), + (l4source_grp1, l4inputnode, [('networks', 'networks_grp1')]), + (l4source_grp2, l4inputnode, [('networks', 'networks_grp2')]), + (l4source_grp1, l4inputnode, [('fibmean', 'fibmean_grp1')]), + (l4source_grp2, l4inputnode, [('fibmean', 'fibmean_grp2')]), + (l4source_grp1, l4inputnode, [('fibdev', 'fibdev_grp1')]), + (l4source_grp2, l4inputnode, [('fibdev', 'fibdev_grp2')]), + ]) l4pipeline.connect([(l4inputnode, average_networks_grp1, [('networks_grp1', 'in_files')])]) l4pipeline.connect([(l4infosource, average_networks_grp1, [('group_id1', 'group_id')])]) @@ -524,4 +524,3 @@ def create_average_networks_by_group_workflow(group_list, data_dir, subjects_dir l4pipeline.connect([(merge_gexf_averages, l4datasink, [('out', '@l4output.gexf')])]) return l4pipeline - diff --git a/nipype/workflows/dmri/connectivity/nx.py b/nipype/workflows/dmri/connectivity/nx.py index 804f77ec84..4fe2f89141 100644 --- a/nipype/workflows/dmri/connectivity/nx.py +++ b/nipype/workflows/dmri/connectivity/nx.py @@ -83,7 +83,7 @@ def create_networkx_pipeline(name="networkx", extra_column_heading="subject"): pipeline.connect([(ntwkMetrics, mergeNetworks, [("gpickled_network_files", "in2")])]) outputnode = pe.Node(interface=util.IdentityInterface(fields=["network_files", - "csv_files", "matlab_files", "node_csv", "global_csv"]), + "csv_files", "matlab_files", "node_csv", "global_csv"]), name="outputnode") pipeline.connect([(MergeCSVFiles_node, outputnode, [("csv_file", "node_csv")])]) diff --git a/nipype/workflows/dmri/dipy/denoise.py b/nipype/workflows/dmri/dipy/denoise.py index 6519584783..7b7afc47ee 100644 --- a/nipype/workflows/dmri/dipy/denoise.py +++ b/nipype/workflows/dmri/dipy/denoise.py @@ -30,18 +30,18 @@ def nlmeans_pipeline(name='Denoise', name='outputnode') nmask = pe.Node(niu.Function(input_names=['in_file', 'in_mask'], - output_names=['out_file'], function=bg_mask), + output_names=['out_file'], function=bg_mask), name='NoiseMsk') nlmeans = pe.Node(dipy.Denoise(**params), name='NLMeans') wf = pe.Workflow(name=name) wf.connect([ - (inputnode, nmask, [('in_file', 'in_file'), - ('in_mask', 'in_mask')]), - (inputnode, nlmeans, [('in_file', 'in_file'), - ('in_mask', 'in_mask')]), - (nmask, nlmeans, [('out_file', 'noise_mask')]), - (nlmeans, outputnode, [('out_file', 'out_file')]) + (inputnode, nmask, [('in_file', 'in_file'), + ('in_mask', 'in_mask')]), + (inputnode, nlmeans, [('in_file', 'in_file'), + ('in_mask', 'in_mask')]), + (nmask, nlmeans, [('out_file', 'noise_mask')]), + (nlmeans, outputnode, [('out_file', 'out_file')]) ]) return wf diff --git a/nipype/workflows/dmri/fsl/artifacts.py b/nipype/workflows/dmri/fsl/artifacts.py index 03936c5695..dc7387f0c0 100644 --- a/nipype/workflows/dmri/fsl/artifacts.py +++ b/nipype/workflows/dmri/fsl/artifacts.py @@ -73,36 +73,34 @@ def all_fmb_pipeline(name='hmc_sdc_ecc', fugue_params=dict(smooth3d=2.0)): wf = pe.Workflow(name=name) wf.connect([ - (inputnode, hmc, [('in_file', 'inputnode.in_file'), - ('in_bvec', 'inputnode.in_bvec'), - ('in_bval', 'inputnode.in_bval')]), - (inputnode, list_b0, [('in_bval', 'in_bval')]), - (inputnode, avg_b0_0, [('in_file', 'in_file')]), - (list_b0, avg_b0_0, [('out_idx', 'index')]), - (avg_b0_0, bet_dwi0, [('out_file', 'in_file')]), - (bet_dwi0, hmc, [('mask_file', 'inputnode.in_mask')]), - (hmc, sdc, [ - ('outputnode.out_file', 'inputnode.in_file')]), - (bet_dwi0, sdc, [('mask_file', 'inputnode.in_mask')]), - (inputnode, sdc, [('bmap_pha', 'inputnode.bmap_pha'), - ('bmap_mag', 'inputnode.bmap_mag'), - ('epi_param', 'inputnode.settings')]), - (list_b0, sdc, [('out_idx', 'inputnode.in_ref')]), - (hmc, ecc, [ - ('outputnode.out_xfms', 'inputnode.in_xfms')]), - (inputnode, ecc, [('in_file', 'inputnode.in_file'), - ('in_bval', 'inputnode.in_bval')]), - (bet_dwi0, ecc, [('mask_file', 'inputnode.in_mask')]), - (ecc, avg_b0_1, [('outputnode.out_file', 'in_file')]), - (list_b0, avg_b0_1, [('out_idx', 'index')]), - (avg_b0_1, bet_dwi1, [('out_file', 'in_file')]), - (inputnode, unwarp, [('in_file', 'inputnode.in_dwi')]), - (hmc, unwarp, [('outputnode.out_xfms', 'inputnode.in_hmc')]), - (ecc, unwarp, [('outputnode.out_xfms', 'inputnode.in_ecc')]), - (sdc, unwarp, [('outputnode.out_warp', 'inputnode.in_sdc')]), - (hmc, outputnode, [('outputnode.out_bvec', 'out_bvec')]), - (unwarp, outputnode, [('outputnode.out_file', 'out_file')]), - (bet_dwi1, outputnode, [('mask_file', 'out_mask')]) + (inputnode, hmc, [('in_file', 'inputnode.in_file'), + ('in_bvec', 'inputnode.in_bvec'), + ('in_bval', 'inputnode.in_bval')]), + (inputnode, list_b0, [('in_bval', 'in_bval')]), + (inputnode, avg_b0_0, [('in_file', 'in_file')]), + (list_b0, avg_b0_0, [('out_idx', 'index')]), + (avg_b0_0, bet_dwi0, [('out_file', 'in_file')]), + (bet_dwi0, hmc, [('mask_file', 'inputnode.in_mask')]), + (hmc, sdc, [('outputnode.out_file', 'inputnode.in_file')]), + (bet_dwi0, sdc, [('mask_file', 'inputnode.in_mask')]), + (inputnode, sdc, [('bmap_pha', 'inputnode.bmap_pha'), + ('bmap_mag', 'inputnode.bmap_mag'), + ('epi_param', 'inputnode.settings')]), + (list_b0, sdc, [('out_idx', 'inputnode.in_ref')]), + (hmc, ecc, [('outputnode.out_xfms', 'inputnode.in_xfms')]), + (inputnode, ecc, [('in_file', 'inputnode.in_file'), + ('in_bval', 'inputnode.in_bval')]), + (bet_dwi0, ecc, [('mask_file', 'inputnode.in_mask')]), + (ecc, avg_b0_1, [('outputnode.out_file', 'in_file')]), + (list_b0, avg_b0_1, [('out_idx', 'index')]), + (avg_b0_1, bet_dwi1, [('out_file', 'in_file')]), + (inputnode, unwarp, [('in_file', 'inputnode.in_dwi')]), + (hmc, unwarp, [('outputnode.out_xfms', 'inputnode.in_hmc')]), + (ecc, unwarp, [('outputnode.out_xfms', 'inputnode.in_ecc')]), + (sdc, unwarp, [('outputnode.out_warp', 'inputnode.in_sdc')]), + (hmc, outputnode, [('outputnode.out_bvec', 'out_bvec')]), + (unwarp, outputnode, [('outputnode.out_file', 'out_file')]), + (bet_dwi1, outputnode, [('mask_file', 'out_mask')]) ]) return wf @@ -163,33 +161,31 @@ def all_peb_pipeline(name='hmc_sdc_ecc', wf = pe.Workflow(name=name) wf.connect([ - (inputnode, hmc, [('in_file', 'inputnode.in_file'), - ('in_bvec', 'inputnode.in_bvec'), - ('in_bval', 'inputnode.in_bval')]), - (inputnode, avg_b0_0, [('in_file', 'in_dwi'), - ('in_bval', 'in_bval')]), - (avg_b0_0, bet_dwi0, [('out_file', 'in_file')]), - (bet_dwi0, hmc, [('mask_file', 'inputnode.in_mask')]), - (hmc, sdc, [ - ('outputnode.out_file', 'inputnode.in_file')]), - (bet_dwi0, sdc, [('mask_file', 'inputnode.in_mask')]), - (inputnode, sdc, [('in_bval', 'inputnode.in_bval'), - ('alt_file', 'inputnode.alt_file')]), - (inputnode, ecc, [('in_file', 'inputnode.in_file'), - ('in_bval', 'inputnode.in_bval')]), - (bet_dwi0, ecc, [('mask_file', 'inputnode.in_mask')]), - (hmc, ecc, [ - ('outputnode.out_xfms', 'inputnode.in_xfms')]), - (ecc, avg_b0_1, [('outputnode.out_file', 'in_dwi')]), - (inputnode, avg_b0_1, [('in_bval', 'in_bval')]), - (avg_b0_1, bet_dwi1, [('out_file', 'in_file')]), - (inputnode, unwarp, [('in_file', 'inputnode.in_dwi')]), - (hmc, unwarp, [('outputnode.out_xfms', 'inputnode.in_hmc')]), - (ecc, unwarp, [('outputnode.out_xfms', 'inputnode.in_ecc')]), - (sdc, unwarp, [('outputnode.out_warp', 'inputnode.in_sdc')]), - (hmc, outputnode, [('outputnode.out_bvec', 'out_bvec')]), - (unwarp, outputnode, [('outputnode.out_file', 'out_file')]), - (bet_dwi1, outputnode, [('mask_file', 'out_mask')]) + (inputnode, hmc, [('in_file', 'inputnode.in_file'), + ('in_bvec', 'inputnode.in_bvec'), + ('in_bval', 'inputnode.in_bval')]), + (inputnode, avg_b0_0, [('in_file', 'in_dwi'), + ('in_bval', 'in_bval')]), + (avg_b0_0, bet_dwi0, [('out_file', 'in_file')]), + (bet_dwi0, hmc, [('mask_file', 'inputnode.in_mask')]), + (hmc, sdc, [('outputnode.out_file', 'inputnode.in_file')]), + (bet_dwi0, sdc, [('mask_file', 'inputnode.in_mask')]), + (inputnode, sdc, [('in_bval', 'inputnode.in_bval'), + ('alt_file', 'inputnode.alt_file')]), + (inputnode, ecc, [('in_file', 'inputnode.in_file'), + ('in_bval', 'inputnode.in_bval')]), + (bet_dwi0, ecc, [('mask_file', 'inputnode.in_mask')]), + (hmc, ecc, [('outputnode.out_xfms', 'inputnode.in_xfms')]), + (ecc, avg_b0_1, [('outputnode.out_file', 'in_dwi')]), + (inputnode, avg_b0_1, [('in_bval', 'in_bval')]), + (avg_b0_1, bet_dwi1, [('out_file', 'in_file')]), + (inputnode, unwarp, [('in_file', 'inputnode.in_dwi')]), + (hmc, unwarp, [('outputnode.out_xfms', 'inputnode.in_hmc')]), + (ecc, unwarp, [('outputnode.out_xfms', 'inputnode.in_ecc')]), + (sdc, unwarp, [('outputnode.out_warp', 'inputnode.in_sdc')]), + (hmc, outputnode, [('outputnode.out_bvec', 'out_bvec')]), + (unwarp, outputnode, [('outputnode.out_file', 'out_file')]), + (bet_dwi1, outputnode, [('mask_file', 'out_mask')]) ]) return wf @@ -261,30 +257,29 @@ def _gen_index(in_file): wf = pe.Workflow(name=name) wf.connect([ - (inputnode, avg_b0_0, [('in_file', 'in_dwi'), - ('in_bval', 'in_bval')]), - (avg_b0_0, bet_dwi0, [('out_file', 'in_file')]), - (bet_dwi0, sdc, [('mask_file', 'inputnode.in_mask')]), - (inputnode, sdc, [('in_file', 'inputnode.in_file'), - ('alt_file', 'inputnode.alt_file'), - ('in_bval', 'inputnode.in_bval')]), - (sdc, ecc, [('topup.out_enc_file', 'in_acqp'), - ('topup.out_fieldcoef', - 'in_topup_fieldcoef'), - ('topup.out_movpar', 'in_topup_movpar')]), - (bet_dwi0, ecc, [('mask_file', 'in_mask')]), - (inputnode, ecc, [('in_file', 'in_file'), - (('in_file', _gen_index), 'in_index'), - ('in_bval', 'in_bval'), - ('in_bvec', 'in_bvec')]), - (inputnode, rot_bvec, [('in_bvec', 'in_bvec')]), - (ecc, rot_bvec, [('out_parameter', 'eddy_params')]), - (ecc, avg_b0_1, [('out_corrected', 'in_dwi')]), - (inputnode, avg_b0_1, [('in_bval', 'in_bval')]), - (avg_b0_1, bet_dwi1, [('out_file', 'in_file')]), - (ecc, outputnode, [('out_corrected', 'out_file')]), - (rot_bvec, outputnode, [('out_file', 'out_bvec')]), - (bet_dwi1, outputnode, [('mask_file', 'out_mask')]) + (inputnode, avg_b0_0, [('in_file', 'in_dwi'), + ('in_bval', 'in_bval')]), + (avg_b0_0, bet_dwi0, [('out_file', 'in_file')]), + (bet_dwi0, sdc, [('mask_file', 'inputnode.in_mask')]), + (inputnode, sdc, [('in_file', 'inputnode.in_file'), + ('alt_file', 'inputnode.alt_file'), + ('in_bval', 'inputnode.in_bval')]), + (sdc, ecc, [('topup.out_enc_file', 'in_acqp'), + ('topup.out_fieldcoef', 'in_topup_fieldcoef'), + ('topup.out_movpar', 'in_topup_movpar')]), + (bet_dwi0, ecc, [('mask_file', 'in_mask')]), + (inputnode, ecc, [('in_file', 'in_file'), + (('in_file', _gen_index), 'in_index'), + ('in_bval', 'in_bval'), + ('in_bvec', 'in_bvec')]), + (inputnode, rot_bvec, [('in_bvec', 'in_bvec')]), + (ecc, rot_bvec, [('out_parameter', 'eddy_params')]), + (ecc, avg_b0_1, [('out_corrected', 'in_dwi')]), + (inputnode, avg_b0_1, [('in_bval', 'in_bval')]), + (avg_b0_1, bet_dwi1, [('out_file', 'in_file')]), + (ecc, outputnode, [('out_corrected', 'out_file')]), + (rot_bvec, outputnode, [('out_file', 'out_bvec')]), + (bet_dwi1, outputnode, [('mask_file', 'out_mask')]) ]) return wf @@ -379,20 +374,20 @@ def hmc_pipeline(name='motion_correct'): wf = pe.Workflow(name=name) wf.connect([ - (inputnode, split, [('in_file', 'in_file'), - ('in_bval', 'in_bval'), - ('ref_num', 'ref_num')]), - (inputnode, flirt, [('in_mask', 'inputnode.ref_mask')]), - (split, flirt, [('out_ref', 'inputnode.reference'), - ('out_mov', 'inputnode.in_file'), - ('out_bval', 'inputnode.in_bval')]), - (flirt, insmat, [('outputnode.out_xfms', 'inlist')]), - (split, insmat, [('volid', 'volid')]), - (inputnode, rot_bvec, [('in_bvec', 'in_bvec')]), - (insmat, rot_bvec, [('out', 'in_matrix')]), - (rot_bvec, outputnode, [('out_file', 'out_bvec')]), - (flirt, outputnode, [('outputnode.out_file', 'out_file')]), - (insmat, outputnode, [('out', 'out_xfms')]) + (inputnode, split, [('in_file', 'in_file'), + ('in_bval', 'in_bval'), + ('ref_num', 'ref_num')]), + (inputnode, flirt, [('in_mask', 'inputnode.ref_mask')]), + (split, flirt, [('out_ref', 'inputnode.reference'), + ('out_mov', 'inputnode.in_file'), + ('out_bval', 'inputnode.in_bval')]), + (flirt, insmat, [('outputnode.out_xfms', 'inlist')]), + (split, insmat, [('volid', 'volid')]), + (inputnode, rot_bvec, [('in_bvec', 'in_bvec')]), + (insmat, rot_bvec, [('out', 'in_matrix')]), + (rot_bvec, outputnode, [('out_file', 'out_bvec')]), + (flirt, outputnode, [('outputnode.out_file', 'out_file')]), + (insmat, outputnode, [('out', 'out_xfms')]) ]) return wf @@ -491,27 +486,27 @@ def ecc_pipeline(name='eddy_correct'): wf = pe.Workflow(name=name) wf.connect([ - (inputnode, avg_b0, [('in_file', 'in_dwi'), - ('in_bval', 'in_bval')]), - (inputnode, pick_dws, [('in_file', 'in_dwi'), - ('in_bval', 'in_bval')]), - (inputnode, merge, [('in_file', 'in_dwi'), - ('in_bval', 'in_bval')]), - (inputnode, flirt, [('in_mask', 'inputnode.ref_mask'), - ('in_xfms', 'inputnode.in_xfms'), - ('in_bval', 'inputnode.in_bval')]), - (inputnode, get_mat, [('in_bval', 'in_bval')]), - (avg_b0, flirt, [('out_file', 'inputnode.reference')]), - (pick_dws, flirt, [('out_file', 'inputnode.in_file')]), - (flirt, get_mat, [('outputnode.out_xfms', 'in_xfms')]), - (flirt, mult, [(('outputnode.out_xfms', _xfm_jacobian), - 'operand_value')]), - (flirt, split, [('outputnode.out_file', 'in_file')]), - (split, mult, [('out_files', 'in_file')]), - (mult, thres, [('out_file', 'in_file')]), - (thres, merge, [('out_file', 'in_corrected')]), - (get_mat, outputnode, [('out_files', 'out_xfms')]), - (merge, outputnode, [('out_file', 'out_file')]) + (inputnode, avg_b0, [('in_file', 'in_dwi'), + ('in_bval', 'in_bval')]), + (inputnode, pick_dws, [('in_file', 'in_dwi'), + ('in_bval', 'in_bval')]), + (inputnode, merge, [('in_file', 'in_dwi'), + ('in_bval', 'in_bval')]), + (inputnode, flirt, [('in_mask', 'inputnode.ref_mask'), + ('in_xfms', 'inputnode.in_xfms'), + ('in_bval', 'inputnode.in_bval')]), + (inputnode, get_mat, [('in_bval', 'in_bval')]), + (avg_b0, flirt, [('out_file', 'inputnode.reference')]), + (pick_dws, flirt, [('out_file', 'inputnode.in_file')]), + (flirt, get_mat, [('outputnode.out_xfms', 'in_xfms')]), + (flirt, mult, [(('outputnode.out_xfms', _xfm_jacobian), + 'operand_value')]), + (flirt, split, [('outputnode.out_file', 'in_file')]), + (split, mult, [('out_files', 'in_file')]), + (mult, thres, [('out_file', 'in_file')]), + (thres, merge, [('out_file', 'in_corrected')]), + (get_mat, outputnode, [('out_files', 'out_xfms')]), + (merge, outputnode, [('out_file', 'out_file')]) ]) return wf @@ -644,58 +639,58 @@ def sdc_fmb(name='fmb_correction', interp='Linear', wf = pe.Workflow(name=name) wf.connect([ - (inputnode, r_params, [('settings', 'in_file')]), - (r_params, eff_echo, [('echospacing', 'echospacing'), - ('acc_factor', 'acc_factor')]), - (inputnode, pha2rads, [('bmap_pha', 'in_file')]), - (inputnode, firstmag, [('bmap_mag', 'in_file')]), - (inputnode, baseline, [('in_file', 'in_file'), - ('in_ref', 'index')]), - (firstmag, n4, [('roi_file', 'input_image')]), - (n4, bet, [('output_image', 'in_file')]), - (bet, dilate, [('mask_file', 'in_file')]), - (pha2rads, prelude, [('out_file', 'phase_file')]), - (n4, prelude, [('output_image', 'magnitude_file')]), - (dilate, prelude, [('out_file', 'mask_file')]), - (r_params, rad2rsec, [('delta_te', 'delta_te')]), - (prelude, rad2rsec, [('unwrapped_phase_file', 'in_file')]), - - (baseline, fmm2b0, [('out_file', 'fixed_image')]), - (n4, fmm2b0, [('output_image', 'moving_image')]), - (inputnode, fmm2b0, [('in_mask', 'fixed_image_mask')]), - (dilate, fmm2b0, [('out_file', 'moving_image_mask')]), - - (baseline, applyxfm, [('out_file', 'reference_image')]), - (rad2rsec, applyxfm, [('out_file', 'input_image')]), - (fmm2b0, applyxfm, [ + (inputnode, r_params, [('settings', 'in_file')]), + (r_params, eff_echo, [('echospacing', 'echospacing'), + ('acc_factor', 'acc_factor')]), + (inputnode, pha2rads, [('bmap_pha', 'in_file')]), + (inputnode, firstmag, [('bmap_mag', 'in_file')]), + (inputnode, baseline, [('in_file', 'in_file'), + ('in_ref', 'index')]), + (firstmag, n4, [('roi_file', 'input_image')]), + (n4, bet, [('output_image', 'in_file')]), + (bet, dilate, [('mask_file', 'in_file')]), + (pha2rads, prelude, [('out_file', 'phase_file')]), + (n4, prelude, [('output_image', 'magnitude_file')]), + (dilate, prelude, [('out_file', 'mask_file')]), + (r_params, rad2rsec, [('delta_te', 'delta_te')]), + (prelude, rad2rsec, [('unwrapped_phase_file', 'in_file')]), + + (baseline, fmm2b0, [('out_file', 'fixed_image')]), + (n4, fmm2b0, [('output_image', 'moving_image')]), + (inputnode, fmm2b0, [('in_mask', 'fixed_image_mask')]), + (dilate, fmm2b0, [('out_file', 'moving_image_mask')]), + + (baseline, applyxfm, [('out_file', 'reference_image')]), + (rad2rsec, applyxfm, [('out_file', 'input_image')]), + (fmm2b0, applyxfm, [ ('forward_transforms', 'transforms'), ('forward_invert_flags', 'invert_transform_flags')]), - (applyxfm, pre_fugue, [('output_image', 'fmap_in_file')]), - (inputnode, pre_fugue, [('in_mask', 'mask_file')]), - (pre_fugue, demean, [('fmap_out_file', 'in_file')]), - (inputnode, demean, [('in_mask', 'in_mask')]), - (demean, cleanup, [('out_file', 'inputnode.in_file')]), - (inputnode, cleanup, [('in_mask', 'inputnode.in_mask')]), - (cleanup, addvol, [('outputnode.out_file', 'in_file')]), - (inputnode, vsm, [('in_mask', 'mask_file')]), - (addvol, vsm, [('out_file', 'fmap_in_file')]), - (r_params, vsm, [('delta_te', 'asym_se_time')]), - (eff_echo, vsm, [('eff_echo', 'dwell_time')]), - (inputnode, split, [('in_file', 'in_file')]), - (split, unwarp, [('out_files', 'in_file')]), - (vsm, unwarp, [('shift_out_file', 'shift_in_file')]), - (r_params, unwarp, [ + (applyxfm, pre_fugue, [('output_image', 'fmap_in_file')]), + (inputnode, pre_fugue, [('in_mask', 'mask_file')]), + (pre_fugue, demean, [('fmap_out_file', 'in_file')]), + (inputnode, demean, [('in_mask', 'in_mask')]), + (demean, cleanup, [('out_file', 'inputnode.in_file')]), + (inputnode, cleanup, [('in_mask', 'inputnode.in_mask')]), + (cleanup, addvol, [('outputnode.out_file', 'in_file')]), + (inputnode, vsm, [('in_mask', 'mask_file')]), + (addvol, vsm, [('out_file', 'fmap_in_file')]), + (r_params, vsm, [('delta_te', 'asym_se_time')]), + (eff_echo, vsm, [('eff_echo', 'dwell_time')]), + (inputnode, split, [('in_file', 'in_file')]), + (split, unwarp, [('out_files', 'in_file')]), + (vsm, unwarp, [('shift_out_file', 'shift_in_file')]), + (r_params, unwarp, [ (('enc_dir', _fix_enc_dir), 'unwarp_direction')]), - (unwarp, thres, [('unwarped_file', 'in_file')]), - (thres, merge, [('out_file', 'in_files')]), - (r_params, vsm2dfm, [ + (unwarp, thres, [('unwarped_file', 'in_file')]), + (thres, merge, [('out_file', 'in_files')]), + (r_params, vsm2dfm, [ (('enc_dir', _fix_enc_dir), 'inputnode.enc_dir')]), - (merge, vsm2dfm, [('merged_file', 'inputnode.in_ref')]), - (vsm, vsm2dfm, [('shift_out_file', 'inputnode.in_vsm')]), - (merge, outputnode, [('merged_file', 'out_file')]), - (vsm, outputnode, [('shift_out_file', 'out_vsm')]), - (vsm2dfm, outputnode, [('outputnode.out_warp', 'out_warp')]) + (merge, vsm2dfm, [('merged_file', 'inputnode.in_ref')]), + (vsm, vsm2dfm, [('shift_out_file', 'inputnode.in_vsm')]), + (merge, outputnode, [('merged_file', 'out_file')]), + (vsm, outputnode, [('shift_out_file', 'out_vsm')]), + (vsm2dfm, outputnode, [('outputnode.out_warp', 'out_warp')]) ]) return wf @@ -782,25 +777,25 @@ def sdc_peb(name='peb_correction', wf = pe.Workflow(name=name) wf.connect([ - (inputnode, b0_ref, [('in_file', 'in_file'), - (('ref_num', _checkrnum), 't_min')]), - (inputnode, b0_alt, [('alt_file', 'in_file'), - (('ref_num', _checkrnum), 't_min')]), - (b0_ref, b0_comb, [('roi_file', 'in1')]), - (b0_alt, b0_comb, [('roi_file', 'in2')]), - (b0_comb, b0_merge, [('out', 'in_files')]), - (b0_merge, topup, [('merged_file', 'in_file')]), - (topup, unwarp, [('out_fieldcoef', 'in_topup_fieldcoef'), - ('out_movpar', 'in_topup_movpar'), - ('out_enc_file', 'encoding_file')]), - (inputnode, unwarp, [('in_file', 'in_files')]), - (unwarp, outputnode, [('out_corrected', 'out_file')]), + (inputnode, b0_ref, [('in_file', 'in_file'), + (('ref_num', _checkrnum), 't_min')]), + (inputnode, b0_alt, [('alt_file', 'in_file'), + (('ref_num', _checkrnum), 't_min')]), + (b0_ref, b0_comb, [('roi_file', 'in1')]), + (b0_alt, b0_comb, [('roi_file', 'in2')]), + (b0_comb, b0_merge, [('out', 'in_files')]), + (b0_merge, topup, [('merged_file', 'in_file')]), + (topup, unwarp, [('out_fieldcoef', 'in_topup_fieldcoef'), + ('out_movpar', 'in_topup_movpar'), + ('out_enc_file', 'encoding_file')]), + (inputnode, unwarp, [('in_file', 'in_files')]), + (unwarp, outputnode, [('out_corrected', 'out_file')]), # (b0_ref, scaling, [('roi_file', 'in_file')]), # (scaling, vsm2dfm, [('factor', 'inputnode.scaling')]), - (b0_ref, vsm2dfm, [('roi_file', 'inputnode.in_ref')]), - (topup, vsm2dfm, [('out_field', 'inputnode.in_vsm')]), - (topup, outputnode, [('out_field', 'out_vsm')]), - (vsm2dfm, outputnode, [('outputnode.out_warp', 'out_warp')]) + (b0_ref, vsm2dfm, [('roi_file', 'inputnode.in_ref')]), + (topup, vsm2dfm, [('out_field', 'inputnode.in_vsm')]), + (topup, outputnode, [('out_field', 'out_vsm')]), + (vsm2dfm, outputnode, [('outputnode.out_warp', 'out_warp')]) ]) return wf @@ -850,16 +845,16 @@ def remove_bias(name='bias_correct'): wf = pe.Workflow(name=name) wf.connect([ - (inputnode, avg_b0, [('in_file', 'in_dwi'), - ('in_bval', 'in_bval')]), - (avg_b0, n4, [('out_file', 'input_image')]), - (inputnode, n4, [('in_mask', 'mask_image')]), - (inputnode, split, [('in_file', 'in_file')]), - (n4, mult, [('bias_image', 'operand_files')]), - (split, mult, [('out_files', 'in_file')]), - (mult, thres, [('out_file', 'in_file')]), - (thres, merge, [('out_file', 'in_files')]), - (merge, outputnode, [('merged_file', 'out_file')]) + (inputnode, avg_b0, [('in_file', 'in_dwi'), + ('in_bval', 'in_bval')]), + (avg_b0, n4, [('out_file', 'input_image')]), + (inputnode, n4, [('in_mask', 'mask_image')]), + (inputnode, split, [('in_file', 'in_file')]), + (n4, mult, [('bias_image', 'operand_files')]), + (split, mult, [('out_files', 'in_file')]), + (mult, thres, [('out_file', 'in_file')]), + (thres, merge, [('out_file', 'in_files')]), + (merge, outputnode, [('merged_file', 'out_file')]) ]) return wf diff --git a/nipype/workflows/dmri/fsl/dti.py b/nipype/workflows/dmri/fsl/dti.py index b3427645b8..c12238786d 100644 --- a/nipype/workflows/dmri/fsl/dti.py +++ b/nipype/workflows/dmri/fsl/dti.py @@ -74,14 +74,14 @@ def create_bedpostx_pipeline(name='bedpostx', params={'n_fibres': 2, 'fudge': 1, wf = pe.Workflow(name=name) wf.connect([ - (inputnode, slice_dwi, [('dwi', 'in_file')]), - (inputnode, slice_msk, [('mask', 'in_file')]), - (slice_dwi, mask_dwi, [('out_files', 'in_file')]), - (slice_msk, mask_dwi, [('out_files', 'in_file2')]), - (slice_dwi, xfibres, [('out_files', 'dwi')]), - (mask_dwi, xfibres, [('out_file', 'mask')]), - (inputnode, xfibres, [('bvecs', 'bvecs'), - ('bvals', 'bvals')]), + (inputnode, slice_dwi, [('dwi', 'in_file')]), + (inputnode, slice_msk, [('mask', 'in_file')]), + (slice_dwi, mask_dwi, [('out_files', 'in_file')]), + (slice_msk, mask_dwi, [('out_files', 'in_file2')]), + (slice_dwi, xfibres, [('out_files', 'dwi')]), + (mask_dwi, xfibres, [('out_file', 'mask')]), + (inputnode, xfibres, [('bvecs', 'bvecs'), + ('bvals', 'bvals')]), (inputnode, make_dyads, [('mask', 'mask')]) ]) @@ -119,10 +119,10 @@ def merge_and_mean(name='mm'): wf = pe.Workflow(name=name) wf.connect([ - (inputnode, merge, [(('in_files', transpose), 'in_files')]), - (merge, mean, [('merged_file', 'in_file')]), + (inputnode, merge, [(('in_files', transpose), 'in_files')]), + (merge, mean, [('merged_file', 'in_file')]), (merge, outputnode, [('merged_file', 'merged')]), - (mean, outputnode, [('out_file', 'mean')]) + (mean, outputnode, [('out_file', 'mean')]) ]) return wf @@ -190,18 +190,18 @@ def bedpostx_parallel(name='bedpostx_parallel', wf = pe.Workflow(name=name) wf.connect([ - (inputnode, slice_dwi, [('dwi', 'in_file'), - ('mask', 'in_mask')]), - (slice_dwi, xfibres, [('out_files', 'dwi'), - ('out_masks', 'mask')]), - (inputnode, xfibres, [('bvecs', 'bvecs'), - ('bvals', 'bvals')]), - (inputnode, mrg_dyads, [('mask', 'in_reference')]), - (xfibres, mrg_dyads, [(('dyads', transpose), 'in_files')]), - (slice_dwi, mrg_dyads, [('out_index', 'in_index')]), - (inputnode, mrg_fsamp, [('mask', 'in_reference')]), - (xfibres, mrg_fsamp, [(('mean_fsamples', transpose), 'in_files')]), - (slice_dwi, mrg_fsamp, [('out_index', 'in_index')]), + (inputnode, slice_dwi, [('dwi', 'in_file'), + ('mask', 'in_mask')]), + (slice_dwi, xfibres, [('out_files', 'dwi'), + ('out_masks', 'mask')]), + (inputnode, xfibres, [('bvecs', 'bvecs'), + ('bvals', 'bvals')]), + (inputnode, mrg_dyads, [('mask', 'in_reference')]), + (xfibres, mrg_dyads, [(('dyads', transpose), 'in_files')]), + (slice_dwi, mrg_dyads, [('out_index', 'in_index')]), + (inputnode, mrg_fsamp, [('mask', 'in_reference')]), + (xfibres, mrg_fsamp, [(('mean_fsamples', transpose), 'in_files')]), + (slice_dwi, mrg_fsamp, [('out_index', 'in_index')]), (mrg_dyads, outputnode, [('merged_file', 'dyads')]), (mrg_fsamp, outputnode, [('merged_file', 'fsamples')]) ]) @@ -247,11 +247,11 @@ def merge_and_mean_parallel(name='mm'): wf = pe.Workflow(name=name) wf.connect([ - (inputnode, merge, [(('in_files', transpose), 'in_files'), - ('in_reference', 'in_reference'), - ('in_index', 'in_index')]), - (merge, mean, [('merged_file', 'in_file')]), + (inputnode, merge, [(('in_files', transpose), 'in_files'), + ('in_reference', 'in_reference'), + ('in_index', 'in_index')]), + (merge, mean, [('merged_file', 'in_file')]), (merge, outputnode, [('merged_file', 'merged')]), - (mean, outputnode, [('out_file', 'mean')]) + (mean, outputnode, [('out_file', 'mean')]) ]) return wf diff --git a/nipype/workflows/dmri/fsl/epi.py b/nipype/workflows/dmri/fsl/epi.py index 2fa7b012a0..bfeb2afeb9 100644 --- a/nipype/workflows/dmri/fsl/epi.py +++ b/nipype/workflows/dmri/fsl/epi.py @@ -89,7 +89,7 @@ def create_dmri_preprocessing(name='dMRI_preprocessing', use_fieldmap=True, fiel if use_fieldmap: # we have a fieldmap, so lets use it (yay!) susceptibility = create_epidewarp_pipeline( - fieldmap_registration=fieldmap_registration) + fieldmap_registration=fieldmap_registration) pipeline.connect([ (inputnode, motion, [('in_file', 'inputnode.in_file'), @@ -250,14 +250,14 @@ def create_eddy_correct_pipeline(name='eddy_correct'): name='outputnode') pipeline.connect([ - (inputnode, split, [('in_file', 'in_file')]), - (split, pick_ref, [('out_files', 'inlist')]), - (inputnode, pick_ref, [('ref_num', 'index')]), - (split, coregistration, [('out_files', 'in_file')]), - (pick_ref, coregistration, [('out', 'reference')]), - (coregistration, merge, [('out_file', 'in_files')]), - (merge, outputnode, [('merged_file', 'eddy_corrected')]) - ]) + (inputnode, split, [('in_file', 'in_file')]), + (split, pick_ref, [('out_files', 'inlist')]), + (inputnode, pick_ref, [('ref_num', 'index')]), + (split, coregistration, [('out_files', 'in_file')]), + (pick_ref, coregistration, [('out', 'reference')]), + (coregistration, merge, [('out_file', 'in_files')]), + (merge, outputnode, [('merged_file', 'eddy_corrected')]) + ]) return pipeline @@ -366,27 +366,27 @@ def fieldmap_correction(name='fieldmap_correction', nocheck=False): name='outputnode') pipeline.connect([ - (inputnode, select_mag, [('fieldmap_mag', 'in_file')]), - (inputnode, fslprep, [('fieldmap_pha', 'in_phase'), ('te_diff', 'delta_TE')]), - (inputnode, mask_mag, [('in_mask', 'mask_file')]), - (select_mag, mask_mag, [('roi_file', 'in_file')]), - (mask_mag, fslprep, [('out_file', 'in_magnitude')]), - (fslprep, vsm, [('out_fieldmap', 'phasemap_in_file')]), - (inputnode, vsm, [('fieldmap_mag', 'in_file'), - ('encoding_direction', 'unwarp_direction'), - (('te_diff', _ms2sec), 'asym_se_time'), - ('vsm_sigma', 'smooth2d'), - (('epi_echospacing', _ms2sec), 'dwell_time')]), - (mask_mag, vsm, [('out_file', 'mask_file')]), - (inputnode, dwi_split, [('in_file', 'in_file')]), - (dwi_split, dwi_applyxfm, [('out_files', 'in_file')]), - (mask_mag, dwi_applyxfm, [('out_file', 'mask_file')]), - (vsm, dwi_applyxfm, [('shift_out_file', 'shift_in_file')]), - (inputnode, dwi_applyxfm, [('encoding_direction', 'unwarp_direction')]), - (dwi_applyxfm, dwi_merge, [('unwarped_file', 'in_files')]), - (dwi_merge, outputnode, [('merged_file', 'epi_corrected')]), - (vsm, outputnode, [('shift_out_file', 'out_vsm')]) - ]) + (inputnode, select_mag, [('fieldmap_mag', 'in_file')]), + (inputnode, fslprep, [('fieldmap_pha', 'in_phase'), ('te_diff', 'delta_TE')]), + (inputnode, mask_mag, [('in_mask', 'mask_file')]), + (select_mag, mask_mag, [('roi_file', 'in_file')]), + (mask_mag, fslprep, [('out_file', 'in_magnitude')]), + (fslprep, vsm, [('out_fieldmap', 'phasemap_in_file')]), + (inputnode, vsm, [('fieldmap_mag', 'in_file'), + ('encoding_direction', 'unwarp_direction'), + (('te_diff', _ms2sec), 'asym_se_time'), + ('vsm_sigma', 'smooth2d'), + (('epi_echospacing', _ms2sec), 'dwell_time')]), + (mask_mag, vsm, [('out_file', 'mask_file')]), + (inputnode, dwi_split, [('in_file', 'in_file')]), + (dwi_split, dwi_applyxfm, [('out_files', 'in_file')]), + (mask_mag, dwi_applyxfm, [('out_file', 'mask_file')]), + (vsm, dwi_applyxfm, [('shift_out_file', 'shift_in_file')]), + (inputnode, dwi_applyxfm, [('encoding_direction', 'unwarp_direction')]), + (dwi_applyxfm, dwi_merge, [('unwarped_file', 'in_files')]), + (dwi_merge, outputnode, [('merged_file', 'epi_corrected')]), + (vsm, outputnode, [('shift_out_file', 'out_vsm')]) + ]) return pipeline @@ -444,12 +444,12 @@ def topup_correction(name='topup_correction'): ) outputnode = pe.Node(niu.IdentityInterface( - fields=['out_fieldcoef', - 'out_movpar', - 'out_enc_file', - 'epi_corrected' - ]), name='outputnode' - ) + fields=['out_fieldcoef', + 'out_movpar', + 'out_enc_file', + 'epi_corrected' + ]), name='outputnode' + ) b0_dir = pe.Node(fsl.ExtractROI(t_size=1), name='b0_1') b0_rev = pe.Node(fsl.ExtractROI(t_size=1), name='b0_2') @@ -461,21 +461,21 @@ def topup_correction(name='topup_correction'): applytopup = pe.Node(fsl.ApplyTOPUP(in_index=[1, 2]), name='applytopup') pipeline.connect([ - (inputnode, b0_dir, [('in_file_dir', 'in_file'), ('ref_num', 't_min')]), - (inputnode, b0_rev, [('in_file_rev', 'in_file'), ('ref_num', 't_min')]), - (inputnode, combin2, [('in_file_dir', 'in1'), ('in_file_rev', 'in2')]), - (b0_dir, combin, [('roi_file', 'in1')]), - (b0_rev, combin, [('roi_file', 'in2')]), - (combin, merged, [('out', 'in_files')]), - (merged, topup, [('merged_file', 'in_file')]), - (inputnode, topup, [('encoding_direction', 'encoding_direction'), ('readout_times', 'readout_times')]), - (topup, applytopup, [('out_fieldcoef', 'in_topup_fieldcoef'), ('out_movpar', 'in_topup_movpar'), - ('out_enc_file', 'encoding_file')]), - (combin2, applytopup, [('out', 'in_files')]), - (topup, outputnode, [('out_fieldcoef', 'out_fieldcoef'), ('out_movpar', 'out_movpar'), - ('out_enc_file', 'out_enc_file')]), - (applytopup, outputnode, [('out_corrected', 'epi_corrected')]) - ]) + (inputnode, b0_dir, [('in_file_dir', 'in_file'), ('ref_num', 't_min')]), + (inputnode, b0_rev, [('in_file_rev', 'in_file'), ('ref_num', 't_min')]), + (inputnode, combin2, [('in_file_dir', 'in1'), ('in_file_rev', 'in2')]), + (b0_dir, combin, [('roi_file', 'in1')]), + (b0_rev, combin, [('roi_file', 'in2')]), + (combin, merged, [('out', 'in_files')]), + (merged, topup, [('merged_file', 'in_file')]), + (inputnode, topup, [('encoding_direction', 'encoding_direction'), ('readout_times', 'readout_times')]), + (topup, applytopup, [('out_fieldcoef', 'in_topup_fieldcoef'), ('out_movpar', 'in_topup_movpar'), + ('out_enc_file', 'encoding_file')]), + (combin2, applytopup, [('out', 'in_files')]), + (topup, outputnode, [('out_fieldcoef', 'out_fieldcoef'), ('out_movpar', 'out_movpar'), + ('out_enc_file', 'out_enc_file')]), + (applytopup, outputnode, [('out_corrected', 'epi_corrected')]) + ]) return pipeline @@ -596,26 +596,26 @@ def create_epidewarp_pipeline(name='epidewarp', fieldmap_registration=False): name='outputnode') pipeline.connect([ - (inputnode, dwell_time, [('epi_echospacing', 'dwell_time'), ('pi_accel_factor', 'pi_factor'), ('epi_rev_encoding', 'is_reverse_encoding')]), - (inputnode, select_mag, [('fieldmap_mag', 'in_file')]), - (inputnode, norm_pha, [('fieldmap_pha', 'in_file')]), - (select_mag, mask_mag, [('roi_file', 'in_file')]), - (mask_mag, mask_mag_dil, [('mask_file', 'in_file')]), - (select_mag, prelude, [('roi_file', 'magnitude_file')]), - (norm_pha, prelude, [('out_file', 'phase_file')]), - (mask_mag_dil, prelude, [('out_file', 'mask_file')]), - (prelude, fill_phase, [('unwrapped_phase_file', 'in_file')]), - (inputnode, vsm, [('fieldmap_mag', 'in_file')]), - (fill_phase, vsm, [('out_file', 'phasemap_in_file')]), - (inputnode, vsm, [(('te_diff', _ms2sec), 'asym_se_time'), ('vsm_sigma', 'smooth2d')]), - (dwell_time, vsm, [(('dwell_time', _ms2sec), 'dwell_time')]), - (mask_mag_dil, vsm, [('out_file', 'mask_file')]), - (mask_mag_dil, vsm_mean, [('out_file', 'mask_file')]), - (vsm, vsm_mean, [('unwarped_file', 'in_unwarped'), ('shift_out_file', 'in_file')]), - (inputnode, dwi_split, [('in_file', 'in_file')]), - (dwi_split, dwi_applyxfm, [('out_files', 'in_file')]), - (dwi_applyxfm, dwi_merge, [('unwarped_file', 'in_files')]), - (dwi_merge, outputnode, [('merged_file', 'epi_corrected')]) + (inputnode, dwell_time, [('epi_echospacing', 'dwell_time'), ('pi_accel_factor', 'pi_factor'), ('epi_rev_encoding', 'is_reverse_encoding')]), + (inputnode, select_mag, [('fieldmap_mag', 'in_file')]), + (inputnode, norm_pha, [('fieldmap_pha', 'in_file')]), + (select_mag, mask_mag, [('roi_file', 'in_file')]), + (mask_mag, mask_mag_dil, [('mask_file', 'in_file')]), + (select_mag, prelude, [('roi_file', 'magnitude_file')]), + (norm_pha, prelude, [('out_file', 'phase_file')]), + (mask_mag_dil, prelude, [('out_file', 'mask_file')]), + (prelude, fill_phase, [('unwrapped_phase_file', 'in_file')]), + (inputnode, vsm, [('fieldmap_mag', 'in_file')]), + (fill_phase, vsm, [('out_file', 'phasemap_in_file')]), + (inputnode, vsm, [(('te_diff', _ms2sec), 'asym_se_time'), ('vsm_sigma', 'smooth2d')]), + (dwell_time, vsm, [(('dwell_time', _ms2sec), 'dwell_time')]), + (mask_mag_dil, vsm, [('out_file', 'mask_file')]), + (mask_mag_dil, vsm_mean, [('out_file', 'mask_file')]), + (vsm, vsm_mean, [('unwarped_file', 'in_unwarped'), ('shift_out_file', 'in_file')]), + (inputnode, dwi_split, [('in_file', 'in_file')]), + (dwi_split, dwi_applyxfm, [('out_files', 'in_file')]), + (dwi_applyxfm, dwi_merge, [('unwarped_file', 'in_files')]), + (dwi_merge, outputnode, [('merged_file', 'epi_corrected')]) ]) if fieldmap_registration: @@ -631,7 +631,7 @@ def create_epidewarp_pipeline(name='epidewarp', fieldmap_registration=False): # vsmmag_name, mask_name ), log ) # Forward Map vsm_fwd = pe.Node(fsl.FUGUE( forward_warping=True), name='vsm_fwd') - vsm_reg = pe.Node(fsl.FLIRT(bins=256, cost='corratio', dof=6, interp='spline', searchr_x=[ + vsm_reg = pe.Node(fsl.FLIRT(bins=256, cost='corratio', dof=6, interp='spline', searchr_x=[ -10, 10], searchr_y=[-10, 10], searchr_z=[-10, 10]), name='vsm_registration') # 'flirt -in %s -ref %s -out %s -init %s -applyxfm' % ( vsmmag_name, ref_epi, vsmmag_name, magfw_mat_out ) vsm_applyxfm = pe.Node(fsl.ApplyXfm( @@ -641,26 +641,26 @@ def create_epidewarp_pipeline(name='epidewarp', fieldmap_registration=False): interp='nearestneighbour'), name='msk_apply_xfm') pipeline.connect([ - (inputnode, select_epi, [('in_file', 'in_file'), ('ref_num', 't_min')]), - (select_epi, vsm_reg, [('roi_file', 'reference')]), - (vsm, vsm_fwd, [('shift_out_file', 'shift_in_file')]), - (mask_mag_dil, vsm_fwd, [('out_file', 'mask_file')]), - (inputnode, vsm_fwd, [('fieldmap_mag', 'in_file')]), - (vsm_fwd, vsm_reg, [('warped_file', 'in_file')]), - (vsm_reg, msk_applyxfm, [('out_matrix_file', 'in_matrix_file')]), - (select_epi, msk_applyxfm, [('roi_file', 'reference')]), - (mask_mag_dil, msk_applyxfm, [('out_file', 'in_file')]), - (vsm_reg, vsm_applyxfm, [('out_matrix_file', 'in_matrix_file')]), - (select_epi, vsm_applyxfm, [('roi_file', 'reference')]), - (vsm_mean, vsm_applyxfm, [('out_file', 'in_file')]), - (msk_applyxfm, dwi_applyxfm, [('out_file', 'mask_file')]), - (vsm_applyxfm, dwi_applyxfm, [('out_file', 'shift_in_file')]) - ]) + (inputnode, select_epi, [('in_file', 'in_file'), ('ref_num', 't_min')]), + (select_epi, vsm_reg, [('roi_file', 'reference')]), + (vsm, vsm_fwd, [('shift_out_file', 'shift_in_file')]), + (mask_mag_dil, vsm_fwd, [('out_file', 'mask_file')]), + (inputnode, vsm_fwd, [('fieldmap_mag', 'in_file')]), + (vsm_fwd, vsm_reg, [('warped_file', 'in_file')]), + (vsm_reg, msk_applyxfm, [('out_matrix_file', 'in_matrix_file')]), + (select_epi, msk_applyxfm, [('roi_file', 'reference')]), + (mask_mag_dil, msk_applyxfm, [('out_file', 'in_file')]), + (vsm_reg, vsm_applyxfm, [('out_matrix_file', 'in_matrix_file')]), + (select_epi, vsm_applyxfm, [('roi_file', 'reference')]), + (vsm_mean, vsm_applyxfm, [('out_file', 'in_file')]), + (msk_applyxfm, dwi_applyxfm, [('out_file', 'mask_file')]), + (vsm_applyxfm, dwi_applyxfm, [('out_file', 'shift_in_file')]) + ]) else: pipeline.connect([ - (mask_mag_dil, dwi_applyxfm, [('out_file', 'mask_file')]), - (vsm_mean, dwi_applyxfm, [('out_file', 'shift_in_file')]) - ]) + (mask_mag_dil, dwi_applyxfm, [('out_file', 'mask_file')]), + (vsm_mean, dwi_applyxfm, [('out_file', 'shift_in_file')]) + ]) return pipeline @@ -723,7 +723,7 @@ def _prepare_phasediff(in_file): img = nib.load(in_file) max_diff = np.max(img.get_data().reshape(-1)) min_diff = np.min(img.get_data().reshape(-1)) - A = (2.0 * np.pi) / (max_diff-min_diff) + A = (2.0 * np.pi) / (max_diff - min_diff) B = np.pi - (A * max_diff) diff_norm = img.get_data() * A + B @@ -788,7 +788,7 @@ def _vsm_remove_mean(in_file, mask_file, in_unwarped): def _ms2sec(val): - return val*1e-3; + return val * 1e-3 def _split_dwi(in_file): diff --git a/nipype/workflows/dmri/fsl/tbss.py b/nipype/workflows/dmri/fsl/tbss.py index 792263e370..504334c2d3 100644 --- a/nipype/workflows/dmri/fsl/tbss.py +++ b/nipype/workflows/dmri/fsl/tbss.py @@ -77,7 +77,7 @@ def create_tbss_1_preproc(name='tbss_1_preproc'): (getmask1, getmask2, [("out_file", "in_file"), ("out_file", "operand_files")]), (prepfa, slicer, [('out_file', 'in_file')]), - ]) + ]) # Define the outputnode outputnode = pe.Node(interface=util.IdentityInterface(fields=["fa_list", @@ -85,10 +85,10 @@ def create_tbss_1_preproc(name='tbss_1_preproc'): "slices"]), name="outputnode") tbss1.connect([ - (prepfa, outputnode, [("out_file", "fa_list")]), - (getmask2, outputnode, [("out_file", "mask_list")]), - (slicer, outputnode, [('out_file', 'slices')]) - ]) + (prepfa, outputnode, [("out_file", "fa_list")]), + (getmask2, outputnode, [("out_file", "mask_list")]), + (slicer, outputnode, [('out_file', 'slices')]) + ]) return tbss1 @@ -152,7 +152,7 @@ def create_tbss_2_reg(name="tbss_2_reg"): ("mask_list", "inmask_file"), ("target", "ref_file")]), (flirt, fnirt, [("out_matrix_file", "affine_file")]), - ]) + ]) # Define the outputnode outputnode = pe.Node(interface=util.IdentityInterface(fields=['field_list']), @@ -160,7 +160,7 @@ def create_tbss_2_reg(name="tbss_2_reg"): tbss2.connect([ (fnirt, outputnode, [('fieldcoeff_file', 'field_list')]) - ]) + ]) return tbss2 @@ -228,7 +228,7 @@ def create_tbss_3_postreg(name='tbss_3_postreg', estimate_skeleton=True): (mergefa, groupmask, [("merged_file", "in_file")]), (mergefa, maskgroup, [("merged_file", "in_file")]), (groupmask, maskgroup, [("out_file", "in_file2")]), - ]) + ]) # Create outputnode outputnode = pe.Node(interface=util.IdentityInterface(fields=['groupmask', @@ -247,13 +247,13 @@ def create_tbss_3_postreg(name='tbss_3_postreg', estimate_skeleton=True): makeskeleton = pe.Node(fsl.TractSkeleton(skeleton_file=True), name="makeskeleton") tbss3.connect([ - (maskgroup, meanfa, [("out_file", "in_file")]), - (meanfa, makeskeleton, [("out_file", "in_file")]), - (groupmask, outputnode, [('out_file', 'groupmask')]), - (makeskeleton, outputnode, [('skeleton_file', 'skeleton_file')]), - (meanfa, outputnode, [('out_file', 'meanfa_file')]), - (maskgroup, outputnode, [('out_file', 'mergefa_file')]) - ]) + (maskgroup, meanfa, [("out_file", "in_file")]), + (meanfa, makeskeleton, [("out_file", "in_file")]), + (groupmask, outputnode, [('out_file', 'groupmask')]), + (makeskeleton, outputnode, [('skeleton_file', 'skeleton_file')]), + (meanfa, outputnode, [('out_file', 'meanfa_file')]), + (maskgroup, outputnode, [('out_file', 'mergefa_file')]) + ]) else: # $FSLDIR/bin/fslmaths $FSLDIR/data/standard/FMRIB58_FA_1mm -mas mean_FA_mask mean_FA maskstd = pe.Node(fsl.ImageMaths(op_string="-mas", @@ -271,18 +271,18 @@ def create_tbss_3_postreg(name='tbss_3_postreg', estimate_skeleton=True): name="maskgroup2") tbss3.connect([ - (groupmask, maskstd, [("out_file", "in_file2")]), - (maskstd, binmaskstd, [("out_file", "in_file")]), - (maskgroup, maskgroup2, [("out_file", "in_file")]), - (binmaskstd, maskgroup2, [("out_file", "in_file2")]) + (groupmask, maskstd, [("out_file", "in_file2")]), + (maskstd, binmaskstd, [("out_file", "in_file")]), + (maskgroup, maskgroup2, [("out_file", "in_file")]), + (binmaskstd, maskgroup2, [("out_file", "in_file2")]) ]) outputnode.inputs.skeleton_file = fsl.Info.standard_image("FMRIB58_FA-skeleton_1mm.nii.gz") tbss3.connect([ - (binmaskstd, outputnode, [('out_file', 'groupmask')]), - (maskstd, outputnode, [('out_file', 'meanfa_file')]), - (maskgroup2, outputnode, [('out_file', 'mergefa_file')]) - ]) + (binmaskstd, outputnode, [('out_file', 'groupmask')]), + (maskstd, outputnode, [('out_file', 'meanfa_file')]), + (maskgroup2, outputnode, [('out_file', 'mergefa_file')]) + ]) return tbss3 @@ -329,8 +329,8 @@ def create_tbss_4_prestats(name='tbss_4_prestats'): # Mask the skeleton at the threshold skeletonmask = pe.Node(fsl.ImageMaths( - suffix="_mask"), - name="skeletonmask") + suffix="_mask"), + name="skeletonmask") # Invert the brainmask then add in the tract skeleton invertmask = pe.Node(fsl.ImageMaths(suffix="_inv", @@ -360,7 +360,7 @@ def create_tbss_4_prestats(name='tbss_4_prestats'): (skeletonmask, invertmask, [("out_file", "in_file2")]), (invertmask, distancemap, [("out_file", "in_file")]), (distancemap, projectfa, [("distance_map", "distance_map")]), - ]) + ]) # Create the outputnode outputnode = pe.Node(interface=util.IdentityInterface(fields=['projectedfa_file', @@ -370,12 +370,12 @@ def create_tbss_4_prestats(name='tbss_4_prestats'): name='outputnode') tbss4.connect([ - (projectfa, outputnode, [('projected_data', 'projectedfa_file'), - ('skeleton_file', 'skeleton_file') - ]), - (distancemap, outputnode, [('distance_map', 'distance_map')]), - (skeletonmask, outputnode, [('out_file', 'skeleton_mask')]) - ]) + (projectfa, outputnode, [('projected_data', 'projectedfa_file'), + ('skeleton_file', 'skeleton_file') + ]), + (distancemap, outputnode, [('distance_map', 'distance_map')]), + (skeletonmask, outputnode, [('out_file', 'skeleton_mask')]) + ]) return tbss4 @@ -420,20 +420,20 @@ def create_tbss_all(name='tbss_all', estimate_skeleton=True): tbss_all = pe.Workflow(name=name) tbss_all.connect([ - (inputnode, tbss1, [('fa_list', 'inputnode.fa_list')]), - (inputnode, tbss4, [('skeleton_thresh', 'inputnode.skeleton_thresh')]), - - (tbss1, tbss2, [('outputnode.fa_list', 'inputnode.fa_list'), - ('outputnode.mask_list', 'inputnode.mask_list')]), - (tbss1, tbss3, [('outputnode.fa_list', 'inputnode.fa_list')]), - (tbss2, tbss3, [('outputnode.field_list', 'inputnode.field_list')]), - (tbss3, tbss4, [ - ('outputnode.groupmask', 'inputnode.groupmask'), - ('outputnode.skeleton_file', 'inputnode.skeleton_file'), - ('outputnode.meanfa_file', 'inputnode.meanfa_file'), - ('outputnode.mergefa_file', 'inputnode.mergefa_file') - ]) - ]) + (inputnode, tbss1, [('fa_list', 'inputnode.fa_list')]), + (inputnode, tbss4, [('skeleton_thresh', 'inputnode.skeleton_thresh')]), + + (tbss1, tbss2, [('outputnode.fa_list', 'inputnode.fa_list'), + ('outputnode.mask_list', 'inputnode.mask_list')]), + (tbss1, tbss3, [('outputnode.fa_list', 'inputnode.fa_list')]), + (tbss2, tbss3, [('outputnode.field_list', 'inputnode.field_list')]), + (tbss3, tbss4, [ + ('outputnode.groupmask', 'inputnode.groupmask'), + ('outputnode.skeleton_file', 'inputnode.skeleton_file'), + ('outputnode.meanfa_file', 'inputnode.meanfa_file'), + ('outputnode.mergefa_file', 'inputnode.mergefa_file') + ]) + ]) # Define the outputnode outputnode = pe.Node(interface=util.IdentityInterface(fields=['groupmask', @@ -446,47 +446,47 @@ def create_tbss_all(name='tbss_all', estimate_skeleton=True): 'distance_map']), name='outputnode') outputall_node = pe.Node(interface=util.IdentityInterface( - fields=['fa_list1', - 'mask_list1', - 'field_list2', - 'groupmask3', - 'skeleton_file3', - 'meanfa_file3', - 'mergefa_file3', - 'projectedfa_file4', - 'skeleton_mask4', - 'distance_map4']), - name='outputall_node') + fields=['fa_list1', + 'mask_list1', + 'field_list2', + 'groupmask3', + 'skeleton_file3', + 'meanfa_file3', + 'mergefa_file3', + 'projectedfa_file4', + 'skeleton_mask4', + 'distance_map4']), + name='outputall_node') tbss_all.connect([ - (tbss3, outputnode, [('outputnode.meanfa_file', 'meanfa_file'), - ('outputnode.mergefa_file', 'mergefa_file'), - ('outputnode.groupmask', 'groupmask'), - ('outputnode.skeleton_file', 'skeleton_file3'), - ]), - (tbss4, outputnode, [('outputnode.projectedfa_file', 'projectedfa_file'), - ('outputnode.skeleton_file', 'skeleton_file4'), - ('outputnode.skeleton_mask', 'skeleton_mask'), - ('outputnode.distance_map', 'distance_map'), - ]), - - (tbss1, outputall_node, [('outputnode.fa_list', 'fa_list1'), - ('outputnode.mask_list', 'mask_list1'), - ]), - (tbss2, outputall_node, [('outputnode.field_list', 'field_list2'), - ]), - (tbss3, outputall_node, [ - ('outputnode.meanfa_file', 'meanfa_file3'), - ('outputnode.mergefa_file', 'mergefa_file3'), - ('outputnode.groupmask', 'groupmask3'), - ('outputnode.skeleton_file', 'skeleton_file3'), - ]), - (tbss4, outputall_node, [ - ('outputnode.projectedfa_file', 'projectedfa_file4'), - ('outputnode.skeleton_mask', 'skeleton_mask4'), - ('outputnode.distance_map', 'distance_map4'), - ]), - ]) + (tbss3, outputnode, [('outputnode.meanfa_file', 'meanfa_file'), + ('outputnode.mergefa_file', 'mergefa_file'), + ('outputnode.groupmask', 'groupmask'), + ('outputnode.skeleton_file', 'skeleton_file3'), + ]), + (tbss4, outputnode, [('outputnode.projectedfa_file', 'projectedfa_file'), + ('outputnode.skeleton_file', 'skeleton_file4'), + ('outputnode.skeleton_mask', 'skeleton_mask'), + ('outputnode.distance_map', 'distance_map'), + ]), + + (tbss1, outputall_node, [('outputnode.fa_list', 'fa_list1'), + ('outputnode.mask_list', 'mask_list1'), + ]), + (tbss2, outputall_node, [('outputnode.field_list', 'field_list2'), + ]), + (tbss3, outputall_node, [ + ('outputnode.meanfa_file', 'meanfa_file3'), + ('outputnode.mergefa_file', 'mergefa_file3'), + ('outputnode.groupmask', 'groupmask3'), + ('outputnode.skeleton_file', 'skeleton_file3'), + ]), + (tbss4, outputall_node, [ + ('outputnode.projectedfa_file', 'projectedfa_file4'), + ('outputnode.skeleton_mask', 'skeleton_mask4'), + ('outputnode.distance_map', 'distance_map4'), + ]), + ]) return tbss_all @@ -555,29 +555,29 @@ def create_tbss_non_FA(name='tbss_non_FA'): tbss_non_FA = pe.Workflow(name=name) tbss_non_FA.connect([ - (inputnode, applywarp, [('file_list', 'in_file'), - ('field_list', 'field_file'), - ]), - (applywarp, merge, [("out_file", "in_files")]), + (inputnode, applywarp, [('file_list', 'in_file'), + ('field_list', 'field_file'), + ]), + (applywarp, merge, [("out_file", "in_files")]), - (merge, maskgroup, [("merged_file", "in_file")]), + (merge, maskgroup, [("merged_file", "in_file")]), - (inputnode, maskgroup, [('groupmask', 'in_file2')]), + (inputnode, maskgroup, [('groupmask', 'in_file2')]), - (maskgroup, projectfa, [('out_file', 'alt_data_file')]), - (inputnode, projectfa, [('skeleton_thresh', 'threshold'), - ("meanfa_file", "in_file"), - ("distance_map", "distance_map"), - ("all_FA_file", 'data_file') - ]), - ]) + (maskgroup, projectfa, [('out_file', 'alt_data_file')]), + (inputnode, projectfa, [('skeleton_thresh', 'threshold'), + ("meanfa_file", "in_file"), + ("distance_map", "distance_map"), + ("all_FA_file", 'data_file') + ]), + ]) # Define the outputnode outputnode = pe.Node(interface=util.IdentityInterface( - fields=['projected_nonFA_file']), - name='outputnode') + fields=['projected_nonFA_file']), + name='outputnode') tbss_non_FA.connect([ - (projectfa, outputnode, [('projected_data', 'projected_nonFA_file'), - ]), - ]) + (projectfa, outputnode, [('projected_data', 'projected_nonFA_file'), + ]), + ]) return tbss_non_FA diff --git a/nipype/workflows/dmri/fsl/tests/test_dti.py b/nipype/workflows/dmri/fsl/tests/test_dti.py index c012c1b3b6..346ac5c23a 100644 --- a/nipype/workflows/dmri/fsl/tests/test_dti.py +++ b/nipype/workflows/dmri/fsl/tests/test_dti.py @@ -84,4 +84,3 @@ def test_create_bedpostx_pipeline(): pipeline.run(plugin='Linear') shutil.rmtree(pipeline.base_dir) - diff --git a/nipype/workflows/dmri/fsl/tests/test_epi.py b/nipype/workflows/dmri/fsl/tests/test_epi.py index c814b85d71..dbac8f2db8 100644 --- a/nipype/workflows/dmri/fsl/tests/test_epi.py +++ b/nipype/workflows/dmri/fsl/tests/test_epi.py @@ -45,4 +45,3 @@ def test_create_eddy_correct_pipeline(): pipeline.run(plugin='Linear') shutil.rmtree(pipeline.base_dir) - diff --git a/nipype/workflows/dmri/fsl/utils.py b/nipype/workflows/dmri/fsl/utils.py index e89696418f..c7cff21521 100644 --- a/nipype/workflows/dmri/fsl/utils.py +++ b/nipype/workflows/dmri/fsl/utils.py @@ -36,18 +36,18 @@ def cleanup_edge_pipeline(name='Cleanup'): wf = pe.Workflow(name=name) wf.connect([ - (inputnode, fugue, [('in_file', 'fmap_in_file'), - ('in_mask', 'mask_file')]), - (inputnode, erode, [('in_mask', 'in_file')]), - (inputnode, newmsk, [('in_mask', 'in_file')]), - (erode, newmsk, [('out_file', 'operand_files')]), - (fugue, applymsk, [('fmap_out_file', 'in_file')]), - (newmsk, applymsk, [('out_file', 'mask_file')]), - (erode, join, [('out_file', 'in1')]), - (applymsk, join, [('out_file', 'in2')]), - (inputnode, addedge, [('in_file', 'in_file')]), - (join, addedge, [('out', 'operand_files')]), - (addedge, outputnode, [('out_file', 'out_file')]) + (inputnode, fugue, [('in_file', 'fmap_in_file'), + ('in_mask', 'mask_file')]), + (inputnode, erode, [('in_mask', 'in_file')]), + (inputnode, newmsk, [('in_mask', 'in_file')]), + (erode, newmsk, [('out_file', 'operand_files')]), + (fugue, applymsk, [('fmap_out_file', 'in_file')]), + (newmsk, applymsk, [('out_file', 'mask_file')]), + (erode, join, [('out_file', 'in1')]), + (applymsk, join, [('out_file', 'in2')]), + (inputnode, addedge, [('in_file', 'in_file')]), + (join, addedge, [('out', 'operand_files')]), + (addedge, outputnode, [('out_file', 'out_file')]) ]) return wf @@ -69,14 +69,14 @@ def vsm2warp(name='Shiftmap2Warping'): wf = pe.Workflow(name=name) wf.connect([ - (inputnode, fixhdr, [('in_vsm', 'in_file'), - ('in_ref', 'in_file_hdr')]), - (inputnode, vsm, [('scaling', 'operand_value')]), - (fixhdr, vsm, [('out_file', 'in_file')]), - (vsm, vsm2dfm, [('out_file', 'shift_in_file')]), - (inputnode, vsm2dfm, [('in_ref', 'reference'), - ('enc_dir', 'shift_direction')]), - (vsm2dfm, outputnode, [('out_file', 'out_warp')]) + (inputnode, fixhdr, [('in_vsm', 'in_file'), + ('in_ref', 'in_file_hdr')]), + (inputnode, vsm, [('scaling', 'operand_value')]), + (fixhdr, vsm, [('out_file', 'in_file')]), + (vsm, vsm2dfm, [('out_file', 'shift_in_file')]), + (inputnode, vsm2dfm, [('in_ref', 'reference'), + ('enc_dir', 'shift_direction')]), + (vsm2dfm, outputnode, [('out_file', 'out_warp')]) ]) return wf @@ -115,25 +115,25 @@ def dwi_flirt(name='DWICoregistration', excl_nodiff=False, fields=['out_file', 'out_xfms']), name='outputnode') wf = pe.Workflow(name=name) wf.connect([ - (inputnode, split, [('in_file', 'in_file')]), - (inputnode, dilate, [('ref_mask', 'in_file')]), - (inputnode, enhb0, [('ref_mask', 'in_mask')]), - (inputnode, initmat, [('in_xfms', 'in_xfms'), - ('in_bval', 'in_bval')]), - (inputnode, n4, [('reference', 'input_image'), - ('ref_mask', 'mask_image')]), - (dilate, flirt, [('out_file', 'ref_weight'), - ('out_file', 'in_weight')]), - (n4, enhb0, [('output_image', 'in_file')]), - (split, enhdw, [('out_files', 'in_file')]), - (dilate, enhdw, [('out_file', 'in_mask')]), - (enhb0, flirt, [('out_file', 'reference')]), - (enhdw, flirt, [('out_file', 'in_file')]), - (initmat, flirt, [('init_xfms', 'in_matrix_file')]), - (flirt, thres, [('out_file', 'in_file')]), - (thres, merge, [('out_file', 'in_files')]), - (merge, outputnode, [('merged_file', 'out_file')]), - (flirt, outputnode, [('out_matrix_file', 'out_xfms')]) + (inputnode, split, [('in_file', 'in_file')]), + (inputnode, dilate, [('ref_mask', 'in_file')]), + (inputnode, enhb0, [('ref_mask', 'in_mask')]), + (inputnode, initmat, [('in_xfms', 'in_xfms'), + ('in_bval', 'in_bval')]), + (inputnode, n4, [('reference', 'input_image'), + ('ref_mask', 'mask_image')]), + (dilate, flirt, [('out_file', 'ref_weight'), + ('out_file', 'in_weight')]), + (n4, enhb0, [('output_image', 'in_file')]), + (split, enhdw, [('out_files', 'in_file')]), + (dilate, enhdw, [('out_file', 'in_mask')]), + (enhb0, flirt, [('out_file', 'reference')]), + (enhdw, flirt, [('out_file', 'in_file')]), + (initmat, flirt, [('init_xfms', 'in_matrix_file')]), + (flirt, thres, [('out_file', 'in_file')]), + (thres, merge, [('out_file', 'in_files')]), + (merge, outputnode, [('merged_file', 'out_file')]), + (flirt, outputnode, [('out_matrix_file', 'out_xfms')]) ]) return wf @@ -175,27 +175,27 @@ def apply_all_corrections(name='UnwarpArtifacts'): wf = pe.Workflow(name=name) wf.connect([ - (inputnode, warps, [('in_sdc', 'warp1'), - ('in_hmc', 'premat'), - ('in_ecc', 'postmat'), - ('in_dwi', 'reference')]), - (inputnode, split, [('in_dwi', 'in_file')]), - (split, selref, [('out_files', 'inlist')]), - (warps, unwarp, [('out_file', 'field_file')]), - (split, unwarp, [('out_files', 'in_file')]), - (selref, unwarp, [('out', 'ref_file')]), - (selref, coeffs, [('out', 'reference')]), - (warps, coeffs, [('out_file', 'in_file')]), - (selref, jacobian, [('out', 'reference')]), - (coeffs, jacobian, [('out_file', 'in_file')]), - (unwarp, jacmult, [('out_file', 'in_file')]), - (jacobian, jacmult, [('out_jacobian', 'operand_files')]), - (jacmult, thres, [('out_file', 'in_file')]), - (thres, merge, [('out_file', 'in_files')]), - (warps, outputnode, [('out_file', 'out_warp')]), - (coeffs, outputnode, [('out_file', 'out_coeff')]), - (jacobian, outputnode, [('out_jacobian', 'out_jacobian')]), - (merge, outputnode, [('merged_file', 'out_file')]) + (inputnode, warps, [('in_sdc', 'warp1'), + ('in_hmc', 'premat'), + ('in_ecc', 'postmat'), + ('in_dwi', 'reference')]), + (inputnode, split, [('in_dwi', 'in_file')]), + (split, selref, [('out_files', 'inlist')]), + (warps, unwarp, [('out_file', 'field_file')]), + (split, unwarp, [('out_files', 'in_file')]), + (selref, unwarp, [('out', 'ref_file')]), + (selref, coeffs, [('out', 'reference')]), + (warps, coeffs, [('out_file', 'in_file')]), + (selref, jacobian, [('out', 'reference')]), + (coeffs, jacobian, [('out_file', 'in_file')]), + (unwarp, jacmult, [('out_file', 'in_file')]), + (jacobian, jacmult, [('out_jacobian', 'operand_files')]), + (jacmult, thres, [('out_file', 'in_file')]), + (thres, merge, [('out_file', 'in_files')]), + (warps, outputnode, [('out_file', 'out_warp')]), + (coeffs, outputnode, [('out_file', 'out_coeff')]), + (jacobian, outputnode, [('out_jacobian', 'out_jacobian')]), + (merge, outputnode, [('merged_file', 'out_file')]) ]) return wf diff --git a/nipype/workflows/dmri/mrtrix/connectivity_mapping.py b/nipype/workflows/dmri/mrtrix/connectivity_mapping.py index 63a40fd013..1bf1d53cc6 100644 --- a/nipype/workflows/dmri/mrtrix/connectivity_mapping.py +++ b/nipype/workflows/dmri/mrtrix/connectivity_mapping.py @@ -516,25 +516,25 @@ def create_connectivity_pipeline(name="connectivity", parcellation_name='scale50 inputnode = pe.Node(interface=util.IdentityInterface(fields=["subject_id", "dwi", "bvecs", "bvals", "subjects_dir"]), name="inputnode") outputnode = pe.Node(interface=util.IdentityInterface(fields=["fa", - "struct", - "tracts", - "tracks2prob", - "connectome", - "nxstatscff", - "nxmatlab", - "nxcsv", - "fiber_csv", - "cmatrices_csv", - "nxmergedcsv", - "cmatrix", - "networks", - "filtered_tracts", - "rois", - "odfs", - "tdi", - "mean_fiber_length", - "median_fiber_length", - "fiber_length_std"]), + "struct", + "tracts", + "tracks2prob", + "connectome", + "nxstatscff", + "nxmatlab", + "nxcsv", + "fiber_csv", + "cmatrices_csv", + "nxmergedcsv", + "cmatrix", + "networks", + "filtered_tracts", + "rois", + "odfs", + "tdi", + "mean_fiber_length", + "median_fiber_length", + "fiber_length_std"]), name="outputnode") connectivity = pe.Workflow(name="connectivity") @@ -564,7 +564,7 @@ def create_connectivity_pipeline(name="connectivity", parcellation_name='scale50 ("mri_convert_Brain.out_file", "struct"), ("MRconvert_fa.converted", "fa"), ("MRconvert_tracks2prob.converted", "tracks2prob")]) - ]) + ]) connectivity.connect([(cmats_to_csv, outputnode, [("outputnode.csv_file", "cmatrices_csv")])]) connectivity.connect([(networkx, outputnode, [("outputnode.csv_files", "nxcsv")])]) diff --git a/nipype/workflows/dmri/mrtrix/diffusion.py b/nipype/workflows/dmri/mrtrix/diffusion.py index 52ac4692bd..86d098011b 100644 --- a/nipype/workflows/dmri/mrtrix/diffusion.py +++ b/nipype/workflows/dmri/mrtrix/diffusion.py @@ -36,8 +36,8 @@ def create_mrtrix_dti_pipeline(name="dtiproc", tractography_type='probabilistic' """ inputnode = pe.Node(interface=util.IdentityInterface(fields=["dwi", - "bvecs", - "bvals"]), + "bvecs", + "bvals"]), name="inputnode") bet = pe.Node(interface=fsl.BET(), name="bet") diff --git a/nipype/workflows/dmri/mrtrix/group_connectivity.py b/nipype/workflows/dmri/mrtrix/group_connectivity.py index c4048c969a..b308e17137 100644 --- a/nipype/workflows/dmri/mrtrix/group_connectivity.py +++ b/nipype/workflows/dmri/mrtrix/group_connectivity.py @@ -94,7 +94,7 @@ def create_group_connectivity_pipeline(group_list, group_id, data_dir, subjects_ datasink.inputs.base_directory = output_dir datasink.inputs.container = group_id - l1pipeline = pe.Workflow(name="l1pipeline_"+group_id) + l1pipeline = pe.Workflow(name="l1pipeline_" + group_id) l1pipeline.base_dir = output_dir l1pipeline.base_output_dir = group_id l1pipeline.connect([(subj_infosource, conmapper, [('subject_id', 'inputnode.subject_id')])]) @@ -102,7 +102,7 @@ def create_group_connectivity_pipeline(group_list, group_id, data_dir, subjects_ l1pipeline.connect([(datasource, conmapper, [("dwi", "inputnode.dwi"), ("bvals", "inputnode.bvals"), ("bvecs", "inputnode.bvecs"), - ])]) + ])]) l1pipeline.connect([(conmapper, datasink, [("outputnode.connectome", "@l1output.cff"), ("outputnode.nxstatscff", "@l1output.nxstatscff"), ("outputnode.nxmatlab", "@l1output.nxmatlab"), diff --git a/nipype/workflows/fmri/fsl/estimate.py b/nipype/workflows/fmri/fsl/estimate.py index 79e14a68d3..ef2a3c44d0 100644 --- a/nipype/workflows/fmri/fsl/estimate.py +++ b/nipype/workflows/fmri/fsl/estimate.py @@ -132,7 +132,7 @@ def create_modelfit_workflow(name='modelfit', f_contrasts=False): (merge_contrasts, outputspec, [('out', 'zfiles')]), (modelestimate, outputspec, [('param_estimates', 'parameter_estimates'), ('dof_file', 'dof_file')]), - ]) + ]) if version < 507: modelfit.connect([ (modelgen, conestimate, [('con_file', 'tcon_file'), @@ -145,7 +145,7 @@ def create_modelfit_workflow(name='modelfit', f_contrasts=False): ('zfstats', 'in2')]), (conestimate, outputspec, [('copes', 'copes'), ('varcopes', 'varcopes')]), - ]) + ]) else: modelfit.connect([ (modelgen, modelestimate, [('con_file', 'tcon_file'), @@ -154,7 +154,7 @@ def create_modelfit_workflow(name='modelfit', f_contrasts=False): ('zfstats', 'in2')]), (modelestimate, outputspec, [('copes', 'copes'), ('varcopes', 'varcopes')]), - ]) + ]) return modelfit diff --git a/nipype/workflows/fmri/fsl/preprocess.py b/nipype/workflows/fmri/fsl/preprocess.py index 2d8cbea2f3..6ac1d7b86d 100644 --- a/nipype/workflows/fmri/fsl/preprocess.py +++ b/nipype/workflows/fmri/fsl/preprocess.py @@ -14,7 +14,7 @@ def getthreshop(thresh): - return ['-thr %.10f -Tmin -bin' %(0.1*val[1]) for val in thresh] + return ['-thr %.10f -Tmin -bin' % (0.1 * val[1]) for val in thresh] def pickfirst(files): @@ -41,14 +41,14 @@ def pickvol(filenames, fileidx, which): elif which.lower() == 'middle': idx = int(np.ceil(load(filenames[fileidx]).get_shape()[3] / 2)) elif which.lower() == 'last': - idx = load(filenames[fileidx]).get_shape()[3]-1 + idx = load(filenames[fileidx]).get_shape()[3] - 1 else: raise Exception('unknown value for volume selection : %s' % which) return idx def getbtthresh(medianvals): - return [0.75*val for val in medianvals] + return [0.75 * val for val in medianvals] def chooseindex(fwhm): @@ -66,7 +66,7 @@ def getusans(x): return [[tuple([val[0], 0.75 * val[1]])] for val in x] tolist = lambda x: [x] -highpass_operand = lambda x: '-bptf %.10f -1' %x +highpass_operand = lambda x: '-bptf %.10f -1' % x def create_parallelfeat_preproc(name='featpreproc', highpass=True): @@ -1241,4 +1241,3 @@ def create_reg_workflow(name='registration'): outputnode, 'anat2target_transform') return register - diff --git a/nipype/workflows/fmri/spm/__init__.py b/nipype/workflows/fmri/spm/__init__.py index e89a009cf0..8ddf878d12 100644 --- a/nipype/workflows/fmri/spm/__init__.py +++ b/nipype/workflows/fmri/spm/__init__.py @@ -1,2 +1,2 @@ from .preprocess import (create_spm_preproc, create_vbm_preproc, - create_DARTEL_template) \ No newline at end of file + create_DARTEL_template) diff --git a/nipype/workflows/fmri/spm/preprocess.py b/nipype/workflows/fmri/spm/preprocess.py index 1bcfcc39c8..34060fc6d3 100644 --- a/nipype/workflows/fmri/spm/preprocess.py +++ b/nipype/workflows/fmri/spm/preprocess.py @@ -95,8 +95,8 @@ def create_spm_preproc(name='preproc'): save_plot=True), name='artdetect') workflow.connect([(inputnode, artdetect, [('norm_threshold', 'norm_threshold'), - ('zintensity_threshold', - 'zintensity_threshold')])]) + ('zintensity_threshold', + 'zintensity_threshold')])]) workflow.connect([(realign, artdetect, [('realigned_files', 'realigned_files'), ('realignment_parameters', 'realignment_parameters')])]) @@ -117,15 +117,15 @@ def create_spm_preproc(name='preproc'): ]), name="outputspec") workflow.connect([ - (maskflow, outputnode, [("outputspec.reg_file", "reg_file")]), - (maskflow, outputnode, [("outputspec.reg_cost", "reg_cost")]), - (maskflow, outputnode, [(("outputspec.mask_file", poplist), "mask_file")]), - (realign, outputnode, [('realignment_parameters', 'realignment_parameters')]), - (smooth, outputnode, [('smoothed_files', 'smoothed_files')]), - (artdetect, outputnode, [('outlier_files', 'outlier_files'), - ('statistic_files', 'outlier_stats'), - ('plot_files', 'outlier_plots')]) - ]) + (maskflow, outputnode, [("outputspec.reg_file", "reg_file")]), + (maskflow, outputnode, [("outputspec.reg_cost", "reg_cost")]), + (maskflow, outputnode, [(("outputspec.mask_file", poplist), "mask_file")]), + (realign, outputnode, [('realignment_parameters', 'realignment_parameters')]), + (smooth, outputnode, [('smoothed_files', 'smoothed_files')]), + (artdetect, outputnode, [('outlier_files', 'outlier_files'), + ('statistic_files', 'outlier_stats'), + ('plot_files', 'outlier_plots')]) + ]) return workflow @@ -195,7 +195,7 @@ def compute_icv(class_images): img = load(session[0][0]).get_data() + \ load(session[1][0]).get_data() + \ load(session[2][0]).get_data() - img_icv = (img > 0.5).astype(int).sum()*voxel_volume*1e-3 + img_icv = (img > 0.5).astype(int).sum() * voxel_volume * 1e-3 icv.append(img_icv) return icv @@ -303,8 +303,8 @@ def get2classes(dartel_files): ]), name="outputspec") workflow.connect([ - (dartel, outputnode, [('final_template_file', 'template_file'), - ('dartel_flow_fields', 'flow_fields')]), - ]) + (dartel, outputnode, [('final_template_file', 'template_file'), + ('dartel_flow_fields', 'flow_fields')]), + ]) return workflow diff --git a/nipype/workflows/graph/__init__.py b/nipype/workflows/graph/__init__.py index 8b13789179..e69de29bb2 100644 --- a/nipype/workflows/graph/__init__.py +++ b/nipype/workflows/graph/__init__.py @@ -1 +0,0 @@ - diff --git a/nipype/workflows/misc/__init__.py b/nipype/workflows/misc/__init__.py index 8b13789179..e69de29bb2 100644 --- a/nipype/workflows/misc/__init__.py +++ b/nipype/workflows/misc/__init__.py @@ -1 +0,0 @@ - diff --git a/nipype/workflows/rsfmri/fsl/resting.py b/nipype/workflows/rsfmri/fsl/resting.py index a8e3f70cb5..8826a7ed9d 100644 --- a/nipype/workflows/rsfmri/fsl/resting.py +++ b/nipype/workflows/rsfmri/fsl/resting.py @@ -48,7 +48,7 @@ def select_volume(filename, which): elif which.lower() == 'middle': idx = int(np.ceil(load(filename).get_shape()[3] / 2)) else: - raise Exception('unknown value for volume selection : %s' %which) + raise Exception('unknown value for volume selection : %s' % which) return idx @@ -71,9 +71,9 @@ def create_realign_flow(name='realign'): ]), name='inputspec') outputnode = pe.Node(interface=util.IdentityInterface(fields=[ - 'realigned_file', - ]), - name='outputspec') + 'realigned_file', + ]), + name='outputspec') realigner = pe.Node(fsl.MCFLIRT(save_mats=True, stats_imgs=True), name='realigner') splitter = pe.Node(fsl.Split(dimension='t'), name='splitter') @@ -136,10 +136,10 @@ def create_resting_preproc(name='restpreproc'): ]), name='inputspec') outputnode = pe.Node(interface=util.IdentityInterface(fields=[ - 'noise_mask_file', - 'filtered_file', - ]), - name='outputspec') + 'noise_mask_file', + 'filtered_file', + ]), + name='outputspec') slicetimer = pe.Node(fsl.SliceTimer(), name='slicetimer') realigner = create_realign_flow() tsnr = pe.Node(TSNR(regress_poly=2), name='tsnr') diff --git a/nipype/workflows/smri/__init__.py b/nipype/workflows/smri/__init__.py index 98dd34812f..06a1e16e33 100644 --- a/nipype/workflows/smri/__init__.py +++ b/nipype/workflows/smri/__init__.py @@ -1,2 +1,2 @@ from . import freesurfer -from . import ants \ No newline at end of file +from . import ants diff --git a/nipype/workflows/smri/ants/ANTSBuildTemplate.py b/nipype/workflows/smri/ants/ANTSBuildTemplate.py index 79b9918867..11bcbb9809 100644 --- a/nipype/workflows/smri/ants/ANTSBuildTemplate.py +++ b/nipype/workflows/smri/ants/ANTSBuildTemplate.py @@ -53,9 +53,9 @@ def RenestDeformedPassiveImages(deformedPassiveImages, flattened_image_nametypes image_dictionary_of_lists[curr_name].append(curr_file) for image_type, image_list in list(image_dictionary_of_lists.items()): nested_imagetype_list.append(image_list) - outputAverageImageName_list.append('AVG_'+image_type+'.nii.gz') - image_type_list.append('WARP_AVG_'+image_type) - print("\n"*10) + outputAverageImageName_list.append('AVG_' + image_type + '.nii.gz') + image_type_list.append('WARP_AVG_' + image_type) + print("\n" * 10) print("HACK: ", nested_imagetype_list) print("HACK: ", outputAverageImageName_list) print("HACK: ", image_type_list) @@ -120,7 +120,7 @@ def ANTSTemplateBuildSingleIterationWF(iterationPhasePrefix=''): outputspec.passive_deformed_templates : """ - TemplateBuildSingleIterationWF = pe.Workflow(name='ANTSTemplateBuildSingleIterationWF_'+str(str(iterationPhasePrefix))) + TemplateBuildSingleIterationWF = pe.Workflow(name='ANTSTemplateBuildSingleIterationWF_' + str(str(iterationPhasePrefix))) inputSpec = pe.Node(interface=util.IdentityInterface(fields=['images', 'fixed_image', 'ListOfPassiveImagesDictionaries']), @@ -139,7 +139,7 @@ def ANTSTemplateBuildSingleIterationWF(iterationPhasePrefix=''): # NOTE MAP NODE! warp each of the original images to the provided fixed_image as the template BeginANTS = pe.MapNode(interface=ANTS(), name='BeginANTS', iterfield=['moving_image']) BeginANTS.inputs.dimension = 3 - BeginANTS.inputs.output_transform_prefix = str(iterationPhasePrefix)+'_tfm' + BeginANTS.inputs.output_transform_prefix = str(iterationPhasePrefix) + '_tfm' BeginANTS.inputs.metric = ['CC'] BeginANTS.inputs.metric_weight = [1.0] BeginANTS.inputs.radius = [5] @@ -175,20 +175,20 @@ def ANTSTemplateBuildSingleIterationWF(iterationPhasePrefix=''): # Now Average All input_images deformed images together to create an updated template average AvgDeformedImages = pe.Node(interface=AverageImages(), name='AvgDeformedImages') AvgDeformedImages.inputs.dimension = 3 - AvgDeformedImages.inputs.output_average_image = str(iterationPhasePrefix)+'.nii.gz' + AvgDeformedImages.inputs.output_average_image = str(iterationPhasePrefix) + '.nii.gz' AvgDeformedImages.inputs.normalize = True TemplateBuildSingleIterationWF.connect(wimtdeformed, "output_image", AvgDeformedImages, 'images') # Now average all affine transforms together AvgAffineTransform = pe.Node(interface=AverageAffineTransform(), name='AvgAffineTransform') AvgAffineTransform.inputs.dimension = 3 - AvgAffineTransform.inputs.output_affine_transform = 'Avererage_'+str(iterationPhasePrefix)+'_Affine.mat' + AvgAffineTransform.inputs.output_affine_transform = 'Avererage_' + str(iterationPhasePrefix) + '_Affine.mat' TemplateBuildSingleIterationWF.connect(BeginANTS, 'affine_transform', AvgAffineTransform, 'transforms') # Now average the warp fields togther AvgWarpImages = pe.Node(interface=AverageImages(), name='AvgWarpImages') AvgWarpImages.inputs.dimension = 3 - AvgWarpImages.inputs.output_average_image = str(iterationPhasePrefix)+'warp.nii.gz' + AvgWarpImages.inputs.output_average_image = str(iterationPhasePrefix) + 'warp.nii.gz' AvgWarpImages.inputs.normalize = True TemplateBuildSingleIterationWF.connect(BeginANTS, 'warp_transform', AvgWarpImages, 'images') @@ -198,7 +198,7 @@ def ANTSTemplateBuildSingleIterationWF(iterationPhasePrefix=''): GradientStepWarpImage = pe.Node(interface=MultiplyImages(), name='GradientStepWarpImage') GradientStepWarpImage.inputs.dimension = 3 GradientStepWarpImage.inputs.second_input = -1.0 * GradientStep - GradientStepWarpImage.inputs.output_product_image = 'GradientStep0.25_'+str(iterationPhasePrefix)+'_warp.nii.gz' + GradientStepWarpImage.inputs.output_product_image = 'GradientStep0.25_' + str(iterationPhasePrefix) + '_warp.nii.gz' TemplateBuildSingleIterationWF.connect(AvgWarpImages, 'output_average_image', GradientStepWarpImage, 'first_input') # Now create the new template shape based on the average of all deformed images @@ -235,28 +235,28 @@ def ANTSTemplateBuildSingleIterationWF(iterationPhasePrefix=''): ############################################## # Now warp all the ListOfPassiveImagesDictionaries images FlattenTransformAndImagesListNode = pe.Node(Function(function=FlattenTransformAndImagesList, - input_names=['ListOfPassiveImagesDictionaries', 'transformation_series'], - output_names=['flattened_images', 'flattened_transforms', 'flattened_image_nametypes']), - run_without_submitting=True, name="99_FlattenTransformAndImagesList") + input_names=['ListOfPassiveImagesDictionaries', 'transformation_series'], + output_names=['flattened_images', 'flattened_transforms', 'flattened_image_nametypes']), + run_without_submitting=True, name="99_FlattenTransformAndImagesList") TemplateBuildSingleIterationWF.connect(inputSpec, 'ListOfPassiveImagesDictionaries', FlattenTransformAndImagesListNode, 'ListOfPassiveImagesDictionaries') TemplateBuildSingleIterationWF.connect(MakeTransformsLists, 'out', FlattenTransformAndImagesListNode, 'transformation_series') wimtPassivedeformed = pe.MapNode(interface=WarpImageMultiTransform(), iterfield=['transformation_series', 'input_image'], name='wimtPassivedeformed') TemplateBuildSingleIterationWF.connect(AvgDeformedImages, 'output_average_image', wimtPassivedeformed, 'reference_image') - TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_images', wimtPassivedeformed, 'input_image') + TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_images', wimtPassivedeformed, 'input_image') TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_transforms', wimtPassivedeformed, 'transformation_series') RenestDeformedPassiveImagesNode = pe.Node(Function(function=RenestDeformedPassiveImages, - input_names=['deformedPassiveImages', 'flattened_image_nametypes'], - output_names=['nested_imagetype_list', 'outputAverageImageName_list', 'image_type_list']), - run_without_submitting=True, name="99_RenestDeformedPassiveImages") + input_names=['deformedPassiveImages', 'flattened_image_nametypes'], + output_names=['nested_imagetype_list', 'outputAverageImageName_list', 'image_type_list']), + run_without_submitting=True, name="99_RenestDeformedPassiveImages") TemplateBuildSingleIterationWF.connect(wimtPassivedeformed, 'output_image', RenestDeformedPassiveImagesNode, 'deformedPassiveImages') TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_image_nametypes', RenestDeformedPassiveImagesNode, 'flattened_image_nametypes') # Now Average All passive input_images deformed images together to create an updated template average AvgDeformedPassiveImages = pe.MapNode(interface=AverageImages(), - iterfield=['images', 'output_average_image'], - name='AvgDeformedPassiveImages') + iterfield=['images', 'output_average_image'], + name='AvgDeformedPassiveImages') AvgDeformedPassiveImages.inputs.dimension = 3 AvgDeformedPassiveImages.inputs.normalize = False TemplateBuildSingleIterationWF.connect(RenestDeformedPassiveImagesNode, "nested_imagetype_list", AvgDeformedPassiveImages, 'images') diff --git a/nipype/workflows/smri/ants/__init__.py b/nipype/workflows/smri/ants/__init__.py index 5cfe6259a2..f22640eb1e 100644 --- a/nipype/workflows/smri/ants/__init__.py +++ b/nipype/workflows/smri/ants/__init__.py @@ -1,2 +1,2 @@ from .ANTSBuildTemplate import ANTSTemplateBuildSingleIterationWF -from .antsRegistrationBuildTemplate import antsRegistrationTemplateBuildSingleIterationWF \ No newline at end of file +from .antsRegistrationBuildTemplate import antsRegistrationTemplateBuildSingleIterationWF diff --git a/nipype/workflows/smri/ants/antsRegistrationBuildTemplate.py b/nipype/workflows/smri/ants/antsRegistrationBuildTemplate.py index 9a5c9c7e8e..e58b62177d 100644 --- a/nipype/workflows/smri/ants/antsRegistrationBuildTemplate.py +++ b/nipype/workflows/smri/ants/antsRegistrationBuildTemplate.py @@ -17,10 +17,10 @@ from ....interfaces.utility import Function from ....interfaces.ants import ( - Registration, - ApplyTransforms, - AverageImages, MultiplyImages, - AverageAffineTransform) + Registration, + ApplyTransforms, + AverageImages, MultiplyImages, + AverageAffineTransform) def makeListOfOneElement(inputFile): @@ -58,13 +58,13 @@ def RenestDeformedPassiveImages(deformedPassiveImages, flattened_image_nametypes image_dictionary_of_lists[curr_name].append(curr_file) for image_type, image_list in list(image_dictionary_of_lists.items()): nested_imagetype_list.append(image_list) - outputAverageImageName_list.append('AVG_'+image_type+'.nii.gz') - image_type_list.append('WARP_AVG_'+image_type) + outputAverageImageName_list.append('AVG_' + image_type + '.nii.gz') + image_type_list.append('WARP_AVG_' + image_type) if image_type in interpolationMapping: nested_interpolation_type.append(interpolationMapping[image_type]) else: nested_interpolation_type.append('Linear') # Linear is the default. - print("\n"*10) + print("\n" * 10) print("HACK: ", nested_imagetype_list) print("HACK: ", outputAverageImageName_list) print("HACK: ", image_type_list) @@ -176,13 +176,13 @@ def antsRegistrationTemplateBuildSingleIterationWF(iterationPhasePrefix=''): outputspec.transforms_list : outputspec.passive_deformed_templates : """ - TemplateBuildSingleIterationWF = pe.Workflow(name='antsRegistrationTemplateBuildSingleIterationWF_'+str(iterationPhasePrefix)) + TemplateBuildSingleIterationWF = pe.Workflow(name='antsRegistrationTemplateBuildSingleIterationWF_' + str(iterationPhasePrefix)) inputSpec = pe.Node(interface=util.IdentityInterface(fields=[ - 'ListOfImagesDictionaries', 'registrationImageTypes', - 'interpolationMapping', 'fixed_image']), - run_without_submitting=True, - name='inputspec') + 'ListOfImagesDictionaries', 'registrationImageTypes', + 'interpolationMapping', 'fixed_image']), + run_without_submitting=True, + name='inputspec') # HACK: TODO: Need to move all local functions to a common untility file, or at the top of the file so that # they do not change due to re-indenting. Otherwise re-indenting for flow control will trigger # their hash to change. @@ -196,18 +196,18 @@ def antsRegistrationTemplateBuildSingleIterationWF(iterationPhasePrefix=''): # NOTE MAP NODE! warp each of the original images to the provided fixed_image as the template BeginANTS = pe.MapNode(interface=Registration(), name='BeginANTS', iterfield=['moving_image']) BeginANTS.inputs.dimension = 3 - BeginANTS.inputs.output_transform_prefix = str(iterationPhasePrefix)+'_tfm' - BeginANTS.inputs.transforms = ["Affine", "SyN"] - BeginANTS.inputs.transform_parameters = [[0.9], [0.25, 3.0, 0.0]] - BeginANTS.inputs.metric = ['Mattes', 'CC'] - BeginANTS.inputs.metric_weight = [1.0, 1.0] - BeginANTS.inputs.radius_or_number_of_bins = [32, 5] + BeginANTS.inputs.output_transform_prefix = str(iterationPhasePrefix) + '_tfm' + BeginANTS.inputs.transforms = ["Affine", "SyN"] + BeginANTS.inputs.transform_parameters = [[0.9], [0.25, 3.0, 0.0]] + BeginANTS.inputs.metric = ['Mattes', 'CC'] + BeginANTS.inputs.metric_weight = [1.0, 1.0] + BeginANTS.inputs.radius_or_number_of_bins = [32, 5] BeginANTS.inputs.number_of_iterations = [[1000, 1000, 1000], [50, 35, 15]] - BeginANTS.inputs.use_histogram_matching = [True, True] - BeginANTS.inputs.use_estimate_learning_rate_once = [False, False] - BeginANTS.inputs.shrink_factors = [[3, 2, 1], [3, 2, 1]] - BeginANTS.inputs.smoothing_sigmas = [[3, 2, 0], [3, 2, 0]] - BeginANTS.inputs.sigma_units = ["vox"]*2 + BeginANTS.inputs.use_histogram_matching = [True, True] + BeginANTS.inputs.use_estimate_learning_rate_once = [False, False] + BeginANTS.inputs.shrink_factors = [[3, 2, 1], [3, 2, 1]] + BeginANTS.inputs.smoothing_sigmas = [[3, 2, 0], [3, 2, 0]] + BeginANTS.inputs.sigma_units = ["vox"] * 2 GetMovingImagesNode = pe.Node(interface=util.Function(function=GetMovingImages, input_names=['ListOfImagesDictionaries', 'registrationImageTypes', 'interpolationMapping'], @@ -237,14 +237,14 @@ def antsRegistrationTemplateBuildSingleIterationWF(iterationPhasePrefix=''): # Now Average All input_images deformed images together to create an updated template average AvgDeformedImages = pe.Node(interface=AverageImages(), name='AvgDeformedImages') AvgDeformedImages.inputs.dimension = 3 - AvgDeformedImages.inputs.output_average_image = str(iterationPhasePrefix)+'.nii.gz' + AvgDeformedImages.inputs.output_average_image = str(iterationPhasePrefix) + '.nii.gz' AvgDeformedImages.inputs.normalize = True TemplateBuildSingleIterationWF.connect(wimtdeformed, "output_image", AvgDeformedImages, 'images') # Now average all affine transforms together AvgAffineTransform = pe.Node(interface=AverageAffineTransform(), name='AvgAffineTransform') AvgAffineTransform.inputs.dimension = 3 - AvgAffineTransform.inputs.output_affine_transform = 'Avererage_'+str(iterationPhasePrefix)+'_Affine.mat' + AvgAffineTransform.inputs.output_affine_transform = 'Avererage_' + str(iterationPhasePrefix) + '_Affine.mat' SplitAffineAndWarpsNode = pe.Node(interface=util.Function(function=SplitAffineAndWarpComponents, input_names=['list_of_transforms_lists'], @@ -257,7 +257,7 @@ def antsRegistrationTemplateBuildSingleIterationWF(iterationPhasePrefix=''): # Now average the warp fields togther AvgWarpImages = pe.Node(interface=AverageImages(), name='AvgWarpImages') AvgWarpImages.inputs.dimension = 3 - AvgWarpImages.inputs.output_average_image = str(iterationPhasePrefix)+'warp.nii.gz' + AvgWarpImages.inputs.output_average_image = str(iterationPhasePrefix) + 'warp.nii.gz' AvgWarpImages.inputs.normalize = True TemplateBuildSingleIterationWF.connect(SplitAffineAndWarpsNode, 'warp_component_list', AvgWarpImages, 'images') @@ -267,7 +267,7 @@ def antsRegistrationTemplateBuildSingleIterationWF(iterationPhasePrefix=''): GradientStepWarpImage = pe.Node(interface=MultiplyImages(), name='GradientStepWarpImage') GradientStepWarpImage.inputs.dimension = 3 GradientStepWarpImage.inputs.second_input = -1.0 * GradientStep - GradientStepWarpImage.inputs.output_product_image = 'GradientStep0.25_'+str(iterationPhasePrefix)+'_warp.nii.gz' + GradientStepWarpImage.inputs.output_product_image = 'GradientStep0.25_' + str(iterationPhasePrefix) + '_warp.nii.gz' TemplateBuildSingleIterationWF.connect(AvgWarpImages, 'output_average_image', GradientStepWarpImage, 'first_input') # Now create the new template shape based on the average of all deformed images @@ -309,11 +309,11 @@ def antsRegistrationTemplateBuildSingleIterationWF(iterationPhasePrefix=''): ############################################## # Now warp all the ListOfPassiveImagesDictionaries images FlattenTransformAndImagesListNode = pe.Node(Function(function=FlattenTransformAndImagesList, - input_names=['ListOfPassiveImagesDictionaries', 'transforms', - 'invert_transform_flags', 'interpolationMapping'], - output_names=['flattened_images', 'flattened_transforms', 'flattened_invert_transform_flags', - 'flattened_image_nametypes', 'flattened_interpolation_type']), - run_without_submitting=True, name="99_FlattenTransformAndImagesList") + input_names=['ListOfPassiveImagesDictionaries', 'transforms', + 'invert_transform_flags', 'interpolationMapping'], + output_names=['flattened_images', 'flattened_transforms', 'flattened_invert_transform_flags', + 'flattened_image_nametypes', 'flattened_interpolation_type']), + run_without_submitting=True, name="99_FlattenTransformAndImagesList") GetPassiveImagesNode = pe.Node(interface=util.Function(function=GetPassiveImages, input_names=['ListOfImagesDictionaries', 'registrationImageTypes'], @@ -333,22 +333,22 @@ def antsRegistrationTemplateBuildSingleIterationWF(iterationPhasePrefix=''): wimtPassivedeformed.default_value = 0 TemplateBuildSingleIterationWF.connect(AvgDeformedImages, 'output_average_image', wimtPassivedeformed, 'reference_image') TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_interpolation_type', wimtPassivedeformed, 'interpolation') - TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_images', wimtPassivedeformed, 'input_image') + TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_images', wimtPassivedeformed, 'input_image') TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_transforms', wimtPassivedeformed, 'transforms') TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_invert_transform_flags', wimtPassivedeformed, 'invert_transform_flags') RenestDeformedPassiveImagesNode = pe.Node(Function(function=RenestDeformedPassiveImages, - input_names=['deformedPassiveImages', 'flattened_image_nametypes', 'interpolationMapping'], - output_names=['nested_imagetype_list', 'outputAverageImageName_list', - 'image_type_list', 'nested_interpolation_type']), - run_without_submitting=True, name="99_RenestDeformedPassiveImages") + input_names=['deformedPassiveImages', 'flattened_image_nametypes', 'interpolationMapping'], + output_names=['nested_imagetype_list', 'outputAverageImageName_list', + 'image_type_list', 'nested_interpolation_type']), + run_without_submitting=True, name="99_RenestDeformedPassiveImages") TemplateBuildSingleIterationWF.connect(inputSpec, 'interpolationMapping', RenestDeformedPassiveImagesNode, 'interpolationMapping') TemplateBuildSingleIterationWF.connect(wimtPassivedeformed, 'output_image', RenestDeformedPassiveImagesNode, 'deformedPassiveImages') TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_image_nametypes', RenestDeformedPassiveImagesNode, 'flattened_image_nametypes') # Now Average All passive input_images deformed images together to create an updated template average AvgDeformedPassiveImages = pe.MapNode(interface=AverageImages(), - iterfield=['images', 'output_average_image'], - name='AvgDeformedPassiveImages') + iterfield=['images', 'output_average_image'], + name='AvgDeformedPassiveImages') AvgDeformedPassiveImages.inputs.dimension = 3 AvgDeformedPassiveImages.inputs.normalize = False TemplateBuildSingleIterationWF.connect(RenestDeformedPassiveImagesNode, "nested_imagetype_list", AvgDeformedPassiveImages, 'images') diff --git a/nipype/workflows/smri/freesurfer/bem.py b/nipype/workflows/smri/freesurfer/bem.py index 46c7b2f239..a9b0a0a325 100644 --- a/nipype/workflows/smri/freesurfer/bem.py +++ b/nipype/workflows/smri/freesurfer/bem.py @@ -66,10 +66,10 @@ def create_bem_flow(name='bem', out_format='stl'): """ bemflow.connect([ - (inputnode, watershed_bem, [('subject_id', 'subject_id'), - ('subjects_dir', 'subjects_dir')]), - (watershed_bem, surfconvert, [('mesh_files', 'in_file')]), - ]) + (inputnode, watershed_bem, [('subject_id', 'subject_id'), + ('subjects_dir', 'subjects_dir')]), + (watershed_bem, surfconvert, [('mesh_files', 'in_file')]), + ]) """ Setup an outputnode that defines relevant inputs of the workflow. @@ -78,6 +78,6 @@ def create_bem_flow(name='bem', out_format='stl'): outputnode = pe.Node(niu.IdentityInterface(fields=["meshes"]), name="outputspec") bemflow.connect([ - (surfconvert, outputnode, [("converted", "meshes")]), - ]) + (surfconvert, outputnode, [("converted", "meshes")]), + ]) return bemflow diff --git a/nipype/workflows/smri/freesurfer/utils.py b/nipype/workflows/smri/freesurfer/utils.py index d3abf46160..d225929d4d 100644 --- a/nipype/workflows/smri/freesurfer/utils.py +++ b/nipype/workflows/smri/freesurfer/utils.py @@ -221,11 +221,11 @@ def switch_labels(inverse, transform_output, source_file, label_file): return label_file, transform_output chooser = pe.MapNode(niu.Function(input_names=['inverse', - 'transform_output', - 'source_file', - 'label_file'], + 'transform_output', + 'source_file', + 'label_file'], output_names=['label_file', - 'source_file'], + 'source_file'], function=switch_labels), iterfield=['transform_output', 'source_file'], name='chooser') @@ -247,8 +247,8 @@ def switch_labels(inverse, transform_output, source_file, label_file): ]), name="outputspec") getstats.connect([ - (statnode, outputnode, [("summary_file", "stats_file")]), - ]) + (statnode, outputnode, [("summary_file", "stats_file")]), + ]) return getstats @@ -339,18 +339,18 @@ def create_tessellation_flow(name='tessellate', out_format='stl'): """ tessflow.connect([ - (inputnode, fssource, [('subject_id', 'subject_id'), - ('subjects_dir', 'subjects_dir')]), - (fssource, volconvert, [('aseg', 'in_file')]), - (volconvert, region_list_from_volume_node, [('out_file', 'in_file')]), - (region_list_from_volume_node, tessellate, [('region_list', 'label_value')]), - (region_list_from_volume_node, id_list_from_lookup_table_node, [('region_list', 'region_list')]), - (inputnode, id_list_from_lookup_table_node, [('lookup_file', 'lookup_file')]), - (id_list_from_lookup_table_node, tessellate, [('id_list', 'out_file')]), - (fssource, tessellate, [('aseg', 'in_file')]), - (tessellate, surfconvert, [('surface', 'in_file')]), - (surfconvert, smoother, [('converted', 'in_file1')]), - ]) + (inputnode, fssource, [('subject_id', 'subject_id'), + ('subjects_dir', 'subjects_dir')]), + (fssource, volconvert, [('aseg', 'in_file')]), + (volconvert, region_list_from_volume_node, [('out_file', 'in_file')]), + (region_list_from_volume_node, tessellate, [('region_list', 'label_value')]), + (region_list_from_volume_node, id_list_from_lookup_table_node, [('region_list', 'region_list')]), + (inputnode, id_list_from_lookup_table_node, [('lookup_file', 'lookup_file')]), + (id_list_from_lookup_table_node, tessellate, [('id_list', 'out_file')]), + (fssource, tessellate, [('aseg', 'in_file')]), + (tessellate, surfconvert, [('surface', 'in_file')]), + (surfconvert, smoother, [('converted', 'in_file1')]), + ]) """ Setup an outputnode that defines relevant inputs of the workflow. @@ -365,9 +365,9 @@ def create_tessellation_flow(name='tessellate', out_format='stl'): ]) tessflow.connect([ (stl_to_gifti, outputnode, [("converted", "meshes")]), - ]) + ]) else: tessflow.connect([ (smoother, outputnode, [("mesh_file", "meshes")]), - ]) + ]) return tessflow diff --git a/nipype/workflows/warp/__init__.py b/nipype/workflows/warp/__init__.py index 8b13789179..e69de29bb2 100644 --- a/nipype/workflows/warp/__init__.py +++ b/nipype/workflows/warp/__init__.py @@ -1 +0,0 @@ - diff --git a/setup.py b/setup.py index 7f282eb7e2..3d81cfe495 100755 --- a/setup.py +++ b/setup.py @@ -20,7 +20,8 @@ # BEFORE importing distutils, remove MANIFEST. distutils doesn't properly # update it when the contents of directories change. -if os.path.exists('MANIFEST'): os.remove('MANIFEST') +if os.path.exists('MANIFEST'): + os.remove('MANIFEST') # For some commands, use setuptools. if len(set(('develop', 'bdist_egg', 'bdist_rpm', 'bdist', 'bdist_dumb', @@ -171,7 +172,7 @@ def package_check(pkg_name, version=None, dependencies. If dict fill key values ``install_requires`` and ``extras_require`` for non-optional and optional dependencies. ''' - setuptools_mode = not setuptools_args is None + setuptools_mode = setuptools_args is not None optional_tf = bool(optional) if version_getter is None: def version_getter(pkg_name): @@ -180,11 +181,11 @@ def version_getter(pkg_name): if messages is None: messages = {} msgs = { - 'missing': 'Cannot import package "%s" - is it installed?', - 'missing opt': 'Missing optional package "%s"', - 'opt suffix': '; you may get run-time errors', - 'version too old': 'You have version %s of package "%s"' - ' but we need version >= %s', } + 'missing': 'Cannot import package "%s" - is it installed?', + 'missing opt': 'Missing optional package "%s"', + 'opt suffix': '; you may get run-time errors', + 'version too old': 'You have version %s of package "%s"' + ' but we need version >= %s', } msgs.update(messages) status, have_version = _package_status(pkg_name, version, @@ -208,8 +209,8 @@ def version_getter(pkg_name): version)) log.warn(msgs['version too old'] % (have_version, pkg_name, - version) - + msgs['opt suffix']) + version) + + msgs['opt suffix']) return # setuptools mode if optional_tf and not isinstance(optional, string_types): @@ -218,7 +219,7 @@ def version_getter(pkg_name): if version: dependency += '>=' + version if optional_tf: - if not 'extras_require' in setuptools_args: + if 'extras_require' not in setuptools_args: setuptools_args['extras_require'] = {} _add_append_key(setuptools_args['extras_require'], optional, @@ -297,119 +298,119 @@ def main(**extra_args): install_requires=REQUIRES, provides=PROVIDES, packages=['nipype', - 'nipype.algorithms', - 'nipype.algorithms.tests', - 'nipype.caching', - 'nipype.caching.tests', - 'nipype.external', - 'nipype.fixes', - 'nipype.fixes.numpy', - 'nipype.fixes.numpy.testing', - 'nipype.interfaces', - 'nipype.interfaces.afni', - 'nipype.interfaces.afni.tests', - 'nipype.interfaces.ants', - 'nipype.interfaces.ants.tests', - 'nipype.interfaces.camino', - 'nipype.interfaces.camino.tests', - 'nipype.interfaces.camino2trackvis', - 'nipype.interfaces.camino2trackvis.tests', - 'nipype.interfaces.cmtk', - 'nipype.interfaces.cmtk.tests', - 'nipype.interfaces.diffusion_toolkit', - 'nipype.interfaces.diffusion_toolkit.tests', - 'nipype.interfaces.dipy', - 'nipype.interfaces.dipy.tests', - 'nipype.interfaces.elastix', - 'nipype.interfaces.elastix.tests', - 'nipype.interfaces.freesurfer', - 'nipype.interfaces.freesurfer.tests', - 'nipype.interfaces.fsl', - 'nipype.interfaces.fsl.tests', - 'nipype.interfaces.mipav', - 'nipype.interfaces.mipav.tests', - 'nipype.interfaces.mne', - 'nipype.interfaces.mne.tests', - 'nipype.interfaces.mrtrix', - 'nipype.interfaces.mrtrix.tests', - 'nipype.interfaces.nipy', - 'nipype.interfaces.nipy.tests', - 'nipype.interfaces.nitime', - 'nipype.interfaces.nitime.tests', - 'nipype.interfaces.script_templates', - 'nipype.interfaces.semtools', - 'nipype.interfaces.semtools.brains', - 'nipype.interfaces.semtools.brains.tests', - 'nipype.interfaces.semtools.diffusion', - 'nipype.interfaces.semtools.diffusion.tests', - 'nipype.interfaces.semtools.diffusion.tractography', - 'nipype.interfaces.semtools.diffusion.tractography.tests', - 'nipype.interfaces.semtools.filtering', - 'nipype.interfaces.semtools.filtering.tests', - 'nipype.interfaces.semtools.legacy', - 'nipype.interfaces.semtools.legacy.tests', - 'nipype.interfaces.semtools.registration', - 'nipype.interfaces.semtools.registration.tests', - 'nipype.interfaces.semtools.segmentation', - 'nipype.interfaces.semtools.segmentation.tests', - 'nipype.interfaces.semtools.testing', - 'nipype.interfaces.semtools.tests', - 'nipype.interfaces.semtools.utilities', - 'nipype.interfaces.semtools.utilities.tests', - 'nipype.interfaces.slicer', - 'nipype.interfaces.slicer.diffusion', - 'nipype.interfaces.slicer.diffusion.tests', - 'nipype.interfaces.slicer.filtering', - 'nipype.interfaces.slicer.filtering.tests', - 'nipype.interfaces.slicer.legacy', - 'nipype.interfaces.slicer.legacy.diffusion', - 'nipype.interfaces.slicer.legacy.diffusion.tests', - 'nipype.interfaces.slicer.legacy.tests', - 'nipype.interfaces.slicer.quantification', - 'nipype.interfaces.slicer.quantification.tests', - 'nipype.interfaces.slicer.registration', - 'nipype.interfaces.slicer.registration.tests', - 'nipype.interfaces.slicer.segmentation', - 'nipype.interfaces.slicer.segmentation.tests', - 'nipype.interfaces.slicer.tests', - 'nipype.interfaces.spm', - 'nipype.interfaces.spm.tests', - 'nipype.interfaces.tests', - 'nipype.interfaces.vista', - 'nipype.interfaces.vista.tests', - 'nipype.pipeline', - 'nipype.pipeline.plugins', - 'nipype.pipeline.plugins.tests', - 'nipype.pipeline.tests', - 'nipype.testing', - 'nipype.testing.data', - 'nipype.testing.data.bedpostxout', - 'nipype.testing.data.dicomdir', - 'nipype.testing.data.tbss_dir', - 'nipype.utils', - 'nipype.utils.tests', - 'nipype.workflows', - 'nipype.workflows.data', - 'nipype.workflows.dmri', - 'nipype.workflows.dmri.camino', - 'nipype.workflows.dmri.connectivity', - 'nipype.workflows.dmri.dipy', - 'nipype.workflows.dmri.fsl', - 'nipype.workflows.dmri.fsl.tests', - 'nipype.workflows.dmri.mrtrix', - 'nipype.workflows.fmri', - 'nipype.workflows.fmri.fsl', - 'nipype.workflows.fmri.fsl.tests', - 'nipype.workflows.fmri.spm', - 'nipype.workflows.fmri.spm.tests', - 'nipype.workflows.graph', - 'nipype.workflows.misc', - 'nipype.workflows.rsfmri', - 'nipype.workflows.rsfmri.fsl', - 'nipype.workflows.smri', - 'nipype.workflows.smri.ants', - 'nipype.workflows.smri.freesurfer', - 'nipype.workflows.warp'], + 'nipype.algorithms', + 'nipype.algorithms.tests', + 'nipype.caching', + 'nipype.caching.tests', + 'nipype.external', + 'nipype.fixes', + 'nipype.fixes.numpy', + 'nipype.fixes.numpy.testing', + 'nipype.interfaces', + 'nipype.interfaces.afni', + 'nipype.interfaces.afni.tests', + 'nipype.interfaces.ants', + 'nipype.interfaces.ants.tests', + 'nipype.interfaces.camino', + 'nipype.interfaces.camino.tests', + 'nipype.interfaces.camino2trackvis', + 'nipype.interfaces.camino2trackvis.tests', + 'nipype.interfaces.cmtk', + 'nipype.interfaces.cmtk.tests', + 'nipype.interfaces.diffusion_toolkit', + 'nipype.interfaces.diffusion_toolkit.tests', + 'nipype.interfaces.dipy', + 'nipype.interfaces.dipy.tests', + 'nipype.interfaces.elastix', + 'nipype.interfaces.elastix.tests', + 'nipype.interfaces.freesurfer', + 'nipype.interfaces.freesurfer.tests', + 'nipype.interfaces.fsl', + 'nipype.interfaces.fsl.tests', + 'nipype.interfaces.mipav', + 'nipype.interfaces.mipav.tests', + 'nipype.interfaces.mne', + 'nipype.interfaces.mne.tests', + 'nipype.interfaces.mrtrix', + 'nipype.interfaces.mrtrix.tests', + 'nipype.interfaces.nipy', + 'nipype.interfaces.nipy.tests', + 'nipype.interfaces.nitime', + 'nipype.interfaces.nitime.tests', + 'nipype.interfaces.script_templates', + 'nipype.interfaces.semtools', + 'nipype.interfaces.semtools.brains', + 'nipype.interfaces.semtools.brains.tests', + 'nipype.interfaces.semtools.diffusion', + 'nipype.interfaces.semtools.diffusion.tests', + 'nipype.interfaces.semtools.diffusion.tractography', + 'nipype.interfaces.semtools.diffusion.tractography.tests', + 'nipype.interfaces.semtools.filtering', + 'nipype.interfaces.semtools.filtering.tests', + 'nipype.interfaces.semtools.legacy', + 'nipype.interfaces.semtools.legacy.tests', + 'nipype.interfaces.semtools.registration', + 'nipype.interfaces.semtools.registration.tests', + 'nipype.interfaces.semtools.segmentation', + 'nipype.interfaces.semtools.segmentation.tests', + 'nipype.interfaces.semtools.testing', + 'nipype.interfaces.semtools.tests', + 'nipype.interfaces.semtools.utilities', + 'nipype.interfaces.semtools.utilities.tests', + 'nipype.interfaces.slicer', + 'nipype.interfaces.slicer.diffusion', + 'nipype.interfaces.slicer.diffusion.tests', + 'nipype.interfaces.slicer.filtering', + 'nipype.interfaces.slicer.filtering.tests', + 'nipype.interfaces.slicer.legacy', + 'nipype.interfaces.slicer.legacy.diffusion', + 'nipype.interfaces.slicer.legacy.diffusion.tests', + 'nipype.interfaces.slicer.legacy.tests', + 'nipype.interfaces.slicer.quantification', + 'nipype.interfaces.slicer.quantification.tests', + 'nipype.interfaces.slicer.registration', + 'nipype.interfaces.slicer.registration.tests', + 'nipype.interfaces.slicer.segmentation', + 'nipype.interfaces.slicer.segmentation.tests', + 'nipype.interfaces.slicer.tests', + 'nipype.interfaces.spm', + 'nipype.interfaces.spm.tests', + 'nipype.interfaces.tests', + 'nipype.interfaces.vista', + 'nipype.interfaces.vista.tests', + 'nipype.pipeline', + 'nipype.pipeline.plugins', + 'nipype.pipeline.plugins.tests', + 'nipype.pipeline.tests', + 'nipype.testing', + 'nipype.testing.data', + 'nipype.testing.data.bedpostxout', + 'nipype.testing.data.dicomdir', + 'nipype.testing.data.tbss_dir', + 'nipype.utils', + 'nipype.utils.tests', + 'nipype.workflows', + 'nipype.workflows.data', + 'nipype.workflows.dmri', + 'nipype.workflows.dmri.camino', + 'nipype.workflows.dmri.connectivity', + 'nipype.workflows.dmri.dipy', + 'nipype.workflows.dmri.fsl', + 'nipype.workflows.dmri.fsl.tests', + 'nipype.workflows.dmri.mrtrix', + 'nipype.workflows.fmri', + 'nipype.workflows.fmri.fsl', + 'nipype.workflows.fmri.fsl.tests', + 'nipype.workflows.fmri.spm', + 'nipype.workflows.fmri.spm.tests', + 'nipype.workflows.graph', + 'nipype.workflows.misc', + 'nipype.workflows.rsfmri', + 'nipype.workflows.rsfmri.fsl', + 'nipype.workflows.smri', + 'nipype.workflows.smri.ants', + 'nipype.workflows.smri.freesurfer', + 'nipype.workflows.warp'], # The package_data spec has no effect for me (on python 2.6) -- even # changing to data_files doesn't get this stuff included in the source # distribution -- not sure if it has something to do with the magic @@ -417,16 +418,16 @@ def main(**extra_args): # python -- duplicating things into MANIFEST.in but this is admittedly # only a workaround to get things started -- not a solution package_data={'nipype': - [pjoin('testing', 'data', '*'), - pjoin('testing', 'data', 'dicomdir', '*'), - pjoin('testing', 'data', 'bedpostxout', '*'), - pjoin('testing', 'data', 'tbss_dir', '*'), - pjoin('workflows', 'data', '*'), - pjoin('pipeline', 'report_template.html'), - pjoin('external', 'd3.js'), - pjoin('interfaces', 'script_templates', '*'), - pjoin('interfaces', 'tests', 'realign_json.json') - ]}, + [pjoin('testing', 'data', '*'), + pjoin('testing', 'data', 'dicomdir', '*'), + pjoin('testing', 'data', 'bedpostxout', '*'), + pjoin('testing', 'data', 'tbss_dir', '*'), + pjoin('workflows', 'data', '*'), + pjoin('pipeline', 'report_template.html'), + pjoin('external', 'd3.js'), + pjoin('interfaces', 'script_templates', '*'), + pjoin('interfaces', 'tests', 'realign_json.json') + ]}, scripts=glob('bin/*'), cmdclass=cmdclass, **extra_args diff --git a/tools/apigen.py b/tools/apigen.py index 348e0e0677..dba2ce0a37 100644 --- a/tools/apigen.py +++ b/tools/apigen.py @@ -221,8 +221,8 @@ def generate_api_doc(self, uri): ad = '.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n' chap_title = uri_short - ad += (chap_title + '\n' + self.rst_section_levels[1] * len(chap_title) - + '\n\n') + ad += (chap_title + '\n' + + self.rst_section_levels[1] * len(chap_title) + '\n\n') # Set the chapter title to read 'module' for all modules except for the # main packages diff --git a/tools/github.py b/tools/github.py index 3f966383ee..cc6c78e1a8 100644 --- a/tools/github.py +++ b/tools/github.py @@ -94,5 +94,5 @@ def get_file_url(object): shortfile = os.path.join('nipype', filename.split('nipype/')[-1]) uri = 'http://github.com/nipy/nipype/tree/%s/%s#L%d' % \ (info['commit_hash'], - shortfile, lines[1]) + shortfile, lines[1]) return uri diff --git a/tools/gitwash_dumper.py b/tools/gitwash_dumper.py index 3cd930066b..8803786c8c 100755 --- a/tools/gitwash_dumper.py +++ b/tools/gitwash_dumper.py @@ -116,8 +116,8 @@ def make_link_targets(proj_name, .. _`proj_name` mailing list: url """ link_contents = open(known_link_fname, 'rt').readlines() - have_url = not url is None - have_ml_url = not ml_url is None + have_url = url is not None + have_ml_url = ml_url is not None have_gh_url = None for line in link_contents: if not have_url: @@ -136,12 +136,12 @@ def make_link_targets(proj_name, raise RuntimeError('Need command line or known project ' 'and / or mailing list URLs') lines = [] - if not url is None: + if url is not None: lines.append('.. _%s: %s\n' % (proj_name, url)) if not have_gh_url: gh_url = 'http://github.com/%s/%s\n' % (user_name, repo_name) lines.append('.. _`%s github`: %s\n' % (proj_name, gh_url)) - if not ml_url is None: + if ml_url is not None: lines.append('.. _`%s mailing list`: %s\n' % (proj_name, ml_url)) if len(lines) == 0: # Nothing to do diff --git a/tools/interfacedocgen.py b/tools/interfacedocgen.py index bc49e6f78b..eda0c6a8b5 100644 --- a/tools/interfacedocgen.py +++ b/tools/interfacedocgen.py @@ -275,8 +275,8 @@ def generate_api_doc(self, uri): ad = '.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n' chap_title = uri_short - ad += (chap_title+'\n' + self.rst_section_levels[1] * len(chap_title) - + '\n\n') + ad += (chap_title + '\n' + + self.rst_section_levels[1] * len(chap_title) + '\n\n') # Set the chapter title to read 'module' for all modules except for the # main packages @@ -491,7 +491,7 @@ def write_index(self, outdir, froot='gen', relative_to=None): if self.written_modules is None: raise ValueError('No modules written') # Get full filename path - path = os.path.join(outdir, froot+self.rst_extension) + path = os.path.join(outdir, froot + self.rst_extension) # Path written into index is relative to rootpath if relative_to is not None: relpath = outdir.replace(relative_to + os.path.sep, '') diff --git a/tools/make_examples.py b/tools/make_examples.py index 20481427a4..605746cc7a 100755 --- a/tools/make_examples.py +++ b/tools/make_examples.py @@ -56,7 +56,7 @@ def show(): allfm = Gcf.get_all_fig_managers() for fcount, fm in enumerate(allfm): fm.canvas.figure.savefig('%s_%02i.png' % - (figure_basename, fcount+1)) + (figure_basename, fcount + 1)) _mpl_show = plt.show plt.show = show @@ -97,4 +97,3 @@ def show(): figure_basename = pjoin('fig', os.path.splitext(script)[0]) execfile(script) plt.close('all') - diff --git a/tools/nipype_nightly.py b/tools/nipype_nightly.py index 363fcc1988..5206c8afe9 100644 --- a/tools/nipype_nightly.py +++ b/tools/nipype_nightly.py @@ -88,6 +88,3 @@ def setup_paths(): build_docs() # push_to_sf() os.chdir(prev_dir) - - - diff --git a/tools/report_coverage.py b/tools/report_coverage.py index 61b8ced640..0009ccc20d 100644 --- a/tools/report_coverage.py +++ b/tools/report_coverage.py @@ -36,8 +36,8 @@ def grab_coverage(output): if line.startswith('Ran '): tcount = line covout.insert(0, header) - covout.insert(1, '-'*70) - covout.append('-'*70) + covout.insert(1, '-' * 70) + covout.append('-' * 70) covout.append(tcount) return '\n'.join(covout) diff --git a/tools/run_examples.py b/tools/run_examples.py index cd75067ed1..a7903c9276 100644 --- a/tools/run_examples.py +++ b/tools/run_examples.py @@ -5,7 +5,7 @@ def run_examples(example, pipelines, plugin): - print('running example: %s with plugin: %s' %(example, plugin)) + print('running example: %s with plugin: %s' % (example, plugin)) from nipype import config config.enable_debug_mode() from nipype.interfaces.base import CommandLine diff --git a/tools/setup.py b/tools/setup.py index 1b427eaba5..fba34de078 100644 --- a/tools/setup.py +++ b/tools/setup.py @@ -10,4 +10,3 @@ url='http://nipy.sourceforge.net', scripts=['./nipype_nightly.py', './report_coverage.py'] ) -